]> git.saurik.com Git - apple/xnu.git/blame_incremental - osfmk/arm64/caches_asm.s
xnu-7195.81.3.tar.gz
[apple/xnu.git] / osfmk / arm64 / caches_asm.s
... / ...
CommitLineData
1/*
2 * Copyright (c) 2010-2013 Apple Inc. All rights reserved.
3 *
4 * @APPLE_OSREFERENCE_LICENSE_HEADER_START@
5 *
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. The rights granted to you under the License
10 * may not be used to create, or enable the creation or redistribution of,
11 * unlawful or unlicensed copies of an Apple operating system, or to
12 * circumvent, violate, or enable the circumvention or violation of, any
13 * terms of an Apple operating system software license agreement.
14 *
15 * Please obtain a copy of the License at
16 * http://www.opensource.apple.com/apsl/ and read it before using this file.
17 *
18 * The Original Code and all software distributed under the License are
19 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
20 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
21 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
22 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
23 * Please see the License for the specific language governing rights and
24 * limitations under the License.
25 *
26 * @APPLE_OSREFERENCE_LICENSE_HEADER_END@
27 */
28
29#include <machine/asm.h>
30#include <arm64/proc_reg.h>
31#include <arm/pmap.h>
32#include <sys/errno.h>
33#include "assym.s"
34
35/*
36 * void InvalidatePoU_Icache(void)
37 *
38 * Invalidate i-cache
39 */
40 .text
41 .align 2
42 .globl EXT(InvalidatePoU_Icache)
43 .globl EXT(invalidate_mmu_icache)
44LEXT(InvalidatePoU_Icache)
45LEXT(invalidate_mmu_icache)
46 dsb sy
47 ic ialluis // Invalidate icache
48 dsb sy
49 isb sy
50L_imi_done:
51 ret
52
53/*
54 * void InvalidatePoU_IcacheRegion(vm_offset_t va, unsigned length)
55 *
56 * Invalidate icache region
57 */
58 .text
59 .align 2
60 .globl EXT(InvalidatePoU_IcacheRegion)
61LEXT(InvalidatePoU_IcacheRegion)
62 ARM64_STACK_PROLOG
63 PUSH_FRAME
64 bl EXT(CleanPoU_DcacheRegion)
65#if __ARM_IC_NOALIAS_ICACHE__
66 mov x9, #((1<<MMU_I_CLINE)-1)
67 and x2, x0, x9
68 bic x0, x0, x9 // Cached aligned
69 add x1, x1, x2
70 sub x1, x1, #1
71 lsr x1, x1, #MMU_I_CLINE // Set cache line counter
72L_ipui_loop:
73 ic ivau, x0 // Invalidate icache line
74 add x0, x0, #1<<MMU_I_CLINE // Get next cache aligned addr
75 subs x1, x1, #1 // Decrementer cache line counter
76 b.pl L_ipui_loop // Loop in counter not null
77 dsb sy
78 isb sy
79L_ipui_done:
80#else
81 bl EXT(InvalidatePoU_Icache)
82#endif
83 POP_FRAME
84 ARM64_STACK_EPILOG
85
86/*
87 * Obtains cache physical layout information required for way/set
88 * data cache maintenance operations.
89 *
90 * $0: Data cache level, starting from 0
91 * $1: Output register for set increment
92 * $2: Output register for last valid set
93 * $3: Output register for way increment
94 */
95.macro GET_CACHE_CONFIG
96 lsl $0, $0, #1
97 msr CSSELR_EL1, $0 // Select appropriate cache
98 isb // Synchronize context
99
100 mrs $0, CCSIDR_EL1
101 ubfx $1, $0, #3, #10 // extract number of ways - 1
102 mov $2, $1
103 add $1, $1, #1 // calculate number of ways
104
105 mov $0, #63
106 and $2, $2, $1
107 cmp $2, #0
108 cinc $0, $0, ne
109 clz $1, $1
110 sub $0, $0, $1
111
112 mov $1, #32 // calculate way increment
113 sub $3, $1, $0
114 mov $1, #1
115 lsl $3, $1, $3
116
117 mrs $0, CCSIDR_EL1
118 ubfx $1, $0, #0, #3 // extract log2(line size) - 4
119 add $1, $1, #4 // calculate log2(line size)
120 mov $2, #1
121 lsl $1, $2, $1 // calculate set increment
122
123 ubfx $2, $0, #13, #15 // extract number of sets - 1
124 add $2, $2, #1 // calculate number of sets
125 mul $2, $1, $2 // calculate last valid set
126.endmacro
127
128/*
129 * Detects the presence of an L2 cache and returns 1 if implemented,
130 * zero otherwise.
131 *
132 * $0: Output register
133 */
134.macro HAS_L2_CACHE
135 mrs $0, CLIDR_EL1
136 ubfx $0, $0, #3, #3 // extract L2 cache Ctype
137 cmp $0, #0x1
138 cset $0, hi
139.endmacro
140
141/*
142 * void CleanPoC_Dcache(void)
143 *
144 * Clean all d-caches
145 */
146 .text
147 .align 2
148 .globl EXT(CleanPoC_Dcache)
149 .globl EXT(clean_mmu_dcache)
150LEXT(CleanPoC_Dcache)
151#if defined(APPLE_ARM64_ARCH_FAMILY)
152 /* "Fully Coherent." */
153#else /* !defined(APPLE_ARM64_ARCH_FAMILY) */
154 mov x0, #0
155 GET_CACHE_CONFIG x0, x9, x10, x11
156
157 dmb sy
158 mov x0, #0
159L_cpcd_dcacheway:
160L_cpcd_dcacheline:
161 dc csw, x0 // clean dcache line by way/set
162 add x0, x0, x9 // increment set index
163 tst x0, x10 // look for overflow
164 b.eq L_cpcd_dcacheline
165 bic x0, x0, x10 // clear set overflow
166 adds w0, w0, w11 // increment way
167 b.cc L_cpcd_dcacheway // loop
168
169 HAS_L2_CACHE x0
170 cbz x0, L_cpcd_skipl2dcache
171 mov x0, #1
172 GET_CACHE_CONFIG x0, x9, x10, x11
173
174 dsb sy
175 mov x0, #2
176L_cpcd_l2dcacheway:
177L_cpcd_l2dcacheline:
178 dc csw, x0 // clean dcache line by way/set
179 add x0, x0, x9 // increment set index
180 tst x0, x10 // look for overflow
181 b.eq L_cpcd_l2dcacheline
182 bic x0, x0, x10 // clear set overflow
183 adds w0, w0, w11 // increment way
184 b.cc L_cpcd_l2dcacheway // loop
185L_cpcd_skipl2dcache:
186#endif /* defined(APPLE_ARM64_ARCH_FAMILY) */
187 dsb sy
188 ret
189
190/*
191 * void CleanPoU_Dcache(void)
192 *
193 * Clean D-cache to Point of Unification
194 */
195 .text
196 .align 2
197 .globl EXT(CleanPoU_Dcache)
198LEXT(CleanPoU_Dcache)
199#if defined(APPLE_ARM64_ARCH_FAMILY)
200 /* "Fully Coherent." */
201#else /* !defined(APPLE_ARM64_ARCH_FAMILY) */
202 mov x0, #0
203 GET_CACHE_CONFIG x0, x9, x10, x11
204
205 dmb sy
206 mov x0, #0
207L_cpud_dcacheway:
208L_cpud_dcacheline:
209 dc csw, x0 // clean dcache line by way/set
210 add x0, x0, x9 // increment set index
211 tst x0, x10 // look for overflow
212 b.eq L_cpud_dcacheline
213 bic x0, x0, x10 // clear set overflow
214 adds w0, w0, w11 // increment way
215 b.cc L_cpud_dcacheway // loop
216 #endif /* defined(APPLE_ARM64_ARCH_FAMILY) */
217 dsb sy
218 ret
219
220/*
221 * void CleanPoU_DcacheRegion(vm_offset_t va, unsigned length)
222 *
223 * Clean d-cache region to Point of Unification
224 */
225 .text
226 .align 2
227 .globl EXT(CleanPoU_DcacheRegion)
228LEXT(CleanPoU_DcacheRegion)
229#if defined(APPLE_ARM64_ARCH_FAMILY)
230 /* "Fully Coherent." */
231#else /* !defined(APPLE_ARM64_ARCH_FAMILY) */
232 mov x9, #((1<<MMU_CLINE)-1)
233 and x2, x0, x9
234 bic x3, x0, x9 // Cached aligned
235 add x4, x1, x2
236 sub x4, x4, #1
237 lsr x4, x4, #MMU_CLINE // Set cache line counter
238 dmb sy
239L_cpudr_loop:
240 dc cvau, x3 // Clean dcache line to PoU
241 add x3, x3, #(1<<MMU_CLINE) // Get next cache aligned addr
242 subs x4, x4, #1 // Decrementer cache line counter
243 b.pl L_cpudr_loop // Loop in counter not null
244#endif /* defined(APPLE_ARM64_ARCH_FAMILY) */
245 dsb sy
246 ret
247
248/*
249 * void CleanPoC_DcacheRegion_internal(vm_offset_t va, size_t length)
250 *
251 * Clean d-cache region to Point of Coherency
252 */
253 .text
254 .align 2
255LEXT(CleanPoC_DcacheRegion_internal)
256 mov x9, #((1<<MMU_CLINE)-1)
257 and x2, x0, x9
258 bic x0, x0, x9 // Cached aligned
259 add x1, x1, x2
260 sub x1, x1, #1
261 lsr x1, x1, #MMU_CLINE // Set cache line counter
262 dsb sy
263L_cpcdr_loop:
264#if defined(APPLE_ARM64_ARCH_FAMILY)
265 // It may be tempting to clean the cache (dc cvac),
266 // but see Cyclone UM 5.3.8.3 -- it's always a NOP on Cyclone.
267 //
268 // Clean & Invalidate, however, will work as long as HID4.DisDCMvaOps isn't set.
269 dc civac, x0 // Clean & Invalidate dcache line to PoC
270#else
271 dc cvac, x0 // Clean dcache line to PoC
272#endif
273 add x0, x0, #(1<<MMU_CLINE) // Get next cache aligned addr
274 subs x1, x1, #1 // Decrementer cache line counter
275 b.pl L_cpcdr_loop // Loop in counter not null
276 dsb sy
277 ret
278
279/*
280 * void CleanPoC_DcacheRegion(vm_offset_t va, size_t length)
281 *
282 * Clean d-cache region to Point of Coherency
283 */
284 .text
285 .align 2
286 .globl EXT(CleanPoC_DcacheRegion)
287LEXT(CleanPoC_DcacheRegion)
288#if defined(APPLE_ARM64_ARCH_FAMILY)
289 /* "Fully Coherent." */
290 dsb sy
291 ret
292#else /* !defined(APPLE_ARM64_ARCH_FAMILY) */
293 b EXT(CleanPoC_DcacheRegion_internal)
294#endif /* defined(APPLE_ARM64_ARCH_FAMILY) */
295
296 .text
297 .align 2
298 .globl EXT(CleanPoC_DcacheRegion_Force_nopreempt)
299LEXT(CleanPoC_DcacheRegion_Force_nopreempt)
300#if defined(APPLE_ARM64_ARCH_FAMILY)
301 ARM64_STACK_PROLOG
302 PUSH_FRAME
303 isb sy
304 ARM64_IS_PCORE x15
305 ARM64_READ_EP_SPR x15, x14, ARM64_REG_EHID4, ARM64_REG_HID4
306 and x14, x14, (~ARM64_REG_HID4_DisDcMVAOps)
307 ARM64_WRITE_EP_SPR x15, x14, ARM64_REG_EHID4, ARM64_REG_HID4
308 isb sy
309 bl EXT(CleanPoC_DcacheRegion_internal)
310 isb sy
311 orr x14, x14, ARM64_REG_HID4_DisDcMVAOps
312 ARM64_WRITE_EP_SPR x15, x14, ARM64_REG_EHID4, ARM64_REG_HID4
313 isb sy
314 POP_FRAME
315 ARM64_STACK_EPILOG
316#else
317 b EXT(CleanPoC_DcacheRegion_internal)
318#endif // APPLE_ARM64_ARCH_FAMILY
319
320/*
321 * void CleanPoC_DcacheRegion_Force(vm_offset_t va, size_t length)
322 *
323 * Clean d-cache region to Point of Coherency - when you really
324 * need to flush even on coherent platforms, e.g. panic log
325 */
326 .text
327 .align 2
328 .globl EXT(CleanPoC_DcacheRegion_Force)
329LEXT(CleanPoC_DcacheRegion_Force)
330#if defined(APPLE_ARM64_ARCH_FAMILY)
331 ARM64_STACK_PROLOG
332 PUSH_FRAME
333 stp x0, x1, [sp, #-16]!
334 bl EXT(_disable_preemption)
335 ldp x0, x1, [sp], #16
336 bl EXT(CleanPoC_DcacheRegion_Force_nopreempt)
337 bl EXT(_enable_preemption)
338 POP_FRAME
339 ARM64_STACK_EPILOG
340#else
341 b EXT(CleanPoC_DcacheRegion_internal)
342#endif // APPLE_ARM64_ARCH_FAMILY
343
344/*
345 * void FlushPoC_Dcache(void)
346 *
347 * Clean and Invalidate dcaches to Point of Coherency
348 */
349 .text
350 .align 2
351 .globl EXT(FlushPoC_Dcache)
352LEXT(FlushPoC_Dcache)
353#if defined(APPLE_ARM64_ARCH_FAMILY)
354 /* "Fully Coherent." */
355#else /* !defined(APPLE_ARM64_ARCH_FAMILY) */
356 mov x0, #0
357 GET_CACHE_CONFIG x0, x9, x10, x11
358
359 dmb sy
360 mov x0, #0
361L_fpcd_dcacheway:
362L_fpcd_dcacheline:
363 dc cisw, x0 // clean invalidate dcache line by way/set
364 add x0, x0, x9 // increment set index
365 tst x0, x10 // look for overflow
366 b.eq L_fpcd_dcacheline
367 bic x0, x0, x10 // clear set overflow
368 adds w0, w0, w11 // increment way
369 b.cc L_fpcd_dcacheway // loop
370
371 HAS_L2_CACHE x0
372 cbz x0, L_fpcd_skipl2dcache
373 dsb sy
374 mov x0, #1
375 GET_CACHE_CONFIG x0, x9, x10, x11
376
377 mov x0, #2
378L_fpcd_l2dcacheway:
379L_fpcd_l2dcacheline:
380 dc cisw, x0 // clean invalide dcache line by way/set
381 add x0, x0, x9 // increment set index
382 tst x0, x10 // look for overflow
383 b.eq L_fpcd_l2dcacheline
384 bic x0, x0, x10 // clear set overflow
385 adds w0, w0, w11 // increment way
386 b.cc L_fpcd_l2dcacheway // loop
387L_fpcd_skipl2dcache:
388#endif /* defined(APPLE_ARM64_ARCH_FAMILY) */
389 dsb sy
390 ret
391
392/*
393 * void FlushPoU_Dcache(void)
394 *
395 * Flush D-cache to Point of Unification
396 */
397 .text
398 .align 2
399 .globl EXT(FlushPoU_Dcache)
400LEXT(FlushPoU_Dcache)
401#if defined(APPLE_ARM64_ARCH_FAMILY)
402 /* "Fully Coherent." */
403#else /* !defined(APPLE_ARM64_ARCH_FAMILY) */
404 mov x0, #0
405 GET_CACHE_CONFIG x0, x9, x10, x11
406
407 dmb sy
408 mov x0, #0
409L_fpud_way:
410L_fpud_line:
411 dc cisw, x0 // clean invalidate dcache line by way/set
412 add x0, x0, x9 // increment set index
413 tst x0, x10 // look for overflow
414 b.eq L_fpud_line
415 bic x0, x0, x10 // clear set overflow
416 adds w0, w0, w11 // increment way
417 b.cc L_fpud_way // loop
418#endif /* defined(APPLE_ARM64_ARCH_FAMILY) */
419 dsb sy
420 ret
421
422/*
423 * void FlushPoC_DcacheRegion(vm_offset_t va, unsigned length)
424 *
425 * Clean and Invalidate d-cache region to Point of Coherency
426 */
427 .text
428 .align 2
429 .globl EXT(FlushPoC_DcacheRegion)
430LEXT(FlushPoC_DcacheRegion)
431#if defined(APPLE_ARM64_ARCH_FAMILY)
432 /* "Fully Coherent." */
433#else /* !defined(APPLE_ARM64_ARCH_FAMILY) */
434 mov x9, #((1<<MMU_CLINE)-1)
435 and x2, x0, x9
436 bic x0, x0, x9 // Cached aligned
437 add x1, x1, x2
438 sub x1, x1, #1
439 lsr x1, x1, #MMU_CLINE // Set cache line counter
440 dmb sy
441L_fpcdr_loop:
442 dc civac, x0 // Clean invalidate dcache line to PoC
443 add x0, x0, #(1<<MMU_CLINE) // Get next cache aligned addr
444 subs x1, x1, #1 // Decrementer cache line counter
445 b.pl L_fpcdr_loop // Loop in counter not null
446#endif /* defined(APPLE_ARM64_ARCH_FAMILY) */
447 dsb sy
448 ret
449
450/*
451 * void flush_dcache64(addr64_t addr, unsigned length, boolean_t phys)
452 */
453 .text
454 .align 2
455 .globl EXT(flush_dcache64)
456LEXT(flush_dcache64)
457 BRANCH_EXTERN flush_dcache
458
459/*
460 * void clean_dcache64(addr64_t addr, unsigned length, boolean_t phys)
461 */
462 .text
463 .align 2
464 .globl EXT(clean_dcache64)
465LEXT(clean_dcache64)
466 BRANCH_EXTERN clean_dcache
467
468/*
469 * void invalidate_icache(vm_offset_t va, unsigned length, boolean_t phys)
470 * void invalidate_icache64(addr64_t va, unsigned length, boolean_t phys)
471 */
472 .text
473 .align 2
474 .globl EXT(invalidate_icache64)
475 .globl EXT(invalidate_icache)
476LEXT(invalidate_icache64)
477LEXT(invalidate_icache)
478 cmp w2, #0 // Is it physical?
479 b.eq Lcall_invalidate_worker
480 adrp x2, _gPhysBase@page
481 add x2, x2, _gPhysBase@pageoff
482 ldr x2, [x2]
483 sub x0, x0, x2
484 adrp x2, _gVirtBase@page
485 add x2, x2, _gVirtBase@pageoff
486 ldr x2, [x2]
487 add x0, x0, x2
488Lcall_invalidate_worker:
489 b EXT(InvalidatePoU_IcacheRegion)
490
491
492/* vim: set ts=4: */