]> git.saurik.com Git - apple/xnu.git/blob - osfmk/arm64/caches_asm.s
xnu-6153.11.26.tar.gz
[apple/xnu.git] / osfmk / arm64 / caches_asm.s
1 /*
2 * Copyright (c) 2010-2013 Apple Inc. All rights reserved.
3 *
4 * @APPLE_OSREFERENCE_LICENSE_HEADER_START@
5 *
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. The rights granted to you under the License
10 * may not be used to create, or enable the creation or redistribution of,
11 * unlawful or unlicensed copies of an Apple operating system, or to
12 * circumvent, violate, or enable the circumvention or violation of, any
13 * terms of an Apple operating system software license agreement.
14 *
15 * Please obtain a copy of the License at
16 * http://www.opensource.apple.com/apsl/ and read it before using this file.
17 *
18 * The Original Code and all software distributed under the License are
19 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
20 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
21 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
22 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
23 * Please see the License for the specific language governing rights and
24 * limitations under the License.
25 *
26 * @APPLE_OSREFERENCE_LICENSE_HEADER_END@
27 */
28
29 #include <machine/asm.h>
30 #include <arm64/proc_reg.h>
31 #include <arm/pmap.h>
32 #include <sys/errno.h>
33 #include "assym.s"
34
35 /*
36 * void InvalidatePoU_Icache(void)
37 *
38 * Invalidate i-cache
39 */
40 .text
41 .align 2
42 .globl EXT(InvalidatePoU_Icache)
43 .globl EXT(invalidate_mmu_icache)
44 LEXT(InvalidatePoU_Icache)
45 LEXT(invalidate_mmu_icache)
46 dsb sy
47 ic ialluis // Invalidate icache
48 dsb sy
49 isb sy
50 L_imi_done:
51 ret
52
53 /*
54 * void InvalidatePoU_IcacheRegion(vm_offset_t va, unsigned length)
55 *
56 * Invalidate icache region
57 */
58 .text
59 .align 2
60 .globl EXT(InvalidatePoU_IcacheRegion)
61 LEXT(InvalidatePoU_IcacheRegion)
62 ARM64_STACK_PROLOG
63 PUSH_FRAME
64 bl EXT(CleanPoU_DcacheRegion)
65 #if __ARM_IC_NOALIAS_ICACHE__
66 mov x9, #((1<<MMU_I_CLINE)-1)
67 and x2, x0, x9
68 bic x0, x0, x9 // Cached aligned
69 add x1, x1, x2
70 sub x1, x1, #1
71 lsr x1, x1, #MMU_I_CLINE // Set cache line counter
72 L_ipui_loop:
73 ic ivau, x0 // Invalidate icache line
74 add x0, x0, #1<<MMU_I_CLINE // Get next cache aligned addr
75 subs x1, x1, #1 // Decrementer cache line counter
76 b.pl L_ipui_loop // Loop in counter not null
77 dsb sy
78 isb sy
79 L_ipui_done:
80 #else
81 bl EXT(InvalidatePoU_Icache)
82 #endif
83 POP_FRAME
84 ARM64_STACK_EPILOG
85
86
87 /*
88 * void CleanPoC_Dcache(void)
89 *
90 * Clean all d-caches
91 */
92 .text
93 .align 2
94 .globl EXT(CleanPoC_Dcache)
95 .globl EXT(clean_mmu_dcache)
96 LEXT(CleanPoC_Dcache)
97 #if defined(APPLE_ARM64_ARCH_FAMILY)
98 /* "Fully Coherent." */
99 #else /* !defined(APPLE_ARM64_ARCH_FAMILY) */
100 mov x0, #0
101 mov x9, #(1 << MMU_I7SET)
102 mov x10, #(1 << (MMU_NSET + MMU_I7SET))
103 mov x11, #(1 << MMU_I7WAY)
104 dmb sy
105 L_cpcd_dcacheway:
106 L_cpcd_dcacheline:
107 dc csw, x0 // clean dcache line by way/set
108 add x0, x0, x9 // increment set index
109 tst x0, #(1 << (MMU_NSET + MMU_I7SET)) // look for overflow
110 b.eq L_cpcd_dcacheline
111 bic x0, x0, x10 // clear set overflow
112 adds w0, w0, w11 // increment way
113 b.cc L_cpcd_dcacheway // loop
114 #if __ARM_L2CACHE__
115 mov x0, #2
116 mov x9, #(1 << L2_I7SET)
117 mov x10, #(1 << (L2_NSET + L2_I7SET))
118 mov x11, #(1 << L2_I7WAY)
119 dsb sy
120 L_cpcd_l2dcacheway:
121 L_cpcd_l2dcacheline:
122 dc csw, x0 // clean dcache line by way/set
123 add x0, x0, x9 // increment set index
124 tst x0, #(1 << (L2_NSET + L2_I7SET)) // look for overflow
125 b.eq L_cpcd_l2dcacheline
126 bic x0, x0, x10 // clear set overflow
127 adds w0, w0, w11 // increment way
128 b.cc L_cpcd_l2dcacheway // loop
129 #endif
130 #endif /* defined(APPLE_ARM64_ARCH_FAMILY) */
131 dsb sy
132 ret
133
134 /*
135 * void CleanPoU_Dcache(void)
136 *
137 * Clean D-cache to Point of Unification
138 */
139 .text
140 .align 2
141 .globl EXT(CleanPoU_Dcache)
142 LEXT(CleanPoU_Dcache)
143 #if defined(APPLE_ARM64_ARCH_FAMILY)
144 /* "Fully Coherent." */
145 #else /* !defined(APPLE_ARM64_ARCH_FAMILY) */
146 mov x0, #0
147 mov x9, #(1 << MMU_I7SET)
148 mov x10, #(1 << (MMU_NSET + MMU_I7SET))
149 mov x11, #(1 << MMU_I7WAY)
150 dmb sy
151 L_cpud_dcacheway:
152 L_cpud_dcacheline:
153 dc csw, x0 // clean dcache line by way/set
154 add x0, x0, x9 // increment set index
155 tst x0, #(1 << (MMU_NSET + MMU_I7SET)) // look for overflow
156 b.eq L_cpud_dcacheline
157 bic x0, x0, x10 // clear set overflow
158 adds w0, w0, w11 // increment way
159 b.cc L_cpud_dcacheway // loop
160 #endif /* defined(APPLE_ARM64_ARCH_FAMILY) */
161 dsb sy
162 ret
163
164 /*
165 * void CleanPoU_DcacheRegion(vm_offset_t va, unsigned length)
166 *
167 * Clean d-cache region to Point of Unification
168 */
169 .text
170 .align 2
171 .globl EXT(CleanPoU_DcacheRegion)
172 LEXT(CleanPoU_DcacheRegion)
173 #if defined(APPLE_ARM64_ARCH_FAMILY)
174 /* "Fully Coherent." */
175 #else /* !defined(APPLE_ARM64_ARCH_FAMILY) */
176 mov x9, #((1<<MMU_CLINE)-1)
177 and x2, x0, x9
178 bic x3, x0, x9 // Cached aligned
179 add x4, x1, x2
180 sub x4, x4, #1
181 lsr x4, x4, #MMU_CLINE // Set cache line counter
182 dmb sy
183 L_cpudr_loop:
184 dc cvau, x3 // Clean dcache line to PoU
185 add x3, x3, #(1<<MMU_CLINE) // Get next cache aligned addr
186 subs x4, x4, #1 // Decrementer cache line counter
187 b.pl L_cpudr_loop // Loop in counter not null
188 #endif /* defined(APPLE_ARM64_ARCH_FAMILY) */
189 dsb sy
190 ret
191
192 /*
193 * void CleanPoC_DcacheRegion_internal(vm_offset_t va, unsigned length)
194 *
195 * Clean d-cache region to Point of Coherency
196 */
197 .text
198 .align 2
199 LEXT(CleanPoC_DcacheRegion_internal)
200 mov x9, #((1<<MMU_CLINE)-1)
201 and x2, x0, x9
202 bic x0, x0, x9 // Cached aligned
203 add x1, x1, x2
204 sub x1, x1, #1
205 lsr x1, x1, #MMU_CLINE // Set cache line counter
206 dsb sy
207 L_cpcdr_loop:
208 #if defined(APPLE_ARM64_ARCH_FAMILY)
209 // It may be tempting to clean the cache (dc cvac),
210 // but see Cyclone UM 5.3.8.3 -- it's always a NOP on Cyclone.
211 //
212 // Clean & Invalidate, however, will work as long as HID4.DisDCMvaOps isn't set.
213 dc civac, x0 // Clean & Invalidate dcache line to PoC
214 #else
215 dc cvac, x0 // Clean dcache line to PoC
216 #endif
217 add x0, x0, #(1<<MMU_CLINE) // Get next cache aligned addr
218 subs x1, x1, #1 // Decrementer cache line counter
219 b.pl L_cpcdr_loop // Loop in counter not null
220 dsb sy
221 ret
222
223 /*
224 * void CleanPoC_DcacheRegion(vm_offset_t va, unsigned length)
225 *
226 * Clean d-cache region to Point of Coherency
227 */
228 .text
229 .align 2
230 .globl EXT(CleanPoC_DcacheRegion)
231 LEXT(CleanPoC_DcacheRegion)
232 #if defined(APPLE_ARM64_ARCH_FAMILY)
233 /* "Fully Coherent." */
234 dsb sy
235 ret
236 #else /* !defined(APPLE_ARM64_ARCH_FAMILY) */
237 b EXT(CleanPoC_DcacheRegion_internal)
238 #endif /* defined(APPLE_ARM64_ARCH_FAMILY) */
239
240 .text
241 .align 2
242 .globl EXT(CleanPoC_DcacheRegion_Force_nopreempt)
243 LEXT(CleanPoC_DcacheRegion_Force_nopreempt)
244 #if defined(APPLE_ARM64_ARCH_FAMILY)
245 ARM64_STACK_PROLOG
246 PUSH_FRAME
247 isb sy
248 ARM64_IS_PCORE x15
249 ARM64_READ_EP_SPR x15, x14, ARM64_REG_EHID4, ARM64_REG_HID4
250 and x14, x14, (~ARM64_REG_HID4_DisDcMVAOps)
251 ARM64_WRITE_EP_SPR x15, x14, ARM64_REG_EHID4, ARM64_REG_HID4
252 isb sy
253 bl EXT(CleanPoC_DcacheRegion_internal)
254 isb sy
255 orr x14, x14, ARM64_REG_HID4_DisDcMVAOps
256 ARM64_WRITE_EP_SPR x15, x14, ARM64_REG_EHID4, ARM64_REG_HID4
257 isb sy
258 POP_FRAME
259 ARM64_STACK_EPILOG
260 #else
261 b EXT(CleanPoC_DcacheRegion_internal)
262 #endif // APPLE_ARM64_ARCH_FAMILY
263
264 /*
265 * void CleanPoC_DcacheRegion_Force(vm_offset_t va, unsigned length)
266 *
267 * Clean d-cache region to Point of Coherency - when you really
268 * need to flush even on coherent platforms, e.g. panic log
269 */
270 .text
271 .align 2
272 .globl EXT(CleanPoC_DcacheRegion_Force)
273 LEXT(CleanPoC_DcacheRegion_Force)
274 #if defined(APPLE_ARM64_ARCH_FAMILY)
275 ARM64_STACK_PROLOG
276 PUSH_FRAME
277 stp x0, x1, [sp, #-16]!
278 bl EXT(_disable_preemption)
279 ldp x0, x1, [sp], #16
280 bl EXT(CleanPoC_DcacheRegion_Force_nopreempt)
281 bl EXT(_enable_preemption)
282 POP_FRAME
283 ARM64_STACK_EPILOG
284 #else
285 b EXT(CleanPoC_DcacheRegion_internal)
286 #endif // APPLE_ARM64_ARCH_FAMILY
287
288 /*
289 * void FlushPoC_Dcache(void)
290 *
291 * Clean and Invalidate dcaches to Point of Coherency
292 */
293 .text
294 .align 2
295 .globl EXT(FlushPoC_Dcache)
296 LEXT(FlushPoC_Dcache)
297 #if defined(APPLE_ARM64_ARCH_FAMILY)
298 /* "Fully Coherent." */
299 #else /* !defined(APPLE_ARM64_ARCH_FAMILY) */
300 mov x0, #0
301 mov x9, #(1 << MMU_I7SET)
302 mov x10, #(1 << (MMU_NSET + MMU_I7SET))
303 mov x11, #(1 << MMU_I7WAY)
304 dmb sy
305 L_fpcd_dcacheway:
306 L_fpcd_dcacheline:
307 dc cisw, x0 // clean invalidate dcache line by way/set
308 add x0, x0, x9 // increment set index
309 tst x0, #(1 << (MMU_NSET + MMU_I7SET)) // look for overflow
310 b.eq L_fpcd_dcacheline
311 bic x0, x0, x10 // clear set overflow
312 adds w0, w0, w11 // increment way
313 b.cc L_fpcd_dcacheway // loop
314 #if __ARM_L2CACHE__
315 dsb sy
316 mov x0, #2
317 mov x9, #(1 << L2_I7SET)
318 mov x10, #(1 << (L2_NSET + L2_I7SET))
319 mov x11, #(1 << L2_I7WAY)
320 L_fpcd_l2dcacheway:
321 L_fpcd_l2dcacheline:
322 dc cisw, x0 // clean invalide dcache line by way/set
323 add x0, x0, x9 // increment set index
324 tst x0, #(1 << (L2_NSET + L2_I7SET)) // look for overflow
325 b.eq L_fpcd_l2dcacheline
326 bic x0, x0, x10 // clear set overflow
327 adds w0, w0, w11 // increment way
328 b.cc L_fpcd_l2dcacheway // loop
329 #endif
330 #endif /* defined(APPLE_ARM64_ARCH_FAMILY) */
331 dsb sy
332 ret
333
334 /*
335 * void FlushPoU_Dcache(void)
336 *
337 * Flush D-cache to Point of Unification
338 */
339 .text
340 .align 2
341 .globl EXT(FlushPoU_Dcache)
342 LEXT(FlushPoU_Dcache)
343 #if defined(APPLE_ARM64_ARCH_FAMILY)
344 /* "Fully Coherent." */
345 #else /* !defined(APPLE_ARM64_ARCH_FAMILY) */
346 mov x0, #0
347 mov x9, #(1 << MMU_I7SET)
348 mov x10, #(1 << (MMU_NSET + MMU_I7SET))
349 mov x11, #(1 << MMU_I7WAY)
350 dmb sy
351 L_fpud_way:
352 L_fpud_line:
353 dc cisw, x0 // clean invalidate dcache line by way/set
354 add x0, x0, x9 // increment set index
355 tst x0, #1 << (MMU_NSET + MMU_I7SET) // look for overflow
356 b.eq L_fpud_line
357 bic x0, x0, x10 // clear set overflow
358 adds w0, w0, w11 // increment way
359 b.cc L_fpud_way // loop
360 #endif /* defined(APPLE_ARM64_ARCH_FAMILY) */
361 dsb sy
362 ret
363
364 /*
365 * void FlushPoC_DcacheRegion(vm_offset_t va, unsigned length)
366 *
367 * Clean and Invalidate d-cache region to Point of Coherency
368 */
369 .text
370 .align 2
371 .globl EXT(FlushPoC_DcacheRegion)
372 LEXT(FlushPoC_DcacheRegion)
373 #if defined(APPLE_ARM64_ARCH_FAMILY)
374 /* "Fully Coherent." */
375 #else /* !defined(APPLE_ARM64_ARCH_FAMILY) */
376 mov x9, #((1<<MMU_CLINE)-1)
377 and x2, x0, x9
378 bic x0, x0, x9 // Cached aligned
379 add x1, x1, x2
380 sub x1, x1, #1
381 lsr x1, x1, #MMU_CLINE // Set cache line counter
382 dmb sy
383 L_fpcdr_loop:
384 dc civac, x0 // Clean invalidate dcache line to PoC
385 add x0, x0, #(1<<MMU_CLINE) // Get next cache aligned addr
386 subs x1, x1, #1 // Decrementer cache line counter
387 b.pl L_fpcdr_loop // Loop in counter not null
388 #endif /* defined(APPLE_ARM64_ARCH_FAMILY) */
389 dsb sy
390 ret
391
392 /*
393 * void flush_dcache64(addr64_t addr, unsigned length, boolean_t phys)
394 */
395 .text
396 .align 2
397 .globl EXT(flush_dcache64)
398 LEXT(flush_dcache64)
399 BRANCH_EXTERN flush_dcache
400
401 /*
402 * void clean_dcache64(addr64_t addr, unsigned length, boolean_t phys)
403 */
404 .text
405 .align 2
406 .globl EXT(clean_dcache64)
407 LEXT(clean_dcache64)
408 BRANCH_EXTERN clean_dcache
409
410 /*
411 * void invalidate_icache(vm_offset_t va, unsigned length, boolean_t phys)
412 * void invalidate_icache64(addr64_t va, unsigned length, boolean_t phys)
413 */
414 .text
415 .align 2
416 .globl EXT(invalidate_icache64)
417 .globl EXT(invalidate_icache)
418 LEXT(invalidate_icache64)
419 LEXT(invalidate_icache)
420 cmp w2, #0 // Is it physical?
421 b.eq Lcall_invalidate_worker
422 adrp x2, _gPhysBase@page
423 add x2, x2, _gPhysBase@pageoff
424 ldr x2, [x2]
425 sub x0, x0, x2
426 adrp x2, _gVirtBase@page
427 add x2, x2, _gVirtBase@pageoff
428 ldr x2, [x2]
429 add x0, x0, x2
430 Lcall_invalidate_worker:
431 b EXT(InvalidatePoU_IcacheRegion)
432
433
434 /* vim: set ts=4: */