]> git.saurik.com Git - apple/xnu.git/blob - osfmk/arm64/caches_asm.s
xnu-4903.270.47.tar.gz
[apple/xnu.git] / osfmk / arm64 / caches_asm.s
1 /*
2 * Copyright (c) 2010-2013 Apple Inc. All rights reserved.
3 *
4 * @APPLE_OSREFERENCE_LICENSE_HEADER_START@
5 *
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. The rights granted to you under the License
10 * may not be used to create, or enable the creation or redistribution of,
11 * unlawful or unlicensed copies of an Apple operating system, or to
12 * circumvent, violate, or enable the circumvention or violation of, any
13 * terms of an Apple operating system software license agreement.
14 *
15 * Please obtain a copy of the License at
16 * http://www.opensource.apple.com/apsl/ and read it before using this file.
17 *
18 * The Original Code and all software distributed under the License are
19 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
20 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
21 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
22 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
23 * Please see the License for the specific language governing rights and
24 * limitations under the License.
25 *
26 * @APPLE_OSREFERENCE_LICENSE_HEADER_END@
27 */
28
29 #include <machine/asm.h>
30 #include <arm64/proc_reg.h>
31 #include <arm/pmap.h>
32 #include <sys/errno.h>
33 #include "assym.s"
34
35 /*
36 * void InvalidatePoU_Icache(void)
37 *
38 * Invalidate i-cache
39 */
40 .text
41 .align 2
42 .globl EXT(InvalidatePoU_Icache)
43 .globl EXT(invalidate_mmu_icache)
44 LEXT(InvalidatePoU_Icache)
45 LEXT(invalidate_mmu_icache)
46 ic ialluis // Invalidate icache
47 dsb sy
48 isb sy
49 ret
50
51 /*
52 * void InvalidatePoU_IcacheRegion(vm_offset_t va, unsigned length)
53 *
54 * Invalidate icache region
55 */
56 .text
57 .align 2
58 .globl EXT(InvalidatePoU_IcacheRegion)
59 LEXT(InvalidatePoU_IcacheRegion)
60 mov x9, #((1<<MMU_I_CLINE)-1)
61 and x2, x0, x9
62 bic x0, x0, x9 // Cached aligned
63 add x1, x1, x2
64 sub x1, x1, #1
65 lsr x1, x1, #MMU_I_CLINE // Set cache line counter
66 L_ipui_loop:
67 ic ivau, x0 // Invalidate icache line
68 add x0, x0, #1<<MMU_I_CLINE // Get next cache aligned addr
69 subs x1, x1, #1 // Decrementer cache line counter
70 b.pl L_ipui_loop // Loop in counter not null
71 dsb sy
72 isb sy
73 ret
74
75
76 /*
77 * void CleanPoC_Dcache(void)
78 *
79 * Clean all d-caches
80 */
81 .text
82 .align 2
83 .globl EXT(CleanPoC_Dcache)
84 .globl EXT(clean_mmu_dcache)
85 LEXT(CleanPoC_Dcache)
86 #if defined(APPLE_ARM64_ARCH_FAMILY)
87 /* "Fully Coherent." */
88 #else /* !defined(APPLE_ARM64_ARCH_FAMILY) */
89 mov x0, #0
90 mov x9, #(1 << MMU_I7SET)
91 mov x10, #(1 << (MMU_NSET + MMU_I7SET))
92 mov x11, #(1 << MMU_I7WAY)
93 L_cpcd_dcacheway:
94 L_cpcd_dcacheline:
95 dc csw, x0 // clean dcache line by way/set
96 add x0, x0, x9 // increment set index
97 tst x0, #(1 << (MMU_NSET + MMU_I7SET)) // look for overflow
98 b.eq L_cpcd_dcacheline
99 bic x0, x0, x10 // clear set overflow
100 adds x0, x0, x11 // increment way
101 b.cc L_cpcd_dcacheway // loop
102 #if __ARM_L2CACHE__
103 mov x0, #2
104 mov x9, #(1 << L2_I7SET)
105 mov x10, #(1 << (L2_NSET + L2_I7SET))
106 mov x11, #(1 << L2_I7WAY)
107 L_cpcd_l2dcacheway:
108 L_cpcd_l2dcacheline:
109 dc csw, x0 // clean dcache line by way/set
110 add x0, x0, x9 // increment set index
111 tst x0, #(1 << (L2_NSET + L2_I7SET)) // look for overflow
112 b.eq L_cpcd_l2dcacheline
113 bic x0, x0, x10 // clear set overflow
114 adds x0, x0, x11 // increment way
115 b.cc L_cpcd_l2dcacheway // loop
116 #endif
117 #endif /* defined(APPLE_ARM64_ARCH_FAMILY) */
118 dsb sy
119 ret
120
121 /*
122 * void CleanPoU_Dcache(void)
123 *
124 * Clean D-cache to Point of Unification
125 */
126 .text
127 .align 2
128 .globl EXT(CleanPoU_Dcache)
129 LEXT(CleanPoU_Dcache)
130 #if defined(APPLE_ARM64_ARCH_FAMILY)
131 /* "Fully Coherent." */
132 #else /* !defined(APPLE_ARM64_ARCH_FAMILY) */
133 mov x0, #0
134 mov x9, #(1 << MMU_I7SET)
135 mov x10, #(1 << (MMU_NSET + MMU_I7SET))
136 mov x11, #(1 << MMU_I7WAY)
137 L_cpud_dcacheway:
138 L_cpud_dcacheline:
139 dc csw, x0 // clean dcache line by way/set
140 add x0, x0, x9 // increment set index
141 tst x0, #(1 << (MMU_NSET + MMU_I7SET)) // look for overflow
142 b.eq L_cpud_dcacheline
143 bic x0, x0, x10 // clear set overflow
144 adds x0, x0, x11 // increment way
145 b.cc L_cpud_dcacheway // loop
146 #endif /* defined(APPLE_ARM64_ARCH_FAMILY) */
147 dsb sy
148 ret
149
150 /*
151 * void CleanPoU_DcacheRegion(vm_offset_t va, unsigned length)
152 *
153 * Clean d-cache region to Point of Unification
154 */
155 .text
156 .align 2
157 .globl EXT(CleanPoU_DcacheRegion)
158 LEXT(CleanPoU_DcacheRegion)
159 #if defined(APPLE_ARM64_ARCH_FAMILY)
160 /* "Fully Coherent." */
161 #else /* !defined(APPLE_ARM64_ARCH_FAMILY) */
162 mov x9, #((1<<MMU_CLINE)-1)
163 and x2, x0, x9
164 bic x0, x0, x9 // Cached aligned
165 add x1, x1, x2
166 sub x1, x1, #1
167 lsr x1, x1, #MMU_CLINE // Set cache line counter
168 L_cpudr_loop:
169 dc cvau, x0 // Clean dcache line to PoU
170 add x0, x0, #(1<<MMU_CLINE) // Get next cache aligned addr
171 subs x1, x1, #1 // Decrementer cache line counter
172 b.pl L_cpudr_loop // Loop in counter not null
173 #endif /* defined(APPLE_ARM64_ARCH_FAMILY) */
174 dsb sy
175 ret
176
177 /*
178 * void CleanPoC_DcacheRegion_internal(vm_offset_t va, unsigned length)
179 *
180 * Clean d-cache region to Point of Coherency
181 */
182 .text
183 .align 2
184 LEXT(CleanPoC_DcacheRegion_internal)
185 ARM64_STACK_PROLOG
186 PUSH_FRAME
187 mov x9, #((1<<MMU_CLINE)-1)
188 and x2, x0, x9
189 bic x0, x0, x9 // Cached aligned
190 add x1, x1, x2
191 sub x1, x1, #1
192 lsr x1, x1, #MMU_CLINE // Set cache line counter
193 dsb sy
194 L_cpcdr_loop:
195 #if defined(APPLE_ARM64_ARCH_FAMILY)
196 // It may be tempting to clean the cache (dc cvac),
197 // but see Cyclone UM 5.3.8.3 -- it's always a NOP on Cyclone.
198 //
199 // Clean & Invalidate, however, will work as long as HID4.DisDCMvaOps isn't set.
200 dc civac, x0 // Clean & Invalidate dcache line to PoC
201 #else
202 dc cvac, x0 // Clean dcache line to PoC
203 #endif
204 add x0, x0, #(1<<MMU_CLINE) // Get next cache aligned addr
205 subs x1, x1, #1 // Decrementer cache line counter
206 b.pl L_cpcdr_loop // Loop in counter not null
207 dsb sy
208 POP_FRAME
209 ARM64_STACK_EPILOG
210
211 /*
212 * void CleanPoC_DcacheRegion(vm_offset_t va, unsigned length)
213 *
214 * Clean d-cache region to Point of Coherency
215 */
216 .text
217 .align 2
218 .globl EXT(CleanPoC_DcacheRegion)
219 LEXT(CleanPoC_DcacheRegion)
220 #if defined(APPLE_ARM64_ARCH_FAMILY)
221 /* "Fully Coherent." */
222 dsb sy
223 ret
224 #else /* !defined(APPLE_ARM64_ARCH_FAMILY) */
225 b EXT(CleanPoC_DcacheRegion_internal)
226 #endif /* defined(APPLE_ARM64_ARCH_FAMILY) */
227
228 .text
229 .align 2
230 .globl EXT(CleanPoC_DcacheRegion_Force_nopreempt)
231 LEXT(CleanPoC_DcacheRegion_Force_nopreempt)
232 #if defined(APPLE_ARM64_ARCH_FAMILY)
233 ARM64_STACK_PROLOG
234 PUSH_FRAME
235 isb sy
236 ARM64_IS_PCORE x15
237 ARM64_READ_EP_SPR x15, x14, ARM64_REG_EHID4, ARM64_REG_HID4
238 and x14, x14, (~ARM64_REG_HID4_DisDcMVAOps)
239 ARM64_WRITE_EP_SPR x15, x14, ARM64_REG_EHID4, ARM64_REG_HID4
240 isb sy
241 bl EXT(CleanPoC_DcacheRegion_internal)
242 isb sy
243 orr x14, x14, ARM64_REG_HID4_DisDcMVAOps
244 ARM64_WRITE_EP_SPR x15, x14, ARM64_REG_EHID4, ARM64_REG_HID4
245 isb sy
246 POP_FRAME
247 ARM64_STACK_EPILOG
248 #else
249 b EXT(CleanPoC_DcacheRegion_internal)
250 #endif // APPLE_ARM64_ARCH_FAMILY
251
252 /*
253 * void CleanPoC_DcacheRegion_Force(vm_offset_t va, unsigned length)
254 *
255 * Clean d-cache region to Point of Coherency - when you really
256 * need to flush even on coherent platforms, e.g. panic log
257 */
258 .text
259 .align 2
260 .globl EXT(CleanPoC_DcacheRegion_Force)
261 LEXT(CleanPoC_DcacheRegion_Force)
262 #if defined(APPLE_ARM64_ARCH_FAMILY)
263 ARM64_STACK_PROLOG
264 PUSH_FRAME
265 stp x0, x1, [sp, #-16]!
266 bl EXT(_disable_preemption)
267 ldp x0, x1, [sp], #16
268 bl EXT(CleanPoC_DcacheRegion_Force_nopreempt)
269 bl EXT(_enable_preemption)
270 POP_FRAME
271 ARM64_STACK_EPILOG
272 #else
273 b EXT(CleanPoC_DcacheRegion_internal)
274 #endif // APPLE_ARM64_ARCH_FAMILY
275
276 /*
277 * void FlushPoC_Dcache(void)
278 *
279 * Clean and Invalidate dcaches to Point of Coherency
280 */
281 .text
282 .align 2
283 .globl EXT(FlushPoC_Dcache)
284 LEXT(FlushPoC_Dcache)
285 #if defined(APPLE_ARM64_ARCH_FAMILY)
286 /* "Fully Coherent." */
287 #else /* !defined(APPLE_ARM64_ARCH_FAMILY) */
288 mov x0, #0
289 mov x9, #(1 << MMU_I7SET)
290 mov x10, #(1 << (MMU_NSET + MMU_I7SET))
291 mov x11, #(1 << MMU_I7WAY)
292 L_fpcd_dcacheway:
293 L_fpcd_dcacheline:
294 dc cisw, x0 // clean invalidate dcache line by way/set
295 add x0, x0, x9 // increment set index
296 tst x0, #(1 << (MMU_NSET + MMU_I7SET)) // look for overflow
297 b.eq L_fpcd_dcacheline
298 bic x0, x0, x10 // clear set overflow
299 adds x0, x0, x11 // increment way
300 b.cc L_fpcd_dcacheway // loop
301 #if __ARM_L2CACHE__
302 mov x0, #2
303 mov x9, #(1 << L2_I7SET)
304 mov x10, #(1 << (L2_NSET + L2_I7SET))
305 mov x11, #(1 << L2_I7WAY)
306 L_fpcd_l2dcacheway:
307 L_fpcd_l2dcacheline:
308 dc cisw, x0 // clean invalide dcache line by way/set
309 add x0, x0, x9 // increment set index
310 tst x0, #(1 << (L2_NSET + L2_I7SET)) // look for overflow
311 b.eq L_fpcd_l2dcacheline
312 bic x0, x0, x10 // clear set overflow
313 adds x0, x0, x11 // increment way
314 b.cc L_fpcd_l2dcacheway // loop
315 #endif
316 #endif /* defined(APPLE_ARM64_ARCH_FAMILY) */
317 dsb sy
318 ret
319
320 /*
321 * void FlushPoU_Dcache(void)
322 *
323 * Flush D-cache to Point of Unification
324 */
325 .text
326 .align 2
327 .globl EXT(FlushPoU_Dcache)
328 LEXT(FlushPoU_Dcache)
329 #if defined(APPLE_ARM64_ARCH_FAMILY)
330 /* "Fully Coherent." */
331 #else /* !defined(APPLE_ARM64_ARCH_FAMILY) */
332 mov x0, #0
333 mov x9, #(1 << MMU_I7SET)
334 mov x10, #(1 << (MMU_NSET + MMU_I7SET))
335 mov x11, #(1 << MMU_I7WAY)
336 L_fpud_way:
337 L_fpud_line:
338 dc cisw, x0 // clean invalidate dcache line by way/set
339 add x0, x0, x9 // increment set index
340 tst x0, #1 << (MMU_NSET + MMU_I7SET) // look for overflow
341 b.eq L_fpud_line
342 bic x0, x0, x10 // clear set overflow
343 adds x0, x0, x11 // increment way
344 b.cc L_fpud_way // loop
345 #endif /* defined(APPLE_ARM64_ARCH_FAMILY) */
346 dsb sy
347 ret
348
349 /*
350 * void FlushPoC_DcacheRegion(vm_offset_t va, unsigned length)
351 *
352 * Clean and Invalidate d-cache region to Point of Coherency
353 */
354 .text
355 .align 2
356 .globl EXT(FlushPoC_DcacheRegion)
357 LEXT(FlushPoC_DcacheRegion)
358 #if defined(APPLE_ARM64_ARCH_FAMILY)
359 /* "Fully Coherent." */
360 #else /* !defined(APPLE_ARM64_ARCH_FAMILY) */
361 mov x9, #((1<<MMU_CLINE)-1)
362 and x2, x0, x9
363 bic x0, x0, x9 // Cached aligned
364 add x1, x1, x2
365 sub x1, x1, #1
366 lsr x1, x1, #MMU_CLINE // Set cache line counter
367 L_fpcdr_loop:
368 dc civac, x0 // Clean invalidate dcache line to PoC
369 add x0, x0, #(1<<MMU_CLINE) // Get next cache aligned addr
370 subs x1, x1, #1 // Decrementer cache line counter
371 b.pl L_fpcdr_loop // Loop in counter not null
372 #endif /* defined(APPLE_ARM64_ARCH_FAMILY) */
373 dsb sy
374 ret
375
376 /*
377 * void flush_dcache64(addr64_t addr, unsigned length, boolean_t phys)
378 */
379 .text
380 .align 2
381 .globl EXT(flush_dcache64)
382 LEXT(flush_dcache64)
383 BRANCH_EXTERN flush_dcache
384
385 /*
386 * void clean_dcache64(addr64_t addr, unsigned length, boolean_t phys)
387 */
388 .text
389 .align 2
390 .globl EXT(clean_dcache64)
391 LEXT(clean_dcache64)
392 BRANCH_EXTERN clean_dcache
393
394 /*
395 * void invalidate_icache(vm_offset_t va, unsigned length, boolean_t phys)
396 * void invalidate_icache64(addr64_t va, unsigned length, boolean_t phys)
397 */
398 .text
399 .align 2
400 .globl EXT(invalidate_icache64)
401 .globl EXT(invalidate_icache)
402 LEXT(invalidate_icache64)
403 LEXT(invalidate_icache)
404 cmp w2, #0 // Is it physical?
405 b.eq Lcall_invalidate_worker
406 adrp x2, _gPhysBase@page
407 add x2, x2, _gPhysBase@pageoff
408 ldr x2, [x2]
409 sub x0, x0, x2
410 adrp x2, _gVirtBase@page
411 add x2, x2, _gVirtBase@pageoff
412 ldr x2, [x2]
413 add x0, x0, x2
414 Lcall_invalidate_worker:
415 b EXT(InvalidatePoU_IcacheRegion)
416
417
418 /* vim: set ts=4: */