]> git.saurik.com Git - apple/xnu.git/blob - osfmk/arm64/caches_asm.s
xnu-4570.61.1.tar.gz
[apple/xnu.git] / osfmk / arm64 / caches_asm.s
1 /*
2 * Copyright (c) 2010-2013 Apple Inc. All rights reserved.
3 *
4 * @APPLE_OSREFERENCE_LICENSE_HEADER_START@
5 *
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. The rights granted to you under the License
10 * may not be used to create, or enable the creation or redistribution of,
11 * unlawful or unlicensed copies of an Apple operating system, or to
12 * circumvent, violate, or enable the circumvention or violation of, any
13 * terms of an Apple operating system software license agreement.
14 *
15 * Please obtain a copy of the License at
16 * http://www.opensource.apple.com/apsl/ and read it before using this file.
17 *
18 * The Original Code and all software distributed under the License are
19 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
20 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
21 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
22 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
23 * Please see the License for the specific language governing rights and
24 * limitations under the License.
25 *
26 * @APPLE_OSREFERENCE_LICENSE_HEADER_END@
27 */
28
29 #include <machine/asm.h>
30 #include <arm64/proc_reg.h>
31 #include <arm/pmap.h>
32 #include <sys/errno.h>
33 #include "assym.s"
34
35 /*
36 * void InvalidatePoU_Icache(void)
37 *
38 * Invalidate i-cache
39 */
40 .text
41 .align 2
42 .globl EXT(InvalidatePoU_Icache)
43 .globl EXT(invalidate_mmu_icache)
44 LEXT(InvalidatePoU_Icache)
45 LEXT(invalidate_mmu_icache)
46 ic ialluis // Invalidate icache
47 dsb sy
48 isb sy
49 ret
50
51 /*
52 * void InvalidatePoU_IcacheRegion(vm_offset_t va, unsigned length)
53 *
54 * Invalidate icache region
55 */
56 .text
57 .align 2
58 .globl EXT(InvalidatePoU_IcacheRegion)
59 LEXT(InvalidatePoU_IcacheRegion)
60 mov x9, #((1<<MMU_I_CLINE)-1)
61 and x2, x0, x9
62 bic x0, x0, x9 // Cached aligned
63 add x1, x1, x2
64 sub x1, x1, #1
65 lsr x1, x1, #MMU_I_CLINE // Set cache line counter
66 L_ipui_loop:
67 ic ivau, x0 // Invalidate icache line
68 add x0, x0, #1<<MMU_I_CLINE // Get next cache aligned addr
69 subs x1, x1, #1 // Decrementer cache line counter
70 b.pl L_ipui_loop // Loop in counter not null
71 dsb sy
72 isb sy
73 ret
74
75
76 /*
77 * void CleanPoC_Dcache(void)
78 *
79 * Clean all d-caches
80 */
81 .text
82 .align 2
83 .globl EXT(CleanPoC_Dcache)
84 .globl EXT(clean_mmu_dcache)
85 LEXT(CleanPoC_Dcache)
86 #if defined(APPLE_ARM64_ARCH_FAMILY)
87 /* "Fully Coherent." */
88 #else /* !defined(APPLE_ARM64_ARCH_FAMILY) */
89 mov x0, #0
90 mov x9, #(1 << MMU_I7SET)
91 mov x10, #(1 << (MMU_NSET + MMU_I7SET))
92 mov x11, #(1 << MMU_I7WAY)
93 L_cpcd_dcacheway:
94 L_cpcd_dcacheline:
95 dc csw, x0 // clean dcache line by way/set
96 add x0, x0, x9 // increment set index
97 tst x0, #(1 << (MMU_NSET + MMU_I7SET)) // look for overflow
98 b.eq L_cpcd_dcacheline
99 bic x0, x0, x10 // clear set overflow
100 adds x0, x0, x11 // increment way
101 b.cc L_cpcd_dcacheway // loop
102 #if __ARM_L2CACHE__
103 mov x0, #2
104 mov x9, #(1 << L2_I7SET)
105 mov x10, #(1 << (L2_NSET + L2_I7SET))
106 mov x11, #(1 << L2_I7WAY)
107 L_cpcd_l2dcacheway:
108 L_cpcd_l2dcacheline:
109 dc csw, x0 // clean dcache line by way/set
110 add x0, x0, x9 // increment set index
111 tst x0, #(1 << (L2_NSET + L2_I7SET)) // look for overflow
112 b.eq L_cpcd_l2dcacheline
113 bic x0, x0, x10 // clear set overflow
114 adds x0, x0, x11 // increment way
115 b.cc L_cpcd_l2dcacheway // loop
116 #endif
117 #endif /* defined(APPLE_ARM64_ARCH_FAMILY) */
118 dsb sy
119 ret
120
121 /*
122 * void CleanPoU_Dcache(void)
123 *
124 * Clean D-cache to Point of Unification
125 */
126 .text
127 .align 2
128 .globl EXT(CleanPoU_Dcache)
129 LEXT(CleanPoU_Dcache)
130 #if defined(APPLE_ARM64_ARCH_FAMILY)
131 /* "Fully Coherent." */
132 #else /* !defined(APPLE_ARM64_ARCH_FAMILY) */
133 #error CleanPoU_Dcache needs an implementation
134 #endif /* defined(APPLE_ARM64_ARCH_FAMILY) */
135 dsb sy
136 ret
137
138 /*
139 * void CleanPoU_DcacheRegion(vm_offset_t va, unsigned length)
140 *
141 * Clean d-cache region to Point of Unification
142 */
143 .text
144 .align 2
145 .globl EXT(CleanPoU_DcacheRegion)
146 LEXT(CleanPoU_DcacheRegion)
147 #if defined(APPLE_ARM64_ARCH_FAMILY)
148 /* "Fully Coherent." */
149 #else /* !defined(APPLE_ARM64_ARCH_FAMILY) */
150 mov x9, #((1<<MMU_CLINE)-1)
151 and x2, x0, x9
152 bic x0, x0, x9 // Cached aligned
153 add x1, x1, x2
154 sub x1, x1, #1
155 lsr x1, x1, #MMU_CLINE // Set cache line counter
156 L_cpudr_loop:
157 dc cvau, x0 // Clean dcache line to PoU
158 add x0, x0, #(1<<MMU_CLINE) // Get next cache aligned addr
159 subs x1, x1, #1 // Decrementer cache line counter
160 b.pl L_cpudr_loop // Loop in counter not null
161 #endif /* defined(APPLE_ARM64_ARCH_FAMILY) */
162 dsb sy
163 ret
164
165 /*
166 * void CleanPoC_DcacheRegion_internal(vm_offset_t va, unsigned length)
167 *
168 * Clean d-cache region to Point of Coherency
169 */
170 .text
171 .align 2
172 LEXT(CleanPoC_DcacheRegion_internal)
173 PUSH_FRAME
174 mov x9, #((1<<MMU_CLINE)-1)
175 and x2, x0, x9
176 bic x0, x0, x9 // Cached aligned
177 add x1, x1, x2
178 sub x1, x1, #1
179 lsr x1, x1, #MMU_CLINE // Set cache line counter
180 dsb sy
181 L_cpcdr_loop:
182 #if defined(APPLE_ARM64_ARCH_FAMILY)
183 // It may be tempting to clean the cache (dc cvac),
184 // but see Cyclone UM 5.3.8.3 -- it's always a NOP on Cyclone.
185 //
186 // Clean & Invalidate, however, will work as long as HID4.DisDCMvaOps isn't set.
187 dc civac, x0 // Clean & Invalidate dcache line to PoC
188 #else
189 dc cvac, x0 // Clean dcache line to PoC
190 #endif
191 add x0, x0, #(1<<MMU_CLINE) // Get next cache aligned addr
192 subs x1, x1, #1 // Decrementer cache line counter
193 b.pl L_cpcdr_loop // Loop in counter not null
194 dsb sy
195 POP_FRAME
196 ret
197
198 /*
199 * void CleanPoC_DcacheRegion(vm_offset_t va, unsigned length)
200 *
201 * Clean d-cache region to Point of Coherency
202 */
203 .text
204 .align 2
205 .globl EXT(CleanPoC_DcacheRegion)
206 LEXT(CleanPoC_DcacheRegion)
207 #if defined(APPLE_ARM64_ARCH_FAMILY)
208 /* "Fully Coherent." */
209 dsb sy
210 ret
211 #else /* !defined(APPLE_ARM64_ARCH_FAMILY) */
212 b EXT(CleanPoC_DcacheRegion_internal)
213 #endif /* defined(APPLE_ARM64_ARCH_FAMILY) */
214
215 /*
216 * void CleanPoC_DcacheRegion_Force(vm_offset_t va, unsigned length)
217 *
218 * Clean d-cache region to Point of Coherency - when you really
219 * need to flush even on coherent platforms, e.g. panic log
220 */
221 .text
222 .align 2
223 .globl EXT(CleanPoC_DcacheRegion_Force)
224 LEXT(CleanPoC_DcacheRegion_Force)
225 #if defined(APPLE_ARM64_ARCH_FAMILY)
226 PUSH_FRAME
227 stp x0, x1, [sp, #-16]!
228 bl EXT(_disable_preemption)
229 isb sy
230 ARM64_IS_PCORE x15
231 ARM64_READ_EP_SPR x15, x14, ARM64_REG_EHID4, ARM64_REG_HID4
232 and x14, x14, (~ARM64_REG_HID4_DisDcMVAOps)
233 ARM64_WRITE_EP_SPR x15, x14, ARM64_REG_EHID4, ARM64_REG_HID4
234 isb sy
235 ldp x0, x1, [sp], #16
236 bl EXT(CleanPoC_DcacheRegion_internal)
237 isb sy
238 orr x14, x14, ARM64_REG_HID4_DisDcMVAOps
239 ARM64_WRITE_EP_SPR x15, x14, ARM64_REG_EHID4, ARM64_REG_HID4
240 isb sy
241 bl EXT(_enable_preemption)
242 POP_FRAME
243 ret
244 #else
245 b EXT(CleanPoC_DcacheRegion_internal)
246 #endif // APPLE_ARM64_ARCH_FAMILY
247
248 /*
249 * void FlushPoC_Dcache(void)
250 *
251 * Clean and Invalidate dcaches to Point of Coherency
252 */
253 .text
254 .align 2
255 .globl EXT(FlushPoC_Dcache)
256 LEXT(FlushPoC_Dcache)
257 #if defined(APPLE_ARM64_ARCH_FAMILY)
258 /* "Fully Coherent." */
259 #else /* !defined(APPLE_ARM64_ARCH_FAMILY) */
260 mov x0, #0
261 mov x9, #(1 << MMU_I7SET)
262 mov x10, #(1 << (MMU_NSET + MMU_I7SET))
263 mov x11, #(1 << MMU_I7WAY)
264 L_fpcd_dcacheway:
265 L_fpcd_dcacheline:
266 dc cisw, x0 // clean invalidate dcache line by way/set
267 add x0, x0, x9 // increment set index
268 tst x0, #(1 << (MMU_NSET + MMU_I7SET)) // look for overflow
269 b.eq L_fpcd_dcacheline
270 bic x0, x0, x10 // clear set overflow
271 adds x0, x0, x11 // increment way
272 b.cc L_fpcd_dcacheway // loop
273 #if __ARM_L2CACHE__
274 mov x0, #2
275 mov x9, #(1 << L2_I7SET)
276 mov x10, #(1 << (L2_NSET + L2_I7SET))
277 mov x11, #(1 << L2_I7WAY)
278 L_fpcd_l2dcacheway:
279 L_fpcd_l2dcacheline:
280 dc cisw, x0 // clean invalide dcache line by way/set
281 add x0, x0, x9 // increment set index
282 tst x0, #(1 << (L2_NSET + L2_I7SET)) // look for overflow
283 b.eq L_fpcd_l2dcacheline
284 bic x0, x0, x10 // clear set overflow
285 adds x0, x0, x11 // increment way
286 b.cc L_fpcd_l2dcacheway // loop
287 #endif
288 #endif /* defined(APPLE_ARM64_ARCH_FAMILY) */
289 dsb sy
290 ret
291
292 /*
293 * void FlushPoU_Dcache(void)
294 *
295 * Flush D-cache to Point of Unification
296 */
297 .text
298 .align 2
299 .globl EXT(FlushPoU_Dcache)
300 LEXT(FlushPoU_Dcache)
301 #if defined(APPLE_ARM64_ARCH_FAMILY)
302 /* "Fully Coherent." */
303 #else /* !defined(APPLE_ARM64_ARCH_FAMILY) */
304 mov x0, #0
305 mov x9, #(1 << MMU_I7SET)
306 mov x10, #(1 << (MMU_NSET + MMU_I7SET))
307 mov x11, #(1 << MMU_I7WAY)
308 L_fpud_way:
309 L_fpud_line:
310 dc cisw, x0 // clean invalidate dcache line by way/set
311 add x0, x0, x9 // increment set index
312 tst x0, #1 << (MMU_NSET + MMU_I7SET) // look for overflow
313 b.eq L_fpud_line
314 bic x0, x0, x10 // clear set overflow
315 adds x0, x0, x11 // increment way
316 b.cc L_fpud_way // loop
317 #endif /* defined(APPLE_ARM64_ARCH_FAMILY) */
318 dsb sy
319 ret
320
321 /*
322 * void FlushPoC_DcacheRegion(vm_offset_t va, unsigned length)
323 *
324 * Clean and Invalidate d-cache region to Point of Coherency
325 */
326 .text
327 .align 2
328 .globl EXT(FlushPoC_DcacheRegion)
329 LEXT(FlushPoC_DcacheRegion)
330 #if defined(APPLE_ARM64_ARCH_FAMILY)
331 /* "Fully Coherent." */
332 #else /* !defined(APPLE_ARM64_ARCH_FAMILY) */
333 mov x9, #((1<<MMU_CLINE)-1)
334 and x2, x0, x9
335 bic x0, x0, x9 // Cached aligned
336 add x1, x1, x2
337 sub x1, x1, #1
338 lsr x1, x1, #MMU_CLINE // Set cache line counter
339 L_fpcdr_loop:
340 dc civac, x0 // Clean invalidate dcache line to PoC
341 add x0, x0, #(1<<MMU_CLINE) // Get next cache aligned addr
342 subs x1, x1, #1 // Decrementer cache line counter
343 b.pl L_fpcdr_loop // Loop in counter not null
344 #endif /* defined(APPLE_ARM64_ARCH_FAMILY) */
345 dsb sy
346 ret
347
348 /*
349 * void flush_dcache64(addr64_t addr, unsigned length, boolean_t phys)
350 */
351 .text
352 .align 2
353 .globl EXT(flush_dcache64)
354 LEXT(flush_dcache64)
355 BRANCH_EXTERN flush_dcache
356
357 /*
358 * void clean_dcache64(addr64_t addr, unsigned length, boolean_t phys)
359 */
360 .text
361 .align 2
362 .globl EXT(clean_dcache64)
363 LEXT(clean_dcache64)
364 BRANCH_EXTERN clean_dcache
365
366 /*
367 * void invalidate_icache(vm_offset_t va, unsigned length, boolean_t phys)
368 * void invalidate_icache64(addr64_t va, unsigned length, boolean_t phys)
369 */
370 .text
371 .align 2
372 .globl EXT(invalidate_icache64)
373 .globl EXT(invalidate_icache)
374 LEXT(invalidate_icache64)
375 LEXT(invalidate_icache)
376 cmp w2, #0 // Is it physical?
377 b.eq Lcall_invalidate_worker
378 adrp x2, _gPhysBase@page
379 add x2, x2, _gPhysBase@pageoff
380 ldr x2, [x2]
381 sub x0, x0, x2
382 adrp x2, _gVirtBase@page
383 add x2, x2, _gVirtBase@pageoff
384 ldr x2, [x2]
385 add x0, x0, x2
386 Lcall_invalidate_worker:
387 b EXT(InvalidatePoU_IcacheRegion)
388
389
390 /* vim: set ts=4: */