2 * Copyright (c) 2010-2013 Apple Inc. All rights reserved.
4 * @APPLE_OSREFERENCE_LICENSE_HEADER_START@
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. The rights granted to you under the License
10 * may not be used to create, or enable the creation or redistribution of,
11 * unlawful or unlicensed copies of an Apple operating system, or to
12 * circumvent, violate, or enable the circumvention or violation of, any
13 * terms of an Apple operating system software license agreement.
15 * Please obtain a copy of the License at
16 * http://www.opensource.apple.com/apsl/ and read it before using this file.
18 * The Original Code and all software distributed under the License are
19 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
20 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
21 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
22 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
23 * Please see the License for the specific language governing rights and
24 * limitations under the License.
26 * @APPLE_OSREFERENCE_LICENSE_HEADER_END@
29 #include <machine/asm.h>
30 #include <arm64/proc_reg.h>
32 #include <sys/errno.h>
36 * void InvalidatePoU_Icache(void)
42 .globl EXT(InvalidatePoU_Icache)
43 .globl EXT(invalidate_mmu_icache)
44 LEXT(InvalidatePoU_Icache)
45 LEXT(invalidate_mmu_icache)
46 ic ialluis // Invalidate icache
52 * void InvalidatePoU_IcacheRegion(vm_offset_t va, unsigned length)
54 * Invalidate icache region
58 .globl EXT(InvalidatePoU_IcacheRegion)
59 LEXT(InvalidatePoU_IcacheRegion)
60 mov x9, #((1<<MMU_I_CLINE)-1)
62 bic x0, x0, x9 // Cached aligned
65 lsr x1, x1, #MMU_I_CLINE // Set cache line counter
67 ic ivau, x0 // Invalidate icache line
68 add x0, x0, #1<<MMU_I_CLINE // Get next cache aligned addr
69 subs x1, x1, #1 // Decrementer cache line counter
70 b.pl L_ipui_loop // Loop in counter not null
77 * void CleanPoC_Dcache(void)
83 .globl EXT(CleanPoC_Dcache)
84 .globl EXT(clean_mmu_dcache)
86 #if defined(APPLE_ARM64_ARCH_FAMILY)
87 /* "Fully Coherent." */
88 #else /* !defined(APPLE_ARM64_ARCH_FAMILY) */
90 mov x9, #(1 << MMU_I7SET)
91 mov x10, #(1 << (MMU_NSET + MMU_I7SET))
92 mov x11, #(1 << MMU_I7WAY)
95 dc csw, x0 // clean dcache line by way/set
96 add x0, x0, x9 // increment set index
97 tst x0, #(1 << (MMU_NSET + MMU_I7SET)) // look for overflow
98 b.eq L_cpcd_dcacheline
99 bic x0, x0, x10 // clear set overflow
100 adds x0, x0, x11 // increment way
101 b.cc L_cpcd_dcacheway // loop
104 mov x9, #(1 << L2_I7SET)
105 mov x10, #(1 << (L2_NSET + L2_I7SET))
106 mov x11, #(1 << L2_I7WAY)
109 dc csw, x0 // clean dcache line by way/set
110 add x0, x0, x9 // increment set index
111 tst x0, #(1 << (L2_NSET + L2_I7SET)) // look for overflow
112 b.eq L_cpcd_l2dcacheline
113 bic x0, x0, x10 // clear set overflow
114 adds x0, x0, x11 // increment way
115 b.cc L_cpcd_l2dcacheway // loop
117 #endif /* defined(APPLE_ARM64_ARCH_FAMILY) */
122 * void CleanPoU_Dcache(void)
124 * Clean D-cache to Point of Unification
128 .globl EXT(CleanPoU_Dcache)
129 LEXT(CleanPoU_Dcache)
130 #if defined(APPLE_ARM64_ARCH_FAMILY)
131 /* "Fully Coherent." */
132 #else /* !defined(APPLE_ARM64_ARCH_FAMILY) */
134 mov x9, #(1 << MMU_I7SET)
135 mov x10, #(1 << (MMU_NSET + MMU_I7SET))
136 mov x11, #(1 << MMU_I7WAY)
139 dc csw, x0 // clean dcache line by way/set
140 add x0, x0, x9 // increment set index
141 tst x0, #(1 << (MMU_NSET + MMU_I7SET)) // look for overflow
142 b.eq L_cpud_dcacheline
143 bic x0, x0, x10 // clear set overflow
144 adds x0, x0, x11 // increment way
145 b.cc L_cpud_dcacheway // loop
146 #endif /* defined(APPLE_ARM64_ARCH_FAMILY) */
151 * void CleanPoU_DcacheRegion(vm_offset_t va, unsigned length)
153 * Clean d-cache region to Point of Unification
157 .globl EXT(CleanPoU_DcacheRegion)
158 LEXT(CleanPoU_DcacheRegion)
159 #if defined(APPLE_ARM64_ARCH_FAMILY)
160 /* "Fully Coherent." */
161 #else /* !defined(APPLE_ARM64_ARCH_FAMILY) */
162 mov x9, #((1<<MMU_CLINE)-1)
164 bic x0, x0, x9 // Cached aligned
167 lsr x1, x1, #MMU_CLINE // Set cache line counter
169 dc cvau, x0 // Clean dcache line to PoU
170 add x0, x0, #(1<<MMU_CLINE) // Get next cache aligned addr
171 subs x1, x1, #1 // Decrementer cache line counter
172 b.pl L_cpudr_loop // Loop in counter not null
173 #endif /* defined(APPLE_ARM64_ARCH_FAMILY) */
178 * void CleanPoC_DcacheRegion_internal(vm_offset_t va, unsigned length)
180 * Clean d-cache region to Point of Coherency
184 LEXT(CleanPoC_DcacheRegion_internal)
187 mov x9, #((1<<MMU_CLINE)-1)
189 bic x0, x0, x9 // Cached aligned
192 lsr x1, x1, #MMU_CLINE // Set cache line counter
195 #if defined(APPLE_ARM64_ARCH_FAMILY)
196 // It may be tempting to clean the cache (dc cvac),
197 // but see Cyclone UM 5.3.8.3 -- it's always a NOP on Cyclone.
199 // Clean & Invalidate, however, will work as long as HID4.DisDCMvaOps isn't set.
200 dc civac, x0 // Clean & Invalidate dcache line to PoC
202 dc cvac, x0 // Clean dcache line to PoC
204 add x0, x0, #(1<<MMU_CLINE) // Get next cache aligned addr
205 subs x1, x1, #1 // Decrementer cache line counter
206 b.pl L_cpcdr_loop // Loop in counter not null
212 * void CleanPoC_DcacheRegion(vm_offset_t va, unsigned length)
214 * Clean d-cache region to Point of Coherency
218 .globl EXT(CleanPoC_DcacheRegion)
219 LEXT(CleanPoC_DcacheRegion)
220 #if defined(APPLE_ARM64_ARCH_FAMILY)
221 /* "Fully Coherent." */
224 #else /* !defined(APPLE_ARM64_ARCH_FAMILY) */
225 b EXT(CleanPoC_DcacheRegion_internal)
226 #endif /* defined(APPLE_ARM64_ARCH_FAMILY) */
230 .globl EXT(CleanPoC_DcacheRegion_Force_nopreempt)
231 LEXT(CleanPoC_DcacheRegion_Force_nopreempt)
232 #if defined(APPLE_ARM64_ARCH_FAMILY)
237 ARM64_READ_EP_SPR x15, x14, ARM64_REG_EHID4, ARM64_REG_HID4
238 and x14, x14, (~ARM64_REG_HID4_DisDcMVAOps)
239 ARM64_WRITE_EP_SPR x15, x14, ARM64_REG_EHID4, ARM64_REG_HID4
241 bl EXT(CleanPoC_DcacheRegion_internal)
243 orr x14, x14, ARM64_REG_HID4_DisDcMVAOps
244 ARM64_WRITE_EP_SPR x15, x14, ARM64_REG_EHID4, ARM64_REG_HID4
249 b EXT(CleanPoC_DcacheRegion_internal)
250 #endif // APPLE_ARM64_ARCH_FAMILY
253 * void CleanPoC_DcacheRegion_Force(vm_offset_t va, unsigned length)
255 * Clean d-cache region to Point of Coherency - when you really
256 * need to flush even on coherent platforms, e.g. panic log
260 .globl EXT(CleanPoC_DcacheRegion_Force)
261 LEXT(CleanPoC_DcacheRegion_Force)
262 #if defined(APPLE_ARM64_ARCH_FAMILY)
265 stp x0, x1, [sp, #-16]!
266 bl EXT(_disable_preemption)
267 ldp x0, x1, [sp], #16
268 bl EXT(CleanPoC_DcacheRegion_Force_nopreempt)
269 bl EXT(_enable_preemption)
273 b EXT(CleanPoC_DcacheRegion_internal)
274 #endif // APPLE_ARM64_ARCH_FAMILY
277 * void FlushPoC_Dcache(void)
279 * Clean and Invalidate dcaches to Point of Coherency
283 .globl EXT(FlushPoC_Dcache)
284 LEXT(FlushPoC_Dcache)
285 #if defined(APPLE_ARM64_ARCH_FAMILY)
286 /* "Fully Coherent." */
287 #else /* !defined(APPLE_ARM64_ARCH_FAMILY) */
289 mov x9, #(1 << MMU_I7SET)
290 mov x10, #(1 << (MMU_NSET + MMU_I7SET))
291 mov x11, #(1 << MMU_I7WAY)
294 dc cisw, x0 // clean invalidate dcache line by way/set
295 add x0, x0, x9 // increment set index
296 tst x0, #(1 << (MMU_NSET + MMU_I7SET)) // look for overflow
297 b.eq L_fpcd_dcacheline
298 bic x0, x0, x10 // clear set overflow
299 adds x0, x0, x11 // increment way
300 b.cc L_fpcd_dcacheway // loop
303 mov x9, #(1 << L2_I7SET)
304 mov x10, #(1 << (L2_NSET + L2_I7SET))
305 mov x11, #(1 << L2_I7WAY)
308 dc cisw, x0 // clean invalide dcache line by way/set
309 add x0, x0, x9 // increment set index
310 tst x0, #(1 << (L2_NSET + L2_I7SET)) // look for overflow
311 b.eq L_fpcd_l2dcacheline
312 bic x0, x0, x10 // clear set overflow
313 adds x0, x0, x11 // increment way
314 b.cc L_fpcd_l2dcacheway // loop
316 #endif /* defined(APPLE_ARM64_ARCH_FAMILY) */
321 * void FlushPoU_Dcache(void)
323 * Flush D-cache to Point of Unification
327 .globl EXT(FlushPoU_Dcache)
328 LEXT(FlushPoU_Dcache)
329 #if defined(APPLE_ARM64_ARCH_FAMILY)
330 /* "Fully Coherent." */
331 #else /* !defined(APPLE_ARM64_ARCH_FAMILY) */
333 mov x9, #(1 << MMU_I7SET)
334 mov x10, #(1 << (MMU_NSET + MMU_I7SET))
335 mov x11, #(1 << MMU_I7WAY)
338 dc cisw, x0 // clean invalidate dcache line by way/set
339 add x0, x0, x9 // increment set index
340 tst x0, #1 << (MMU_NSET + MMU_I7SET) // look for overflow
342 bic x0, x0, x10 // clear set overflow
343 adds x0, x0, x11 // increment way
344 b.cc L_fpud_way // loop
345 #endif /* defined(APPLE_ARM64_ARCH_FAMILY) */
350 * void FlushPoC_DcacheRegion(vm_offset_t va, unsigned length)
352 * Clean and Invalidate d-cache region to Point of Coherency
356 .globl EXT(FlushPoC_DcacheRegion)
357 LEXT(FlushPoC_DcacheRegion)
358 #if defined(APPLE_ARM64_ARCH_FAMILY)
359 /* "Fully Coherent." */
360 #else /* !defined(APPLE_ARM64_ARCH_FAMILY) */
361 mov x9, #((1<<MMU_CLINE)-1)
363 bic x0, x0, x9 // Cached aligned
366 lsr x1, x1, #MMU_CLINE // Set cache line counter
368 dc civac, x0 // Clean invalidate dcache line to PoC
369 add x0, x0, #(1<<MMU_CLINE) // Get next cache aligned addr
370 subs x1, x1, #1 // Decrementer cache line counter
371 b.pl L_fpcdr_loop // Loop in counter not null
372 #endif /* defined(APPLE_ARM64_ARCH_FAMILY) */
377 * void flush_dcache64(addr64_t addr, unsigned length, boolean_t phys)
381 .globl EXT(flush_dcache64)
383 BRANCH_EXTERN flush_dcache
386 * void clean_dcache64(addr64_t addr, unsigned length, boolean_t phys)
390 .globl EXT(clean_dcache64)
392 BRANCH_EXTERN clean_dcache
395 * void invalidate_icache(vm_offset_t va, unsigned length, boolean_t phys)
396 * void invalidate_icache64(addr64_t va, unsigned length, boolean_t phys)
400 .globl EXT(invalidate_icache64)
401 .globl EXT(invalidate_icache)
402 LEXT(invalidate_icache64)
403 LEXT(invalidate_icache)
404 cmp w2, #0 // Is it physical?
405 b.eq Lcall_invalidate_worker
406 adrp x2, _gPhysBase@page
407 add x2, x2, _gPhysBase@pageoff
410 adrp x2, _gVirtBase@page
411 add x2, x2, _gVirtBase@pageoff
414 Lcall_invalidate_worker:
415 b EXT(InvalidatePoU_IcacheRegion)