2 * Copyright (c) 2010-2013 Apple Inc. All rights reserved.
4 * @APPLE_OSREFERENCE_LICENSE_HEADER_START@
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. The rights granted to you under the License
10 * may not be used to create, or enable the creation or redistribution of,
11 * unlawful or unlicensed copies of an Apple operating system, or to
12 * circumvent, violate, or enable the circumvention or violation of, any
13 * terms of an Apple operating system software license agreement.
15 * Please obtain a copy of the License at
16 * http://www.opensource.apple.com/apsl/ and read it before using this file.
18 * The Original Code and all software distributed under the License are
19 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
20 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
21 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
22 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
23 * Please see the License for the specific language governing rights and
24 * limitations under the License.
26 * @APPLE_OSREFERENCE_LICENSE_HEADER_END@
29 #include <machine/asm.h>
30 #include <arm64/proc_reg.h>
32 #include <sys/errno.h>
36 * void InvalidatePoU_Icache(void)
42 .globl EXT(InvalidatePoU_Icache)
43 .globl EXT(invalidate_mmu_icache)
44 LEXT(InvalidatePoU_Icache)
45 LEXT(invalidate_mmu_icache)
46 ic ialluis // Invalidate icache
52 * void InvalidatePoU_IcacheRegion(vm_offset_t va, unsigned length)
54 * Invalidate icache region
58 .globl EXT(InvalidatePoU_IcacheRegion)
59 LEXT(InvalidatePoU_IcacheRegion)
60 mov x9, #((1<<MMU_I_CLINE)-1)
62 bic x0, x0, x9 // Cached aligned
65 lsr x1, x1, #MMU_I_CLINE // Set cache line counter
67 ic ivau, x0 // Invalidate icache line
68 add x0, x0, #1<<MMU_I_CLINE // Get next cache aligned addr
69 subs x1, x1, #1 // Decrementer cache line counter
70 b.pl L_ipui_loop // Loop in counter not null
77 * void CleanPoC_Dcache(void)
83 .globl EXT(CleanPoC_Dcache)
84 .globl EXT(clean_mmu_dcache)
86 #if defined(APPLE_ARM64_ARCH_FAMILY)
87 /* "Fully Coherent." */
88 #else /* !defined(APPLE_ARM64_ARCH_FAMILY) */
90 mov x9, #(1 << MMU_I7SET)
91 mov x10, #(1 << (MMU_NSET + MMU_I7SET))
92 mov x11, #(1 << MMU_I7WAY)
95 dc csw, x0 // clean dcache line by way/set
96 add x0, x0, x9 // increment set index
97 tst x0, #(1 << (MMU_NSET + MMU_I7SET)) // look for overflow
98 b.eq L_cpcd_dcacheline
99 bic x0, x0, x10 // clear set overflow
100 adds x0, x0, x11 // increment way
101 b.cc L_cpcd_dcacheway // loop
104 mov x9, #(1 << L2_I7SET)
105 mov x10, #(1 << (L2_NSET + L2_I7SET))
106 mov x11, #(1 << L2_I7WAY)
109 dc csw, x0 // clean dcache line by way/set
110 add x0, x0, x9 // increment set index
111 tst x0, #(1 << (L2_NSET + L2_I7SET)) // look for overflow
112 b.eq L_cpcd_l2dcacheline
113 bic x0, x0, x10 // clear set overflow
114 adds x0, x0, x11 // increment way
115 b.cc L_cpcd_l2dcacheway // loop
117 #endif /* defined(APPLE_ARM64_ARCH_FAMILY) */
122 * void CleanPoU_Dcache(void)
124 * Clean D-cache to Point of Unification
128 .globl EXT(CleanPoU_Dcache)
129 LEXT(CleanPoU_Dcache)
130 #if defined(APPLE_ARM64_ARCH_FAMILY)
131 /* "Fully Coherent." */
132 #else /* !defined(APPLE_ARM64_ARCH_FAMILY) */
133 #error CleanPoU_Dcache needs an implementation
134 #endif /* defined(APPLE_ARM64_ARCH_FAMILY) */
139 * void CleanPoU_DcacheRegion(vm_offset_t va, unsigned length)
141 * Clean d-cache region to Point of Unification
145 .globl EXT(CleanPoU_DcacheRegion)
146 LEXT(CleanPoU_DcacheRegion)
147 #if defined(APPLE_ARM64_ARCH_FAMILY)
148 /* "Fully Coherent." */
149 #else /* !defined(APPLE_ARM64_ARCH_FAMILY) */
150 mov x9, #((1<<MMU_CLINE)-1)
152 bic x0, x0, x9 // Cached aligned
155 lsr x1, x1, #MMU_CLINE // Set cache line counter
157 dc cvau, x0 // Clean dcache line to PoU
158 add x0, x0, #(1<<MMU_CLINE) // Get next cache aligned addr
159 subs x1, x1, #1 // Decrementer cache line counter
160 b.pl L_cpudr_loop // Loop in counter not null
161 #endif /* defined(APPLE_ARM64_ARCH_FAMILY) */
166 * void CleanPoC_DcacheRegion_internal(vm_offset_t va, unsigned length)
168 * Clean d-cache region to Point of Coherency
172 LEXT(CleanPoC_DcacheRegion_internal)
174 mov x9, #((1<<MMU_CLINE)-1)
176 bic x0, x0, x9 // Cached aligned
179 lsr x1, x1, #MMU_CLINE // Set cache line counter
182 #if defined(APPLE_ARM64_ARCH_FAMILY)
183 // It may be tempting to clean the cache (dc cvac),
184 // but see Cyclone UM 5.3.8.3 -- it's always a NOP on Cyclone.
186 // Clean & Invalidate, however, will work as long as HID4.DisDCMvaOps isn't set.
187 dc civac, x0 // Clean & Invalidate dcache line to PoC
189 dc cvac, x0 // Clean dcache line to PoC
191 add x0, x0, #(1<<MMU_CLINE) // Get next cache aligned addr
192 subs x1, x1, #1 // Decrementer cache line counter
193 b.pl L_cpcdr_loop // Loop in counter not null
199 * void CleanPoC_DcacheRegion(vm_offset_t va, unsigned length)
201 * Clean d-cache region to Point of Coherency
205 .globl EXT(CleanPoC_DcacheRegion)
206 LEXT(CleanPoC_DcacheRegion)
207 #if defined(APPLE_ARM64_ARCH_FAMILY)
208 /* "Fully Coherent." */
211 #else /* !defined(APPLE_ARM64_ARCH_FAMILY) */
212 b EXT(CleanPoC_DcacheRegion_internal)
213 #endif /* defined(APPLE_ARM64_ARCH_FAMILY) */
216 * void CleanPoC_DcacheRegion_Force(vm_offset_t va, unsigned length)
218 * Clean d-cache region to Point of Coherency - when you really
219 * need to flush even on coherent platforms, e.g. panic log
223 .globl EXT(CleanPoC_DcacheRegion_Force)
224 LEXT(CleanPoC_DcacheRegion_Force)
225 #if defined(APPLE_ARM64_ARCH_FAMILY)
227 stp x0, x1, [sp, #-16]!
228 bl EXT(_disable_preemption)
231 ARM64_READ_EP_SPR x15, x14, ARM64_REG_EHID4, ARM64_REG_HID4
232 and x14, x14, (~ARM64_REG_HID4_DisDcMVAOps)
233 ARM64_WRITE_EP_SPR x15, x14, ARM64_REG_EHID4, ARM64_REG_HID4
235 ldp x0, x1, [sp], #16
236 bl EXT(CleanPoC_DcacheRegion_internal)
238 orr x14, x14, ARM64_REG_HID4_DisDcMVAOps
239 ARM64_WRITE_EP_SPR x15, x14, ARM64_REG_EHID4, ARM64_REG_HID4
241 bl EXT(_enable_preemption)
245 b EXT(CleanPoC_DcacheRegion_internal)
246 #endif // APPLE_ARM64_ARCH_FAMILY
249 * void FlushPoC_Dcache(void)
251 * Clean and Invalidate dcaches to Point of Coherency
255 .globl EXT(FlushPoC_Dcache)
256 LEXT(FlushPoC_Dcache)
257 #if defined(APPLE_ARM64_ARCH_FAMILY)
258 /* "Fully Coherent." */
259 #else /* !defined(APPLE_ARM64_ARCH_FAMILY) */
261 mov x9, #(1 << MMU_I7SET)
262 mov x10, #(1 << (MMU_NSET + MMU_I7SET))
263 mov x11, #(1 << MMU_I7WAY)
266 dc cisw, x0 // clean invalidate dcache line by way/set
267 add x0, x0, x9 // increment set index
268 tst x0, #(1 << (MMU_NSET + MMU_I7SET)) // look for overflow
269 b.eq L_fpcd_dcacheline
270 bic x0, x0, x10 // clear set overflow
271 adds x0, x0, x11 // increment way
272 b.cc L_fpcd_dcacheway // loop
275 mov x9, #(1 << L2_I7SET)
276 mov x10, #(1 << (L2_NSET + L2_I7SET))
277 mov x11, #(1 << L2_I7WAY)
280 dc cisw, x0 // clean invalide dcache line by way/set
281 add x0, x0, x9 // increment set index
282 tst x0, #(1 << (L2_NSET + L2_I7SET)) // look for overflow
283 b.eq L_fpcd_l2dcacheline
284 bic x0, x0, x10 // clear set overflow
285 adds x0, x0, x11 // increment way
286 b.cc L_fpcd_l2dcacheway // loop
288 #endif /* defined(APPLE_ARM64_ARCH_FAMILY) */
293 * void FlushPoU_Dcache(void)
295 * Flush D-cache to Point of Unification
299 .globl EXT(FlushPoU_Dcache)
300 LEXT(FlushPoU_Dcache)
301 #if defined(APPLE_ARM64_ARCH_FAMILY)
302 /* "Fully Coherent." */
303 #else /* !defined(APPLE_ARM64_ARCH_FAMILY) */
305 mov x9, #(1 << MMU_I7SET)
306 mov x10, #(1 << (MMU_NSET + MMU_I7SET))
307 mov x11, #(1 << MMU_I7WAY)
310 dc cisw, x0 // clean invalidate dcache line by way/set
311 add x0, x0, x9 // increment set index
312 tst x0, #1 << (MMU_NSET + MMU_I7SET) // look for overflow
314 bic x0, x0, x10 // clear set overflow
315 adds x0, x0, x11 // increment way
316 b.cc L_fpud_way // loop
317 #endif /* defined(APPLE_ARM64_ARCH_FAMILY) */
322 * void FlushPoC_DcacheRegion(vm_offset_t va, unsigned length)
324 * Clean and Invalidate d-cache region to Point of Coherency
328 .globl EXT(FlushPoC_DcacheRegion)
329 LEXT(FlushPoC_DcacheRegion)
330 #if defined(APPLE_ARM64_ARCH_FAMILY)
331 /* "Fully Coherent." */
332 #else /* !defined(APPLE_ARM64_ARCH_FAMILY) */
333 mov x9, #((1<<MMU_CLINE)-1)
335 bic x0, x0, x9 // Cached aligned
338 lsr x1, x1, #MMU_CLINE // Set cache line counter
340 dc civac, x0 // Clean invalidate dcache line to PoC
341 add x0, x0, #(1<<MMU_CLINE) // Get next cache aligned addr
342 subs x1, x1, #1 // Decrementer cache line counter
343 b.pl L_fpcdr_loop // Loop in counter not null
344 #endif /* defined(APPLE_ARM64_ARCH_FAMILY) */
349 * void flush_dcache64(addr64_t addr, unsigned length, boolean_t phys)
353 .globl EXT(flush_dcache64)
355 BRANCH_EXTERN flush_dcache
358 * void clean_dcache64(addr64_t addr, unsigned length, boolean_t phys)
362 .globl EXT(clean_dcache64)
364 BRANCH_EXTERN clean_dcache
367 * void invalidate_icache(vm_offset_t va, unsigned length, boolean_t phys)
368 * void invalidate_icache64(addr64_t va, unsigned length, boolean_t phys)
372 .globl EXT(invalidate_icache64)
373 .globl EXT(invalidate_icache)
374 LEXT(invalidate_icache64)
375 LEXT(invalidate_icache)
376 cmp w2, #0 // Is it physical?
377 b.eq Lcall_invalidate_worker
378 adrp x2, _gPhysBase@page
379 add x2, x2, _gPhysBase@pageoff
382 adrp x2, _gVirtBase@page
383 add x2, x2, _gVirtBase@pageoff
386 Lcall_invalidate_worker:
387 b EXT(InvalidatePoU_IcacheRegion)