2 * Copyright (c) 2010-2013 Apple Inc. All rights reserved.
4 * @APPLE_OSREFERENCE_LICENSE_HEADER_START@
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. The rights granted to you under the License
10 * may not be used to create, or enable the creation or redistribution of,
11 * unlawful or unlicensed copies of an Apple operating system, or to
12 * circumvent, violate, or enable the circumvention or violation of, any
13 * terms of an Apple operating system software license agreement.
15 * Please obtain a copy of the License at
16 * http://www.opensource.apple.com/apsl/ and read it before using this file.
18 * The Original Code and all software distributed under the License are
19 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
20 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
21 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
22 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
23 * Please see the License for the specific language governing rights and
24 * limitations under the License.
26 * @APPLE_OSREFERENCE_LICENSE_HEADER_END@
29 #include <machine/asm.h>
30 #include <arm64/proc_reg.h>
32 #include <sys/errno.h>
36 * void InvalidatePoU_Icache(void)
42 .globl EXT(InvalidatePoU_Icache)
43 .globl EXT(invalidate_mmu_icache)
44 LEXT(InvalidatePoU_Icache)
45 LEXT(invalidate_mmu_icache)
47 ic ialluis // Invalidate icache
54 * void InvalidatePoU_IcacheRegion(vm_offset_t va, unsigned length)
56 * Invalidate icache region
60 .globl EXT(InvalidatePoU_IcacheRegion)
61 LEXT(InvalidatePoU_IcacheRegion)
64 bl EXT(CleanPoU_DcacheRegion)
65 #if __ARM_IC_NOALIAS_ICACHE__
66 mov x9, #((1<<MMU_I_CLINE)-1)
68 bic x0, x0, x9 // Cached aligned
71 lsr x1, x1, #MMU_I_CLINE // Set cache line counter
73 ic ivau, x0 // Invalidate icache line
74 add x0, x0, #1<<MMU_I_CLINE // Get next cache aligned addr
75 subs x1, x1, #1 // Decrementer cache line counter
76 b.pl L_ipui_loop // Loop in counter not null
81 bl EXT(InvalidatePoU_Icache)
87 * Obtains cache physical layout information required for way/set
88 * data cache maintenance operations.
90 * $0: Data cache level, starting from 0
91 * $1: Output register for set increment
92 * $2: Output register for last valid set
93 * $3: Output register for way increment
95 .macro GET_CACHE_CONFIG
97 msr CSSELR_EL1, $0 // Select appropriate cache
98 isb // Synchronize context
101 ubfx $1, $0, #3, #10 // extract number of ways - 1
103 add $1, $1, #1 // calculate number of ways
112 mov $1, #32 // calculate way increment
118 ubfx $1, $0, #0, #3 // extract log2(line size) - 4
119 add $1, $1, #4 // calculate log2(line size)
121 lsl $1, $2, $1 // calculate set increment
123 ubfx $2, $0, #13, #15 // extract number of sets - 1
124 add $2, $2, #1 // calculate number of sets
125 mul $2, $1, $2 // calculate last valid set
129 * Detects the presence of an L2 cache and returns 1 if implemented,
132 * $0: Output register
136 ubfx $0, $0, #3, #3 // extract L2 cache Ctype
142 * void CleanPoC_Dcache(void)
148 .globl EXT(CleanPoC_Dcache)
149 .globl EXT(clean_mmu_dcache)
150 LEXT(CleanPoC_Dcache)
151 #if defined(APPLE_ARM64_ARCH_FAMILY)
152 /* "Fully Coherent." */
153 #else /* !defined(APPLE_ARM64_ARCH_FAMILY) */
155 GET_CACHE_CONFIG x0, x9, x10, x11
161 dc csw, x0 // clean dcache line by way/set
162 add x0, x0, x9 // increment set index
163 tst x0, x10 // look for overflow
164 b.eq L_cpcd_dcacheline
165 bic x0, x0, x10 // clear set overflow
166 adds w0, w0, w11 // increment way
167 b.cc L_cpcd_dcacheway // loop
170 cbz x0, L_cpcd_skipl2dcache
172 GET_CACHE_CONFIG x0, x9, x10, x11
178 dc csw, x0 // clean dcache line by way/set
179 add x0, x0, x9 // increment set index
180 tst x0, x10 // look for overflow
181 b.eq L_cpcd_l2dcacheline
182 bic x0, x0, x10 // clear set overflow
183 adds w0, w0, w11 // increment way
184 b.cc L_cpcd_l2dcacheway // loop
186 #endif /* defined(APPLE_ARM64_ARCH_FAMILY) */
191 * void CleanPoU_Dcache(void)
193 * Clean D-cache to Point of Unification
197 .globl EXT(CleanPoU_Dcache)
198 LEXT(CleanPoU_Dcache)
199 #if defined(APPLE_ARM64_ARCH_FAMILY)
200 /* "Fully Coherent." */
201 #else /* !defined(APPLE_ARM64_ARCH_FAMILY) */
203 GET_CACHE_CONFIG x0, x9, x10, x11
209 dc csw, x0 // clean dcache line by way/set
210 add x0, x0, x9 // increment set index
211 tst x0, x10 // look for overflow
212 b.eq L_cpud_dcacheline
213 bic x0, x0, x10 // clear set overflow
214 adds w0, w0, w11 // increment way
215 b.cc L_cpud_dcacheway // loop
216 #endif /* defined(APPLE_ARM64_ARCH_FAMILY) */
221 * void CleanPoU_DcacheRegion(vm_offset_t va, unsigned length)
223 * Clean d-cache region to Point of Unification
227 .globl EXT(CleanPoU_DcacheRegion)
228 LEXT(CleanPoU_DcacheRegion)
229 #if defined(APPLE_ARM64_ARCH_FAMILY)
230 /* "Fully Coherent." */
231 #else /* !defined(APPLE_ARM64_ARCH_FAMILY) */
232 mov x9, #((1<<MMU_CLINE)-1)
234 bic x3, x0, x9 // Cached aligned
237 lsr x4, x4, #MMU_CLINE // Set cache line counter
240 dc cvau, x3 // Clean dcache line to PoU
241 add x3, x3, #(1<<MMU_CLINE) // Get next cache aligned addr
242 subs x4, x4, #1 // Decrementer cache line counter
243 b.pl L_cpudr_loop // Loop in counter not null
244 #endif /* defined(APPLE_ARM64_ARCH_FAMILY) */
249 * void CleanPoC_DcacheRegion_internal(vm_offset_t va, size_t length)
251 * Clean d-cache region to Point of Coherency
255 LEXT(CleanPoC_DcacheRegion_internal)
256 mov x9, #((1<<MMU_CLINE)-1)
258 bic x0, x0, x9 // Cached aligned
261 lsr x1, x1, #MMU_CLINE // Set cache line counter
264 #if defined(APPLE_ARM64_ARCH_FAMILY)
265 // It may be tempting to clean the cache (dc cvac),
266 // but see Cyclone UM 5.3.8.3 -- it's always a NOP on Cyclone.
268 // Clean & Invalidate, however, will work as long as HID4.DisDCMvaOps isn't set.
269 dc civac, x0 // Clean & Invalidate dcache line to PoC
271 dc cvac, x0 // Clean dcache line to PoC
273 add x0, x0, #(1<<MMU_CLINE) // Get next cache aligned addr
274 subs x1, x1, #1 // Decrementer cache line counter
275 b.pl L_cpcdr_loop // Loop in counter not null
280 * void CleanPoC_DcacheRegion(vm_offset_t va, size_t length)
282 * Clean d-cache region to Point of Coherency
286 .globl EXT(CleanPoC_DcacheRegion)
287 LEXT(CleanPoC_DcacheRegion)
288 #if defined(APPLE_ARM64_ARCH_FAMILY)
289 /* "Fully Coherent." */
292 #else /* !defined(APPLE_ARM64_ARCH_FAMILY) */
293 b EXT(CleanPoC_DcacheRegion_internal)
294 #endif /* defined(APPLE_ARM64_ARCH_FAMILY) */
298 .globl EXT(CleanPoC_DcacheRegion_Force_nopreempt)
299 LEXT(CleanPoC_DcacheRegion_Force_nopreempt)
300 #if defined(APPLE_ARM64_ARCH_FAMILY)
305 ARM64_READ_EP_SPR x15, x14, ARM64_REG_EHID4, ARM64_REG_HID4
306 and x14, x14, (~ARM64_REG_HID4_DisDcMVAOps)
307 ARM64_WRITE_EP_SPR x15, x14, ARM64_REG_EHID4, ARM64_REG_HID4
309 bl EXT(CleanPoC_DcacheRegion_internal)
311 orr x14, x14, ARM64_REG_HID4_DisDcMVAOps
312 ARM64_WRITE_EP_SPR x15, x14, ARM64_REG_EHID4, ARM64_REG_HID4
317 b EXT(CleanPoC_DcacheRegion_internal)
318 #endif // APPLE_ARM64_ARCH_FAMILY
321 * void CleanPoC_DcacheRegion_Force(vm_offset_t va, size_t length)
323 * Clean d-cache region to Point of Coherency - when you really
324 * need to flush even on coherent platforms, e.g. panic log
328 .globl EXT(CleanPoC_DcacheRegion_Force)
329 LEXT(CleanPoC_DcacheRegion_Force)
330 #if defined(APPLE_ARM64_ARCH_FAMILY)
333 stp x0, x1, [sp, #-16]!
334 bl EXT(_disable_preemption)
335 ldp x0, x1, [sp], #16
336 bl EXT(CleanPoC_DcacheRegion_Force_nopreempt)
337 bl EXT(_enable_preemption)
341 b EXT(CleanPoC_DcacheRegion_internal)
342 #endif // APPLE_ARM64_ARCH_FAMILY
345 * void FlushPoC_Dcache(void)
347 * Clean and Invalidate dcaches to Point of Coherency
351 .globl EXT(FlushPoC_Dcache)
352 LEXT(FlushPoC_Dcache)
353 #if defined(APPLE_ARM64_ARCH_FAMILY)
354 /* "Fully Coherent." */
355 #else /* !defined(APPLE_ARM64_ARCH_FAMILY) */
357 GET_CACHE_CONFIG x0, x9, x10, x11
363 dc cisw, x0 // clean invalidate dcache line by way/set
364 add x0, x0, x9 // increment set index
365 tst x0, x10 // look for overflow
366 b.eq L_fpcd_dcacheline
367 bic x0, x0, x10 // clear set overflow
368 adds w0, w0, w11 // increment way
369 b.cc L_fpcd_dcacheway // loop
372 cbz x0, L_fpcd_skipl2dcache
375 GET_CACHE_CONFIG x0, x9, x10, x11
380 dc cisw, x0 // clean invalide dcache line by way/set
381 add x0, x0, x9 // increment set index
382 tst x0, x10 // look for overflow
383 b.eq L_fpcd_l2dcacheline
384 bic x0, x0, x10 // clear set overflow
385 adds w0, w0, w11 // increment way
386 b.cc L_fpcd_l2dcacheway // loop
388 #endif /* defined(APPLE_ARM64_ARCH_FAMILY) */
393 * void FlushPoU_Dcache(void)
395 * Flush D-cache to Point of Unification
399 .globl EXT(FlushPoU_Dcache)
400 LEXT(FlushPoU_Dcache)
401 #if defined(APPLE_ARM64_ARCH_FAMILY)
402 /* "Fully Coherent." */
403 #else /* !defined(APPLE_ARM64_ARCH_FAMILY) */
405 GET_CACHE_CONFIG x0, x9, x10, x11
411 dc cisw, x0 // clean invalidate dcache line by way/set
412 add x0, x0, x9 // increment set index
413 tst x0, x10 // look for overflow
415 bic x0, x0, x10 // clear set overflow
416 adds w0, w0, w11 // increment way
417 b.cc L_fpud_way // loop
418 #endif /* defined(APPLE_ARM64_ARCH_FAMILY) */
423 * void FlushPoC_DcacheRegion(vm_offset_t va, unsigned length)
425 * Clean and Invalidate d-cache region to Point of Coherency
429 .globl EXT(FlushPoC_DcacheRegion)
430 LEXT(FlushPoC_DcacheRegion)
431 #if defined(APPLE_ARM64_ARCH_FAMILY)
432 /* "Fully Coherent." */
433 #else /* !defined(APPLE_ARM64_ARCH_FAMILY) */
434 mov x9, #((1<<MMU_CLINE)-1)
436 bic x0, x0, x9 // Cached aligned
439 lsr x1, x1, #MMU_CLINE // Set cache line counter
442 dc civac, x0 // Clean invalidate dcache line to PoC
443 add x0, x0, #(1<<MMU_CLINE) // Get next cache aligned addr
444 subs x1, x1, #1 // Decrementer cache line counter
445 b.pl L_fpcdr_loop // Loop in counter not null
446 #endif /* defined(APPLE_ARM64_ARCH_FAMILY) */
451 * void flush_dcache64(addr64_t addr, unsigned length, boolean_t phys)
455 .globl EXT(flush_dcache64)
457 BRANCH_EXTERN flush_dcache
460 * void clean_dcache64(addr64_t addr, unsigned length, boolean_t phys)
464 .globl EXT(clean_dcache64)
466 BRANCH_EXTERN clean_dcache
469 * void invalidate_icache(vm_offset_t va, unsigned length, boolean_t phys)
470 * void invalidate_icache64(addr64_t va, unsigned length, boolean_t phys)
474 .globl EXT(invalidate_icache64)
475 .globl EXT(invalidate_icache)
476 LEXT(invalidate_icache64)
477 LEXT(invalidate_icache)
478 cmp w2, #0 // Is it physical?
479 b.eq Lcall_invalidate_worker
480 adrp x2, _gPhysBase@page
481 add x2, x2, _gPhysBase@pageoff
484 adrp x2, _gVirtBase@page
485 add x2, x2, _gVirtBase@pageoff
488 Lcall_invalidate_worker:
489 b EXT(InvalidatePoU_IcacheRegion)