2 * Copyright (c) 2010 Apple Inc. All rights reserved.
4 * @APPLE_OSREFERENCE_LICENSE_HEADER_START@
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. The rights granted to you under the License
10 * may not be used to create, or enable the creation or redistribution of,
11 * unlawful or unlicensed copies of an Apple operating system, or to
12 * circumvent, violate, or enable the circumvention or violation of, any
13 * terms of an Apple operating system software license agreement.
15 * Please obtain a copy of the License at
16 * http://www.opensource.apple.com/apsl/ and read it before using this file.
18 * The Original Code and all software distributed under the License are
19 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
20 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
21 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
22 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
23 * Please see the License for the specific language governing rights and
24 * limitations under the License.
26 * @APPLE_OSREFERENCE_LICENSE_HEADER_END@
29 #include <machine/asm.h>
30 #include <arm/proc_reg.h>
32 #include <sys/errno.h>
37 * void invalidate_mmu_cache(void)
39 * Invalidate d-cache and i-cache
43 .globl EXT(invalidate_mmu_cache)
44 LEXT(invalidate_mmu_cache)
46 mcr p15, 0, r0, c7, c7, 0 // Invalidate caches
50 * void invalidate_mmu_dcache(void)
56 .globl EXT(invalidate_mmu_dcache)
57 LEXT(invalidate_mmu_dcache)
59 mcr p15, 0, r0, c7, c6, 0 // Invalidate dcache
63 * void invalidate_mmu_dcache_region(vm_offset_t va, unsigned length)
65 * Invalidate d-cache region
69 .globl EXT(invalidate_mmu_dcache_region)
70 LEXT(invalidate_mmu_dcache_region)
71 and r2, r0, #((1<<MMU_CLINE)-1)
72 bic r0, r0, #((1<<MMU_CLINE)-1) // Cached aligned
75 mov r1, r1, LSR #MMU_CLINE // Set cache line counter
77 mcr p15, 0, r0, c7, c14, 1 // Invalidate dcache line
78 add r0, r0, #1<<MMU_CLINE // Get next cache aligned addr
79 subs r1, r1, #1 // Decrementer cache line counter
80 bpl fmdr_loop // Loop in counter not null
85 * void InvalidatePoU_Icache(void)
91 .globl EXT(InvalidatePoU_Icache)
92 .globl EXT(invalidate_mmu_icache)
93 LEXT(InvalidatePoU_Icache)
94 LEXT(invalidate_mmu_icache)
96 mcr p15, 0, r0, c7, c5, 0 // Invalidate icache
100 * void InvalidatePoU_IcacheRegion(vm_offset_t va, unsigned length)
102 * Invalidate icache region
106 .globl EXT(InvalidatePoU_IcacheRegion)
107 LEXT(InvalidatePoU_IcacheRegion)
108 and r2, r0, #((1<<MMU_I_CLINE)-1)
109 bic r0, r0, #((1<<MMU_I_CLINE)-1) // Cached aligned
112 mov r1, r1, LSR #MMU_I_CLINE // Set cache line counter
114 mcr p15, 0, r0, c7, c5, 1 // Invalidate icache line
115 add r0, r0, #1<<MMU_I_CLINE // Get next cache aligned addr
116 subs r1, r1, #1 // Decrementer cache line counter
117 bpl fmir_loop // Loop in counter not null
121 * void CleanPoC_Dcache(void)
127 .globl EXT(CleanPoC_Dcache)
128 .globl EXT(clean_mmu_dcache)
129 LEXT(CleanPoC_Dcache)
130 LEXT(clean_mmu_dcache)
131 #if !defined(__ARM_L1_WT_CACHE__)
135 mcr p15, 0, r0, c7, c10, 2 // clean dcache line by way/set
136 add r0, r0, #1 << MMU_I7SET // increment set index
137 tst r0, #1 << (MMU_NSET + MMU_I7SET) // look for overflow
139 bic r0, r0, #1 << (MMU_NSET + MMU_I7SET) // clear set overflow
140 adds r0, r0, #1 << MMU_I7WAY // increment way
141 bcc clean_dcacheway // loop
148 mcr p15, 0, r0, c7, c10, 2 // clean dcache line by way/set
149 add r0, r0, #1 << L2_I7SET // increment set index
150 tst r0, #1 << (L2_NSET + L2_I7SET) // look for overflow
151 beq clean_l2dcacheline
152 bic r0, r0, #1 << (L2_NSET + L2_I7SET) // clear set overflow
153 adds r0, r0, #1 << L2_I7WAY // increment way
154 bcc clean_l2dcacheway // loop
160 * void CleanPoU_Dcache(void)
162 * Clean D-cache to Point of Unification
166 .globl EXT(CleanPoU_Dcache)
167 LEXT(CleanPoU_Dcache)
168 #if !defined(__ARM_PoU_WT_CACHE__)
170 clean_dcacheway_idle:
171 clean_dcacheline_idle:
172 mcr p15, 0, r0, c7, c10, 2 // clean dcache line by way/set
173 add r0, r0, #1 << MMU_I7SET // increment set index
174 tst r0, #1 << (MMU_NSET + MMU_I7SET) // look for overflow
175 beq clean_dcacheline_idle
176 bic r0, r0, #1 << (MMU_NSET + MMU_I7SET) // clear set overflow
177 adds r0, r0, #1 << MMU_I7WAY // increment way
178 bcc clean_dcacheway_idle // loop
184 * void CleanPoU_DcacheRegion(vm_offset_t va, unsigned length)
186 * Clean d-cache region to Point of Unification
190 .globl EXT(CleanPoU_DcacheRegion)
191 LEXT(CleanPoU_DcacheRegion)
192 #if !defined(__ARM_PoU_WT_CACHE__)
194 and r2, r0, #((1<<MMU_CLINE)-1)
195 bic r0, r0, #((1<<MMU_CLINE)-1) // Cached aligned
198 mov r1, r1, LSR #MMU_CLINE // Set cache line counter
200 mcr p15, 0, r0, c7, c11, 1 // Clean dcache line to PoU
201 add r0, r0, #1<<MMU_CLINE // Get next cache aligned addr
202 subs r1, r1, #1 // Decrementer cache line counter
203 bpl cudr_loop // Loop in counter not null
210 * void CleanPoC_DcacheRegion(vm_offset_t va, unsigned length)
212 * Clean d-cache region to Point of Coherency
216 .globl EXT(CleanPoC_DcacheRegion)
217 .globl EXT(CleanPoC_DcacheRegion_Force)
218 LEXT(CleanPoC_DcacheRegion)
219 LEXT(CleanPoC_DcacheRegion_Force)
220 and r2, r0, #((1<<MMU_CLINE)-1)
221 bic r0, r0, #((1<<MMU_CLINE)-1) // Cached aligned
224 mov r1, r1, LSR #MMU_CLINE // Set cache line counter
226 mcr p15, 0, r0, c7, c10, 1 // Clean dcache line to PoC
227 add r0, r0, #1<<MMU_CLINE // Get next cache aligned addr
228 subs r1, r1, #1 // Decrementer cache line counter
229 bpl ccdr_loop // Loop in counter not null
234 * void FlushPoC_Dcache(void)
236 * Clean and Invalidate dcaches to Point of Coherency
240 .globl EXT(FlushPoC_Dcache)
241 LEXT(FlushPoC_Dcache)
243 cleanflush_dcacheway:
244 cleanflush_dcacheline:
245 mcr p15, 0, r0, c7, c14, 2 // cleanflush dcache line by way/set
246 add r0, r0, #1 << MMU_I7SET // increment set index
247 tst r0, #1 << (MMU_NSET + MMU_I7SET) // look for overflow
248 beq cleanflush_dcacheline
249 bic r0, r0, #1 << (MMU_NSET + MMU_I7SET) // clear set overflow
250 adds r0, r0, #1 << MMU_I7WAY // increment way
251 bcc cleanflush_dcacheway // loop
255 cleanflush_l2dcacheway:
256 cleanflush_l2dcacheline:
257 mcr p15, 0, r0, c7, c14, 2 // cleanflush dcache line by way/set
258 add r0, r0, #1 << L2_I7SET // increment set index
259 tst r0, #1 << (L2_NSET + L2_I7SET) // look for overflow
260 beq cleanflush_l2dcacheline
261 bic r0, r0, #1 << (L2_NSET + L2_I7SET) // clear set overflow
262 adds r0, r0, #1 << L2_I7WAY // increment way
263 bcc cleanflush_l2dcacheway // loop
269 * void FlushPoU_Dcache(void)
271 * Flush D-cache to Point of Unification
275 .globl EXT(FlushPoU_Dcache)
276 LEXT(FlushPoU_Dcache)
280 mcr p15, 0, r0, c7, c14, 2 // cleanflush dcache line by way/set
281 add r0, r0, #1 << MMU_I7SET // increment set index
282 tst r0, #1 << (MMU_NSET + MMU_I7SET) // look for overflow
284 bic r0, r0, #1 << (MMU_NSET + MMU_I7SET) // clear set overflow
285 adds r0, r0, #1 << MMU_I7WAY // increment way
291 * void FlushPoC_DcacheRegion(vm_offset_t va, unsigned length)
293 * Clean and Invalidate d-cache region to Point of Coherency
297 .globl EXT(FlushPoC_DcacheRegion)
298 LEXT(FlushPoC_DcacheRegion)
299 and r2, r0, #((1<<MMU_CLINE)-1)
300 bic r0, r0, #((1<<MMU_CLINE)-1) // Cached aligned
303 mov r1, r1, LSR #MMU_CLINE // Set cache line counter
305 mcr p15, 0, r0, c7, c14, 1 // Clean & invalidate dcache line
306 add r0, r0, #1<<MMU_CLINE // Get next cache aligned addr
307 subs r1, r1, #1 // Decrementer cache line counter
308 bpl cfmdr_loop // Loop in counter not null
313 * void flush_dcache64(addr64_t addr, unsigned length, boolean_t phys)
317 .globl EXT(flush_dcache64)
321 LOAD_ADDR_PC(flush_dcache)
324 * void clean_dcache64(addr64_t addr, unsigned length, boolean_t phys)
328 .globl EXT(clean_dcache64)
332 LOAD_ADDR_PC(clean_dcache)
335 * void invalidate_icache(vm_offset_t va, unsigned length, boolean_t phys)
336 * void invalidate_icache64(addr64_t va, unsigned length, boolean_t phys)
340 .globl EXT(invalidate_icache64)
341 .globl EXT(invalidate_icache)
342 LEXT(invalidate_icache64)
345 LEXT(invalidate_icache)
346 cmp r2, #0 // Is it physical?
347 COND_EXTERN_BEQ(InvalidatePoU_IcacheRegion)
348 LOAD_ADDR(r2, gPhysBase)
351 LOAD_ADDR(r2, gVirtBase)
354 b EXT(InvalidatePoU_IcacheRegion)
357 #include "globals_asm.h"
359 LOAD_ADDR_GEN_DEF(flush_dcache)
360 LOAD_ADDR_GEN_DEF(clean_dcache)