2 * Copyright (c) 2010 Apple Inc. All rights reserved.
4 * @APPLE_OSREFERENCE_LICENSE_HEADER_START@
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. The rights granted to you under the License
10 * may not be used to create, or enable the creation or redistribution of,
11 * unlawful or unlicensed copies of an Apple operating system, or to
12 * circumvent, violate, or enable the circumvention or violation of, any
13 * terms of an Apple operating system software license agreement.
15 * Please obtain a copy of the License at
16 * http://www.opensource.apple.com/apsl/ and read it before using this file.
18 * The Original Code and all software distributed under the License are
19 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
20 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
21 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
22 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
23 * Please see the License for the specific language governing rights and
24 * limitations under the License.
26 * @APPLE_OSREFERENCE_LICENSE_HEADER_END@
29 #include <machine/asm.h>
30 #include <arm/proc_reg.h>
32 #include <sys/errno.h>
37 * void invalidate_mmu_cache(void)
39 * Invalidate d-cache and i-cache
43 .globl EXT(invalidate_mmu_cache)
44 LEXT(invalidate_mmu_cache)
47 mcr p15, 0, r0, c7, c7, 0 // Invalidate caches
53 * void invalidate_mmu_dcache(void)
59 .globl EXT(invalidate_mmu_dcache)
60 LEXT(invalidate_mmu_dcache)
63 mcr p15, 0, r0, c7, c6, 0 // Invalidate dcache
68 * void invalidate_mmu_dcache_region(vm_offset_t va, unsigned length)
70 * Invalidate d-cache region
74 .globl EXT(invalidate_mmu_dcache_region)
75 LEXT(invalidate_mmu_dcache_region)
76 and r2, r0, #((1<<MMU_CLINE)-1)
77 bic r0, r0, #((1<<MMU_CLINE)-1) // Cached aligned
80 mov r1, r1, LSR #MMU_CLINE // Set cache line counter
83 mcr p15, 0, r0, c7, c14, 1 // Invalidate dcache line
84 add r0, r0, #1<<MMU_CLINE // Get next cache aligned addr
85 subs r1, r1, #1 // Decrementer cache line counter
86 bpl fmdr_loop // Loop in counter not null
91 * void InvalidatePoU_Icache(void)
97 .globl EXT(InvalidatePoU_Icache)
98 .globl EXT(invalidate_mmu_icache)
99 LEXT(InvalidatePoU_Icache)
100 LEXT(invalidate_mmu_icache)
103 mcr p15, 0, r0, c7, c5, 0 // Invalidate icache
109 * void InvalidatePoU_IcacheRegion(vm_offset_t va, unsigned length)
111 * Invalidate icache region
115 .globl EXT(InvalidatePoU_IcacheRegion)
116 LEXT(InvalidatePoU_IcacheRegion)
119 bl EXT(CleanPoU_DcacheRegion)
120 and r2, r0, #((1<<MMU_I_CLINE)-1)
121 bic r0, r0, #((1<<MMU_I_CLINE)-1) // Cached aligned
124 mov r1, r1, LSR #MMU_I_CLINE // Set cache line counter
126 mcr p15, 0, r0, c7, c5, 1 // Invalidate icache line
127 add r0, r0, #1<<MMU_I_CLINE // Get next cache aligned addr
128 subs r1, r1, #1 // Decrementer cache line counter
129 bpl fmir_loop // Loop in counter not null
135 * void CleanPoC_Dcache(void)
141 .globl EXT(CleanPoC_Dcache)
142 .globl EXT(clean_mmu_dcache)
143 LEXT(CleanPoC_Dcache)
144 LEXT(clean_mmu_dcache)
145 #if !defined(__ARM_L1_WT_CACHE__)
150 mcr p15, 0, r0, c7, c10, 2 // clean dcache line by way/set
151 add r0, r0, #1 << MMU_I7SET // increment set index
152 tst r0, #1 << (MMU_NSET + MMU_I7SET) // look for overflow
154 bic r0, r0, #1 << (MMU_NSET + MMU_I7SET) // clear set overflow
155 adds r0, r0, #1 << MMU_I7WAY // increment way
156 bcc clean_dcacheway // loop
163 mcr p15, 0, r0, c7, c10, 2 // clean dcache line by way/set
164 add r0, r0, #1 << L2_I7SET // increment set index
165 tst r0, #1 << (L2_NSET + L2_I7SET) // look for overflow
166 beq clean_l2dcacheline
167 bic r0, r0, #1 << (L2_NSET + L2_I7SET) // clear set overflow
168 adds r0, r0, #1 << L2_I7WAY // increment way
169 bcc clean_l2dcacheway // loop
175 * void CleanPoU_Dcache(void)
177 * Clean D-cache to Point of Unification
181 .globl EXT(CleanPoU_Dcache)
182 LEXT(CleanPoU_Dcache)
183 #if !defined(__ARM_PoU_WT_CACHE__)
186 clean_dcacheway_idle:
187 clean_dcacheline_idle:
188 mcr p15, 0, r0, c7, c10, 2 // clean dcache line by way/set
189 add r0, r0, #1 << MMU_I7SET // increment set index
190 tst r0, #1 << (MMU_NSET + MMU_I7SET) // look for overflow
191 beq clean_dcacheline_idle
192 bic r0, r0, #1 << (MMU_NSET + MMU_I7SET) // clear set overflow
193 adds r0, r0, #1 << MMU_I7WAY // increment way
194 bcc clean_dcacheway_idle // loop
200 * void CleanPoU_DcacheRegion(vm_offset_t va, unsigned length)
202 * Clean d-cache region to Point of Unification
206 .globl EXT(CleanPoU_DcacheRegion)
207 LEXT(CleanPoU_DcacheRegion)
208 #if !defined(__ARM_PoU_WT_CACHE__)
210 and r2, r0, #((1<<MMU_CLINE)-1)
211 bic r3, r0, #((1<<MMU_CLINE)-1) // Cached aligned
214 mov r12, r12, LSR #MMU_CLINE // Set cache line counter
217 mcr p15, 0, r3, c7, c11, 1 // Clean dcache line to PoU
218 add r3, r3, #1<<MMU_CLINE // Get next cache aligned addr
219 subs r12, r12, #1 // Decrementer cache line counter
220 bpl cudr_loop // Loop in counter not null
227 * void CleanPoC_DcacheRegion(vm_offset_t va, unsigned length)
229 * Clean d-cache region to Point of Coherency
233 .globl EXT(CleanPoC_DcacheRegion)
234 .globl EXT(CleanPoC_DcacheRegion_Force)
235 LEXT(CleanPoC_DcacheRegion)
236 LEXT(CleanPoC_DcacheRegion_Force)
237 and r2, r0, #((1<<MMU_CLINE)-1)
238 bic r0, r0, #((1<<MMU_CLINE)-1) // Cached aligned
241 mov r1, r1, LSR #MMU_CLINE // Set cache line counter
243 mcr p15, 0, r0, c7, c10, 1 // Clean dcache line to PoC
244 add r0, r0, #1<<MMU_CLINE // Get next cache aligned addr
245 subs r1, r1, #1 // Decrementer cache line counter
246 bpl ccdr_loop // Loop in counter not null
251 * void FlushPoC_Dcache(void)
253 * Clean and Invalidate dcaches to Point of Coherency
257 .globl EXT(FlushPoC_Dcache)
258 LEXT(FlushPoC_Dcache)
261 cleanflush_dcacheway:
262 cleanflush_dcacheline:
263 mcr p15, 0, r0, c7, c14, 2 // cleanflush dcache line by way/set
264 add r0, r0, #1 << MMU_I7SET // increment set index
265 tst r0, #1 << (MMU_NSET + MMU_I7SET) // look for overflow
266 beq cleanflush_dcacheline
267 bic r0, r0, #1 << (MMU_NSET + MMU_I7SET) // clear set overflow
268 adds r0, r0, #1 << MMU_I7WAY // increment way
269 bcc cleanflush_dcacheway // loop
273 cleanflush_l2dcacheway:
274 cleanflush_l2dcacheline:
275 mcr p15, 0, r0, c7, c14, 2 // cleanflush dcache line by way/set
276 add r0, r0, #1 << L2_I7SET // increment set index
277 tst r0, #1 << (L2_NSET + L2_I7SET) // look for overflow
278 beq cleanflush_l2dcacheline
279 bic r0, r0, #1 << (L2_NSET + L2_I7SET) // clear set overflow
280 adds r0, r0, #1 << L2_I7WAY // increment way
281 bcc cleanflush_l2dcacheway // loop
287 * void FlushPoU_Dcache(void)
289 * Flush D-cache to Point of Unification
293 .globl EXT(FlushPoU_Dcache)
294 LEXT(FlushPoU_Dcache)
299 mcr p15, 0, r0, c7, c14, 2 // cleanflush dcache line by way/set
300 add r0, r0, #1 << MMU_I7SET // increment set index
301 tst r0, #1 << (MMU_NSET + MMU_I7SET) // look for overflow
303 bic r0, r0, #1 << (MMU_NSET + MMU_I7SET) // clear set overflow
304 adds r0, r0, #1 << MMU_I7WAY // increment way
310 * void FlushPoC_DcacheRegion(vm_offset_t va, unsigned length)
312 * Clean and Invalidate d-cache region to Point of Coherency
316 .globl EXT(FlushPoC_DcacheRegion)
317 LEXT(FlushPoC_DcacheRegion)
318 and r2, r0, #((1<<MMU_CLINE)-1)
319 bic r0, r0, #((1<<MMU_CLINE)-1) // Cached aligned
322 mov r1, r1, LSR #MMU_CLINE // Set cache line counter
325 mcr p15, 0, r0, c7, c14, 1 // Clean & invalidate dcache line
326 add r0, r0, #1<<MMU_CLINE // Get next cache aligned addr
327 subs r1, r1, #1 // Decrementer cache line counter
328 bpl cfmdr_loop // Loop in counter not null
333 * void flush_dcache64(addr64_t addr, unsigned length, boolean_t phys)
337 .globl EXT(flush_dcache64)
341 LOAD_ADDR_PC(flush_dcache)
344 * void clean_dcache64(addr64_t addr, unsigned length, boolean_t phys)
348 .globl EXT(clean_dcache64)
352 LOAD_ADDR_PC(clean_dcache)
355 * void invalidate_icache(vm_offset_t va, unsigned length, boolean_t phys)
356 * void invalidate_icache64(addr64_t va, unsigned length, boolean_t phys)
360 .globl EXT(invalidate_icache64)
361 .globl EXT(invalidate_icache)
362 LEXT(invalidate_icache64)
365 LEXT(invalidate_icache)
366 cmp r2, #0 // Is it physical?
367 COND_EXTERN_BEQ(InvalidatePoU_IcacheRegion)
368 LOAD_ADDR(r2, gPhysBase)
371 LOAD_ADDR(r2, gVirtBase)
374 b EXT(InvalidatePoU_IcacheRegion)
377 #include "globals_asm.h"
379 LOAD_ADDR_GEN_DEF(flush_dcache)
380 LOAD_ADDR_GEN_DEF(clean_dcache)