]>
Commit | Line | Data |
---|---|---|
5ba3f43e A |
1 | /* |
2 | * Copyright (c) 2010-2013 Apple Inc. All rights reserved. | |
3 | * | |
4 | * @APPLE_OSREFERENCE_LICENSE_HEADER_START@ | |
5 | * | |
6 | * This file contains Original Code and/or Modifications of Original Code | |
7 | * as defined in and that are subject to the Apple Public Source License | |
8 | * Version 2.0 (the 'License'). You may not use this file except in | |
9 | * compliance with the License. The rights granted to you under the License | |
10 | * may not be used to create, or enable the creation or redistribution of, | |
11 | * unlawful or unlicensed copies of an Apple operating system, or to | |
12 | * circumvent, violate, or enable the circumvention or violation of, any | |
13 | * terms of an Apple operating system software license agreement. | |
14 | * | |
15 | * Please obtain a copy of the License at | |
16 | * http://www.opensource.apple.com/apsl/ and read it before using this file. | |
17 | * | |
18 | * The Original Code and all software distributed under the License are | |
19 | * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER | |
20 | * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES, | |
21 | * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY, | |
22 | * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT. | |
23 | * Please see the License for the specific language governing rights and | |
24 | * limitations under the License. | |
25 | * | |
26 | * @APPLE_OSREFERENCE_LICENSE_HEADER_END@ | |
27 | */ | |
28 | ||
29 | #include <machine/asm.h> | |
30 | #include <arm64/proc_reg.h> | |
c3c9b80d | 31 | #include <pexpert/arm64/board_config.h> |
5ba3f43e A |
32 | #include <arm/pmap.h> |
33 | #include <sys/errno.h> | |
34 | #include "assym.s" | |
35 | ||
36 | /* | |
37 | * void InvalidatePoU_Icache(void) | |
38 | * | |
39 | * Invalidate i-cache | |
40 | */ | |
41 | .text | |
42 | .align 2 | |
43 | .globl EXT(InvalidatePoU_Icache) | |
44 | .globl EXT(invalidate_mmu_icache) | |
45 | LEXT(InvalidatePoU_Icache) | |
46 | LEXT(invalidate_mmu_icache) | |
cb323159 | 47 | dsb sy |
5ba3f43e A |
48 | ic ialluis // Invalidate icache |
49 | dsb sy | |
50 | isb sy | |
cb323159 | 51 | L_imi_done: |
5ba3f43e A |
52 | ret |
53 | ||
54 | /* | |
55 | * void InvalidatePoU_IcacheRegion(vm_offset_t va, unsigned length) | |
56 | * | |
57 | * Invalidate icache region | |
58 | */ | |
59 | .text | |
60 | .align 2 | |
61 | .globl EXT(InvalidatePoU_IcacheRegion) | |
62 | LEXT(InvalidatePoU_IcacheRegion) | |
cb323159 A |
63 | ARM64_STACK_PROLOG |
64 | PUSH_FRAME | |
65 | bl EXT(CleanPoU_DcacheRegion) | |
66 | #if __ARM_IC_NOALIAS_ICACHE__ | |
5ba3f43e A |
67 | mov x9, #((1<<MMU_I_CLINE)-1) |
68 | and x2, x0, x9 | |
69 | bic x0, x0, x9 // Cached aligned | |
70 | add x1, x1, x2 | |
71 | sub x1, x1, #1 | |
72 | lsr x1, x1, #MMU_I_CLINE // Set cache line counter | |
73 | L_ipui_loop: | |
74 | ic ivau, x0 // Invalidate icache line | |
75 | add x0, x0, #1<<MMU_I_CLINE // Get next cache aligned addr | |
76 | subs x1, x1, #1 // Decrementer cache line counter | |
77 | b.pl L_ipui_loop // Loop in counter not null | |
78 | dsb sy | |
79 | isb sy | |
cb323159 A |
80 | L_ipui_done: |
81 | #else | |
82 | bl EXT(InvalidatePoU_Icache) | |
83 | #endif | |
84 | POP_FRAME | |
85 | ARM64_STACK_EPILOG | |
5ba3f43e | 86 | |
f427ee49 A |
87 | /* |
88 | * Obtains cache physical layout information required for way/set | |
89 | * data cache maintenance operations. | |
90 | * | |
91 | * $0: Data cache level, starting from 0 | |
92 | * $1: Output register for set increment | |
93 | * $2: Output register for last valid set | |
94 | * $3: Output register for way increment | |
95 | */ | |
96 | .macro GET_CACHE_CONFIG | |
97 | lsl $0, $0, #1 | |
98 | msr CSSELR_EL1, $0 // Select appropriate cache | |
99 | isb // Synchronize context | |
100 | ||
101 | mrs $0, CCSIDR_EL1 | |
102 | ubfx $1, $0, #3, #10 // extract number of ways - 1 | |
103 | mov $2, $1 | |
104 | add $1, $1, #1 // calculate number of ways | |
105 | ||
106 | mov $0, #63 | |
107 | and $2, $2, $1 | |
108 | cmp $2, #0 | |
109 | cinc $0, $0, ne | |
110 | clz $1, $1 | |
111 | sub $0, $0, $1 | |
112 | ||
113 | mov $1, #32 // calculate way increment | |
114 | sub $3, $1, $0 | |
115 | mov $1, #1 | |
116 | lsl $3, $1, $3 | |
117 | ||
118 | mrs $0, CCSIDR_EL1 | |
119 | ubfx $1, $0, #0, #3 // extract log2(line size) - 4 | |
120 | add $1, $1, #4 // calculate log2(line size) | |
121 | mov $2, #1 | |
122 | lsl $1, $2, $1 // calculate set increment | |
123 | ||
124 | ubfx $2, $0, #13, #15 // extract number of sets - 1 | |
125 | add $2, $2, #1 // calculate number of sets | |
126 | mul $2, $1, $2 // calculate last valid set | |
127 | .endmacro | |
128 | ||
129 | /* | |
c3c9b80d | 130 | * Returns the cache configuration for the specified level |
f427ee49 | 131 | * $0: Output register |
c3c9b80d A |
132 | * $1: Cache level register |
133 | * $2: Scratch register | |
f427ee49 | 134 | */ |
c3c9b80d | 135 | .macro CACHE_AT_LEVEL |
f427ee49 | 136 | mrs $0, CLIDR_EL1 |
c3c9b80d A |
137 | add $2, $1, $1, lsl #1 |
138 | lsr $0, $0, $2 | |
139 | and $0, $0, #7 // extract cache type | |
140 | .endmacro | |
141 | ||
142 | /* | |
143 | * Perform set/way maintenance to the desired cache level | |
144 | * $0: 'dc' set/way variant, e.g. csw or cisw | |
145 | * x0: maximum cache level, 0-based, inclusive | |
146 | */ | |
147 | .macro DCACHE_SET_WAY | |
148 | dmb sy | |
149 | mov x1, #0 | |
150 | 1: | |
151 | CACHE_AT_LEVEL x2, x1, x3 | |
152 | cbz x2, 5f // No cache at this level, all higher levels may be skipped | |
153 | cmp x2, #2 | |
154 | b.lt 4f // No data cache at this level, skip to next level | |
155 | mov x2, x1 | |
156 | GET_CACHE_CONFIG x2, x9, x10, x11 | |
157 | lsl x2, x1, #1 // level field for cisw/csw, bits 1:3 | |
158 | 2: | |
159 | 3: | |
160 | dc $0, x2 // clean dcache line by way/set | |
161 | add x2, x2, x9 // increment set index | |
162 | tst x2, x10 // look for overflow | |
163 | b.eq 3b | |
164 | bic x2, x2, x10 // clear set overflow | |
165 | adds w2, w2, w11 // increment way | |
166 | b.cc 2b // loop | |
167 | dsb sy // ensure completion of prior level maintenance | |
168 | 4: | |
169 | add x1, x1, #1 | |
170 | cmp x1, x0 | |
171 | b.ls 1b // next level | |
172 | 5: | |
173 | ret | |
f427ee49 | 174 | .endmacro |
5ba3f43e A |
175 | |
176 | /* | |
177 | * void CleanPoC_Dcache(void) | |
178 | * | |
179 | * Clean all d-caches | |
180 | */ | |
181 | .text | |
182 | .align 2 | |
183 | .globl EXT(CleanPoC_Dcache) | |
184 | .globl EXT(clean_mmu_dcache) | |
185 | LEXT(CleanPoC_Dcache) | |
186 | #if defined(APPLE_ARM64_ARCH_FAMILY) | |
c3c9b80d A |
187 | dsb sy |
188 | ret | |
5ba3f43e A |
189 | /* "Fully Coherent." */ |
190 | #else /* !defined(APPLE_ARM64_ARCH_FAMILY) */ | |
c3c9b80d A |
191 | mrs x0, CLIDR_EL1 |
192 | ubfx x0, x0, #24, #3 // extract CLIDR_EL1.LoC | |
193 | DCACHE_SET_WAY csw | |
5ba3f43e | 194 | #endif /* defined(APPLE_ARM64_ARCH_FAMILY) */ |
5ba3f43e A |
195 | |
196 | /* | |
197 | * void CleanPoU_Dcache(void) | |
198 | * | |
199 | * Clean D-cache to Point of Unification | |
200 | */ | |
201 | .text | |
202 | .align 2 | |
203 | .globl EXT(CleanPoU_Dcache) | |
204 | LEXT(CleanPoU_Dcache) | |
205 | #if defined(APPLE_ARM64_ARCH_FAMILY) | |
5ba3f43e A |
206 | dsb sy |
207 | ret | |
c3c9b80d A |
208 | /* "Fully Coherent." */ |
209 | #else /* !defined(APPLE_ARM64_ARCH_FAMILY) */ | |
210 | mrs x0, CLIDR_EL1 | |
211 | ubfx x0, x0, #21, 3 // extract CLIDR_EL1.LoUIS | |
212 | DCACHE_SET_WAY csw | |
213 | #endif /* defined(APPLE_ARM64_ARCH_FAMILY) */ | |
5ba3f43e A |
214 | |
215 | /* | |
216 | * void CleanPoU_DcacheRegion(vm_offset_t va, unsigned length) | |
217 | * | |
218 | * Clean d-cache region to Point of Unification | |
219 | */ | |
220 | .text | |
221 | .align 2 | |
222 | .globl EXT(CleanPoU_DcacheRegion) | |
223 | LEXT(CleanPoU_DcacheRegion) | |
224 | #if defined(APPLE_ARM64_ARCH_FAMILY) | |
225 | /* "Fully Coherent." */ | |
226 | #else /* !defined(APPLE_ARM64_ARCH_FAMILY) */ | |
227 | mov x9, #((1<<MMU_CLINE)-1) | |
228 | and x2, x0, x9 | |
cb323159 A |
229 | bic x3, x0, x9 // Cached aligned |
230 | add x4, x1, x2 | |
231 | sub x4, x4, #1 | |
232 | lsr x4, x4, #MMU_CLINE // Set cache line counter | |
233 | dmb sy | |
5ba3f43e | 234 | L_cpudr_loop: |
cb323159 A |
235 | dc cvau, x3 // Clean dcache line to PoU |
236 | add x3, x3, #(1<<MMU_CLINE) // Get next cache aligned addr | |
237 | subs x4, x4, #1 // Decrementer cache line counter | |
5ba3f43e A |
238 | b.pl L_cpudr_loop // Loop in counter not null |
239 | #endif /* defined(APPLE_ARM64_ARCH_FAMILY) */ | |
240 | dsb sy | |
241 | ret | |
242 | ||
243 | /* | |
f427ee49 | 244 | * void CleanPoC_DcacheRegion_internal(vm_offset_t va, size_t length) |
5ba3f43e A |
245 | * |
246 | * Clean d-cache region to Point of Coherency | |
247 | */ | |
248 | .text | |
249 | .align 2 | |
250 | LEXT(CleanPoC_DcacheRegion_internal) | |
c3c9b80d A |
251 | mov x10, #(MMU_CLINE) |
252 | ||
253 | /* Stash (1 << cache_line_size) in x11 for easy access. */ | |
254 | mov x11, #1 | |
255 | lsl x11, x11, x10 | |
256 | ||
257 | sub x9, x11, #1 | |
5ba3f43e A |
258 | and x2, x0, x9 |
259 | bic x0, x0, x9 // Cached aligned | |
260 | add x1, x1, x2 | |
261 | sub x1, x1, #1 | |
c3c9b80d | 262 | lsr x1, x1, x10 // Set cache line counter |
5ba3f43e A |
263 | dsb sy |
264 | L_cpcdr_loop: | |
265 | #if defined(APPLE_ARM64_ARCH_FAMILY) | |
266 | // It may be tempting to clean the cache (dc cvac), | |
267 | // but see Cyclone UM 5.3.8.3 -- it's always a NOP on Cyclone. | |
268 | // | |
c3c9b80d | 269 | // Clean & Invalidate, however, will work as long as S3_0_C15_C4_0.DisDCMvaOps isn't set. |
5ba3f43e A |
270 | dc civac, x0 // Clean & Invalidate dcache line to PoC |
271 | #else | |
272 | dc cvac, x0 // Clean dcache line to PoC | |
273 | #endif | |
c3c9b80d | 274 | add x0, x0, x11 // Get next cache aligned addr |
5ba3f43e A |
275 | subs x1, x1, #1 // Decrementer cache line counter |
276 | b.pl L_cpcdr_loop // Loop in counter not null | |
277 | dsb sy | |
cb323159 | 278 | ret |
5ba3f43e A |
279 | |
280 | /* | |
f427ee49 | 281 | * void CleanPoC_DcacheRegion(vm_offset_t va, size_t length) |
5ba3f43e A |
282 | * |
283 | * Clean d-cache region to Point of Coherency | |
284 | */ | |
285 | .text | |
286 | .align 2 | |
287 | .globl EXT(CleanPoC_DcacheRegion) | |
288 | LEXT(CleanPoC_DcacheRegion) | |
289 | #if defined(APPLE_ARM64_ARCH_FAMILY) | |
290 | /* "Fully Coherent." */ | |
291 | dsb sy | |
292 | ret | |
293 | #else /* !defined(APPLE_ARM64_ARCH_FAMILY) */ | |
294 | b EXT(CleanPoC_DcacheRegion_internal) | |
295 | #endif /* defined(APPLE_ARM64_ARCH_FAMILY) */ | |
296 | ||
d9a64523 | 297 | .text |
5ba3f43e | 298 | .align 2 |
d9a64523 A |
299 | .globl EXT(CleanPoC_DcacheRegion_Force_nopreempt) |
300 | LEXT(CleanPoC_DcacheRegion_Force_nopreempt) | |
a39ff7e2 | 301 | #if defined(APPLE_ARM64_ARCH_FAMILY) |
d9a64523 | 302 | ARM64_STACK_PROLOG |
a39ff7e2 | 303 | PUSH_FRAME |
a39ff7e2 A |
304 | isb sy |
305 | ARM64_IS_PCORE x15 | |
c3c9b80d | 306 | ARM64_READ_EP_SPR x15, x14, S3_0_C15_C4_1, S3_0_C15_C4_0 |
a39ff7e2 | 307 | and x14, x14, (~ARM64_REG_HID4_DisDcMVAOps) |
c3c9b80d | 308 | ARM64_WRITE_EP_SPR x15, x14, S3_0_C15_C4_1, S3_0_C15_C4_0 |
a39ff7e2 | 309 | isb sy |
a39ff7e2 A |
310 | bl EXT(CleanPoC_DcacheRegion_internal) |
311 | isb sy | |
312 | orr x14, x14, ARM64_REG_HID4_DisDcMVAOps | |
c3c9b80d | 313 | ARM64_WRITE_EP_SPR x15, x14, S3_0_C15_C4_1, S3_0_C15_C4_0 |
a39ff7e2 | 314 | isb sy |
d9a64523 A |
315 | POP_FRAME |
316 | ARM64_STACK_EPILOG | |
317 | #else | |
318 | b EXT(CleanPoC_DcacheRegion_internal) | |
319 | #endif // APPLE_ARM64_ARCH_FAMILY | |
320 | ||
321 | /* | |
f427ee49 | 322 | * void CleanPoC_DcacheRegion_Force(vm_offset_t va, size_t length) |
d9a64523 A |
323 | * |
324 | * Clean d-cache region to Point of Coherency - when you really | |
325 | * need to flush even on coherent platforms, e.g. panic log | |
326 | */ | |
327 | .text | |
328 | .align 2 | |
329 | .globl EXT(CleanPoC_DcacheRegion_Force) | |
330 | LEXT(CleanPoC_DcacheRegion_Force) | |
331 | #if defined(APPLE_ARM64_ARCH_FAMILY) | |
332 | ARM64_STACK_PROLOG | |
333 | PUSH_FRAME | |
334 | stp x0, x1, [sp, #-16]! | |
335 | bl EXT(_disable_preemption) | |
336 | ldp x0, x1, [sp], #16 | |
337 | bl EXT(CleanPoC_DcacheRegion_Force_nopreempt) | |
a39ff7e2 A |
338 | bl EXT(_enable_preemption) |
339 | POP_FRAME | |
d9a64523 | 340 | ARM64_STACK_EPILOG |
a39ff7e2 A |
341 | #else |
342 | b EXT(CleanPoC_DcacheRegion_internal) | |
343 | #endif // APPLE_ARM64_ARCH_FAMILY | |
5ba3f43e A |
344 | |
345 | /* | |
346 | * void FlushPoC_Dcache(void) | |
347 | * | |
348 | * Clean and Invalidate dcaches to Point of Coherency | |
349 | */ | |
350 | .text | |
351 | .align 2 | |
352 | .globl EXT(FlushPoC_Dcache) | |
353 | LEXT(FlushPoC_Dcache) | |
354 | #if defined(APPLE_ARM64_ARCH_FAMILY) | |
c3c9b80d A |
355 | dsb sy |
356 | ret | |
5ba3f43e A |
357 | /* "Fully Coherent." */ |
358 | #else /* !defined(APPLE_ARM64_ARCH_FAMILY) */ | |
c3c9b80d A |
359 | mrs x0, CLIDR_EL1 |
360 | ubfx x0, x0, #24, #3 // extract CLIDR_EL1.LoC | |
361 | DCACHE_SET_WAY cisw | |
5ba3f43e | 362 | #endif /* defined(APPLE_ARM64_ARCH_FAMILY) */ |
c3c9b80d A |
363 | |
364 | /* | |
365 | * void Flush_Dcache(void) | |
366 | * | |
367 | * Clean and invalidate D-cache, all levels | |
368 | */ | |
369 | .text | |
370 | .align 2 | |
371 | .globl EXT(Flush_Dcache) | |
372 | LEXT(Flush_Dcache) | |
373 | mov x0, #6 // Maximum allowable caching level (0-based) | |
374 | DCACHE_SET_WAY cisw | |
5ba3f43e A |
375 | |
376 | /* | |
377 | * void FlushPoU_Dcache(void) | |
378 | * | |
379 | * Flush D-cache to Point of Unification | |
380 | */ | |
381 | .text | |
382 | .align 2 | |
383 | .globl EXT(FlushPoU_Dcache) | |
384 | LEXT(FlushPoU_Dcache) | |
385 | #if defined(APPLE_ARM64_ARCH_FAMILY) | |
c3c9b80d A |
386 | dsb sy |
387 | ret | |
5ba3f43e A |
388 | /* "Fully Coherent." */ |
389 | #else /* !defined(APPLE_ARM64_ARCH_FAMILY) */ | |
c3c9b80d A |
390 | mrs x0, CLIDR_EL1 |
391 | ubfx x0, x0, #21, 3 // extract CLIDR_EL1.LoUIS | |
392 | DCACHE_SET_WAY cisw | |
5ba3f43e | 393 | #endif /* defined(APPLE_ARM64_ARCH_FAMILY) */ |
5ba3f43e A |
394 | |
395 | /* | |
396 | * void FlushPoC_DcacheRegion(vm_offset_t va, unsigned length) | |
397 | * | |
398 | * Clean and Invalidate d-cache region to Point of Coherency | |
399 | */ | |
400 | .text | |
401 | .align 2 | |
402 | .globl EXT(FlushPoC_DcacheRegion) | |
403 | LEXT(FlushPoC_DcacheRegion) | |
404 | #if defined(APPLE_ARM64_ARCH_FAMILY) | |
405 | /* "Fully Coherent." */ | |
406 | #else /* !defined(APPLE_ARM64_ARCH_FAMILY) */ | |
407 | mov x9, #((1<<MMU_CLINE)-1) | |
408 | and x2, x0, x9 | |
409 | bic x0, x0, x9 // Cached aligned | |
410 | add x1, x1, x2 | |
411 | sub x1, x1, #1 | |
412 | lsr x1, x1, #MMU_CLINE // Set cache line counter | |
cb323159 | 413 | dmb sy |
5ba3f43e A |
414 | L_fpcdr_loop: |
415 | dc civac, x0 // Clean invalidate dcache line to PoC | |
416 | add x0, x0, #(1<<MMU_CLINE) // Get next cache aligned addr | |
417 | subs x1, x1, #1 // Decrementer cache line counter | |
418 | b.pl L_fpcdr_loop // Loop in counter not null | |
419 | #endif /* defined(APPLE_ARM64_ARCH_FAMILY) */ | |
420 | dsb sy | |
421 | ret | |
422 | ||
423 | /* | |
424 | * void flush_dcache64(addr64_t addr, unsigned length, boolean_t phys) | |
425 | */ | |
426 | .text | |
427 | .align 2 | |
428 | .globl EXT(flush_dcache64) | |
429 | LEXT(flush_dcache64) | |
430 | BRANCH_EXTERN flush_dcache | |
431 | ||
432 | /* | |
433 | * void clean_dcache64(addr64_t addr, unsigned length, boolean_t phys) | |
434 | */ | |
435 | .text | |
436 | .align 2 | |
437 | .globl EXT(clean_dcache64) | |
438 | LEXT(clean_dcache64) | |
439 | BRANCH_EXTERN clean_dcache | |
440 | ||
441 | /* | |
442 | * void invalidate_icache(vm_offset_t va, unsigned length, boolean_t phys) | |
443 | * void invalidate_icache64(addr64_t va, unsigned length, boolean_t phys) | |
444 | */ | |
445 | .text | |
446 | .align 2 | |
447 | .globl EXT(invalidate_icache64) | |
448 | .globl EXT(invalidate_icache) | |
449 | LEXT(invalidate_icache64) | |
450 | LEXT(invalidate_icache) | |
451 | cmp w2, #0 // Is it physical? | |
452 | b.eq Lcall_invalidate_worker | |
453 | adrp x2, _gPhysBase@page | |
454 | add x2, x2, _gPhysBase@pageoff | |
455 | ldr x2, [x2] | |
456 | sub x0, x0, x2 | |
457 | adrp x2, _gVirtBase@page | |
458 | add x2, x2, _gVirtBase@pageoff | |
459 | ldr x2, [x2] | |
460 | add x0, x0, x2 | |
461 | Lcall_invalidate_worker: | |
462 | b EXT(InvalidatePoU_IcacheRegion) | |
463 | ||
464 | ||
465 | /* vim: set ts=4: */ |