2 * Copyright (c) 2004 Apple Computer, Inc. All rights reserved.
4 * @APPLE_LICENSE_HEADER_START@
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. Please obtain a copy of the License at
10 * http://www.opensource.apple.com/apsl/ and read it before using this
13 * The Original Code and all software distributed under the License are
14 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
15 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
16 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
18 * Please see the License for the specific language governing rights and
19 * limitations under the License.
21 * @APPLE_LICENSE_HEADER_END@
24 #include <machine/cpu_capabilities.h>
25 #include <architecture/arm/asm_help.h>
30 /* Number of times we spin in a spinlock before going to kernel */
31 #define MP_SPIN_TRIES 1000
32 #define MP_SPIN_TRIES_WFE 10
34 #if defined(VARIANT_DYLD)
35 #if defined(_ARM_ARCH_7)
36 /* This makes sure we pick up MP variants for dyld on armv7 */
37 #define ENTRY_POINT_RESOLVER(symbol, variant) \
38 ENTRY_POINT(symbol##$VARIANT$##variant) ;\
39 .private_extern symbol##$VARIANT$##variant
41 #define ENTRY_POINT_DEFAULT(symbol, variant) \
42 ENTRY_POINT(symbol##$VARIANT$##variant) ;\
43 .private_extern symbol##$VARIANT$##variant
45 #define makeResolver_up_mp(name) \
46 ENTRY_POINT(_##name) \
47 ldr ip, L##name##$commpage ; \
50 beq _##name##$VARIANT$mp ; \
51 b _##name##$VARIANT$up ; \
52 L##name##$commpage: .long _COMM_PAGE_CPU_CAPABILITIES ;
54 #define makeResolver_up_mp_wfe(name) \
55 makeResolver_up_mp(name)
57 #define ENTRY_POINT_RESOLVER(symbol, variant) \
58 ENTRY_POINT(symbol##$VARIANT$##variant) ;\
59 .private_extern symbol##$VARIANT$##variant
61 #define ENTRY_POINT_DEFAULT(symbol, variant) ENTRY_POINT(symbol)
64 #if defined(_ARM_ARCH_7)
65 #define ENTRY_POINT_RESOLVER(symbol, variant) \
66 ENTRY_POINT(symbol##$VARIANT$##variant) ;\
67 .private_extern symbol##$VARIANT$##variant
68 #define ENTRY_POINT_DEFAULT(symbol, variant) \
69 ENTRY_POINT(symbol##$VARIANT$##variant) ;\
70 .private_extern symbol##$VARIANT$##variant
72 /* _RESOLVER shouldn't be used on armv5/6, so intentionally plants bad text. */
73 #define ENTRY_POINT_RESOLVER(symbol, variant) .error
74 #define ENTRY_POINT_DEFAULT(symbol, variant) ENTRY_POINT(symbol)
76 #endif // VARIANT_DYLD
78 #if defined(VARIANT_DYLD) && defined(_ARM_ARCH_7)
80 * In dyld's build only, we include the list of resolvers needed and
81 * this generates entry points for dyld which are run on every execution
82 * in order to pick the correct variant.
84 #include "OSAtomic_resolvers.h"
87 #if defined(_ARM_ARCH_6)
89 /* Implement a generic atomic arithmetic operation:
90 * operand is in R0, pointer is in R1. Return new
91 * value into R0 (or old valule in _ORIG cases).
93 * Return instructions are separate to the
94 * _ATOMIC_ARITHMETIC macro.
96 #define _ATOMIC_ARITHMETIC(op) \
97 ldrex r2, [r1] /* load existing value and tag memory */ ;\
98 op r3, r2, r0 /* compute new value */ ;\
99 strex ip, r3, [r1] /* store new value if memory is still tagged */ ;\
100 cmp ip, #0 /* check if the store succeeded */ ;\
101 bne 1b /* if not, try again */
103 #if defined(_ARM_ARCH_7)
105 * ARMv7 barrier operations:
106 * - Full Barrier (FB); store barrier before store exclusive, full barrier after op.
109 #define ATOMIC_ARITHMETIC_FB(op) \
110 dmb ishst /* store barrier before store exclusive */ ;\
111 1: _ATOMIC_ARITHMETIC(op) ;\
112 dmb ish /* issue data memory barrier */ ;\
113 mov r0, r3 /* return new value */
115 #define ATOMIC_ARITHMETIC_ORIG_FB(op) \
116 dmb ishst /* store barrier before store exclusive */ ;\
117 1: _ATOMIC_ARITHMETIC(op) ;\
118 dmb ish /* issue data memory barrier */ ;\
119 mov r0, r2 /* return orig value */
124 * For the non-MP ARMv7 cases, and ARMv5/6, these provide atomic arithmetic
125 * without any barriers at all.
127 #define ATOMIC_ARITHMETIC(op) \
128 1: _ATOMIC_ARITHMETIC(op) ;\
129 mov r0, r3 /* return new value */
131 #define ATOMIC_ARITHMETIC_ORIG(op) \
132 1: _ATOMIC_ARITHMETIC(op) ;\
133 mov r0, r2 /* return orig value */
135 #if defined(_ARM_ARCH_7)
136 ENTRY_POINT_RESOLVER(_OSAtomicAdd32Barrier, mp)
137 ATOMIC_ARITHMETIC_FB(add)
141 ENTRY_POINT_DEFAULT(_OSAtomicAdd32Barrier, up)
142 ENTRY_POINT(_OSAtomicAdd32)
143 ATOMIC_ARITHMETIC(add)
146 #if defined(_ARM_ARCH_7)
147 ENTRY_POINT_RESOLVER(_OSAtomicOr32Barrier, mp)
148 ATOMIC_ARITHMETIC_FB(orr)
152 ENTRY_POINT_DEFAULT(_OSAtomicOr32Barrier, up)
153 ENTRY_POINT(_OSAtomicOr32)
154 ATOMIC_ARITHMETIC(orr)
157 #if defined(_ARM_ARCH_7)
158 ENTRY_POINT_RESOLVER(_OSAtomicOr32OrigBarrier, mp)
159 ATOMIC_ARITHMETIC_ORIG_FB(orr)
163 ENTRY_POINT_DEFAULT(_OSAtomicOr32OrigBarrier, up)
164 ENTRY_POINT(_OSAtomicOr32Orig)
165 ATOMIC_ARITHMETIC_ORIG(orr)
168 #if defined(_ARM_ARCH_7)
169 ENTRY_POINT_RESOLVER(_OSAtomicAnd32Barrier, mp)
170 ATOMIC_ARITHMETIC_FB(and)
174 ENTRY_POINT_DEFAULT(_OSAtomicAnd32Barrier, up)
175 ENTRY_POINT(_OSAtomicAnd32)
176 ATOMIC_ARITHMETIC(and)
179 #if defined(_ARM_ARCH_7)
180 ENTRY_POINT_RESOLVER(_OSAtomicAnd32OrigBarrier, mp)
181 ATOMIC_ARITHMETIC_ORIG_FB(and)
185 ENTRY_POINT_DEFAULT(_OSAtomicAnd32OrigBarrier, up)
186 ENTRY_POINT(_OSAtomicAnd32Orig)
187 ATOMIC_ARITHMETIC_ORIG(and)
190 #if defined(_ARM_ARCH_7)
191 ENTRY_POINT_RESOLVER(_OSAtomicXor32Barrier, mp)
192 ATOMIC_ARITHMETIC_FB(eor)
196 ENTRY_POINT_DEFAULT(_OSAtomicXor32Barrier, up)
197 ENTRY_POINT(_OSAtomicXor32)
198 ATOMIC_ARITHMETIC(eor)
201 #if defined(_ARM_ARCH_7)
202 ENTRY_POINT_RESOLVER(_OSAtomicXor32OrigBarrier, mp)
203 ATOMIC_ARITHMETIC_ORIG_FB(eor)
207 ENTRY_POINT_DEFAULT(_OSAtomicXor32OrigBarrier, up)
208 ENTRY_POINT(_OSAtomicXor32Orig)
209 ATOMIC_ARITHMETIC_ORIG(eor)
213 #if defined(_ARM_ARCH_7)
214 ENTRY_POINT_RESOLVER(_OSAtomicCompareAndSwap32Barrier, mp)
215 ENTRY_POINT_RESOLVER(_OSAtomicCompareAndSwapIntBarrier, mp)
216 ENTRY_POINT_RESOLVER(_OSAtomicCompareAndSwapLongBarrier, mp)
217 ENTRY_POINT_RESOLVER(_OSAtomicCompareAndSwapPtrBarrier, mp)
218 ldrex r3, [r2] // load existing value and tag memory
219 teq r3, r0 // is it the same as oldValue?
220 movne r0, #0 // if not, return 0 immediately
222 dmb ishst // store barrier before store exclusive
223 strex r3, r1, [r2] // otherwise, try to store new value
224 cmp r3, #0 // check if the store succeeded
225 bne 2f // if not, try again
226 1: dmb ish // memory barrier
227 mov r0, #1 // return true
229 2: ldrex r3, [r2] // load existing value and tag memory
230 teq r3, r0 // is it the same as oldValue?
231 movne r0, #0 // if not, return 0 immediately
233 strex r3, r1, [r2] // otherwise, try to store new value
234 cmp r3, #0 // check if the store succeeded
235 bne 2b // if not, try again
239 ENTRY_POINT_DEFAULT(_OSAtomicCompareAndSwap32Barrier, up)
240 ENTRY_POINT_DEFAULT(_OSAtomicCompareAndSwapIntBarrier, up)
241 ENTRY_POINT_DEFAULT(_OSAtomicCompareAndSwapLongBarrier, up)
242 ENTRY_POINT_DEFAULT(_OSAtomicCompareAndSwapPtrBarrier, up)
243 ENTRY_POINT(_OSAtomicCompareAndSwap32)
244 ENTRY_POINT(_OSAtomicCompareAndSwapInt)
245 ENTRY_POINT(_OSAtomicCompareAndSwapLong)
246 ENTRY_POINT(_OSAtomicCompareAndSwapPtr)
247 1: ldrex r3, [r2] // load existing value and tag memory
248 teq r3, r0 // is it the same as oldValue?
249 movne r0, #0 // if not, return 0 immediately
251 strex r3, r1, [r2] // otherwise, try to store new value
252 cmp r3, #0 // check if the store succeeded
253 bne 1b // if not, try again
254 mov r0, #1 // return true
258 /* Implement a generic test-and-bit-op operation:
259 * bit to set is in R0, base address is in R1. Return
260 * previous value (0 or 1) of the bit in R0.
263 /* Adjust pointer to point at the correct word ;\
264 * R1 = R1 + 4 * (R0 / 32) ;\
266 mov r3, r0, lsr #5 ;\
267 add r1, r1, r3, asl #2 ;\
268 /* Generate a bit mask for the bit we want to test ;\
269 * R0 = (0x80 >> (R0 & 7)) << (R0 & ~7 & 31) ;\
273 mov r3, r3, asr r2 ;\
275 mov r0, r3, asl r0 ;\
277 #define ATOMIC_BITOP(op) \
280 ldrex r2, [r1] /* load existing value and tag memory */ ;\
281 op r3, r2, r0 /* compute new value */ ;\
282 strex ip, r3, [r1] /* attempt to store new value */ ;\
283 cmp ip, #0 /* check if the store succeeded */ ;\
284 bne 1b /* if not, try again */ ;\
285 ands r0, r2, r0 /* mask off the bit from the old value */ ;\
286 movne r0, #1 /* if non-zero, return exactly 1 */
288 #if defined(_ARM_ARCH_7)
289 #define ATOMIC_BITOP_FB(op) \
291 dmb ishst /* store barrier before store exclusive */ ;\
292 1: ldrex r2, [r1] /* load existing value and tag memory */ ;\
293 op r3, r2, r0 /* compute new value */ ;\
294 strex ip, r3, [r1] /* attempt to store new value */ ;\
295 cmp ip, #0 /* check if the store succeeded */ ;\
296 bne 1b /* if not, try again */ ;\
297 dmb ish /* memory barrier */ ;\
298 ands r0, r2, r0 /* mask off the bit from the old value */ ;\
299 movne r0, #1 /* if non-zero, return exactly 1 */
302 #if defined(_ARM_ARCH_7)
303 ENTRY_POINT_RESOLVER(_OSAtomicTestAndSetBarrier, mp)
308 ENTRY_POINT_DEFAULT(_OSAtomicTestAndSetBarrier, up)
309 ENTRY_POINT(_OSAtomicTestAndSet)
313 #if defined(_ARM_ARCH_7)
314 ENTRY_POINT_RESOLVER(_OSAtomicTestAndClearBarrier, mp)
319 ENTRY_POINT_DEFAULT(_OSAtomicTestAndClearBarrier, up)
320 ENTRY_POINT(_OSAtomicTestAndClear)
324 #if defined(_ARM_ARCH_7)
325 ENTRY_POINT_RESOLVER(_OSMemoryBarrier, mp)
330 ENTRY_POINT_DEFAULT(_OSMemoryBarrier, up)
333 /* void OSAtomicEnqueue( OSQueueHead *__list, void *__new, size_t __offset); */
334 #if defined(_ARM_ARCH_7)
335 ENTRY_POINT_RESOLVER(_OSAtomicEnqueue, mp)
337 1: ldrex r3, [r0] // get link to 1st on list
338 str r3, [r1, r2] // hang list off new node
339 strex r3, r1, [r0] // make new 1st on list
346 ENTRY_POINT_DEFAULT(_OSAtomicEnqueue, up)
347 1: ldrex r3, [r0] // get link to 1st on list
348 str r3, [r1, r2] // hang list off new node
349 strex r3, r1, [r0] // make new 1st on list
354 /* void* OSAtomicDequeue( OSQueueHead *list, size_t offset); */
355 #if defined(_ARM_ARCH_7)
356 ENTRY_POINT_RESOLVER(_OSAtomicDequeue, mp)
359 1: ldrex r0, [r2] // get 1st in list
361 bxeq lr // yes, list empty
362 ldr r3, [r0, r1] // get 2nd
363 strex ip, r3, [r2] // make 2nd first
370 ENTRY_POINT_DEFAULT(_OSAtomicDequeue, up)
372 1: ldrex r0, [r2] // get 1st in list
374 bxeq lr // yes, list empty
375 ldr r3, [r0, r1] // get 2nd
376 strex ip, r3, [r2] // make 2nd first
381 #if defined(_ARM_ARCH_6K)
382 /* If we can use LDREXD/STREXD, then we can implement 64-bit atomic operations */
384 #if defined(_ARM_ARCH_7)
385 ENTRY_POINT_RESOLVER(_OSAtomicAdd64Barrier, mp)
386 // R0,R1 contain the amount to add
387 // R2 contains the pointer
388 stmfd sp!, {r4, r5, r8, r9, lr}
389 dmb ishst // store memory barrier before store exclusive
390 1: ldrexd r4, r5, [r2] // load existing value to R4/R5 and tag memory
391 adds r8, r4, r0 // add lower half of new value into R8 and set carry bit
392 adc r9, r5, r1 // add upper half of new value into R9 with carry
393 strexd r3, r8, r9, [r2] // store new value if memory is still tagged
394 cmp r3, #0 // check if store succeeded
395 bne 1b // if not, try again
396 dmb ish // memory barrier
397 mov r0, r8 // return new value
399 ldmfd sp!, {r4, r5, r8, r9, pc}
402 ENTRY_POINT_DEFAULT(_OSAtomicAdd64Barrier, up)
403 ENTRY_POINT(_OSAtomicAdd64)
404 // R0,R1 contain the amount to add
405 // R2 contains the pointer
406 stmfd sp!, {r4, r5, r8, r9, lr}
407 1: ldrexd r4, r5, [r2] // load existing value to R4/R5 and tag memory
408 adds r8, r4, r0 // add lower half of new value into R8 and set carry bit
409 adc r9, r5, r1 // add upper half of new value into R9 with carry
410 strexd r3, r8, r9, [r2] // store new value if memory is still tagged
411 cmp r3, #0 // check if store succeeded
412 bne 1b // if not, try again
413 mov r0, r8 // return new value
415 ldmfd sp!, {r4, r5, r8, r9, pc}
417 #if defined(_ARM_ARCH_7)
418 ENTRY_POINT_RESOLVER(_OSAtomicCompareAndSwap64Barrier, mp)
419 // R0,R1 contains the old value
420 // R2,R3 contains the new value
421 // the pointer is pushed onto the stack
422 ldr ip, [sp, #0] // load pointer into IP
423 stmfd sp!, {r4, r5, lr}
424 ldrexd r4, [ip] // load existing value into R4/R5 and tag memory
425 teq r0, r4 // check low word
426 teqeq r1, r5 // if low words match, check high word
427 movne r0, #0 // if either match fails, return 0
429 dmb ishst // store barrier before store exclusive
430 strexd r4, r2, [ip] // otherwise, try to store new values
431 cmp r4, #0 // check if store succeeded
432 bne 3f // if not, try again
433 1: dmb ish // memory barrier
434 mov r0, #1 // return true
435 2: ldmfd sp!, {r4, r5, pc}
436 3: ldrexd r4, [ip] // load existing value into R4/R5 and tag memory
437 teq r0, r4 // check low word
438 teqeq r1, r5 // if low words match, check high word
439 movne r0, #0 // if either match fails, return 0
441 strexd r4, r2, [ip] // otherwise, try to store new values
442 cmp r4, #0 // check if store succeeded
443 bne 3f // if not, try again
447 ENTRY_POINT_DEFAULT(_OSAtomicCompareAndSwap64Barrier, up)
448 ENTRY_POINT(_OSAtomicCompareAndSwap64)
449 // R0,R1 contains the old value
450 // R2,R3 contains the new value
451 // the pointer is pushed onto the stack
452 ldr ip, [sp, #0] // load pointer into IP
453 stmfd sp!, {r4, r5, lr}
454 1: ldrexd r4, [ip] // load existing value into R4/R5 and tag memory
455 teq r0, r4 // check low word
456 teqeq r1, r5 // if low words match, check high word
457 movne r0, #0 // if either match fails, return 0
459 strexd r4, r2, [ip] // otherwise, try to store new values
460 cmp r4, #0 // check if store succeeded
461 bne 1b // if not, try again
462 mov r0, #1 // return true
463 2: ldmfd sp!, {r4, r5, pc}
465 #endif /* defined(_ARM_ARCH_6K) */
467 #endif /* defined(_ARM_ARCH_6) */
474 * Lock the lock pointed to by p. Spin (possibly forever) until the next
478 #if defined(_ARM_ARCH_7)
479 ENTRY_POINT_RESOLVER(_spin_lock, mp)
480 ENTRY_POINT_RESOLVER(__spin_lock, mp)
481 ENTRY_POINT_RESOLVER(_OSSpinLockLock, mp)
483 1: ldrex r2, [r0] // load the value of [r0] into r2
484 cmp r2, #0 // compare the lock value to zero
485 bne 2f // jump to the spin if we don't own the lock
486 strex r3, r1, [r0] // try to store the one
487 cmp r3, #0 // test to see if we stored our value
488 bne 2f // if not, jump to the spin too
489 dmb ish // memory barrier if we acquired the lock
491 2: mov r3, $(MP_SPIN_TRIES) // load up r3 with spin counter
492 3: ldr r2, [r0] // load the lock
493 cmp r2, #0 // if unlocked
494 beq 1b // then go back to the top
495 subs r3, r3, #1 // counter--
496 bne 3b // if nonzero, back to 3:
498 mov r3, r0 // r0 is clobbered by the syscall return value
499 mov r0, #0 // THREAD_NULL
500 // SWITCH_OPTION_DEPRESS (r1==1 already)
501 mov r2, #1 // timeout (ms)
502 mov r12, #-61 // SYSCALL_THREAD_SWITCH
504 mov r0, r3 // restore state of r0
507 #if !defined(VARIANT_DYLD)
509 This sucks from a code sharing PoV. The only difference in this version is
510 the presence of a WFE instruction in the spin loop. This is only used on
511 CPU's which get woken up regularly out of WFE waits.
513 Additionally, completely compiled out of the dyld variant so we can easily
514 use macros to pick the normal MP version for dyld on armv7 platforms.
516 ENTRY_POINT_RESOLVER(_spin_lock, wfe)
517 ENTRY_POINT_RESOLVER(__spin_lock, wfe)
518 ENTRY_POINT_RESOLVER(_OSSpinLockLock, wfe)
520 1: ldrex r2, [r0] // load the value of [r0] into r2
521 cmp r2, #0 // compare the lock value to zero
522 bne 2f // jump to the spin if we don't own the lock
523 strex r3, r1, [r0] // try to store the one
524 cmp r3, #0 // test to see if we stored our value
525 bne 2f // if not, jump to the spin too
526 dmb ish // memory barrier if we acquired the lock
528 2: mov r3, $(MP_SPIN_TRIES_WFE) // load up r3 with spin counter
529 3: wfe // sleepy time
530 ldr r2, [r0] // load the lock
531 cmp r2, #0 // if unlocked
532 beq 1b // then go back to the top
533 subs r3, r3, #1 // counter--
534 bne 3b // if nonzero, back to 3:
536 mov r3, r0 // r0 is clobbered by the syscall return value
537 mov r0, #0 // THREAD_NULL
538 // SWITCH_OPTION_DEPRESS (r1==1 already)
539 mov r2, #1 // timeout (ms)
540 mov r12, #-61 // SYSCALL_THREAD_SWITCH
542 mov r0, r3 // restore state of r0
544 #endif // VARIANT_DYLD
545 #endif // _ARM_ARCH_7
547 ENTRY_POINT_DEFAULT(_spin_lock, up)
548 ENTRY_POINT_DEFAULT(__spin_lock, up)
549 ENTRY_POINT_DEFAULT(_OSSpinLockLock, up)
552 #if !defined(_ARM_ARCH_7)
556 ldrex r2, [r0] // load the value of [r0] into r2
557 cmp r2, #0 // compare the lock value to zero
558 bne 2f // jump to the spin if we don't own the lock
559 strex r3, r1, [r0] // try to store the one
560 cmp r3, #0 // test to see if we stored our value
561 #endif // !_ARM_ARCH_6
562 bxeq lr // if so, return
563 2: mov r3, r0 // r0 is clobbered by the syscall return value
564 mov r0, #0 // THREAD_NULL
565 // SWITCH_OPTION_DEPRESS (r1==1 already)
566 mov r2, #1 // timeout (ms)
567 mov r12, #-61 // SYSCALL_THREAD_SWITCH
569 mov r0, r3 // restore state of r0
572 #if defined(_ARM_ARCH_7)
573 ENTRY_POINT_RESOLVER(_spin_lock_try, mp)
574 ENTRY_POINT_RESOLVER(__spin_lock_try, mp)
575 ENTRY_POINT_RESOLVER(_OSSpinLockTry, mp)
586 ENTRY_POINT_DEFAULT(_spin_lock_try, up)
587 ENTRY_POINT_DEFAULT(__spin_lock_try, up)
588 ENTRY_POINT_DEFAULT(_OSSpinLockTry, up)
590 #if !defined(_ARM_ARCH_7)
597 #endif // !_ARM_ARCH_6
606 * Unlock the lock pointed to by p.
609 #if defined(_ARM_ARCH_7)
610 ENTRY_POINT_RESOLVER(_spin_unlock, mp)
611 ENTRY_POINT_RESOLVER(__spin_unlock, mp)
612 ENTRY_POINT_RESOLVER(_OSSpinLockUnlock, mp)
614 dmb ish // barrier so that previous accesses are observed before unlock
615 1: ldrex r2, [r0] // load the lock to get exclusive access
616 strex r3, r1, [r0] // strex is instantly visible to (at least) {st,ld}rex
617 cmp r3, #0 // did the unlock succeed?
618 bne 1b // if not, try try again.
622 ENTRY_POINT_DEFAULT(_spin_unlock, up)
623 ENTRY_POINT_DEFAULT(__spin_unlock, up)
624 ENTRY_POINT_DEFAULT(_OSSpinLockUnlock, up)
626 #if !defined(_ARM_ARCH_7)
629 1: ldrex r2, [r0] // load the lock to get exclusive access
630 strex r3, r1, [r0] // store zero to the lock
631 cmp r3, #0 // did the unlock succeed?
632 bne 1b // if not, try try again.
633 #endif // !_ARM_ARCH_6