]> git.saurik.com Git - apple/libc.git/blame_incremental - arm/sys/OSAtomic.s
Libc-763.13.tar.gz
[apple/libc.git] / arm / sys / OSAtomic.s
... / ...
CommitLineData
1/*
2 * Copyright (c) 2004 Apple Computer, Inc. All rights reserved.
3 *
4 * @APPLE_LICENSE_HEADER_START@
5 *
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. Please obtain a copy of the License at
10 * http://www.opensource.apple.com/apsl/ and read it before using this
11 * file.
12 *
13 * The Original Code and all software distributed under the License are
14 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
15 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
16 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
18 * Please see the License for the specific language governing rights and
19 * limitations under the License.
20 *
21 * @APPLE_LICENSE_HEADER_END@
22 */
23
24#include <machine/cpu_capabilities.h>
25#include <architecture/arm/asm_help.h>
26#include <arm/arch.h>
27
28.text
29
30/* Number of times we spin in a spinlock before going to kernel */
31#define MP_SPIN_TRIES 1000
32#define MP_SPIN_TRIES_WFE 10
33
34#if defined(VARIANT_DYLD)
35 #if defined(_ARM_ARCH_7)
36 /* This makes sure we pick up MP variants for dyld on armv7 */
37 #define ENTRY_POINT_RESOLVER(symbol, variant) \
38 ENTRY_POINT(symbol##$VARIANT$##variant) ;\
39 .private_extern symbol##$VARIANT$##variant
40
41 #define ENTRY_POINT_DEFAULT(symbol, variant) \
42 ENTRY_POINT(symbol##$VARIANT$##variant) ;\
43 .private_extern symbol##$VARIANT$##variant
44
45 #define makeResolver_up_mp(name) \
46 ENTRY_POINT(_##name) \
47 ldr ip, L##name##$commpage ; \
48 ldr ip, [ip] ; \
49 tst ip, $(kUP) ; \
50 beq _##name##$VARIANT$mp ; \
51 b _##name##$VARIANT$up ; \
52 L##name##$commpage: .long _COMM_PAGE_CPU_CAPABILITIES ;
53
54 #define makeResolver_up_mp_wfe(name) \
55 makeResolver_up_mp(name)
56 #else
57 #define ENTRY_POINT_RESOLVER(symbol, variant) \
58 ENTRY_POINT(symbol##$VARIANT$##variant) ;\
59 .private_extern symbol##$VARIANT$##variant
60
61 #define ENTRY_POINT_DEFAULT(symbol, variant) ENTRY_POINT(symbol)
62 #endif
63#else
64 #if defined(_ARM_ARCH_7)
65 #define ENTRY_POINT_RESOLVER(symbol, variant) \
66 ENTRY_POINT(symbol##$VARIANT$##variant) ;\
67 .private_extern symbol##$VARIANT$##variant
68 #define ENTRY_POINT_DEFAULT(symbol, variant) \
69 ENTRY_POINT(symbol##$VARIANT$##variant) ;\
70 .private_extern symbol##$VARIANT$##variant
71 #else // !_ARM_ARCH_7
72 /* _RESOLVER shouldn't be used on armv5/6, so intentionally plants bad text. */
73 #define ENTRY_POINT_RESOLVER(symbol, variant) .error
74 #define ENTRY_POINT_DEFAULT(symbol, variant) ENTRY_POINT(symbol)
75 #endif
76#endif // VARIANT_DYLD
77
78#if defined(VARIANT_DYLD) && defined(_ARM_ARCH_7)
79/*
80 * In dyld's build only, we include the list of resolvers needed and
81 * this generates entry points for dyld which are run on every execution
82 * in order to pick the correct variant.
83 */
84#include "OSAtomic_resolvers.h"
85#endif
86
87#if defined(_ARM_ARCH_6)
88
89/* Implement a generic atomic arithmetic operation:
90 * operand is in R0, pointer is in R1. Return new
91 * value into R0 (or old valule in _ORIG cases).
92 *
93 * Return instructions are separate to the
94 * _ATOMIC_ARITHMETIC macro.
95 */
96#define _ATOMIC_ARITHMETIC(op) \
97 ldrex r2, [r1] /* load existing value and tag memory */ ;\
98 op r3, r2, r0 /* compute new value */ ;\
99 strex ip, r3, [r1] /* store new value if memory is still tagged */ ;\
100 cmp ip, #0 /* check if the store succeeded */ ;\
101 bne 1b /* if not, try again */
102
103#if defined(_ARM_ARCH_7)
104/*
105 * ARMv7 barrier operations:
106 * - Full Barrier (FB); store barrier before store exclusive, full barrier after op.
107 */
108
109#define ATOMIC_ARITHMETIC_FB(op) \
110 dmb ishst /* store barrier before store exclusive */ ;\
1111: _ATOMIC_ARITHMETIC(op) ;\
112 dmb ish /* issue data memory barrier */ ;\
113 mov r0, r3 /* return new value */
114
115#define ATOMIC_ARITHMETIC_ORIG_FB(op) \
116 dmb ishst /* store barrier before store exclusive */ ;\
1171: _ATOMIC_ARITHMETIC(op) ;\
118 dmb ish /* issue data memory barrier */ ;\
119 mov r0, r2 /* return orig value */
120
121#endif
122
123/*
124 * For the non-MP ARMv7 cases, and ARMv5/6, these provide atomic arithmetic
125 * without any barriers at all.
126 */
127#define ATOMIC_ARITHMETIC(op) \
1281: _ATOMIC_ARITHMETIC(op) ;\
129 mov r0, r3 /* return new value */
130
131#define ATOMIC_ARITHMETIC_ORIG(op) \
132 1: _ATOMIC_ARITHMETIC(op) ;\
133 mov r0, r2 /* return orig value */
134
135#if defined(_ARM_ARCH_7)
136ENTRY_POINT_RESOLVER(_OSAtomicAdd32Barrier, mp)
137 ATOMIC_ARITHMETIC_FB(add)
138 bx lr
139#endif
140
141ENTRY_POINT_DEFAULT(_OSAtomicAdd32Barrier, up)
142ENTRY_POINT(_OSAtomicAdd32)
143 ATOMIC_ARITHMETIC(add)
144 bx lr
145
146#if defined(_ARM_ARCH_7)
147ENTRY_POINT_RESOLVER(_OSAtomicOr32Barrier, mp)
148 ATOMIC_ARITHMETIC_FB(orr)
149 bx lr
150#endif
151
152ENTRY_POINT_DEFAULT(_OSAtomicOr32Barrier, up)
153ENTRY_POINT(_OSAtomicOr32)
154 ATOMIC_ARITHMETIC(orr)
155 bx lr
156
157#if defined(_ARM_ARCH_7)
158ENTRY_POINT_RESOLVER(_OSAtomicOr32OrigBarrier, mp)
159 ATOMIC_ARITHMETIC_ORIG_FB(orr)
160 bx lr
161#endif
162
163ENTRY_POINT_DEFAULT(_OSAtomicOr32OrigBarrier, up)
164ENTRY_POINT(_OSAtomicOr32Orig)
165 ATOMIC_ARITHMETIC_ORIG(orr)
166 bx lr
167
168#if defined(_ARM_ARCH_7)
169ENTRY_POINT_RESOLVER(_OSAtomicAnd32Barrier, mp)
170 ATOMIC_ARITHMETIC_FB(and)
171 bx lr
172#endif
173
174ENTRY_POINT_DEFAULT(_OSAtomicAnd32Barrier, up)
175ENTRY_POINT(_OSAtomicAnd32)
176 ATOMIC_ARITHMETIC(and)
177 bx lr
178
179#if defined(_ARM_ARCH_7)
180ENTRY_POINT_RESOLVER(_OSAtomicAnd32OrigBarrier, mp)
181 ATOMIC_ARITHMETIC_ORIG_FB(and)
182 bx lr
183#endif
184
185ENTRY_POINT_DEFAULT(_OSAtomicAnd32OrigBarrier, up)
186ENTRY_POINT(_OSAtomicAnd32Orig)
187 ATOMIC_ARITHMETIC_ORIG(and)
188 bx lr
189
190#if defined(_ARM_ARCH_7)
191ENTRY_POINT_RESOLVER(_OSAtomicXor32Barrier, mp)
192 ATOMIC_ARITHMETIC_FB(eor)
193 bx lr
194#endif
195
196ENTRY_POINT_DEFAULT(_OSAtomicXor32Barrier, up)
197ENTRY_POINT(_OSAtomicXor32)
198 ATOMIC_ARITHMETIC(eor)
199 bx lr
200
201#if defined(_ARM_ARCH_7)
202ENTRY_POINT_RESOLVER(_OSAtomicXor32OrigBarrier, mp)
203 ATOMIC_ARITHMETIC_ORIG_FB(eor)
204 bx lr
205#endif
206
207ENTRY_POINT_DEFAULT(_OSAtomicXor32OrigBarrier, up)
208ENTRY_POINT(_OSAtomicXor32Orig)
209 ATOMIC_ARITHMETIC_ORIG(eor)
210 bx lr
211
212
213#if defined(_ARM_ARCH_7)
214ENTRY_POINT_RESOLVER(_OSAtomicCompareAndSwap32Barrier, mp)
215ENTRY_POINT_RESOLVER(_OSAtomicCompareAndSwapIntBarrier, mp)
216ENTRY_POINT_RESOLVER(_OSAtomicCompareAndSwapLongBarrier, mp)
217ENTRY_POINT_RESOLVER(_OSAtomicCompareAndSwapPtrBarrier, mp)
218 ldrex r3, [r2] // load existing value and tag memory
219 teq r3, r0 // is it the same as oldValue?
220 movne r0, #0 // if not, return 0 immediately
221 bxne lr
222 dmb ishst // store barrier before store exclusive
223 strex r3, r1, [r2] // otherwise, try to store new value
224 cmp r3, #0 // check if the store succeeded
225 bne 2f // if not, try again
2261: dmb ish // memory barrier
227 mov r0, #1 // return true
228 bx lr
2292: ldrex r3, [r2] // load existing value and tag memory
230 teq r3, r0 // is it the same as oldValue?
231 movne r0, #0 // if not, return 0 immediately
232 bxne lr
233 strex r3, r1, [r2] // otherwise, try to store new value
234 cmp r3, #0 // check if the store succeeded
235 bne 2b // if not, try again
236 b 1b // return
237#endif
238
239ENTRY_POINT_DEFAULT(_OSAtomicCompareAndSwap32Barrier, up)
240ENTRY_POINT_DEFAULT(_OSAtomicCompareAndSwapIntBarrier, up)
241ENTRY_POINT_DEFAULT(_OSAtomicCompareAndSwapLongBarrier, up)
242ENTRY_POINT_DEFAULT(_OSAtomicCompareAndSwapPtrBarrier, up)
243ENTRY_POINT(_OSAtomicCompareAndSwap32)
244ENTRY_POINT(_OSAtomicCompareAndSwapInt)
245ENTRY_POINT(_OSAtomicCompareAndSwapLong)
246ENTRY_POINT(_OSAtomicCompareAndSwapPtr)
2471: ldrex r3, [r2] // load existing value and tag memory
248 teq r3, r0 // is it the same as oldValue?
249 movne r0, #0 // if not, return 0 immediately
250 bxne lr
251 strex r3, r1, [r2] // otherwise, try to store new value
252 cmp r3, #0 // check if the store succeeded
253 bne 1b // if not, try again
254 mov r0, #1 // return true
255 bx lr
256
257
258/* Implement a generic test-and-bit-op operation:
259 * bit to set is in R0, base address is in R1. Return
260 * previous value (0 or 1) of the bit in R0.
261 */
262#define _BITOP(op) \
263 /* Adjust pointer to point at the correct word ;\
264 * R1 = R1 + 4 * (R0 / 32) ;\
265 */ ;\
266 mov r3, r0, lsr #5 ;\
267 add r1, r1, r3, asl #2 ;\
268 /* Generate a bit mask for the bit we want to test ;\
269 * R0 = (0x80 >> (R0 & 7)) << (R0 & ~7 & 31) ;\
270 */ ;\
271 and r2, r0, #7 ;\
272 mov r3, #0x80 ;\
273 mov r3, r3, asr r2 ;\
274 and r0, r0, #0x18 ;\
275 mov r0, r3, asl r0 ;\
276
277#define ATOMIC_BITOP(op) \
278 _BITOP(op) ;\
2791: ;\
280 ldrex r2, [r1] /* load existing value and tag memory */ ;\
281 op r3, r2, r0 /* compute new value */ ;\
282 strex ip, r3, [r1] /* attempt to store new value */ ;\
283 cmp ip, #0 /* check if the store succeeded */ ;\
284 bne 1b /* if not, try again */ ;\
285 ands r0, r2, r0 /* mask off the bit from the old value */ ;\
286 movne r0, #1 /* if non-zero, return exactly 1 */
287
288#if defined(_ARM_ARCH_7)
289#define ATOMIC_BITOP_FB(op) \
290 _BITOP(op) ;\
291 dmb ishst /* store barrier before store exclusive */ ;\
2921: ldrex r2, [r1] /* load existing value and tag memory */ ;\
293 op r3, r2, r0 /* compute new value */ ;\
294 strex ip, r3, [r1] /* attempt to store new value */ ;\
295 cmp ip, #0 /* check if the store succeeded */ ;\
296 bne 1b /* if not, try again */ ;\
297 dmb ish /* memory barrier */ ;\
298 ands r0, r2, r0 /* mask off the bit from the old value */ ;\
299 movne r0, #1 /* if non-zero, return exactly 1 */
300#endif
301
302#if defined(_ARM_ARCH_7)
303ENTRY_POINT_RESOLVER(_OSAtomicTestAndSetBarrier, mp)
304 ATOMIC_BITOP_FB(orr)
305 bx lr
306#endif
307
308ENTRY_POINT_DEFAULT(_OSAtomicTestAndSetBarrier, up)
309ENTRY_POINT(_OSAtomicTestAndSet)
310 ATOMIC_BITOP(orr)
311 bx lr
312
313#if defined(_ARM_ARCH_7)
314ENTRY_POINT_RESOLVER(_OSAtomicTestAndClearBarrier, mp)
315 ATOMIC_BITOP_FB(bic)
316 bx lr
317#endif
318
319ENTRY_POINT_DEFAULT(_OSAtomicTestAndClearBarrier, up)
320ENTRY_POINT(_OSAtomicTestAndClear)
321 ATOMIC_BITOP(bic)
322 bx lr
323
324#if defined(_ARM_ARCH_7)
325ENTRY_POINT_RESOLVER(_OSMemoryBarrier, mp)
326 dmb ish
327 bx lr
328#endif
329
330ENTRY_POINT_DEFAULT(_OSMemoryBarrier, up)
331 bx lr
332
333/* void OSAtomicEnqueue( OSQueueHead *__list, void *__new, size_t __offset); */
334#if defined(_ARM_ARCH_7)
335ENTRY_POINT_RESOLVER(_OSAtomicEnqueue, mp)
336 dmb ishst
3371: ldrex r3, [r0] // get link to 1st on list
338 str r3, [r1, r2] // hang list off new node
339 strex r3, r1, [r0] // make new 1st on list
340 cmp r3, #0
341 bne 1b
342 dmb ish
343 bx lr
344#endif
345
346ENTRY_POINT_DEFAULT(_OSAtomicEnqueue, up)
3471: ldrex r3, [r0] // get link to 1st on list
348 str r3, [r1, r2] // hang list off new node
349 strex r3, r1, [r0] // make new 1st on list
350 cmp r3, #0
351 bne 1b
352 bx lr
353
354/* void* OSAtomicDequeue( OSQueueHead *list, size_t offset); */
355#if defined(_ARM_ARCH_7)
356ENTRY_POINT_RESOLVER(_OSAtomicDequeue, mp)
357 mov r2, r0
358 dmb ishst
3591: ldrex r0, [r2] // get 1st in list
360 cmp r0, #0 // null?
361 bxeq lr // yes, list empty
362 ldr r3, [r0, r1] // get 2nd
363 strex ip, r3, [r2] // make 2nd first
364 cmp ip, #0
365 bne 1b
366 dmb ish
367 bx lr
368#endif
369
370ENTRY_POINT_DEFAULT(_OSAtomicDequeue, up)
371 mov r2, r0
3721: ldrex r0, [r2] // get 1st in list
373 cmp r0, #0 // null?
374 bxeq lr // yes, list empty
375 ldr r3, [r0, r1] // get 2nd
376 strex ip, r3, [r2] // make 2nd first
377 cmp ip, #0
378 bne 1b
379 bx lr
380
381#if defined(_ARM_ARCH_6K)
382/* If we can use LDREXD/STREXD, then we can implement 64-bit atomic operations */
383
384#if defined(_ARM_ARCH_7)
385ENTRY_POINT_RESOLVER(_OSAtomicAdd64Barrier, mp)
386 // R0,R1 contain the amount to add
387 // R2 contains the pointer
388 stmfd sp!, {r4, r5, r8, r9, lr}
389 dmb ishst // store memory barrier before store exclusive
3901: ldrexd r4, r5, [r2] // load existing value to R4/R5 and tag memory
391 adds r8, r4, r0 // add lower half of new value into R8 and set carry bit
392 adc r9, r5, r1 // add upper half of new value into R9 with carry
393 strexd r3, r8, r9, [r2] // store new value if memory is still tagged
394 cmp r3, #0 // check if store succeeded
395 bne 1b // if not, try again
396 dmb ish // memory barrier
397 mov r0, r8 // return new value
398 mov r1, r9
399 ldmfd sp!, {r4, r5, r8, r9, pc}
400#endif
401
402ENTRY_POINT_DEFAULT(_OSAtomicAdd64Barrier, up)
403ENTRY_POINT(_OSAtomicAdd64)
404 // R0,R1 contain the amount to add
405 // R2 contains the pointer
406 stmfd sp!, {r4, r5, r8, r9, lr}
4071: ldrexd r4, r5, [r2] // load existing value to R4/R5 and tag memory
408 adds r8, r4, r0 // add lower half of new value into R8 and set carry bit
409 adc r9, r5, r1 // add upper half of new value into R9 with carry
410 strexd r3, r8, r9, [r2] // store new value if memory is still tagged
411 cmp r3, #0 // check if store succeeded
412 bne 1b // if not, try again
413 mov r0, r8 // return new value
414 mov r1, r9
415 ldmfd sp!, {r4, r5, r8, r9, pc}
416
417#if defined(_ARM_ARCH_7)
418ENTRY_POINT_RESOLVER(_OSAtomicCompareAndSwap64Barrier, mp)
419 // R0,R1 contains the old value
420 // R2,R3 contains the new value
421 // the pointer is pushed onto the stack
422 ldr ip, [sp, #0] // load pointer into IP
423 stmfd sp!, {r4, r5, lr}
424 ldrexd r4, [ip] // load existing value into R4/R5 and tag memory
425 teq r0, r4 // check low word
426 teqeq r1, r5 // if low words match, check high word
427 movne r0, #0 // if either match fails, return 0
428 bne 2f
429 dmb ishst // store barrier before store exclusive
430 strexd r4, r2, [ip] // otherwise, try to store new values
431 cmp r4, #0 // check if store succeeded
432 bne 3f // if not, try again
4331: dmb ish // memory barrier
434 mov r0, #1 // return true
4352: ldmfd sp!, {r4, r5, pc}
4363: ldrexd r4, [ip] // load existing value into R4/R5 and tag memory
437 teq r0, r4 // check low word
438 teqeq r1, r5 // if low words match, check high word
439 movne r0, #0 // if either match fails, return 0
440 bne 2b
441 strexd r4, r2, [ip] // otherwise, try to store new values
442 cmp r4, #0 // check if store succeeded
443 bne 3f // if not, try again
444 b 1b // return
445#endif
446
447ENTRY_POINT_DEFAULT(_OSAtomicCompareAndSwap64Barrier, up)
448ENTRY_POINT(_OSAtomicCompareAndSwap64)
449 // R0,R1 contains the old value
450 // R2,R3 contains the new value
451 // the pointer is pushed onto the stack
452 ldr ip, [sp, #0] // load pointer into IP
453 stmfd sp!, {r4, r5, lr}
4541: ldrexd r4, [ip] // load existing value into R4/R5 and tag memory
455 teq r0, r4 // check low word
456 teqeq r1, r5 // if low words match, check high word
457 movne r0, #0 // if either match fails, return 0
458 bne 2f
459 strexd r4, r2, [ip] // otherwise, try to store new values
460 cmp r4, #0 // check if store succeeded
461 bne 1b // if not, try again
462 mov r0, #1 // return true
4632: ldmfd sp!, {r4, r5, pc}
464
465#endif /* defined(_ARM_ARCH_6K) */
466
467#endif /* defined(_ARM_ARCH_6) */
468
469/*
470 * void
471 * _spin_lock(p)
472 * int *p;
473 *
474 * Lock the lock pointed to by p. Spin (possibly forever) until the next
475 * lock is available.
476 */
477
478#if defined(_ARM_ARCH_7)
479ENTRY_POINT_RESOLVER(_spin_lock, mp)
480ENTRY_POINT_RESOLVER(__spin_lock, mp)
481ENTRY_POINT_RESOLVER(_OSSpinLockLock, mp)
482 mov r1, #1
4831: ldrex r2, [r0] // load the value of [r0] into r2
484 cmp r2, #0 // compare the lock value to zero
485 bne 2f // jump to the spin if we don't own the lock
486 strex r3, r1, [r0] // try to store the one
487 cmp r3, #0 // test to see if we stored our value
488 bne 2f // if not, jump to the spin too
489 dmb ish // memory barrier if we acquired the lock
490 bx lr // and return
4912: mov r3, $(MP_SPIN_TRIES) // load up r3 with spin counter
4923: ldr r2, [r0] // load the lock
493 cmp r2, #0 // if unlocked
494 beq 1b // then go back to the top
495 subs r3, r3, #1 // counter--
496 bne 3b // if nonzero, back to 3:
497
498 mov r3, r0 // r0 is clobbered by the syscall return value
499 mov r0, #0 // THREAD_NULL
500 // SWITCH_OPTION_DEPRESS (r1==1 already)
501 mov r2, #1 // timeout (ms)
502 mov r12, #-61 // SYSCALL_THREAD_SWITCH
503 swi 0x80
504 mov r0, r3 // restore state of r0
505 b 1b
506
507#if !defined(VARIANT_DYLD)
508/*
509 This sucks from a code sharing PoV. The only difference in this version is
510 the presence of a WFE instruction in the spin loop. This is only used on
511 CPU's which get woken up regularly out of WFE waits.
512
513 Additionally, completely compiled out of the dyld variant so we can easily
514 use macros to pick the normal MP version for dyld on armv7 platforms.
515 */
516ENTRY_POINT_RESOLVER(_spin_lock, wfe)
517ENTRY_POINT_RESOLVER(__spin_lock, wfe)
518ENTRY_POINT_RESOLVER(_OSSpinLockLock, wfe)
519 mov r1, #1
5201: ldrex r2, [r0] // load the value of [r0] into r2
521 cmp r2, #0 // compare the lock value to zero
522 bne 2f // jump to the spin if we don't own the lock
523 strex r3, r1, [r0] // try to store the one
524 cmp r3, #0 // test to see if we stored our value
525 bne 2f // if not, jump to the spin too
526 dmb ish // memory barrier if we acquired the lock
527 bx lr // and return
5282: mov r3, $(MP_SPIN_TRIES_WFE) // load up r3 with spin counter
5293: wfe // sleepy time
530 ldr r2, [r0] // load the lock
531 cmp r2, #0 // if unlocked
532 beq 1b // then go back to the top
533 subs r3, r3, #1 // counter--
534 bne 3b // if nonzero, back to 3:
535
536 mov r3, r0 // r0 is clobbered by the syscall return value
537 mov r0, #0 // THREAD_NULL
538 // SWITCH_OPTION_DEPRESS (r1==1 already)
539 mov r2, #1 // timeout (ms)
540 mov r12, #-61 // SYSCALL_THREAD_SWITCH
541 swi 0x80
542 mov r0, r3 // restore state of r0
543 b 1b
544#endif // VARIANT_DYLD
545#endif // _ARM_ARCH_7
546
547ENTRY_POINT_DEFAULT(_spin_lock, up)
548ENTRY_POINT_DEFAULT(__spin_lock, up)
549ENTRY_POINT_DEFAULT(_OSSpinLockLock, up)
550 mov r1, #1
5511:
552#if !defined(_ARM_ARCH_7)
553 swp r2, r1, [r0]
554 cmp r2, #0
555#else
556 ldrex r2, [r0] // load the value of [r0] into r2
557 cmp r2, #0 // compare the lock value to zero
558 bne 2f // jump to the spin if we don't own the lock
559 strex r3, r1, [r0] // try to store the one
560 cmp r3, #0 // test to see if we stored our value
561#endif // !_ARM_ARCH_6
562 bxeq lr // if so, return
5632: mov r3, r0 // r0 is clobbered by the syscall return value
564 mov r0, #0 // THREAD_NULL
565 // SWITCH_OPTION_DEPRESS (r1==1 already)
566 mov r2, #1 // timeout (ms)
567 mov r12, #-61 // SYSCALL_THREAD_SWITCH
568 swi 0x80
569 mov r0, r3 // restore state of r0
570 b 1b
571
572#if defined(_ARM_ARCH_7)
573ENTRY_POINT_RESOLVER(_spin_lock_try, mp)
574ENTRY_POINT_RESOLVER(__spin_lock_try, mp)
575ENTRY_POINT_RESOLVER(_OSSpinLockTry, mp)
576 mov r1, #1
5771: ldrex r2, [r0]
578 strex r3, r1, [r0]
579 cmp r3, #0
580 bne 1b
581 dmb ish
582 bic r0, r1, r2
583 bx lr
584#endif
585
586ENTRY_POINT_DEFAULT(_spin_lock_try, up)
587ENTRY_POINT_DEFAULT(__spin_lock_try, up)
588ENTRY_POINT_DEFAULT(_OSSpinLockTry, up)
589 mov r1, #1
590#if !defined(_ARM_ARCH_7)
591 swp r2, r1, [r0]
592#else
5931: ldrex r2, [r0]
594 strex r3, r1, [r0]
595 cmp r3, #0
596 bne 1b
597#endif // !_ARM_ARCH_6
598 bic r0, r1, r2
599 bx lr
600
601/*
602 * void
603 * _spin_unlock(p)
604 * int *p;
605 *
606 * Unlock the lock pointed to by p.
607 */
608
609#if defined(_ARM_ARCH_7)
610ENTRY_POINT_RESOLVER(_spin_unlock, mp)
611ENTRY_POINT_RESOLVER(__spin_unlock, mp)
612ENTRY_POINT_RESOLVER(_OSSpinLockUnlock, mp)
613 mov r1, #0
614 dmb ish // barrier so that previous accesses are observed before unlock
6151: ldrex r2, [r0] // load the lock to get exclusive access
616 strex r3, r1, [r0] // strex is instantly visible to (at least) {st,ld}rex
617 cmp r3, #0 // did the unlock succeed?
618 bne 1b // if not, try try again.
619 bx lr
620#endif
621
622ENTRY_POINT_DEFAULT(_spin_unlock, up)
623ENTRY_POINT_DEFAULT(__spin_unlock, up)
624ENTRY_POINT_DEFAULT(_OSSpinLockUnlock, up)
625 mov r1, #0
626#if !defined(_ARM_ARCH_7)
627 str r1, [r0]
628#else
6291: ldrex r2, [r0] // load the lock to get exclusive access
630 strex r3, r1, [r0] // store zero to the lock
631 cmp r3, #0 // did the unlock succeed?
632 bne 1b // if not, try try again.
633#endif // !_ARM_ARCH_6
634 bx lr