]> git.saurik.com Git - apple/libc.git/blame - arm/sys/OSAtomic.s
Libc-498.1.7.tar.gz
[apple/libc.git] / arm / sys / OSAtomic.s
CommitLineData
b5d655f7
A
1/*
2 * Copyright (c) 2004 Apple Computer, Inc. All rights reserved.
3 *
4 * @APPLE_LICENSE_HEADER_START@
5 *
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. Please obtain a copy of the License at
10 * http://www.opensource.apple.com/apsl/ and read it before using this
11 * file.
12 *
13 * The Original Code and all software distributed under the License are
14 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
15 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
16 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
18 * Please see the License for the specific language governing rights and
19 * limitations under the License.
20 *
21 * @APPLE_LICENSE_HEADER_END@
22 */
23
24#include <machine/cpu_capabilities.h>
25#include "SYS.h"
26#include <arm/arch.h>
27
28.text
29
30/*
31 * Use LDREX/STREX to perform atomic operations.
32 * Memory barriers are not needed on a UP system
33 */
34
35#if defined(_ARM_ARCH_6)
36
37/* Implement a generic atomic arithmetic operation:
38 * operand is in R0, pointer is in R1. Return new
39 * value into R0
40 */
41#define ATOMIC_ARITHMETIC(op) \
421: ldrex r2, [r1] /* load existing value and tag memory */ ;\
43 op r3, r2, r0 /* compute new value */ ;\
44 strex r2, r3, [r1] /* store new value if memory is still tagged */ ;\
45 cmp r2, #0 /* check if the store succeeded */ ;\
46 bne 1b /* if not, try again */ ;\
47 mov r0, r3 /* return new value */
48
49MI_ENTRY_POINT(_OSAtomicAdd32Barrier)
50MI_ENTRY_POINT(_OSAtomicAdd32)
51 ATOMIC_ARITHMETIC(add)
52 bx lr
53
54MI_ENTRY_POINT(_OSAtomicOr32Barrier)
55MI_ENTRY_POINT(_OSAtomicOr32)
56 ATOMIC_ARITHMETIC(orr)
57 bx lr
58
59MI_ENTRY_POINT(_OSAtomicAnd32Barrier)
60MI_ENTRY_POINT(_OSAtomicAnd32)
61 ATOMIC_ARITHMETIC(and)
62 bx lr
63
64MI_ENTRY_POINT(_OSAtomicXor32Barrier)
65MI_ENTRY_POINT(_OSAtomicXor32)
66 ATOMIC_ARITHMETIC(eor)
67 bx lr
68
69MI_ENTRY_POINT(_OSAtomicCompareAndSwap32Barrier)
70MI_ENTRY_POINT(_OSAtomicCompareAndSwap32)
71MI_ENTRY_POINT(_OSAtomicCompareAndSwapIntBarrier)
72MI_ENTRY_POINT(_OSAtomicCompareAndSwapInt)
73MI_ENTRY_POINT(_OSAtomicCompareAndSwapLongBarrier)
74MI_ENTRY_POINT(_OSAtomicCompareAndSwapLong)
75MI_ENTRY_POINT(_OSAtomicCompareAndSwapPtrBarrier)
76MI_ENTRY_POINT(_OSAtomicCompareAndSwapPtr)
771: ldrex r3, [r2] // load existing value and tag memory
78 teq r3, r0 // is it the same as oldValue?
79 movne r0, #0 // if not, return 0 immediately
80 bxne lr
81 strex r3, r1, [r2] // otherwise, try to store new value
82 cmp r3, #0 // check if the store succeeded
83 bne 1b // if not, try again
84 mov r0, #1 // return true
85 bx lr
86
87
88/* Implement a generic test-and-bit-op operation:
89 * bit to set is in R0, base address is in R1. Return
90 * previous value (0 or 1) of the bit in R0.
91 */
92#define ATOMIC_BITOP(op) \
93 /* Adjust pointer to point at the correct word ;\
94 * R1 = R1 + 4 * (R0 / 32) ;\
95 */ ;\
96 mov r3, r0, lsr #5 ;\
97 add r1, r1, r3, asl #2 ;\
98 /* Generate a bit mask for the bit we want to test ;\
99 * R0 = (0x80 >> (R0 & 7)) << (R0 & ~7 & 31) ;\
100 */ ;\
101 and r2, r0, #7 ;\
102 mov r3, #0x80 ;\
103 mov r3, r3, asr r2 ;\
104 and r0, r0, #0x18 ;\
105 mov r0, r3, asl r0 ;\
1061: ;\
107 ldrex r2, [r1] /* load existing value and tag memory */ ;\
108 op r3, r2, r0 /* compute new value */ ;\
109 strex ip, r3, [r1] /* attempt to store new value */ ;\
110 cmp ip, #0 /* check if the store succeeded */ ;\
111 bne 1b /* if so, try again */ ;\
112 ands r0, r2, r0 /* mask off the bit from the old value */ ;\
113 movne r0, #1 /* if non-zero, return exactly 1 */
114
115MI_ENTRY_POINT(_OSAtomicTestAndSetBarrier)
116MI_ENTRY_POINT(_OSAtomicTestAndSet)
117 ATOMIC_BITOP(orr)
118 bx lr
119
120MI_ENTRY_POINT(_OSAtomicTestAndClearBarrier)
121MI_ENTRY_POINT(_OSAtomicTestAndClear)
122 ATOMIC_BITOP(bic)
123 bx lr
124
125MI_ENTRY_POINT(_OSMemoryBarrier)
126 bx lr
127
128
129#if defined(_ARM_ARCH_6K)
130/* If we can use LDREXD/STREXD, then we can implement 64-bit atomic operations */
131
132MI_ENTRY_POINT(_OSAtomicAdd64)
133 // R0,R1 contain the amount to add
134 // R2 contains the pointer
135 stmfd sp!, {r4, r5, r6, r8, lr}
1361:
137 ldrexd r4, [r2] // load existing value to R4/R5 and tag memory
138 adds r6, r4, r0 // add lower half of new value into R6 and set carry bit
139 adc r8, r5, r1 // add upper half of new value into R8 with carry
140 strexd r3, r6, [r2] // store new value if memory is still tagged
141 cmp r3, #0 // check if store succeeded
142 bne 1b // if so, try again
143 mov r0, r6 // return new value
144 mov r1, r8
145 ldmfd sp!, {r4, r5, r6, r8, pc}
146
147MI_ENTRY_POINT(_OSAtomicCompareAndSwap64)
148 // R0,R1 contains the old value
149 // R2,R3 contains the new value
150 // the pointer is pushed onto the stack
151 ldr ip, [sp, #0] // load pointer into IP
152 stmfd sp!, {r4, r5, lr}
1531:
154 ldrexd r4, [ip] // load existing value into R4/R5 and tag memory
155 teq r0, r4 // check low word
156 teqeq r1, r5 // if low words match, check high word
157 movne r0, #0 // if either match fails, return 0
158 bne 2f
159 strexd r4, r2, [ip] // otherwise, try to store new values
160 cmp r3, #0 // check if store succeeded
161 bne 1b // if so, try again
162 mov r0, #1 // return true
1632:
164 ldmfd sp!, {r4, r5, pc}
165
166#endif /* defined(_ARM_ARCH_6K) */
167
168#endif /* defined(_ARM_ARCH_6) */
169
170/*
171 * void
172 * _spin_lock(p)
173 * int *p;
174 *
175 * Lock the lock pointed to by p. Spin (possibly forever) until the next
176 * lock is available.
177 */
178MI_ENTRY_POINT(_spin_lock)
179MI_ENTRY_POINT(__spin_lock)
180MI_ENTRY_POINT(_OSSpinLockLock)
181L_spin_lock_loop:
182 mov r1, #1
183 swp r2, r1, [r0]
184 cmp r2, #0
185 bxeq lr
186 mov ip, sp
187 stmfd sp!, {r0, r8}
188 mov r0, #0 // THREAD_NULL
189 mov r1, #1 // SWITCH_OPTION_DEPRESS
190 mov r2, #1 // timeout (ms)
191 mov r12, #-61 // SYSCALL_THREAD_SWITCH
192 swi 0x80
193 ldmfd sp!, {r0, r8}
194 b L_spin_lock_loop
195
196MI_ENTRY_POINT(_spin_lock_try)
197MI_ENTRY_POINT(__spin_lock_try)
198MI_ENTRY_POINT(_OSSpinLockTry)
199 mov r1, #1
200 swp r2, r1, [r0]
201 bic r0, r1, r2
202 bx lr
203
204/*
205 * void
206 * _spin_unlock(p)
207 * int *p;
208 *
209 * Unlock the lock pointed to by p.
210 */
211MI_ENTRY_POINT(_spin_unlock)
212MI_ENTRY_POINT(__spin_unlock)
213MI_ENTRY_POINT(_OSSpinLockUnlock)
214 mov r1, #0
215 str r1, [r0]
216 bx lr
217