2 * Copyright (c) 2004 Apple Computer, Inc. All rights reserved.
4 * @APPLE_LICENSE_HEADER_START@
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. Please obtain a copy of the License at
10 * http://www.opensource.apple.com/apsl/ and read it before using this
13 * The Original Code and all software distributed under the License are
14 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
15 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
16 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
18 * Please see the License for the specific language governing rights and
19 * limitations under the License.
21 * @APPLE_LICENSE_HEADER_END@
24 #include <machine/cpu_capabilities.h>
25 #include <architecture/arm/asm_help.h>
31 * Use LDREX/STREX to perform atomic operations.
32 * Memory barriers are not needed on a UP system
35 #if defined(_ARM_ARCH_6)
37 /* Implement a generic atomic arithmetic operation:
38 * operand is in R0, pointer is in R1. Return new
41 #define ATOMIC_ARITHMETIC(op) \
42 1: ldrex r2, [r1] /* load existing value and tag memory */ ;\
43 op r3, r2, r0 /* compute new value */ ;\
44 strex ip, r3, [r1] /* store new value if memory is still tagged */ ;\
45 cmp ip, #0 /* check if the store succeeded */ ;\
46 bne 1b /* if not, try again */ ;\
47 mov r0, r3 /* return new value */
49 #define ATOMIC_ARITHMETIC_ORIG(op) \
50 1: ldrex r2, [r1] /* load existing value and tag memory */ ;\
51 op r3, r2, r0 /* compute new value */ ;\
52 strex ip, r3, [r1] /* store new value if memory is still tagged */ ;\
53 cmp ip, #0 /* check if the store succeeded */ ;\
54 bne 1b /* if not, try again */ ;\
55 mov r0, r2 /* return orig value */
57 ENTRY_POINT(_OSAtomicAdd32Barrier)
58 ENTRY_POINT(_OSAtomicAdd32)
59 ATOMIC_ARITHMETIC(add)
62 ENTRY_POINT(_OSAtomicOr32Barrier)
63 ENTRY_POINT(_OSAtomicOr32)
64 ATOMIC_ARITHMETIC(orr)
67 ENTRY_POINT(_OSAtomicOr32OrigBarrier)
68 ENTRY_POINT(_OSAtomicOr32Orig)
69 ATOMIC_ARITHMETIC_ORIG(orr)
72 ENTRY_POINT(_OSAtomicAnd32Barrier)
73 ENTRY_POINT(_OSAtomicAnd32)
74 ATOMIC_ARITHMETIC(and)
77 ENTRY_POINT(_OSAtomicAnd32OrigBarrier)
78 ENTRY_POINT(_OSAtomicAnd32Orig)
79 ATOMIC_ARITHMETIC_ORIG(and)
82 ENTRY_POINT(_OSAtomicXor32Barrier)
83 ENTRY_POINT(_OSAtomicXor32)
84 ATOMIC_ARITHMETIC(eor)
87 ENTRY_POINT(_OSAtomicXor32OrigBarrier)
88 ENTRY_POINT(_OSAtomicXor32Orig)
89 ATOMIC_ARITHMETIC_ORIG(eor)
92 ENTRY_POINT(_OSAtomicCompareAndSwap32Barrier)
93 ENTRY_POINT(_OSAtomicCompareAndSwap32)
94 ENTRY_POINT(_OSAtomicCompareAndSwapIntBarrier)
95 ENTRY_POINT(_OSAtomicCompareAndSwapInt)
96 ENTRY_POINT(_OSAtomicCompareAndSwapLongBarrier)
97 ENTRY_POINT(_OSAtomicCompareAndSwapLong)
98 ENTRY_POINT(_OSAtomicCompareAndSwapPtrBarrier)
99 ENTRY_POINT(_OSAtomicCompareAndSwapPtr)
100 1: ldrex r3, [r2] // load existing value and tag memory
101 teq r3, r0 // is it the same as oldValue?
102 movne r0, #0 // if not, return 0 immediately
104 strex r3, r1, [r2] // otherwise, try to store new value
105 cmp r3, #0 // check if the store succeeded
106 bne 1b // if not, try again
107 mov r0, #1 // return true
111 /* Implement a generic test-and-bit-op operation:
112 * bit to set is in R0, base address is in R1. Return
113 * previous value (0 or 1) of the bit in R0.
115 #define ATOMIC_BITOP(op) \
116 /* Adjust pointer to point at the correct word ;\
117 * R1 = R1 + 4 * (R0 / 32) ;\
119 mov r3, r0, lsr #5 ;\
120 add r1, r1, r3, asl #2 ;\
121 /* Generate a bit mask for the bit we want to test ;\
122 * R0 = (0x80 >> (R0 & 7)) << (R0 & ~7 & 31) ;\
126 mov r3, r3, asr r2 ;\
128 mov r0, r3, asl r0 ;\
130 ldrex r2, [r1] /* load existing value and tag memory */ ;\
131 op r3, r2, r0 /* compute new value */ ;\
132 strex ip, r3, [r1] /* attempt to store new value */ ;\
133 cmp ip, #0 /* check if the store succeeded */ ;\
134 bne 1b /* if so, try again */ ;\
135 ands r0, r2, r0 /* mask off the bit from the old value */ ;\
136 movne r0, #1 /* if non-zero, return exactly 1 */
138 ENTRY_POINT(_OSAtomicTestAndSetBarrier)
139 ENTRY_POINT(_OSAtomicTestAndSet)
143 ENTRY_POINT(_OSAtomicTestAndClearBarrier)
144 ENTRY_POINT(_OSAtomicTestAndClear)
148 ENTRY_POINT(_OSMemoryBarrier)
152 #if defined(_ARM_ARCH_6K)
153 /* If we can use LDREXD/STREXD, then we can implement 64-bit atomic operations */
155 ENTRY_POINT(_OSAtomicAdd64Barrier)
156 ENTRY_POINT(_OSAtomicAdd64)
157 // R0,R1 contain the amount to add
158 // R2 contains the pointer
159 stmfd sp!, {r4, r5, r8, r9, lr}
161 ldrexd r4, r5, [r2] // load existing value to R4/R5 and tag memory
162 adds r8, r4, r0 // add lower half of new value into R6 and set carry bit
163 adc r9, r5, r1 // add upper half of new value into R8 with carry
164 strexd r3, r8, r9, [r2] // store new value if memory is still tagged
165 cmp r3, #0 // check if store succeeded
166 bne 1b // if so, try again
167 mov r0, r8 // return new value
169 ldmfd sp!, {r4, r5, r8, r9, pc}
171 ENTRY_POINT(_OSAtomicCompareAndSwap64Barrier)
172 ENTRY_POINT(_OSAtomicCompareAndSwap64)
173 // R0,R1 contains the old value
174 // R2,R3 contains the new value
175 // the pointer is pushed onto the stack
176 ldr ip, [sp, #0] // load pointer into IP
177 stmfd sp!, {r4, r5, lr}
179 ldrexd r4, [ip] // load existing value into R4/R5 and tag memory
180 teq r0, r4 // check low word
181 teqeq r1, r5 // if low words match, check high word
182 movne r0, #0 // if either match fails, return 0
184 strexd r4, r2, [ip] // otherwise, try to store new values
185 cmp r3, #0 // check if store succeeded
186 bne 1b // if so, try again
187 mov r0, #1 // return true
189 ldmfd sp!, {r4, r5, pc}
191 #endif /* defined(_ARM_ARCH_6K) */
193 #endif /* defined(_ARM_ARCH_6) */
200 * Lock the lock pointed to by p. Spin (possibly forever) until the next
203 ENTRY_POINT(_spin_lock)
204 ENTRY_POINT(__spin_lock)
205 ENTRY_POINT(_OSSpinLockLock)
213 mov r0, #0 // THREAD_NULL
214 mov r1, #1 // SWITCH_OPTION_DEPRESS
215 mov r2, #1 // timeout (ms)
216 mov r12, #-61 // SYSCALL_THREAD_SWITCH
221 ENTRY_POINT(_spin_lock_try)
222 ENTRY_POINT(__spin_lock_try)
223 ENTRY_POINT(_OSSpinLockTry)
234 * Unlock the lock pointed to by p.
236 ENTRY_POINT(_spin_unlock)
237 ENTRY_POINT(__spin_unlock)
238 ENTRY_POINT(_OSSpinLockUnlock)