2 * Copyright (c) 2013 Apple Inc. All rights reserved.
4 * @APPLE_APACHE_LICENSE_HEADER_START@
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
10 * http://www.apache.org/licenses/LICENSE-2.0
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
18 * @APPLE_APACHE_LICENSE_HEADER_END@
21 #include "os/internal.h"
23 #include "libkern/OSAtomic.h"
25 #if TARGET_OS_EMBEDDED
28 int32_t OSAtomicAdd32(int32_t v
, volatile int32_t *p
);
30 int32_t OSAtomicAdd32Barrier(int32_t v
, volatile int32_t *p
);
32 int32_t OSAtomicIncrement32(volatile int32_t *p
);
34 int32_t OSAtomicIncrement32Barrier(volatile int32_t *p
);
36 int32_t OSAtomicDecrement32(volatile int32_t *p
);
38 int32_t OSAtomicDecrement32Barrier(volatile int32_t *p
);
40 int64_t OSAtomicAdd64(int64_t v
, volatile int64_t *p
);
42 int64_t OSAtomicAdd64Barrier(int64_t v
, volatile int64_t *p
);
44 int64_t OSAtomicIncrement64(volatile int64_t *p
);
46 int64_t OSAtomicIncrement64Barrier(volatile int64_t *p
);
48 int64_t OSAtomicDecrement64(volatile int64_t *p
);
50 int64_t OSAtomicDecrement64Barrier(volatile int64_t *p
);
52 int32_t OSAtomicAnd32(uint32_t v
, volatile uint32_t *p
);
54 int32_t OSAtomicAnd32Barrier(uint32_t v
, volatile uint32_t *p
);
56 int32_t OSAtomicAnd32Orig(uint32_t v
, volatile uint32_t *p
);
58 int32_t OSAtomicAnd32OrigBarrier(uint32_t v
, volatile uint32_t *p
);
60 int32_t OSAtomicOr32(uint32_t v
, volatile uint32_t *p
);
62 int32_t OSAtomicOr32Barrier(uint32_t v
, volatile uint32_t *p
);
64 int32_t OSAtomicOr32Orig(uint32_t v
, volatile uint32_t *p
);
66 int32_t OSAtomicOr32OrigBarrier(uint32_t v
, volatile uint32_t *p
);
68 int32_t OSAtomicXor32(uint32_t v
, volatile uint32_t *p
);
70 int32_t OSAtomicXor32Barrier(uint32_t v
, volatile uint32_t *p
);
72 int32_t OSAtomicXor32Orig(uint32_t v
, volatile uint32_t *p
);
74 int32_t OSAtomicXor32OrigBarrier(uint32_t v
, volatile uint32_t *p
);
76 bool OSAtomicCompareAndSwap32(int32_t o
, int32_t n
, volatile int32_t *p
);
78 bool OSAtomicCompareAndSwap32Barrier(int32_t o
, int32_t n
, volatile int32_t *p
);
80 bool OSAtomicCompareAndSwapPtr(void *o
, void *n
, void * volatile *p
);
82 bool OSAtomicCompareAndSwapPtrBarrier(void *o
, void *n
, void * volatile *p
);
84 bool OSAtomicCompareAndSwapInt(int o
, int n
, volatile int *p
);
86 bool OSAtomicCompareAndSwapIntBarrier(int o
, int n
, volatile int *p
);
88 bool OSAtomicCompareAndSwapLong(long o
, long n
, volatile long *p
);
90 bool OSAtomicCompareAndSwapLongBarrier(long o
, long n
, volatile long *p
);
92 bool OSAtomicCompareAndSwap64(int64_t o
, int64_t n
, volatile int64_t *p
);
94 bool OSAtomicCompareAndSwap64Barrier(int64_t o
, int64_t n
, volatile int64_t *p
);
96 bool OSAtomicTestAndSet(uint32_t n
, volatile void * p
);
98 bool OSAtomicTestAndSetBarrier(uint32_t n
, volatile void * p
);
100 bool OSAtomicTestAndClear(uint32_t n
, volatile void * p
);
102 bool OSAtomicTestAndClearBarrier(uint32_t n
, volatile void * p
);
104 void OSAtomicEnqueue(OSQueueHead
*list
, void *new, size_t offset
);
106 void* OSAtomicDequeue(OSQueueHead
*list
, size_t offset
);
108 void OSMemoryBarrier(void);
111 OSAtomicAdd32(int32_t v
, volatile int32_t *p
)
113 int32_t r
= os_atomic_add(p
, v
, relaxed
);
118 OSAtomicAdd32Barrier(int32_t v
, volatile int32_t *p
)
120 int32_t r
= os_atomic_add(p
, v
, seq_cst
);
125 OSAtomicIncrement32(volatile int32_t *p
)
127 int32_t r
= os_atomic_add(p
, 1, relaxed
);
132 OSAtomicIncrement32Barrier(volatile int32_t *p
)
134 int32_t r
= os_atomic_add(p
, 1, seq_cst
);
139 OSAtomicDecrement32(volatile int32_t *p
)
141 int32_t r
= os_atomic_add(p
, -1, relaxed
);
146 OSAtomicDecrement32Barrier(volatile int32_t *p
)
148 int32_t r
= os_atomic_add(p
, -1, seq_cst
);
153 OSAtomicAdd64(int64_t v
, volatile int64_t *p
)
155 int64_t r
= os_atomic_add(p
, v
, relaxed
);
160 OSAtomicAdd64Barrier(int64_t v
, volatile int64_t *p
)
162 int64_t r
= os_atomic_add(p
, v
, seq_cst
);
167 OSAtomicIncrement64(volatile int64_t *p
)
169 int64_t r
= os_atomic_add(p
, 1, relaxed
);
174 OSAtomicIncrement64Barrier(volatile int64_t *p
)
176 int64_t r
= os_atomic_add(p
, 1, seq_cst
);
181 OSAtomicDecrement64(volatile int64_t *p
)
183 int64_t r
= os_atomic_add(p
, -1, relaxed
);
188 OSAtomicDecrement64Barrier(volatile int64_t *p
)
190 int64_t r
= os_atomic_add(p
, -1, seq_cst
);
195 OSAtomicAnd32(uint32_t v
, volatile uint32_t *p
)
197 uint32_t r
= os_atomic_and(p
, v
, relaxed
);
202 OSAtomicAnd32Barrier(uint32_t v
, volatile uint32_t *p
)
204 uint32_t r
= os_atomic_and(p
, v
, seq_cst
);
209 OSAtomicAnd32Orig(uint32_t v
, volatile uint32_t *p
)
211 uint32_t r
= os_atomic_and_orig(p
, v
, relaxed
);
216 OSAtomicAnd32OrigBarrier(uint32_t v
, volatile uint32_t *p
)
218 uint32_t r
= os_atomic_and_orig(p
, v
, seq_cst
);
223 OSAtomicOr32(uint32_t v
, volatile uint32_t *p
)
225 uint32_t r
= os_atomic_or(p
, v
, relaxed
);
230 OSAtomicOr32Barrier(uint32_t v
, volatile uint32_t *p
)
232 uint32_t r
= os_atomic_or(p
, v
, seq_cst
);
237 OSAtomicOr32Orig(uint32_t v
, volatile uint32_t *p
)
239 uint32_t r
= os_atomic_or_orig(p
, v
, relaxed
);
244 OSAtomicOr32OrigBarrier(uint32_t v
, volatile uint32_t *p
)
246 uint32_t r
= os_atomic_or_orig(p
, v
, seq_cst
);
251 OSAtomicXor32(uint32_t v
, volatile uint32_t *p
)
253 uint32_t r
= os_atomic_xor(p
, v
, relaxed
);
258 OSAtomicXor32Barrier(uint32_t v
, volatile uint32_t *p
)
260 uint32_t r
= os_atomic_xor(p
, v
, seq_cst
);
265 OSAtomicXor32Orig(uint32_t v
, volatile uint32_t *p
)
267 uint32_t r
= os_atomic_xor_orig(p
, v
, relaxed
);
272 OSAtomicXor32OrigBarrier(uint32_t v
, volatile uint32_t *p
)
274 uint32_t r
= os_atomic_xor_orig(p
, v
, seq_cst
);
279 OSAtomicCompareAndSwap32(int32_t o
, int32_t n
, volatile int32_t *p
)
281 return os_atomic_cmpxchg(p
, o
, n
, relaxed
);
285 OSAtomicCompareAndSwap32Barrier(int32_t o
, int32_t n
, volatile int32_t *p
)
287 return os_atomic_cmpxchg(p
, o
, n
, seq_cst
);
291 OSAtomicCompareAndSwapPtr(void *o
, void *n
, void * volatile *p
)
293 return os_atomic_cmpxchg(p
, o
, n
, relaxed
);
297 OSAtomicCompareAndSwapPtrBarrier(void *o
, void *n
, void * volatile *p
)
299 return os_atomic_cmpxchg(p
, o
, n
, seq_cst
);
303 OSAtomicCompareAndSwapInt(int o
, int n
, volatile int *p
)
305 return os_atomic_cmpxchg(p
, o
, n
, relaxed
);
309 OSAtomicCompareAndSwapIntBarrier(int o
, int n
, volatile int *p
)
311 return os_atomic_cmpxchg(p
, o
, n
, seq_cst
);
315 OSAtomicCompareAndSwapLong(long o
, long n
, volatile long *p
)
317 return os_atomic_cmpxchg(p
, o
, n
, relaxed
);
321 OSAtomicCompareAndSwapLongBarrier(long o
, long n
, volatile long *p
)
323 return os_atomic_cmpxchg(p
, o
, n
, seq_cst
);
327 OSAtomicCompareAndSwap64(int64_t o
, int64_t n
, volatile int64_t *p
)
329 return os_atomic_cmpxchg(p
, o
, n
, relaxed
);
333 OSAtomicCompareAndSwap64Barrier(int64_t o
, int64_t n
, volatile int64_t *p
)
335 return os_atomic_cmpxchg(p
, o
, n
, seq_cst
);
338 static inline uint32_t*
339 _OSAtomicTestPtrVal(uint32_t bit
, volatile void *addr
, uint32_t *vp
)
341 uintptr_t a
= (uintptr_t)addr
;
343 // 32-bit align addr and adjust bit to compensate <rdar://12927920>
347 *vp
= (0x80u
>> (bit
& 7)) << (bit
& ~7u & 31);
348 return (uint32_t*)((char*)a
+ 4 * (bit
/ 32));
352 OSAtomicTestAndSet(uint32_t bit
, volatile void *addr
)
355 volatile uint32_t *p
= _OSAtomicTestPtrVal(bit
, addr
, &v
);
356 uint32_t r
= os_atomic_or_orig(p
, v
, relaxed
);
361 OSAtomicTestAndSetBarrier(uint32_t bit
, volatile void *addr
)
364 volatile uint32_t *p
= _OSAtomicTestPtrVal(bit
, addr
, &v
);
365 uint32_t r
= os_atomic_or_orig(p
, v
, seq_cst
);
370 OSAtomicTestAndClear(uint32_t bit
, volatile void *addr
)
373 volatile uint32_t *p
= _OSAtomicTestPtrVal(bit
, addr
, &v
);
374 uint32_t r
= os_atomic_and_orig(p
, ~v
, relaxed
);
379 OSAtomicTestAndClearBarrier(uint32_t bit
, volatile void *addr
)
382 volatile uint32_t *p
= _OSAtomicTestPtrVal(bit
, addr
, &v
);
383 uint32_t r
= os_atomic_and_orig(p
, ~v
, seq_cst
);
394 OSAtomicEnqueue(OSQueueHead
*list
, void *new, size_t offset
)
396 void * volatile *headptr
= &(((_OSQueueHead
*)list
)->item
);
397 void * volatile *nextptr
= (void*)((char*)new + offset
);
400 head
= os_atomic_load(headptr
, relaxed
);
404 } while (!os_atomic_cmpxchgvw(headptr
, head
, next
, &head
, release
));
408 OSAtomicDequeue(OSQueueHead
*list
, size_t offset
)
410 void * volatile *headptr
= &(((_OSQueueHead
*)list
)->item
);
411 void * volatile *nextptr
;
414 os_atomic_rmw_loop(headptr
, head
, next
, acquire
, {
415 if (!head
) os_atomic_rmw_loop_give_up(break);
416 nextptr
= (void*)((char*)head
+ offset
);
424 OSMemoryBarrier(void)
426 os_atomic_thread_fence(seq_cst
);
429 #endif // TARGET_OS_EMBEDDED
431 struct _os_empty_files_are_not_c_files
;