]>
Commit | Line | Data |
---|---|---|
ada7c492 A |
1 | /* |
2 | * Copyright (c) 2013 Apple Inc. All rights reserved. | |
3 | * | |
4 | * @APPLE_APACHE_LICENSE_HEADER_START@ | |
5 | * | |
6 | * Licensed under the Apache License, Version 2.0 (the "License"); | |
7 | * you may not use this file except in compliance with the License. | |
8 | * You may obtain a copy of the License at | |
9 | * | |
10 | * http://www.apache.org/licenses/LICENSE-2.0 | |
11 | * | |
12 | * Unless required by applicable law or agreed to in writing, software | |
13 | * distributed under the License is distributed on an "AS IS" BASIS, | |
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
15 | * See the License for the specific language governing permissions and | |
16 | * limitations under the License. | |
17 | * | |
18 | * @APPLE_APACHE_LICENSE_HEADER_END@ | |
19 | */ | |
20 | ||
21 | #include "os/internal.h" | |
ada7c492 | 22 | #include "resolver.h" |
e45b4692 | 23 | #include "libkern/OSAtomic.h" |
ada7c492 | 24 | |
442fbc9d | 25 | #if defined(__arm__) || defined(__arm64__) |
ada7c492 | 26 | |
e45b4692 A |
27 | OS_ATOMIC_EXPORT |
28 | int32_t OSAtomicAdd32(int32_t v, volatile int32_t *p); | |
ada7c492 A |
29 | OS_ATOMIC_EXPORT |
30 | int32_t OSAtomicAdd32Barrier(int32_t v, volatile int32_t *p); | |
31 | OS_ATOMIC_EXPORT | |
e45b4692 A |
32 | int32_t OSAtomicIncrement32(volatile int32_t *p); |
33 | OS_ATOMIC_EXPORT | |
ada7c492 A |
34 | int32_t OSAtomicIncrement32Barrier(volatile int32_t *p); |
35 | OS_ATOMIC_EXPORT | |
e45b4692 A |
36 | int32_t OSAtomicDecrement32(volatile int32_t *p); |
37 | OS_ATOMIC_EXPORT | |
ada7c492 A |
38 | int32_t OSAtomicDecrement32Barrier(volatile int32_t *p); |
39 | OS_ATOMIC_EXPORT | |
e45b4692 A |
40 | int64_t OSAtomicAdd64(int64_t v, volatile int64_t *p); |
41 | OS_ATOMIC_EXPORT | |
ada7c492 A |
42 | int64_t OSAtomicAdd64Barrier(int64_t v, volatile int64_t *p); |
43 | OS_ATOMIC_EXPORT | |
e45b4692 A |
44 | int64_t OSAtomicIncrement64(volatile int64_t *p); |
45 | OS_ATOMIC_EXPORT | |
ada7c492 A |
46 | int64_t OSAtomicIncrement64Barrier(volatile int64_t *p); |
47 | OS_ATOMIC_EXPORT | |
e45b4692 A |
48 | int64_t OSAtomicDecrement64(volatile int64_t *p); |
49 | OS_ATOMIC_EXPORT | |
ada7c492 A |
50 | int64_t OSAtomicDecrement64Barrier(volatile int64_t *p); |
51 | OS_ATOMIC_EXPORT | |
e45b4692 A |
52 | int32_t OSAtomicAnd32(uint32_t v, volatile uint32_t *p); |
53 | OS_ATOMIC_EXPORT | |
ada7c492 A |
54 | int32_t OSAtomicAnd32Barrier(uint32_t v, volatile uint32_t *p); |
55 | OS_ATOMIC_EXPORT | |
e45b4692 A |
56 | int32_t OSAtomicAnd32Orig(uint32_t v, volatile uint32_t *p); |
57 | OS_ATOMIC_EXPORT | |
ada7c492 A |
58 | int32_t OSAtomicAnd32OrigBarrier(uint32_t v, volatile uint32_t *p); |
59 | OS_ATOMIC_EXPORT | |
e45b4692 A |
60 | int32_t OSAtomicOr32(uint32_t v, volatile uint32_t *p); |
61 | OS_ATOMIC_EXPORT | |
ada7c492 A |
62 | int32_t OSAtomicOr32Barrier(uint32_t v, volatile uint32_t *p); |
63 | OS_ATOMIC_EXPORT | |
e45b4692 A |
64 | int32_t OSAtomicOr32Orig(uint32_t v, volatile uint32_t *p); |
65 | OS_ATOMIC_EXPORT | |
ada7c492 A |
66 | int32_t OSAtomicOr32OrigBarrier(uint32_t v, volatile uint32_t *p); |
67 | OS_ATOMIC_EXPORT | |
e45b4692 A |
68 | int32_t OSAtomicXor32(uint32_t v, volatile uint32_t *p); |
69 | OS_ATOMIC_EXPORT | |
ada7c492 A |
70 | int32_t OSAtomicXor32Barrier(uint32_t v, volatile uint32_t *p); |
71 | OS_ATOMIC_EXPORT | |
e45b4692 A |
72 | int32_t OSAtomicXor32Orig(uint32_t v, volatile uint32_t *p); |
73 | OS_ATOMIC_EXPORT | |
ada7c492 A |
74 | int32_t OSAtomicXor32OrigBarrier(uint32_t v, volatile uint32_t *p); |
75 | OS_ATOMIC_EXPORT | |
e45b4692 A |
76 | bool OSAtomicCompareAndSwap32(int32_t o, int32_t n, volatile int32_t *p); |
77 | OS_ATOMIC_EXPORT | |
ada7c492 A |
78 | bool OSAtomicCompareAndSwap32Barrier(int32_t o, int32_t n, volatile int32_t *p); |
79 | OS_ATOMIC_EXPORT | |
e45b4692 A |
80 | bool OSAtomicCompareAndSwapPtr(void *o, void *n, void * volatile *p); |
81 | OS_ATOMIC_EXPORT | |
82 | bool OSAtomicCompareAndSwapPtrBarrier(void *o, void *n, void * volatile *p); | |
83 | OS_ATOMIC_EXPORT | |
84 | bool OSAtomicCompareAndSwapInt(int o, int n, volatile int *p); | |
85 | OS_ATOMIC_EXPORT | |
86 | bool OSAtomicCompareAndSwapIntBarrier(int o, int n, volatile int *p); | |
87 | OS_ATOMIC_EXPORT | |
88 | bool OSAtomicCompareAndSwapLong(long o, long n, volatile long *p); | |
89 | OS_ATOMIC_EXPORT | |
90 | bool OSAtomicCompareAndSwapLongBarrier(long o, long n, volatile long *p); | |
91 | OS_ATOMIC_EXPORT | |
92 | bool OSAtomicCompareAndSwap64(int64_t o, int64_t n, volatile int64_t *p); | |
93 | OS_ATOMIC_EXPORT | |
ada7c492 A |
94 | bool OSAtomicCompareAndSwap64Barrier(int64_t o, int64_t n, volatile int64_t *p); |
95 | OS_ATOMIC_EXPORT | |
e45b4692 A |
96 | bool OSAtomicTestAndSet(uint32_t n, volatile void * p); |
97 | OS_ATOMIC_EXPORT | |
ada7c492 A |
98 | bool OSAtomicTestAndSetBarrier(uint32_t n, volatile void * p); |
99 | OS_ATOMIC_EXPORT | |
e45b4692 A |
100 | bool OSAtomicTestAndClear(uint32_t n, volatile void * p); |
101 | OS_ATOMIC_EXPORT | |
ada7c492 A |
102 | bool OSAtomicTestAndClearBarrier(uint32_t n, volatile void * p); |
103 | OS_ATOMIC_EXPORT | |
104 | void OSAtomicEnqueue(OSQueueHead *list, void *new, size_t offset); | |
105 | OS_ATOMIC_EXPORT | |
106 | void* OSAtomicDequeue(OSQueueHead *list, size_t offset); | |
107 | OS_ATOMIC_EXPORT | |
108 | void OSMemoryBarrier(void); | |
109 | ||
e45b4692 A |
110 | int32_t |
111 | OSAtomicAdd32(int32_t v, volatile int32_t *p) | |
112 | { | |
113 | int32_t r = os_atomic_add(p, v, relaxed); | |
114 | return r; | |
115 | } | |
ada7c492 A |
116 | |
117 | int32_t | |
118 | OSAtomicAdd32Barrier(int32_t v, volatile int32_t *p) | |
119 | { | |
e45b4692 A |
120 | int32_t r = os_atomic_add(p, v, seq_cst); |
121 | return r; | |
122 | } | |
123 | ||
124 | int32_t | |
125 | OSAtomicIncrement32(volatile int32_t *p) | |
126 | { | |
127 | int32_t r = os_atomic_add(p, 1, relaxed); | |
ada7c492 A |
128 | return r; |
129 | } | |
130 | ||
131 | int32_t | |
132 | OSAtomicIncrement32Barrier(volatile int32_t *p) | |
133 | { | |
e45b4692 A |
134 | int32_t r = os_atomic_add(p, 1, seq_cst); |
135 | return r; | |
136 | } | |
137 | ||
138 | int32_t | |
139 | OSAtomicDecrement32(volatile int32_t *p) | |
140 | { | |
141 | int32_t r = os_atomic_add(p, -1, relaxed); | |
ada7c492 A |
142 | return r; |
143 | } | |
144 | ||
145 | int32_t | |
146 | OSAtomicDecrement32Barrier(volatile int32_t *p) | |
147 | { | |
e45b4692 A |
148 | int32_t r = os_atomic_add(p, -1, seq_cst); |
149 | return r; | |
150 | } | |
151 | ||
152 | int64_t | |
153 | OSAtomicAdd64(int64_t v, volatile int64_t *p) | |
154 | { | |
155 | int64_t r = os_atomic_add(p, v, relaxed); | |
ada7c492 A |
156 | return r; |
157 | } | |
158 | ||
159 | int64_t | |
160 | OSAtomicAdd64Barrier(int64_t v, volatile int64_t *p) | |
161 | { | |
e45b4692 A |
162 | int64_t r = os_atomic_add(p, v, seq_cst); |
163 | return r; | |
164 | } | |
165 | ||
166 | int64_t | |
167 | OSAtomicIncrement64(volatile int64_t *p) | |
168 | { | |
169 | int64_t r = os_atomic_add(p, 1, relaxed); | |
ada7c492 A |
170 | return r; |
171 | } | |
172 | ||
173 | int64_t | |
174 | OSAtomicIncrement64Barrier(volatile int64_t *p) | |
175 | { | |
e45b4692 A |
176 | int64_t r = os_atomic_add(p, 1, seq_cst); |
177 | return r; | |
178 | } | |
179 | ||
180 | int64_t | |
181 | OSAtomicDecrement64(volatile int64_t *p) | |
182 | { | |
183 | int64_t r = os_atomic_add(p, -1, relaxed); | |
ada7c492 A |
184 | return r; |
185 | } | |
186 | ||
187 | int64_t | |
188 | OSAtomicDecrement64Barrier(volatile int64_t *p) | |
189 | { | |
e45b4692 | 190 | int64_t r = os_atomic_add(p, -1, seq_cst); |
ada7c492 A |
191 | return r; |
192 | } | |
193 | ||
e45b4692 A |
194 | int32_t |
195 | OSAtomicAnd32(uint32_t v, volatile uint32_t *p) | |
196 | { | |
197 | uint32_t r = os_atomic_and(p, v, relaxed); | |
198 | return (int32_t)r; | |
199 | } | |
200 | ||
ada7c492 A |
201 | int32_t |
202 | OSAtomicAnd32Barrier(uint32_t v, volatile uint32_t *p) | |
203 | { | |
e45b4692 A |
204 | uint32_t r = os_atomic_and(p, v, seq_cst); |
205 | return (int32_t)r; | |
206 | } | |
207 | ||
208 | int32_t | |
209 | OSAtomicAnd32Orig(uint32_t v, volatile uint32_t *p) | |
210 | { | |
211 | uint32_t r = os_atomic_and_orig(p, v, relaxed); | |
ada7c492 A |
212 | return (int32_t)r; |
213 | } | |
214 | ||
215 | int32_t | |
216 | OSAtomicAnd32OrigBarrier(uint32_t v, volatile uint32_t *p) | |
217 | { | |
e45b4692 A |
218 | uint32_t r = os_atomic_and_orig(p, v, seq_cst); |
219 | return (int32_t)r; | |
220 | } | |
221 | ||
222 | int32_t | |
223 | OSAtomicOr32(uint32_t v, volatile uint32_t *p) | |
224 | { | |
225 | uint32_t r = os_atomic_or(p, v, relaxed); | |
ada7c492 A |
226 | return (int32_t)r; |
227 | } | |
228 | ||
229 | int32_t | |
230 | OSAtomicOr32Barrier(uint32_t v, volatile uint32_t *p) | |
231 | { | |
e45b4692 A |
232 | uint32_t r = os_atomic_or(p, v, seq_cst); |
233 | return (int32_t)r; | |
234 | } | |
235 | ||
236 | int32_t | |
237 | OSAtomicOr32Orig(uint32_t v, volatile uint32_t *p) | |
238 | { | |
239 | uint32_t r = os_atomic_or_orig(p, v, relaxed); | |
ada7c492 A |
240 | return (int32_t)r; |
241 | } | |
242 | ||
243 | int32_t | |
244 | OSAtomicOr32OrigBarrier(uint32_t v, volatile uint32_t *p) | |
245 | { | |
e45b4692 A |
246 | uint32_t r = os_atomic_or_orig(p, v, seq_cst); |
247 | return (int32_t)r; | |
248 | } | |
249 | ||
250 | int32_t | |
251 | OSAtomicXor32(uint32_t v, volatile uint32_t *p) | |
252 | { | |
253 | uint32_t r = os_atomic_xor(p, v, relaxed); | |
ada7c492 A |
254 | return (int32_t)r; |
255 | } | |
256 | ||
257 | int32_t | |
258 | OSAtomicXor32Barrier(uint32_t v, volatile uint32_t *p) | |
259 | { | |
e45b4692 A |
260 | uint32_t r = os_atomic_xor(p, v, seq_cst); |
261 | return (int32_t)r; | |
262 | } | |
263 | ||
264 | int32_t | |
265 | OSAtomicXor32Orig(uint32_t v, volatile uint32_t *p) | |
266 | { | |
267 | uint32_t r = os_atomic_xor_orig(p, v, relaxed); | |
ada7c492 A |
268 | return (int32_t)r; |
269 | } | |
270 | ||
271 | int32_t | |
272 | OSAtomicXor32OrigBarrier(uint32_t v, volatile uint32_t *p) | |
273 | { | |
e45b4692 | 274 | uint32_t r = os_atomic_xor_orig(p, v, seq_cst); |
ada7c492 A |
275 | return (int32_t)r; |
276 | } | |
277 | ||
e45b4692 A |
278 | bool |
279 | OSAtomicCompareAndSwap32(int32_t o, int32_t n, volatile int32_t *p) | |
280 | { | |
281 | return os_atomic_cmpxchg(p, o, n, relaxed); | |
282 | } | |
283 | ||
ada7c492 A |
284 | bool |
285 | OSAtomicCompareAndSwap32Barrier(int32_t o, int32_t n, volatile int32_t *p) | |
286 | { | |
e45b4692 A |
287 | return os_atomic_cmpxchg(p, o, n, seq_cst); |
288 | } | |
289 | ||
290 | bool | |
291 | OSAtomicCompareAndSwapPtr(void *o, void *n, void * volatile *p) | |
292 | { | |
293 | return os_atomic_cmpxchg(p, o, n, relaxed); | |
294 | } | |
295 | ||
296 | bool | |
297 | OSAtomicCompareAndSwapPtrBarrier(void *o, void *n, void * volatile *p) | |
298 | { | |
299 | return os_atomic_cmpxchg(p, o, n, seq_cst); | |
300 | } | |
301 | ||
302 | bool | |
303 | OSAtomicCompareAndSwapInt(int o, int n, volatile int *p) | |
304 | { | |
305 | return os_atomic_cmpxchg(p, o, n, relaxed); | |
306 | } | |
307 | ||
308 | bool | |
309 | OSAtomicCompareAndSwapIntBarrier(int o, int n, volatile int *p) | |
310 | { | |
311 | return os_atomic_cmpxchg(p, o, n, seq_cst); | |
312 | } | |
313 | ||
314 | bool | |
315 | OSAtomicCompareAndSwapLong(long o, long n, volatile long *p) | |
316 | { | |
317 | return os_atomic_cmpxchg(p, o, n, relaxed); | |
318 | } | |
319 | ||
320 | bool | |
321 | OSAtomicCompareAndSwapLongBarrier(long o, long n, volatile long *p) | |
322 | { | |
323 | return os_atomic_cmpxchg(p, o, n, seq_cst); | |
324 | } | |
325 | ||
326 | bool | |
327 | OSAtomicCompareAndSwap64(int64_t o, int64_t n, volatile int64_t *p) | |
328 | { | |
329 | return os_atomic_cmpxchg(p, o, n, relaxed); | |
ada7c492 A |
330 | } |
331 | ||
332 | bool | |
333 | OSAtomicCompareAndSwap64Barrier(int64_t o, int64_t n, volatile int64_t *p) | |
334 | { | |
e45b4692 | 335 | return os_atomic_cmpxchg(p, o, n, seq_cst); |
ada7c492 A |
336 | } |
337 | ||
338 | static inline uint32_t* | |
339 | _OSAtomicTestPtrVal(uint32_t bit, volatile void *addr, uint32_t *vp) | |
340 | { | |
341 | uintptr_t a = (uintptr_t)addr; | |
342 | if (a & 3) { | |
343 | // 32-bit align addr and adjust bit to compensate <rdar://12927920> | |
344 | bit += (a & 3) * 8; | |
345 | a &= ~3ull; | |
346 | } | |
347 | *vp = (0x80u >> (bit & 7)) << (bit & ~7u & 31); | |
348 | return (uint32_t*)((char*)a + 4 * (bit / 32)); | |
349 | } | |
350 | ||
e45b4692 A |
351 | bool |
352 | OSAtomicTestAndSet(uint32_t bit, volatile void *addr) | |
353 | { | |
354 | uint32_t v; | |
355 | volatile uint32_t *p = _OSAtomicTestPtrVal(bit, addr, &v); | |
356 | uint32_t r = os_atomic_or_orig(p, v, relaxed); | |
357 | return (r & v); | |
358 | } | |
359 | ||
ada7c492 A |
360 | bool |
361 | OSAtomicTestAndSetBarrier(uint32_t bit, volatile void *addr) | |
362 | { | |
ada7c492 A |
363 | uint32_t v; |
364 | volatile uint32_t *p = _OSAtomicTestPtrVal(bit, addr, &v); | |
e45b4692 | 365 | uint32_t r = os_atomic_or_orig(p, v, seq_cst); |
ada7c492 A |
366 | return (r & v); |
367 | } | |
368 | ||
369 | bool | |
e45b4692 | 370 | OSAtomicTestAndClear(uint32_t bit, volatile void *addr) |
ada7c492 | 371 | { |
ada7c492 A |
372 | uint32_t v; |
373 | volatile uint32_t *p = _OSAtomicTestPtrVal(bit, addr, &v); | |
e45b4692 | 374 | uint32_t r = os_atomic_and_orig(p, ~v, relaxed); |
ada7c492 A |
375 | return (r & v); |
376 | } | |
377 | ||
e45b4692 A |
378 | bool |
379 | OSAtomicTestAndClearBarrier(uint32_t bit, volatile void *addr) | |
380 | { | |
381 | uint32_t v; | |
382 | volatile uint32_t *p = _OSAtomicTestPtrVal(bit, addr, &v); | |
383 | uint32_t r = os_atomic_and_orig(p, ~v, seq_cst); | |
384 | return (r & v); | |
385 | } | |
ada7c492 | 386 | |
e45b4692 A |
387 | typedef struct { |
388 | void *item; | |
389 | long gencount; | |
ada7c492 A |
390 | } _OSQueueHead; |
391 | ||
442fbc9d A |
392 | OS_ALWAYS_INLINE |
393 | static inline void | |
394 | _OSAtomicEnqueue_llsc(OSQueueHead *list, void *new, size_t offset) | |
ada7c492 A |
395 | { |
396 | void * volatile *headptr = &(((_OSQueueHead*)list)->item); | |
397 | void * volatile *nextptr = (void*)((char*)new + offset); | |
442fbc9d | 398 | void *head, *tmp, *next; |
e45b4692 A |
399 | |
400 | head = os_atomic_load(headptr, relaxed); | |
401 | next = new; | |
ada7c492 | 402 | do { |
442fbc9d A |
403 | *nextptr = tmp = head; |
404 | head = os_atomic_load_exclusive(headptr, relaxed); | |
405 | } while (tmp != head || !os_atomic_store_exclusive(headptr, next, release)); | |
ada7c492 A |
406 | } |
407 | ||
442fbc9d A |
408 | OS_ALWAYS_INLINE |
409 | static inline void * | |
410 | _OSAtomicDequeue_llsc(OSQueueHead *list, size_t offset) | |
ada7c492 A |
411 | { |
412 | void * volatile *headptr = &(((_OSQueueHead*)list)->item); | |
413 | void * volatile *nextptr; | |
414 | void *head, *next; | |
e45b4692 | 415 | |
442fbc9d A |
416 | do { |
417 | head = os_atomic_load_exclusive(headptr, acquire); | |
418 | if (!head) { | |
419 | os_atomic_clear_exclusive(); | |
420 | break; | |
421 | } | |
ada7c492 A |
422 | nextptr = (void*)((char*)head + offset); |
423 | next = *nextptr; | |
442fbc9d A |
424 | } while (unlikely(!os_atomic_store_exclusive(headptr, next, relaxed))); |
425 | ||
ada7c492 A |
426 | return head; |
427 | } | |
428 | ||
e45b4692 | 429 | |
442fbc9d A |
430 | void |
431 | OSAtomicEnqueue(OSQueueHead *list, void *new, size_t offset) | |
432 | { | |
433 | return _OSAtomicEnqueue_llsc(list, new, offset); | |
434 | } | |
435 | ||
436 | void* | |
437 | OSAtomicDequeue(OSQueueHead *list, size_t offset) | |
438 | { | |
439 | return _OSAtomicDequeue_llsc(list, offset); | |
440 | } | |
441 | ||
442 | ||
ada7c492 A |
443 | void |
444 | OSMemoryBarrier(void) | |
445 | { | |
446 | os_atomic_thread_fence(seq_cst); | |
447 | } | |
448 | ||
442fbc9d | 449 | #endif // defined(__arm__) || defined(__arm64__) |
ada7c492 A |
450 | |
451 | struct _os_empty_files_are_not_c_files; |