]>
Commit | Line | Data |
---|---|---|
ada7c492 A |
1 | /* |
2 | * Copyright (c) 2013 Apple Inc. All rights reserved. | |
3 | * | |
4 | * @APPLE_APACHE_LICENSE_HEADER_START@ | |
5 | * | |
6 | * Licensed under the Apache License, Version 2.0 (the "License"); | |
7 | * you may not use this file except in compliance with the License. | |
8 | * You may obtain a copy of the License at | |
9 | * | |
10 | * http://www.apache.org/licenses/LICENSE-2.0 | |
11 | * | |
12 | * Unless required by applicable law or agreed to in writing, software | |
13 | * distributed under the License is distributed on an "AS IS" BASIS, | |
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
15 | * See the License for the specific language governing permissions and | |
16 | * limitations under the License. | |
17 | * | |
18 | * @APPLE_APACHE_LICENSE_HEADER_END@ | |
19 | */ | |
20 | ||
21 | #include "os/internal.h" | |
22 | #include "libkern/OSAtomic.h" | |
23 | #include "resolver.h" | |
24 | ||
25 | #if TARGET_OS_EMBEDDED | |
26 | ||
27 | OS_ATOMIC_EXPORT | |
28 | int32_t OSAtomicAdd32Barrier(int32_t v, volatile int32_t *p); | |
29 | OS_ATOMIC_EXPORT | |
30 | int32_t OSAtomicIncrement32Barrier(volatile int32_t *p); | |
31 | OS_ATOMIC_EXPORT | |
32 | int32_t OSAtomicDecrement32Barrier(volatile int32_t *p); | |
33 | OS_ATOMIC_EXPORT | |
34 | int64_t OSAtomicAdd64Barrier(int64_t v, volatile int64_t *p); | |
35 | OS_ATOMIC_EXPORT | |
36 | int64_t OSAtomicIncrement64Barrier(volatile int64_t *p); | |
37 | OS_ATOMIC_EXPORT | |
38 | int64_t OSAtomicDecrement64Barrier(volatile int64_t *p); | |
39 | OS_ATOMIC_EXPORT | |
40 | int32_t OSAtomicAnd32Barrier(uint32_t v, volatile uint32_t *p); | |
41 | OS_ATOMIC_EXPORT | |
42 | int32_t OSAtomicAnd32OrigBarrier(uint32_t v, volatile uint32_t *p); | |
43 | OS_ATOMIC_EXPORT | |
44 | int32_t OSAtomicOr32Barrier(uint32_t v, volatile uint32_t *p); | |
45 | OS_ATOMIC_EXPORT | |
46 | int32_t OSAtomicOr32OrigBarrier(uint32_t v, volatile uint32_t *p); | |
47 | OS_ATOMIC_EXPORT | |
48 | int32_t OSAtomicXor32Barrier(uint32_t v, volatile uint32_t *p); | |
49 | OS_ATOMIC_EXPORT | |
50 | int32_t OSAtomicXor32OrigBarrier(uint32_t v, volatile uint32_t *p); | |
51 | OS_ATOMIC_EXPORT | |
52 | bool OSAtomicCompareAndSwap32Barrier(int32_t o, int32_t n, volatile int32_t *p); | |
53 | OS_ATOMIC_EXPORT | |
54 | bool OSAtomicCompareAndSwap64Barrier(int64_t o, int64_t n, volatile int64_t *p); | |
55 | OS_ATOMIC_EXPORT | |
56 | bool OSAtomicTestAndSetBarrier(uint32_t n, volatile void * p); | |
57 | OS_ATOMIC_EXPORT | |
58 | bool OSAtomicTestAndClearBarrier(uint32_t n, volatile void * p); | |
59 | OS_ATOMIC_EXPORT | |
60 | void OSAtomicEnqueue(OSQueueHead *list, void *new, size_t offset); | |
61 | OS_ATOMIC_EXPORT | |
62 | void* OSAtomicDequeue(OSQueueHead *list, size_t offset); | |
63 | OS_ATOMIC_EXPORT | |
64 | void OSMemoryBarrier(void); | |
65 | ||
66 | #if OS_ATOMIC_UP | |
67 | #define OS_ATOMIC_ALIAS_NO_BARRIER(n) OS_ATOMIC_EXPORT_ALIAS(n, n##Barrier) | |
68 | #else | |
69 | #define OS_ATOMIC_ALIAS_NO_BARRIER(n) | |
70 | #endif | |
71 | ||
72 | int32_t | |
73 | OSAtomicAdd32Barrier(int32_t v, volatile int32_t *p) | |
74 | { | |
75 | OS_ATOMIC_ALIAS_NO_BARRIER(OSAtomicAdd32); | |
76 | int32_t r = os_atomic_add(p, v, acq_rel); | |
77 | return r; | |
78 | } | |
79 | ||
80 | int32_t | |
81 | OSAtomicIncrement32Barrier(volatile int32_t *p) | |
82 | { | |
83 | OS_ATOMIC_ALIAS_NO_BARRIER(OSAtomicIncrement32); | |
84 | int32_t r = os_atomic_add(p, 1, acq_rel); | |
85 | return r; | |
86 | } | |
87 | ||
88 | int32_t | |
89 | OSAtomicDecrement32Barrier(volatile int32_t *p) | |
90 | { | |
91 | OS_ATOMIC_ALIAS_NO_BARRIER(OSAtomicDecrement32); | |
92 | int32_t r = os_atomic_add(p, -1, acq_rel); | |
93 | return r; | |
94 | } | |
95 | ||
96 | int64_t | |
97 | OSAtomicAdd64Barrier(int64_t v, volatile int64_t *p) | |
98 | { | |
99 | OS_ATOMIC_ALIAS_NO_BARRIER(OSAtomicAdd64); | |
100 | int64_t r = os_atomic_add(p, v, acq_rel); | |
101 | return r; | |
102 | } | |
103 | ||
104 | int64_t | |
105 | OSAtomicIncrement64Barrier(volatile int64_t *p) | |
106 | { | |
107 | OS_ATOMIC_ALIAS_NO_BARRIER(OSAtomicIncrement64); | |
108 | int64_t r = os_atomic_add(p, 1, acq_rel); | |
109 | return r; | |
110 | } | |
111 | ||
112 | int64_t | |
113 | OSAtomicDecrement64Barrier(volatile int64_t *p) | |
114 | { | |
115 | OS_ATOMIC_ALIAS_NO_BARRIER(OSAtomicDecrement64); | |
116 | int64_t r = os_atomic_add(p, -1, acq_rel); | |
117 | return r; | |
118 | } | |
119 | ||
120 | int32_t | |
121 | OSAtomicAnd32Barrier(uint32_t v, volatile uint32_t *p) | |
122 | { | |
123 | OS_ATOMIC_ALIAS_NO_BARRIER(OSAtomicAnd32); | |
124 | uint32_t r = os_atomic_and(p, v, acq_rel); | |
125 | return (int32_t)r; | |
126 | } | |
127 | ||
128 | int32_t | |
129 | OSAtomicAnd32OrigBarrier(uint32_t v, volatile uint32_t *p) | |
130 | { | |
131 | OS_ATOMIC_ALIAS_NO_BARRIER(OSAtomicAnd32Orig); | |
132 | uint32_t r = os_atomic_and_orig(p, v, acq_rel); | |
133 | return (int32_t)r; | |
134 | } | |
135 | ||
136 | int32_t | |
137 | OSAtomicOr32Barrier(uint32_t v, volatile uint32_t *p) | |
138 | { | |
139 | OS_ATOMIC_ALIAS_NO_BARRIER(OSAtomicOr32); | |
140 | uint32_t r = os_atomic_or(p, v, acq_rel); | |
141 | return (int32_t)r; | |
142 | } | |
143 | ||
144 | int32_t | |
145 | OSAtomicOr32OrigBarrier(uint32_t v, volatile uint32_t *p) | |
146 | { | |
147 | OS_ATOMIC_ALIAS_NO_BARRIER(OSAtomicOr32Orig); | |
148 | uint32_t r = os_atomic_or_orig(p, v, acq_rel); | |
149 | return (int32_t)r; | |
150 | } | |
151 | ||
152 | int32_t | |
153 | OSAtomicXor32Barrier(uint32_t v, volatile uint32_t *p) | |
154 | { | |
155 | OS_ATOMIC_ALIAS_NO_BARRIER(OSAtomicXor32); | |
156 | uint32_t r = os_atomic_xor(p, v, acq_rel); | |
157 | return (int32_t)r; | |
158 | } | |
159 | ||
160 | int32_t | |
161 | OSAtomicXor32OrigBarrier(uint32_t v, volatile uint32_t *p) | |
162 | { | |
163 | OS_ATOMIC_ALIAS_NO_BARRIER(OSAtomicXor32Orig); | |
164 | uint32_t r = os_atomic_xor_orig(p, v, acq_rel); | |
165 | return (int32_t)r; | |
166 | } | |
167 | ||
168 | bool | |
169 | OSAtomicCompareAndSwap32Barrier(int32_t o, int32_t n, volatile int32_t *p) | |
170 | { | |
171 | OS_ATOMIC_ALIAS_NO_BARRIER(OSAtomicCompareAndSwap32); | |
172 | OS_ATOMIC_ALIAS(OSAtomicCompareAndSwapIntBarrier, | |
173 | OSAtomicCompareAndSwap32Barrier); | |
174 | OS_ATOMIC_ALIAS_NO_BARRIER(OSAtomicCompareAndSwapInt); | |
175 | #ifndef __LP64__ | |
176 | OS_ATOMIC_ALIAS(OSAtomicCompareAndSwapLongBarrier, | |
177 | OSAtomicCompareAndSwap32Barrier); | |
178 | OS_ATOMIC_ALIAS_NO_BARRIER(OSAtomicCompareAndSwapLong); | |
179 | OS_ATOMIC_ALIAS(OSAtomicCompareAndSwapPtrBarrier, | |
180 | OSAtomicCompareAndSwap32Barrier); | |
181 | OS_ATOMIC_ALIAS_NO_BARRIER(OSAtomicCompareAndSwapPtr); | |
182 | #endif | |
183 | return os_atomic_cmpxchg(p, o, n, acq_rel); | |
184 | } | |
185 | ||
186 | bool | |
187 | OSAtomicCompareAndSwap64Barrier(int64_t o, int64_t n, volatile int64_t *p) | |
188 | { | |
189 | OS_ATOMIC_ALIAS_NO_BARRIER(OSAtomicCompareAndSwap64); | |
190 | #ifdef __LP64__ | |
191 | OS_ATOMIC_ALIAS(OSAtomicCompareAndSwapLongBarrier, | |
192 | OSAtomicCompareAndSwap64Barrier); | |
193 | OS_ATOMIC_ALIAS_NO_BARRIER(OSAtomicCompareAndSwapLong); | |
194 | OS_ATOMIC_ALIAS(OSAtomicCompareAndSwapPtrBarrier, | |
195 | OSAtomicCompareAndSwap64Barrier); | |
196 | OS_ATOMIC_ALIAS_NO_BARRIER(OSAtomicCompareAndSwapPtr); | |
197 | #endif | |
198 | return os_atomic_cmpxchg(p, o, n, acq_rel); | |
199 | } | |
200 | ||
201 | static inline uint32_t* | |
202 | _OSAtomicTestPtrVal(uint32_t bit, volatile void *addr, uint32_t *vp) | |
203 | { | |
204 | uintptr_t a = (uintptr_t)addr; | |
205 | if (a & 3) { | |
206 | // 32-bit align addr and adjust bit to compensate <rdar://12927920> | |
207 | bit += (a & 3) * 8; | |
208 | a &= ~3ull; | |
209 | } | |
210 | *vp = (0x80u >> (bit & 7)) << (bit & ~7u & 31); | |
211 | return (uint32_t*)((char*)a + 4 * (bit / 32)); | |
212 | } | |
213 | ||
214 | bool | |
215 | OSAtomicTestAndSetBarrier(uint32_t bit, volatile void *addr) | |
216 | { | |
217 | OS_ATOMIC_ALIAS_NO_BARRIER(OSAtomicTestAndSet); | |
218 | uint32_t v; | |
219 | volatile uint32_t *p = _OSAtomicTestPtrVal(bit, addr, &v); | |
220 | uint32_t r = os_atomic_or_orig(p, v, acq_rel); | |
221 | return (r & v); | |
222 | } | |
223 | ||
224 | bool | |
225 | OSAtomicTestAndClearBarrier(uint32_t bit, volatile void *addr) | |
226 | { | |
227 | OS_ATOMIC_ALIAS_NO_BARRIER(OSAtomicTestAndClear); | |
228 | uint32_t v; | |
229 | volatile uint32_t *p = _OSAtomicTestPtrVal(bit, addr, &v); | |
230 | uint32_t r = os_atomic_and_orig(p, ~v, acq_rel); | |
231 | return (r & v); | |
232 | } | |
233 | ||
234 | #if !OS_ATOMIC_NO_BARRIER_ONLY | |
235 | ||
236 | typedef volatile struct { | |
237 | void * volatile item; | |
238 | long unused; | |
239 | } _OSQueueHead; | |
240 | ||
241 | void | |
242 | OSAtomicEnqueue(OSQueueHead *list, void *new, size_t offset) | |
243 | { | |
244 | void * volatile *headptr = &(((_OSQueueHead*)list)->item); | |
245 | void * volatile *nextptr = (void*)((char*)new + offset); | |
246 | void *head = *headptr; | |
247 | do { | |
248 | *nextptr = head; | |
249 | } while (!os_atomic_cmpxchgvw(headptr, head, new, &head, release)); | |
250 | } | |
251 | ||
252 | void* | |
253 | OSAtomicDequeue(OSQueueHead *list, size_t offset) | |
254 | { | |
255 | void * volatile *headptr = &(((_OSQueueHead*)list)->item); | |
256 | void * volatile *nextptr; | |
257 | void *head, *next; | |
258 | (void)os_atomic_rmw_loop(headptr, head, next, acquire, { | |
259 | if (!head) { | |
260 | os_atomic_rmw_loop_give_up(break); | |
261 | } | |
262 | nextptr = (void*)((char*)head + offset); | |
263 | next = *nextptr; | |
264 | }); | |
265 | return head; | |
266 | } | |
267 | ||
268 | void | |
269 | OSMemoryBarrier(void) | |
270 | { | |
271 | os_atomic_thread_fence(seq_cst); | |
272 | } | |
273 | ||
274 | #endif // !OS_ATOMIC_NO_BARRIER_ONLY | |
275 | #endif // TARGET_OS_EMBEDDED | |
276 | ||
277 | struct _os_empty_files_are_not_c_files; |