]>
Commit | Line | Data |
---|---|---|
1c79356b | 1 | /* |
3e170ce0 | 2 | * Copyright (c) 2000-2015 Apple Computer, Inc. All rights reserved. |
1c79356b | 3 | * |
2d21ac55 | 4 | * @APPLE_OSREFERENCE_LICENSE_HEADER_START@ |
1c79356b | 5 | * |
2d21ac55 A |
6 | * This file contains Original Code and/or Modifications of Original Code |
7 | * as defined in and that are subject to the Apple Public Source License | |
8 | * Version 2.0 (the 'License'). You may not use this file except in | |
9 | * compliance with the License. The rights granted to you under the License | |
10 | * may not be used to create, or enable the creation or redistribution of, | |
11 | * unlawful or unlicensed copies of an Apple operating system, or to | |
12 | * circumvent, violate, or enable the circumvention or violation of, any | |
13 | * terms of an Apple operating system software license agreement. | |
8f6c56a5 | 14 | * |
2d21ac55 A |
15 | * Please obtain a copy of the License at |
16 | * http://www.opensource.apple.com/apsl/ and read it before using this file. | |
17 | * | |
18 | * The Original Code and all software distributed under the License are | |
19 | * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER | |
8f6c56a5 A |
20 | * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES, |
21 | * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY, | |
2d21ac55 A |
22 | * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT. |
23 | * Please see the License for the specific language governing rights and | |
24 | * limitations under the License. | |
8f6c56a5 | 25 | * |
2d21ac55 | 26 | * @APPLE_OSREFERENCE_LICENSE_HEADER_END@ |
1c79356b A |
27 | */ |
28 | ||
29 | #include <libkern/OSAtomic.h> | |
3e170ce0 A |
30 | #include <kern/debug.h> |
31 | #include <machine/atomic.h> | |
1c79356b A |
32 | |
33 | enum { | |
34 | false = 0, | |
35 | true = 1 | |
36 | }; | |
2d21ac55 A |
37 | |
38 | #ifndef NULL | |
39 | #define NULL ((void *)0) | |
40 | #endif | |
1c79356b | 41 | |
3e170ce0 A |
42 | #define ATOMIC_DEBUG DEBUG |
43 | ||
44 | #if ATOMIC_DEBUG | |
45 | #define ALIGN_TEST(p,t) do{if((uintptr_t)p&(sizeof(t)-1)) panic("Unaligned atomic pointer %p\n",p);}while(0) | |
46 | #else | |
47 | #define ALIGN_TEST(p,t) do{}while(0) | |
48 | #endif | |
49 | ||
50 | // 19831745 - start of big hammer! | |
51 | #pragma clang diagnostic push | |
52 | #pragma clang diagnostic ignored "-Wcast-qual" | |
53 | ||
1c79356b A |
54 | /* |
55 | * atomic operations | |
3e170ce0 A |
56 | * These are _the_ atomic operations, now implemented via compiler built-ins. |
57 | * It is expected that this C implementation is a candidate for Link-Time- | |
58 | * Optimization inlining, whereas the assembler implementations they replace | |
59 | * were not. | |
1c79356b A |
60 | */ |
61 | ||
3e170ce0 A |
62 | #undef OSCompareAndSwap8 |
63 | Boolean OSCompareAndSwap8(UInt8 oldValue, UInt8 newValue, volatile UInt8 *address) | |
64 | { | |
65 | return __c11_atomic_compare_exchange_strong((_Atomic UInt8 *)address, &oldValue, newValue, | |
66 | memory_order_acq_rel_smp, memory_order_relaxed); | |
67 | } | |
68 | ||
69 | #undef OSCompareAndSwap16 | |
70 | Boolean OSCompareAndSwap16(UInt16 oldValue, UInt16 newValue, volatile UInt16 *address) | |
71 | { | |
72 | return __c11_atomic_compare_exchange_strong((_Atomic UInt16 *)address, &oldValue, newValue, | |
73 | memory_order_acq_rel_smp, memory_order_relaxed); | |
74 | } | |
75 | ||
76 | #undef OSCompareAndSwap | |
77 | Boolean OSCompareAndSwap(UInt32 oldValue, UInt32 newValue, volatile UInt32 *address) | |
78 | { | |
79 | ALIGN_TEST(address, UInt32); | |
80 | return __c11_atomic_compare_exchange_strong((_Atomic UInt32 *)address, &oldValue, newValue, | |
81 | memory_order_acq_rel_smp, memory_order_relaxed); | |
82 | } | |
83 | ||
84 | #undef OSCompareAndSwap64 | |
85 | Boolean OSCompareAndSwap64(UInt64 oldValue, UInt64 newValue, volatile UInt64 *address) | |
86 | { | |
87 | /* | |
88 | * _Atomic uint64 requires 8-byte alignment on all architectures. | |
89 | * This silences the compiler cast warning. ALIGN_TEST() verifies | |
90 | * that the cast was legal, if defined. | |
91 | */ | |
92 | _Atomic UInt64 *aligned_addr = (_Atomic UInt64 *)(uintptr_t)address; | |
93 | ||
94 | ALIGN_TEST(address, UInt64); | |
95 | return __c11_atomic_compare_exchange_strong(aligned_addr, &oldValue, newValue, | |
96 | memory_order_acq_rel_smp, memory_order_relaxed); | |
97 | } | |
98 | ||
99 | #undef OSCompareAndSwapPtr | |
100 | Boolean OSCompareAndSwapPtr(void *oldValue, void *newValue, void * volatile *address) | |
101 | { | |
102 | #if __LP64__ | |
103 | return OSCompareAndSwap64((UInt64)oldValue, (UInt64)newValue, (volatile UInt64 *)address); | |
b0d623f7 | 104 | #else |
3e170ce0 | 105 | return OSCompareAndSwap((UInt32)oldValue, (UInt32)newValue, (volatile UInt32 *)address); |
b0d623f7 | 106 | #endif |
3e170ce0 A |
107 | } |
108 | ||
109 | SInt8 OSAddAtomic8(SInt32 amount, volatile SInt8 *address) | |
110 | { | |
111 | return __c11_atomic_fetch_add((_Atomic SInt8*)address, amount, memory_order_relaxed); | |
112 | } | |
113 | ||
114 | SInt16 OSAddAtomic16(SInt32 amount, volatile SInt16 *address) | |
115 | { | |
116 | return __c11_atomic_fetch_add((_Atomic SInt16*)address, amount, memory_order_relaxed); | |
117 | } | |
118 | ||
119 | #undef OSAddAtomic | |
120 | SInt32 OSAddAtomic(SInt32 amount, volatile SInt32 *address) | |
121 | { | |
122 | ALIGN_TEST(address, UInt32); | |
123 | return __c11_atomic_fetch_add((_Atomic SInt32*)address, amount, memory_order_relaxed); | |
124 | } | |
125 | ||
126 | #undef OSAddAtomic64 | |
127 | SInt64 OSAddAtomic64(SInt64 amount, volatile SInt64 *address) | |
128 | { | |
129 | _Atomic SInt64* aligned_address = (_Atomic SInt64*)(uintptr_t)address; | |
130 | ||
131 | ALIGN_TEST(address, SInt64); | |
132 | return __c11_atomic_fetch_add(aligned_address, amount, memory_order_relaxed); | |
133 | } | |
134 | ||
135 | #undef OSAddAtomicLong | |
136 | long | |
137 | OSAddAtomicLong(long theAmount, volatile long *address) | |
138 | { | |
139 | #ifdef __LP64__ | |
140 | return (long)OSAddAtomic64((SInt64)theAmount, (SInt64*)address); | |
141 | #else | |
142 | return (long)OSAddAtomic((SInt32)theAmount, address); | |
143 | #endif | |
144 | } | |
b0d623f7 | 145 | |
b0d623f7 A |
146 | #undef OSIncrementAtomic |
147 | SInt32 OSIncrementAtomic(volatile SInt32 * value) | |
91447636 | 148 | { |
b0d623f7 | 149 | return OSAddAtomic(1, value); |
91447636 A |
150 | } |
151 | ||
b0d623f7 A |
152 | #undef OSDecrementAtomic |
153 | SInt32 OSDecrementAtomic(volatile SInt32 * value) | |
91447636 | 154 | { |
b0d623f7 | 155 | return OSAddAtomic(-1, value); |
91447636 | 156 | } |
1c79356b | 157 | |
b0d623f7 | 158 | #undef OSBitAndAtomic |
2d21ac55 | 159 | UInt32 OSBitAndAtomic(UInt32 mask, volatile UInt32 * value) |
1c79356b | 160 | { |
3e170ce0 | 161 | return __c11_atomic_fetch_and((_Atomic UInt32*)value, mask, memory_order_relaxed); |
1c79356b A |
162 | } |
163 | ||
b0d623f7 | 164 | #undef OSBitOrAtomic |
2d21ac55 | 165 | UInt32 OSBitOrAtomic(UInt32 mask, volatile UInt32 * value) |
1c79356b | 166 | { |
3e170ce0 | 167 | return __c11_atomic_fetch_or((_Atomic UInt32*)value, mask, memory_order_relaxed); |
1c79356b A |
168 | } |
169 | ||
b0d623f7 | 170 | #undef OSBitXorAtomic |
2d21ac55 | 171 | UInt32 OSBitXorAtomic(UInt32 mask, volatile UInt32 * value) |
1c79356b | 172 | { |
3e170ce0 | 173 | return __c11_atomic_fetch_xor((_Atomic UInt32*)value, mask, memory_order_relaxed); |
1c79356b A |
174 | } |
175 | ||
2d21ac55 | 176 | static Boolean OSTestAndSetClear(UInt32 bit, Boolean wantSet, volatile UInt8 * startAddress) |
1c79356b A |
177 | { |
178 | UInt8 mask = 1; | |
179 | UInt8 oldValue; | |
180 | UInt8 wantValue; | |
181 | ||
182 | startAddress += (bit / 8); | |
183 | mask <<= (7 - (bit % 8)); | |
184 | wantValue = wantSet ? mask : 0; | |
185 | ||
186 | do { | |
187 | oldValue = *startAddress; | |
188 | if ((oldValue & mask) == wantValue) { | |
189 | break; | |
190 | } | |
3e170ce0 A |
191 | } while (! __c11_atomic_compare_exchange_strong((_Atomic UInt8 *)startAddress, |
192 | &oldValue, (oldValue & ~mask) | wantValue, memory_order_relaxed, memory_order_relaxed)); | |
1c79356b A |
193 | |
194 | return (oldValue & mask) == wantValue; | |
195 | } | |
196 | ||
2d21ac55 | 197 | Boolean OSTestAndSet(UInt32 bit, volatile UInt8 * startAddress) |
1c79356b A |
198 | { |
199 | return OSTestAndSetClear(bit, true, startAddress); | |
200 | } | |
201 | ||
2d21ac55 | 202 | Boolean OSTestAndClear(UInt32 bit, volatile UInt8 * startAddress) |
1c79356b A |
203 | { |
204 | return OSTestAndSetClear(bit, false, startAddress); | |
205 | } | |
206 | ||
1c79356b A |
207 | /* |
208 | * silly unaligned versions | |
209 | */ | |
210 | ||
2d21ac55 | 211 | SInt8 OSIncrementAtomic8(volatile SInt8 * value) |
1c79356b A |
212 | { |
213 | return OSAddAtomic8(1, value); | |
214 | } | |
215 | ||
2d21ac55 | 216 | SInt8 OSDecrementAtomic8(volatile SInt8 * value) |
1c79356b A |
217 | { |
218 | return OSAddAtomic8(-1, value); | |
219 | } | |
220 | ||
2d21ac55 | 221 | UInt8 OSBitAndAtomic8(UInt32 mask, volatile UInt8 * value) |
1c79356b | 222 | { |
3e170ce0 | 223 | return __c11_atomic_fetch_and((_Atomic UInt8 *)value, mask, memory_order_relaxed); |
1c79356b A |
224 | } |
225 | ||
2d21ac55 | 226 | UInt8 OSBitOrAtomic8(UInt32 mask, volatile UInt8 * value) |
1c79356b | 227 | { |
3e170ce0 | 228 | return __c11_atomic_fetch_or((_Atomic UInt8 *)value, mask, memory_order_relaxed); |
1c79356b A |
229 | } |
230 | ||
2d21ac55 | 231 | UInt8 OSBitXorAtomic8(UInt32 mask, volatile UInt8 * value) |
1c79356b | 232 | { |
3e170ce0 | 233 | return __c11_atomic_fetch_xor((_Atomic UInt8 *)value, mask, memory_order_relaxed); |
1c79356b A |
234 | } |
235 | ||
2d21ac55 | 236 | SInt16 OSIncrementAtomic16(volatile SInt16 * value) |
1c79356b A |
237 | { |
238 | return OSAddAtomic16(1, value); | |
239 | } | |
240 | ||
2d21ac55 | 241 | SInt16 OSDecrementAtomic16(volatile SInt16 * value) |
1c79356b A |
242 | { |
243 | return OSAddAtomic16(-1, value); | |
244 | } | |
245 | ||
2d21ac55 | 246 | UInt16 OSBitAndAtomic16(UInt32 mask, volatile UInt16 * value) |
1c79356b | 247 | { |
3e170ce0 | 248 | return __c11_atomic_fetch_and((_Atomic UInt16 *)value, mask, memory_order_relaxed); |
1c79356b A |
249 | } |
250 | ||
2d21ac55 | 251 | UInt16 OSBitOrAtomic16(UInt32 mask, volatile UInt16 * value) |
1c79356b | 252 | { |
3e170ce0 | 253 | return __c11_atomic_fetch_or((_Atomic UInt16 *)value, mask, memory_order_relaxed); |
1c79356b A |
254 | } |
255 | ||
2d21ac55 | 256 | UInt16 OSBitXorAtomic16(UInt32 mask, volatile UInt16 * value) |
1c79356b | 257 | { |
3e170ce0 | 258 | return __c11_atomic_fetch_xor((_Atomic UInt16 *)value, mask, memory_order_relaxed); |
1c79356b A |
259 | } |
260 | ||
3e170ce0 A |
261 | // 19831745 - end of big hammer! |
262 | #pragma clang diagnostic pop | |
263 |