]> git.saurik.com Git - apple/xnu.git/blame - osfmk/arm/atomic.h
xnu-6153.141.1.tar.gz
[apple/xnu.git] / osfmk / arm / atomic.h
CommitLineData
5ba3f43e 1/*
cb323159 2 * Copyright (c) 2015-2018 Apple Inc. All rights reserved.
5ba3f43e
A
3 *
4 * @APPLE_OSREFERENCE_LICENSE_HEADER_START@
0a7de745 5 *
5ba3f43e
A
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. The rights granted to you under the License
10 * may not be used to create, or enable the creation or redistribution of,
11 * unlawful or unlicensed copies of an Apple operating system, or to
12 * circumvent, violate, or enable the circumvention or violation of, any
13 * terms of an Apple operating system software license agreement.
0a7de745 14 *
5ba3f43e
A
15 * Please obtain a copy of the License at
16 * http://www.opensource.apple.com/apsl/ and read it before using this file.
0a7de745 17 *
5ba3f43e
A
18 * The Original Code and all software distributed under the License are
19 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
20 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
21 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
22 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
23 * Please see the License for the specific language governing rights and
24 * limitations under the License.
0a7de745 25 *
5ba3f43e
A
26 * @APPLE_OSREFERENCE_LICENSE_HEADER_END@
27 */
28
cb323159
A
29#ifndef _MACHINE_ATOMIC_H
30#error "Do not include <arm/atomic.h> directly, use <machine/atomic.h>"
31#endif
32
5ba3f43e
A
33#ifndef _ARM_ATOMIC_H_
34#define _ARM_ATOMIC_H_
35
d9a64523 36#include <mach/boolean.h>
5ba3f43e
A
37
38// Parameter for __builtin_arm_dmb
cb323159
A
39#define DMB_OSHLD 0x1
40#define DMB_OSHST 0x2
41#define DMB_OSH 0x3
42#define DMB_NSHLD 0x5
43#define DMB_NSHST 0x6
0a7de745
A
44#define DMB_NSH 0x7
45#define DMB_ISHLD 0x9
46#define DMB_ISHST 0xa
47#define DMB_ISH 0xb
cb323159
A
48#define DMB_LD 0xd
49#define DMB_ST 0xe
0a7de745 50#define DMB_SY 0xf
5ba3f43e
A
51
52// Parameter for __builtin_arm_dsb
cb323159
A
53#define DSB_OSHLD 0x1
54#define DSB_OSHST 0x2
55#define DSB_OSH 0x3
56#define DSB_NSHLD 0x5
57#define DSB_NSHST 0x6
0a7de745
A
58#define DSB_NSH 0x7
59#define DSB_ISHLD 0x9
60#define DSB_ISHST 0xa
61#define DSB_ISH 0xb
cb323159
A
62#define DSB_LD 0xd
63#define DSB_ST 0xe
0a7de745 64#define DSB_SY 0xf
5ba3f43e
A
65
66// Parameter for __builtin_arm_isb
0a7de745 67#define ISB_SY 0xf
5ba3f43e 68
cb323159
A
69#undef OS_ATOMIC_HAS_LLSC
70#define OS_ATOMIC_HAS_LLSC 1
5ba3f43e 71
cb323159
A
72#if defined(__ARM_ARCH_8_2__) && defined(__arm64__)
73#undef OS_ATOMIC_USE_LLSC
74#define OS_ATOMIC_USE_LLSC 0
5ba3f43e
A
75#endif
76
c6bf4f31
A
77#if defined(__ARM_ARCH_8_4__) && defined(__arm64__)
78/* on armv8.4 16-byte aligned load/store pair is atomic */
79#undef os_atomic_load_is_plain
80#define os_atomic_load_is_plain(p) \
81 (sizeof(*(p)) <= 16 && _Alignof(typeof(*(p))) >= sizeof(*(p)))
82#endif
cb323159 83
5ba3f43e 84/*
cb323159
A
85 * On armv7 & arm64, we do provide fine grained dependency injection, so
86 * memory_order_dependency maps to relaxed as far as thread fences are concerned
5ba3f43e 87 */
cb323159
A
88#undef memory_order_dependency_smp
89#define memory_order_dependency_smp memory_order_relaxed
5ba3f43e 90
cb323159 91#define os_atomic_clear_exclusive() __builtin_arm_clrex()
5ba3f43e
A
92
93#if __arm__
5ba3f43e 94
cb323159
A
95#define os_atomic_load_exclusive(p, m) ({ \
96 _os_atomic_basetypeof(p) _r; \
97 _r = __builtin_arm_ldrex(p); \
98 _os_memory_fence_after_atomic(m); \
99 _os_compiler_barrier_after_atomic(m); \
100 _r; \
101})
5ba3f43e 102
cb323159
A
103#define os_atomic_store_exclusive(p, v, m) ({ \
104 _os_compiler_barrier_before_atomic(m); \
105 _os_memory_fence_before_atomic(m); \
106 !__builtin_arm_strex(p, v); \
107})
0a7de745 108
cb323159
A
109/*
110 * armv7 override of os_atomic_make_dependency
111 * documentation for os_atomic_make_dependency is in <machine/atomic.h>
112 */
113#undef os_atomic_make_dependency
114#define os_atomic_make_dependency(v) ({ \
115 os_atomic_dependency_t _dep; \
116 __asm__ __volatile__("and %[_dep], %[_v], #0" \
117 : [_dep] "=r" (_dep.__opaque_zero) : [_v] "r" (v)); \
118 os_compiler_barrier(acquire); \
119 _dep; \
120})
5ba3f43e 121
cb323159
A
122/*
123 * armv7 override of os_atomic_rmw_loop
124 * documentation for os_atomic_rmw_loop is in <machine/atomic.h>
125 */
d9a64523 126#undef os_atomic_rmw_loop
5ba3f43e 127#define os_atomic_rmw_loop(p, ov, nv, m, ...) ({ \
cb323159
A
128 int _result = 0; uint32_t _err = 0; \
129 _os_atomic_basetypeof(p) *_p; \
130 _p = (_os_atomic_basetypeof(p) *)(p); \
131 _os_compiler_barrier_before_atomic(m); \
0a7de745
A
132 for (;;) { \
133 ov = __builtin_arm_ldrex(_p); \
134 __VA_ARGS__; \
cb323159
A
135 if (!_err) { \
136 /* release barrier only done for the first loop iteration */ \
137 _os_memory_fence_before_atomic(m); \
0a7de745
A
138 } \
139 _err = __builtin_arm_strex(nv, _p); \
140 if (__builtin_expect(!_err, 1)) { \
cb323159
A
141 _os_memory_fence_after_atomic(m); \
142 _result = 1; \
0a7de745
A
143 break; \
144 } \
145 } \
cb323159 146 _os_compiler_barrier_after_atomic(m); \
0a7de745 147 _result; \
5ba3f43e 148 })
d9a64523 149
cb323159
A
150/*
151 * armv7 override of os_atomic_rmw_loop_give_up
152 * documentation for os_atomic_rmw_loop_give_up is in <machine/atomic.h>
153 */
d9a64523 154#undef os_atomic_rmw_loop_give_up
cb323159
A
155#define os_atomic_rmw_loop_give_up(...) \
156 ({ os_atomic_clear_exclusive(); __VA_ARGS__; break; })
157
158#else // __arm64__
159
160#define os_atomic_load_exclusive(p, m) ({ \
161 _os_atomic_basetypeof(p) _r; \
162 if (memory_order_has_acquire(memory_order_##m##_smp)) { \
163 _r = __builtin_arm_ldaex(p); \
164 } else { \
165 _r = __builtin_arm_ldrex(p); \
166 } \
167 _os_compiler_barrier_after_atomic(m); \
168 _r; \
169})
d9a64523 170
cb323159
A
171#define os_atomic_store_exclusive(p, v, m) ({ \
172 _os_compiler_barrier_before_atomic(m); \
173 (memory_order_has_release(memory_order_##m##_smp) ? \
174 !__builtin_arm_stlex(p, v) : !__builtin_arm_strex(p, v)); \
175})
d9a64523 176
cb323159
A
177/*
178 * arm64 override of os_atomic_make_dependency
179 * documentation for os_atomic_make_dependency is in <machine/atomic.h>
180 */
181#undef os_atomic_make_dependency
182#define os_atomic_make_dependency(v) ({ \
183 os_atomic_dependency_t _dep; \
184 __asm__ __volatile__("and %[_dep], %[_v], xzr" \
185 : [_dep] "=r" (_dep.__opaque_zero) : [_v] "r" (v)); \
186 os_compiler_barrier(acquire); \
187 _dep; \
188})
189
190#if OS_ATOMIC_USE_LLSC
191
192/*
193 * arm64 (without armv81 atomics) override of os_atomic_rmw_loop
194 * documentation for os_atomic_rmw_loop is in <machine/atomic.h>
195 */
d9a64523 196#undef os_atomic_rmw_loop
5ba3f43e 197#define os_atomic_rmw_loop(p, ov, nv, m, ...) ({ \
cb323159
A
198 int _result = 0; \
199 _os_atomic_basetypeof(p) *_p; \
200 _p = (_os_atomic_basetypeof(p) *)(p); \
201 _os_compiler_barrier_before_atomic(m); \
0a7de745 202 do { \
cb323159 203 if (memory_order_has_acquire(memory_order_##m##_smp)) { \
0a7de745
A
204 ov = __builtin_arm_ldaex(_p); \
205 } else { \
206 ov = __builtin_arm_ldrex(_p); \
207 } \
208 __VA_ARGS__; \
cb323159 209 if (memory_order_has_release(memory_order_##m##_smp)) { \
0a7de745
A
210 _result = !__builtin_arm_stlex(nv, _p); \
211 } else { \
212 _result = !__builtin_arm_strex(nv, _p); \
213 } \
214 } while (__builtin_expect(!_result, 0)); \
cb323159 215 _os_compiler_barrier_after_atomic(m); \
0a7de745 216 _result; \
5ba3f43e 217 })
5ba3f43e 218
cb323159
A
219/*
220 * arm64 override of os_atomic_rmw_loop_give_up
221 * documentation for os_atomic_rmw_loop_give_up is in <machine/atomic.h>
222 */
d9a64523 223#undef os_atomic_rmw_loop_give_up
cb323159
A
224#define os_atomic_rmw_loop_give_up(...) \
225 ({ os_atomic_clear_exclusive(); __VA_ARGS__; break; })
d9a64523 226
cb323159
A
227#endif // OS_ATOMIC_USE_LLSC
228
229#endif // __arm64__
5ba3f43e
A
230
231#endif // _ARM_ATOMIC_H_