]>
git.saurik.com Git - apple/xnu.git/blob - libkern/os/atomic_private_arch.h
2 * Copyright (c) 2019 Apple Inc. All rights reserved.
4 * @APPLE_OSREFERENCE_LICENSE_HEADER_START@
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. The rights granted to you under the License
10 * may not be used to create, or enable the creation or redistribution of,
11 * unlawful or unlicensed copies of an Apple operating system, or to
12 * circumvent, violate, or enable the circumvention or violation of, any
13 * terms of an Apple operating system software license agreement.
15 * Please obtain a copy of the License at
16 * http://www.opensource.apple.com/apsl/ and read it before using this file.
18 * The Original Code and all software distributed under the License are
19 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
20 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
21 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
22 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
23 * Please see the License for the specific language governing rights and
24 * limitations under the License.
26 * @APPLE_OSREFERENCE_LICENSE_HEADER_END@
30 * This header provides some gory details to implement the <os/atomic_private.h>
31 * interfaces. Nothing in this header should be called directly, no promise is
32 * made to keep this interface stable.
34 * Architecture overrides.
37 #ifndef __OS_ATOMIC_PRIVATE_H__
38 #error "Do not include <os/atomic_private_arch.h> directly, use <os/atomic_private.h>"
41 #ifndef __OS_ATOMIC_PRIVATE_ARCH_H__
42 #define __OS_ATOMIC_PRIVATE_ARCH_H__
48 #if OS_ATOMIC_CONFIG_MEMORY_ORDER_DEPENDENCY
50 * On armv7, we do provide fine grained dependency injection, so
51 * memory_order_dependency maps to relaxed as far as thread fences are concerned
53 #undef _os_atomic_mo_dependency
54 #define _os_atomic_mo_dependency memory_order_relaxed
56 #undef os_atomic_make_dependency
57 #define os_atomic_make_dependency(v) ({ \
58 os_atomic_dependency_t _dep; \
59 __asm__ __volatile__("and %[_dep], %[_v], #0" \
60 : [_dep] "=r" (_dep.__opaque_zero) \
62 os_compiler_barrier(acquire); \
65 #endif // OS_ATOMIC_CONFIG_MEMORY_ORDER_DEPENDENCY
67 #define os_atomic_clear_exclusive() __builtin_arm_clrex()
69 #define os_atomic_load_exclusive(p, m) ({ \
70 __auto_type _r = __builtin_arm_ldrex(os_cast_to_nonatomic_pointer(p)); \
71 _os_memory_fence_after_atomic(m); \
72 _os_compiler_barrier_after_atomic(m); \
76 #define os_atomic_store_exclusive(p, v, m) ({ \
77 _os_compiler_barrier_before_atomic(m); \
78 _os_memory_fence_before_atomic(m); \
79 !__builtin_arm_strex(v, os_cast_to_nonatomic_pointer(p)); \
82 #if !OS_ATOMIC_HAS_STARVATION_FREE_RMW && !OS_ATOMIC_CONFIG_STARVATION_FREE_ONLY
85 * armv7 override of os_atomic_rmw_loop
86 * documentation for os_atomic_rmw_loop is in <os/atomic_private.h>
88 #undef os_atomic_rmw_loop
89 #define os_atomic_rmw_loop(p, ov, nv, m, ...) ({ \
90 int _result = 0; uint32_t _err = 0; \
91 __auto_type *_p = os_cast_to_nonatomic_pointer(p); \
93 ov = __builtin_arm_ldrex(_p); \
96 /* release barrier only done for the first loop iteration */ \
97 _os_memory_fence_before_atomic(m); \
99 _err = __builtin_arm_strex(nv, _p); \
100 if (__builtin_expect(!_err, 1)) { \
101 _os_memory_fence_after_atomic(m); \
106 _os_compiler_barrier_after_atomic(m); \
111 * armv7 override of os_atomic_rmw_loop_give_up
112 * documentation for os_atomic_rmw_loop_give_up is in <os/atomic_private.h>
114 #undef os_atomic_rmw_loop_give_up
115 #define os_atomic_rmw_loop_give_up(...) \
116 ({ os_atomic_clear_exclusive(); __VA_ARGS__; break; })
118 #endif // !OS_ATOMIC_HAS_STARVATION_FREE_RMW && !OS_ATOMIC_CONFIG_STARVATION_FREE_ONLY
124 #if defined(__arm64__)
126 #if OS_ATOMIC_CONFIG_MEMORY_ORDER_DEPENDENCY
128 * On arm64, we do provide fine grained dependency injection, so
129 * memory_order_dependency maps to relaxed as far as thread fences are concerned
131 #undef _os_atomic_mo_dependency
132 #define _os_atomic_mo_dependency memory_order_relaxed
134 #undef os_atomic_make_dependency
135 #if __ARM64_ARCH_8_32__
136 #define os_atomic_make_dependency(v) ({ \
137 os_atomic_dependency_t _dep; \
138 __asm__ __volatile__("and %w[_dep], %w[_v], wzr" \
139 : [_dep] "=r" (_dep.__opaque_zero) \
141 os_compiler_barrier(acquire); \
145 #define os_atomic_make_dependency(v) ({ \
146 os_atomic_dependency_t _dep; \
147 __asm__ __volatile__("and %[_dep], %[_v], xzr" \
148 : [_dep] "=r" (_dep.__opaque_zero) \
150 os_compiler_barrier(acquire); \
154 #endif // OS_ATOMIC_CONFIG_MEMORY_ORDER_DEPENDENCY
156 #if defined(__ARM_ARCH_8_4__)
157 /* on armv8.4 16-byte aligned load/store pair is atomic */
158 #undef os_atomic_load_is_plain
159 #define os_atomic_load_is_plain(p) (sizeof(*(p)) <= 16)
162 #define os_atomic_clear_exclusive() __builtin_arm_clrex()
164 #define os_atomic_load_exclusive(p, m) ({ \
165 __auto_type _r = _os_atomic_mo_has_acquire(_os_atomic_mo_##m##_smp) \
166 ? __builtin_arm_ldaex(os_cast_to_nonatomic_pointer(p)) \
167 : __builtin_arm_ldrex(os_cast_to_nonatomic_pointer(p)); \
168 _os_compiler_barrier_after_atomic(m); \
172 #define os_atomic_store_exclusive(p, v, m) ({ \
173 _os_compiler_barrier_before_atomic(m); \
174 (_os_atomic_mo_has_release(_os_atomic_mo_##m##_smp) \
175 ? !__builtin_arm_stlex(v, os_cast_to_nonatomic_pointer(p)) \
176 : !__builtin_arm_strex(v, os_cast_to_nonatomic_pointer(p))); \
179 #if !OS_ATOMIC_HAS_STARVATION_FREE_RMW && !OS_ATOMIC_CONFIG_STARVATION_FREE_ONLY
182 * arm64 (without armv81 atomics) override of os_atomic_rmw_loop
183 * documentation for os_atomic_rmw_loop is in <os/atomic_private.h>
185 #undef os_atomic_rmw_loop
186 #define os_atomic_rmw_loop(p, ov, nv, m, ...) ({ \
188 __auto_type *_p = os_cast_to_nonatomic_pointer(p); \
189 _os_compiler_barrier_before_atomic(m); \
191 if (_os_atomic_mo_has_acquire(_os_atomic_mo_##m##_smp)) { \
192 ov = __builtin_arm_ldaex(_p); \
194 ov = __builtin_arm_ldrex(_p); \
197 if (_os_atomic_mo_has_release(_os_atomic_mo_##m##_smp)) { \
198 _result = !__builtin_arm_stlex(nv, _p); \
200 _result = !__builtin_arm_strex(nv, _p); \
202 } while (__builtin_expect(!_result, 0)); \
203 _os_compiler_barrier_after_atomic(m); \
208 * arm64 override of os_atomic_rmw_loop_give_up
209 * documentation for os_atomic_rmw_loop_give_up is in <os/atomic_private.h>
211 #undef os_atomic_rmw_loop_give_up
212 #define os_atomic_rmw_loop_give_up(...) \
213 ({ os_atomic_clear_exclusive(); __VA_ARGS__; break; })
215 #endif // !OS_ATOMIC_HAS_STARVATION_FREE_RMW && !OS_ATOMIC_CONFIG_STARVATION_FREE_ONLY
219 #endif /* __OS_ATOMIC_PRIVATE_ARCH_H__ */