]> git.saurik.com Git - apple/xnu.git/blobdiff - osfmk/arm/atomic.h
xnu-7195.60.75.tar.gz
[apple/xnu.git] / osfmk / arm / atomic.h
index 0382aa23349723c06bb2f8bb3c907ca063ba8006..2b679dbb381d1b60b4b77414a3130c4ea4e56125 100644 (file)
 // Parameter for __builtin_arm_isb
 #define ISB_SY          0xf
 
-#undef OS_ATOMIC_HAS_LLSC
-#define OS_ATOMIC_HAS_LLSC  1
-
-#if defined(__ARM_ARCH_8_2__) && defined(__arm64__)
-#undef OS_ATOMIC_USE_LLSC
-#define OS_ATOMIC_USE_LLSC  0
-#endif
-
-#if defined(__ARM_ARCH_8_4__) && defined(__arm64__)
-/* on armv8.4 16-byte aligned load/store pair is atomic */
-#undef os_atomic_load_is_plain
-#define os_atomic_load_is_plain(p) \
-               (sizeof(*(p)) <= 16 && _Alignof(typeof(*(p))) >= sizeof(*(p)))
-#endif
-
-/*
- * On armv7 & arm64, we do provide fine grained dependency injection, so
- * memory_order_dependency maps to relaxed as far as thread fences are concerned
- */
-#undef memory_order_dependency_smp
-#define memory_order_dependency_smp  memory_order_relaxed
-
-#define os_atomic_clear_exclusive()  __builtin_arm_clrex()
-
-#if __arm__
-
-#define os_atomic_load_exclusive(p, m)  ({ \
-               _os_atomic_basetypeof(p) _r; \
-               _r = __builtin_arm_ldrex(p); \
-               _os_memory_fence_after_atomic(m); \
-               _os_compiler_barrier_after_atomic(m); \
-               _r; \
-})
-
-#define os_atomic_store_exclusive(p, v, m)  ({ \
-               _os_compiler_barrier_before_atomic(m); \
-               _os_memory_fence_before_atomic(m); \
-               !__builtin_arm_strex(p, v); \
-})
-
-/*
- * armv7 override of os_atomic_make_dependency
- * documentation for os_atomic_make_dependency is in <machine/atomic.h>
- */
-#undef os_atomic_make_dependency
-#define os_atomic_make_dependency(v) ({ \
-               os_atomic_dependency_t _dep; \
-               __asm__ __volatile__("and %[_dep], %[_v], #0" \
-                               : [_dep] "=r" (_dep.__opaque_zero) : [_v] "r" (v)); \
-               os_compiler_barrier(acquire); \
-               _dep; \
-})
-
-/*
- * armv7 override of os_atomic_rmw_loop
- * documentation for os_atomic_rmw_loop is in <machine/atomic.h>
- */
-#undef os_atomic_rmw_loop
-#define os_atomic_rmw_loop(p, ov, nv, m, ...)  ({ \
-               int _result = 0; uint32_t _err = 0; \
-               _os_atomic_basetypeof(p) *_p; \
-               _p = (_os_atomic_basetypeof(p) *)(p); \
-               _os_compiler_barrier_before_atomic(m); \
-               for (;;) { \
-                       ov = __builtin_arm_ldrex(_p); \
-                       __VA_ARGS__; \
-                       if (!_err) { \
-       /* release barrier only done for the first loop iteration */ \
-                               _os_memory_fence_before_atomic(m); \
-                       } \
-                       _err = __builtin_arm_strex(nv, _p); \
-                       if (__builtin_expect(!_err, 1)) { \
-                               _os_memory_fence_after_atomic(m); \
-                               _result = 1; \
-                               break; \
-                       } \
-               } \
-               _os_compiler_barrier_after_atomic(m); \
-               _result; \
-       })
-
-/*
- * armv7 override of os_atomic_rmw_loop_give_up
- * documentation for os_atomic_rmw_loop_give_up is in <machine/atomic.h>
- */
-#undef os_atomic_rmw_loop_give_up
-#define os_atomic_rmw_loop_give_up(...) \
-               ({ os_atomic_clear_exclusive(); __VA_ARGS__; break; })
-
-#else // __arm64__
-
-#define os_atomic_load_exclusive(p, m)  ({ \
-               _os_atomic_basetypeof(p) _r; \
-               if (memory_order_has_acquire(memory_order_##m##_smp)) { \
-                       _r = __builtin_arm_ldaex(p); \
-               } else { \
-                       _r = __builtin_arm_ldrex(p); \
-               } \
-               _os_compiler_barrier_after_atomic(m); \
-               _r; \
-})
-
-#define os_atomic_store_exclusive(p, v, m)  ({ \
-               _os_compiler_barrier_before_atomic(m); \
-               (memory_order_has_release(memory_order_##m##_smp) ? \
-                               !__builtin_arm_stlex(p, v) : !__builtin_arm_strex(p, v)); \
-})
-
-/*
- * arm64 override of os_atomic_make_dependency
- * documentation for os_atomic_make_dependency is in <machine/atomic.h>
- */
-#undef os_atomic_make_dependency
-#define os_atomic_make_dependency(v) ({ \
-               os_atomic_dependency_t _dep; \
-               __asm__ __volatile__("and %[_dep], %[_v], xzr" \
-                               : [_dep] "=r" (_dep.__opaque_zero) : [_v] "r" (v)); \
-               os_compiler_barrier(acquire); \
-               _dep; \
-})
-
-#if OS_ATOMIC_USE_LLSC
-
-/*
- * arm64 (without armv81 atomics) override of os_atomic_rmw_loop
- * documentation for os_atomic_rmw_loop is in <machine/atomic.h>
- */
-#undef os_atomic_rmw_loop
-#define os_atomic_rmw_loop(p, ov, nv, m, ...)  ({ \
-               int _result = 0; \
-               _os_atomic_basetypeof(p) *_p; \
-               _p = (_os_atomic_basetypeof(p) *)(p); \
-               _os_compiler_barrier_before_atomic(m); \
-               do { \
-                       if (memory_order_has_acquire(memory_order_##m##_smp)) { \
-                               ov = __builtin_arm_ldaex(_p); \
-                       } else { \
-                               ov = __builtin_arm_ldrex(_p); \
-                       } \
-                       __VA_ARGS__; \
-                       if (memory_order_has_release(memory_order_##m##_smp)) { \
-                               _result = !__builtin_arm_stlex(nv, _p); \
-                       } else { \
-                               _result = !__builtin_arm_strex(nv, _p); \
-                       } \
-               } while (__builtin_expect(!_result, 0)); \
-               _os_compiler_barrier_after_atomic(m); \
-               _result; \
-       })
-
-/*
- * arm64 override of os_atomic_rmw_loop_give_up
- * documentation for os_atomic_rmw_loop_give_up is in <machine/atomic.h>
- */
-#undef os_atomic_rmw_loop_give_up
-#define os_atomic_rmw_loop_give_up(...) \
-               ({ os_atomic_clear_exclusive(); __VA_ARGS__; break; })
-
-#endif // OS_ATOMIC_USE_LLSC
-
-#endif // __arm64__
-
 #endif // _ARM_ATOMIC_H_