#define OSCompareAndSwap64(a, b, c) \
(OSCompareAndSwap64(a, b, __SAFE_CAST_PTR(volatile UInt64*,c)))
#define OSCompareAndSwap64(a, b, c) \
(OSCompareAndSwap64(a, b, __SAFE_CAST_PTR(volatile UInt64*,c)))
#define OSAddAtomic64(a, b) \
(OSAddAtomic64(a, __SAFE_CAST_PTR(volatile SInt64*,b)))
#define OSAddAtomic64(a, b) \
(OSAddAtomic64(a, __SAFE_CAST_PTR(volatile SInt64*,b)))
#define OSAddAtomicLong(a, b) \
(OSAddAtomicLong(a, __SAFE_CAST_PTR(volatile long*,b)))
#define OSAddAtomicLong(a, b) \
(OSAddAtomicLong(a, __SAFE_CAST_PTR(volatile long*,b)))
* 32/64-bit decrement, depending on sizeof(long)
*@discussion See OSDecrementAtomic.
*/
* 32/64-bit decrement, depending on sizeof(long)
*@discussion See OSDecrementAtomic.
*/
#define OSCompareAndSwap8(a, b, c) \
(OSCompareAndSwap8(a, b, __SAFE_CAST_PTR(volatile UInt8*,c)))
#define OSCompareAndSwap8(a, b, c) \
(OSCompareAndSwap8(a, b, __SAFE_CAST_PTR(volatile UInt8*,c)))
#define OSCompareAndSwap16(a, b, c) \
(OSCompareAndSwap16(a, b, __SAFE_CAST_PTR(volatile UInt16*,c)))
#define OSCompareAndSwap16(a, b, c) \
(OSCompareAndSwap16(a, b, __SAFE_CAST_PTR(volatile UInt16*,c)))
#define OSCompareAndSwap(a, b, c) \
(OSCompareAndSwap(a, b, __SAFE_CAST_PTR(volatile UInt32*,c)))
#define OSCompareAndSwap(a, b, c) \
(OSCompareAndSwap(a, b, __SAFE_CAST_PTR(volatile UInt32*,c)))
#define OSCompareAndSwapPtr(a, b, c) \
(OSCompareAndSwapPtr(a, b, __SAFE_CAST_PTR(void * volatile *,c)))
#define OSCompareAndSwapPtr(a, b, c) \
(OSCompareAndSwapPtr(a, b, __SAFE_CAST_PTR(void * volatile *,c)))
#define OSAddAtomic(a, b) \
(OSAddAtomic(a, __SAFE_CAST_PTR(volatile SInt32*,b)))
#define OSAddAtomic(a, b) \
(OSAddAtomic(a, __SAFE_CAST_PTR(volatile SInt32*,b)))
#define OSBitAndAtomic(a, b) \
(OSBitAndAtomic(a, __SAFE_CAST_PTR(volatile UInt32*,b)))
#define OSBitAndAtomic(a, b) \
(OSBitAndAtomic(a, __SAFE_CAST_PTR(volatile UInt32*,b)))
#define OSBitOrAtomic(a, b) \
(OSBitOrAtomic(a, __SAFE_CAST_PTR(volatile UInt32*,b)))
#define OSBitOrAtomic(a, b) \
(OSBitOrAtomic(a, __SAFE_CAST_PTR(volatile UInt32*,b)))
#define OSBitXorAtomic(a, b) \
(OSBitXorAtomic(a, __SAFE_CAST_PTR(volatile UInt32*,b)))
#define OSBitXorAtomic(a, b) \
(OSBitXorAtomic(a, __SAFE_CAST_PTR(volatile UInt32*,b)))
* @result true if the bit was already set, false otherwise.
*/
extern Boolean OSTestAndSet(
* @result true if the bit was already set, false otherwise.
*/
extern Boolean OSTestAndSet(
* @result true if the bit was already clear, false otherwise.
*/
extern Boolean OSTestAndClear(
* @result true if the bit was already clear, false otherwise.
*/
extern Boolean OSTestAndClear(
-#if defined(__arm__) || defined(__arm64__)
-static inline void OSMemoryBarrier(void) {
- __asm__ volatile("dmb ish" ::: "memory");
+#if defined(__arm__) || defined(__arm64__)
+static inline void
+OSMemoryBarrier(void)
+{
+ __asm__ volatile ("dmb ish" ::: "memory");
-#if defined(XNU_KERNEL_PRIVATE)
-static inline void OSMemoryBarrier(void) {
- __asm__ volatile("mfence" ::: "memory");
+#if defined(XNU_KERNEL_PRIVATE)
+static inline void
+OSMemoryBarrier(void)
+{
+ __asm__ volatile ("mfence" ::: "memory");