+#undef OSCompareAndSwap8
+Boolean OSCompareAndSwap8(UInt8 oldValue, UInt8 newValue, volatile UInt8 *address)
+{
+ return __c11_atomic_compare_exchange_strong((_Atomic UInt8 *)address, &oldValue, newValue,
+ memory_order_acq_rel_smp, memory_order_relaxed);
+}
+
+#undef OSCompareAndSwap16
+Boolean OSCompareAndSwap16(UInt16 oldValue, UInt16 newValue, volatile UInt16 *address)
+{
+ return __c11_atomic_compare_exchange_strong((_Atomic UInt16 *)address, &oldValue, newValue,
+ memory_order_acq_rel_smp, memory_order_relaxed);
+}
+
+#undef OSCompareAndSwap
+Boolean OSCompareAndSwap(UInt32 oldValue, UInt32 newValue, volatile UInt32 *address)
+{
+ ALIGN_TEST(address, UInt32);
+ return __c11_atomic_compare_exchange_strong((_Atomic UInt32 *)address, &oldValue, newValue,
+ memory_order_acq_rel_smp, memory_order_relaxed);
+}
+
+#undef OSCompareAndSwap64
+Boolean OSCompareAndSwap64(UInt64 oldValue, UInt64 newValue, volatile UInt64 *address)
+{
+ /*
+ * _Atomic uint64 requires 8-byte alignment on all architectures.
+ * This silences the compiler cast warning. ALIGN_TEST() verifies
+ * that the cast was legal, if defined.
+ */
+ _Atomic UInt64 *aligned_addr = (_Atomic UInt64 *)(uintptr_t)address;
+
+ ALIGN_TEST(address, UInt64);
+ return __c11_atomic_compare_exchange_strong(aligned_addr, &oldValue, newValue,
+ memory_order_acq_rel_smp, memory_order_relaxed);
+}
+
+#undef OSCompareAndSwapPtr
+Boolean OSCompareAndSwapPtr(void *oldValue, void *newValue, void * volatile *address)
+{
+#if __LP64__
+ return OSCompareAndSwap64((UInt64)oldValue, (UInt64)newValue, (volatile UInt64 *)address);