+SInt8 OSAddAtomic8(SInt32 amount, volatile SInt8 *address)
+{
+ return __c11_atomic_fetch_add((_Atomic SInt8*)address, amount, memory_order_relaxed);
+}
+
+SInt16 OSAddAtomic16(SInt32 amount, volatile SInt16 *address)
+{
+ return __c11_atomic_fetch_add((_Atomic SInt16*)address, amount, memory_order_relaxed);
+}
+
+#undef OSAddAtomic
+SInt32 OSAddAtomic(SInt32 amount, volatile SInt32 *address)
+{
+ ALIGN_TEST(address, UInt32);
+ return __c11_atomic_fetch_add((_Atomic SInt32*)address, amount, memory_order_relaxed);
+}
+
+#undef OSAddAtomic64
+SInt64 OSAddAtomic64(SInt64 amount, volatile SInt64 *address)
+{
+ _Atomic SInt64* aligned_address = (_Atomic SInt64*)(uintptr_t)address;
+
+ ALIGN_TEST(address, SInt64);
+ return __c11_atomic_fetch_add(aligned_address, amount, memory_order_relaxed);
+}
+
+#undef OSAddAtomicLong
+long
+OSAddAtomicLong(long theAmount, volatile long *address)
+{
+#ifdef __LP64__
+ return (long)OSAddAtomic64((SInt64)theAmount, (SInt64*)address);
+#else
+ return (long)OSAddAtomic((SInt32)theAmount, address);
+#endif
+}