+#define atomic_add_16_ov(a, n) \
+ ((u_int16_t) OSAddAtomic16(n, (volatile SInt16 *)a))
+
+#define atomic_add_16(a, n) \
+ ((void) atomic_add_16_ov(a, n))
+
+#define atomic_add_32_ov(a, n) \
+ ((u_int32_t) OSAddAtomic(n, (volatile SInt32 *)a))
+
+#define atomic_add_32(a, n) \
+ ((void) atomic_add_32_ov(a, n))
+
+#define atomic_add_64_ov(a, n) \
+ ((u_int64_t) OSAddAtomic64(n, (volatile SInt64 *)a))
+
+#define atomic_add_64(a, n) \
+ ((void) atomic_add_64_ov(a, n))
+
+#define atomic_set_64(a, n) do { \
+ while (!OSCompareAndSwap64(*a, n, (volatile UInt64 *)a)) \
+ ; \
+} while (0)
+
+#if defined(__LP64__)
+#define atomic_get_64(n, a) do { \
+ (n) = *(a); \
+} while (0)