+
+/*
+ * Atomic primitives, prototyped in kern/simple_lock.h
+ */
+LEAF_ENTRY(hw_atomic_add)
+ movl L_ARG0, %ecx /* Load address of operand */
+ movl L_ARG1, %eax /* Load addend */
+ movl %eax, %edx
+ lock
+ xaddl %eax, (%ecx) /* Atomic exchange and add */
+ addl %edx, %eax /* Calculate result */
+ LEAF_RET
+
+LEAF_ENTRY(hw_atomic_sub)
+ movl L_ARG0, %ecx /* Load address of operand */
+ movl L_ARG1, %eax /* Load subtrahend */
+ negl %eax
+ movl %eax, %edx
+ lock
+ xaddl %eax, (%ecx) /* Atomic exchange and add */
+ addl %edx, %eax /* Calculate result */
+ LEAF_RET
+
+LEAF_ENTRY(hw_atomic_or)
+ movl L_ARG0, %ecx /* Load address of operand */
+ movl (%ecx), %eax
+1:
+ movl L_ARG1, %edx /* Load mask */
+ orl %eax, %edx
+ lock
+ cmpxchgl %edx, (%ecx) /* Atomic CAS */
+ jne 1b
+ movl %edx, %eax /* Result */
+ LEAF_RET
+/*
+ * A variant of hw_atomic_or which doesn't return a value.
+ * The implementation is thus comparatively more efficient.
+ */
+
+LEAF_ENTRY(hw_atomic_or_noret)
+ movl L_ARG0, %ecx /* Load address of operand */
+ movl L_ARG1, %edx /* Load mask */
+ lock
+ orl %edx, (%ecx) /* Atomic OR */
+ LEAF_RET
+
+LEAF_ENTRY(hw_atomic_and)
+ movl L_ARG0, %ecx /* Load address of operand */
+ movl (%ecx), %eax
+1:
+ movl L_ARG1, %edx /* Load mask */
+ andl %eax, %edx
+ lock
+ cmpxchgl %edx, (%ecx) /* Atomic CAS */
+ jne 1b
+ movl %edx, %eax /* Result */
+ LEAF_RET
+/*
+ * A variant of hw_atomic_and which doesn't return a value.
+ * The implementation is thus comparatively more efficient.
+ */
+
+LEAF_ENTRY(hw_atomic_and_noret)
+ movl L_ARG0, %ecx /* Load address of operand */
+ movl L_ARG1, %edx /* Load mask */
+ lock
+ andl %edx, (%ecx) /* Atomic AND */
+ LEAF_RET
+
+#else /* !__i386__ */
+
+LEAF_ENTRY(i_bit_set)
+ lock
+ bts %edi,(%rsi)
+ LEAF_RET
+
+LEAF_ENTRY(i_bit_clear)
+ lock
+ btr %edi,(%rsi)
+ LEAF_RET
+
+
+LEAF_ENTRY(bit_lock)
+1:
+ lock
+ bts %edi,(%rsi)
+ jb 1b
+ LEAF_RET
+
+
+LEAF_ENTRY(bit_lock_try)
+ lock
+ bts %edi,(%rsi)
+ jb bit_lock_failed
+ movl $1, %eax
+ LEAF_RET
+bit_lock_failed:
+ xorl %eax,%eax
+ LEAF_RET
+
+LEAF_ENTRY(bit_unlock)
+ lock
+ btr %edi,(%rsi)
+ LEAF_RET
+
+
+/*
+ * Atomic primitives, prototyped in kern/simple_lock.h
+ */
+LEAF_ENTRY(hw_atomic_add)
+#if MACH_LDEBUG
+ test $3, %rdi
+ jz 1f
+ ud2
+1:
+#endif
+ movl %esi, %eax /* Load addend */
+ lock xaddl %eax, (%rdi) /* Atomic exchange and add */
+ addl %esi, %eax /* Calculate result */
+ LEAF_RET
+
+LEAF_ENTRY(hw_atomic_sub)
+#if MACH_LDEBUG
+ test $3, %rdi
+ jz 1f
+ ud2
+1:
+#endif
+ negl %esi
+ movl %esi, %eax
+ lock xaddl %eax, (%rdi) /* Atomic exchange and add */
+ addl %esi, %eax /* Calculate result */
+ LEAF_RET
+
+LEAF_ENTRY(hw_atomic_or)
+#if MACH_LDEBUG
+ test $3, %rdi
+ jz 1f
+ ud2
+1:
+#endif
+ movl (%rdi), %eax
+1:
+ movl %esi, %edx /* Load mask */
+ orl %eax, %edx
+ lock cmpxchgl %edx, (%rdi) /* Atomic CAS */
+ jne 1b
+ movl %edx, %eax /* Result */
+ LEAF_RET
+/*
+ * A variant of hw_atomic_or which doesn't return a value.
+ * The implementation is thus comparatively more efficient.
+ */
+
+LEAF_ENTRY(hw_atomic_or_noret)
+#if MACH_LDEBUG
+ test $3, %rdi
+ jz 1f
+ ud2
+1:
+#endif
+ lock
+ orl %esi, (%rdi) /* Atomic OR */
+ LEAF_RET
+
+
+LEAF_ENTRY(hw_atomic_and)
+#if MACH_LDEBUG
+ test $3, %rdi
+ jz 1f
+ ud2
+1:
+#endif
+ movl (%rdi), %eax
+1:
+ movl %esi, %edx /* Load mask */
+ andl %eax, %edx
+ lock cmpxchgl %edx, (%rdi) /* Atomic CAS */
+ jne 1b
+ movl %edx, %eax /* Result */
+ LEAF_RET
+/*
+ * A variant of hw_atomic_and which doesn't return a value.
+ * The implementation is thus comparatively more efficient.
+ */
+
+LEAF_ENTRY(hw_atomic_and_noret)
+#if MACH_LDEBUG
+ test $3, %rdi
+ jz 1f
+ ud2
+1:
+#endif
+ lock andl %esi, (%rdi) /* Atomic OR */
+ LEAF_RET
+
+#endif /* !__i386 __ */