+COMMPAGE_FUNCTION_START(spin_lock_try_up_64, 64, 4)
+ xorl %eax, %eax
+ orl $-1, %edx
+ cmpxchgl %edx, (%rdi)
+ setz %dl
+ movzbl %dl, %eax
+ ret
+COMMPAGE_DESCRIPTOR(spin_lock_try_up_64,_COMM_PAGE_SPINLOCK_TRY,kUP,0)
+
+
+COMMPAGE_FUNCTION_START(spin_lock_try_mp_64, 64, 4)
+ xorl %eax, %eax
+ orl $-1, %edx
+ lock
+ cmpxchgl %edx, (%rdi)
+ setz %dl
+ movzbl %dl, %eax
+ ret
+COMMPAGE_DESCRIPTOR(spin_lock_try_mp_64,_COMM_PAGE_SPINLOCK_TRY,0,kUP)
+
+
+COMMPAGE_FUNCTION_START(spin_lock_up_64, 64, 4)
+ movq %rdi,%r8
+0:
+ xorl %eax, %eax
+ orl $-1, %edx
+ cmpxchgl %edx, (%r8)
+ jnz 1f
+ ret
+1:
+ /* failed to get lock so relinquish the processor immediately on UP */
+ xorl %edi,%edi /* THREAD_NULL */
+ movl $1,%esi /* SWITCH_OPTION_DEPRESS */
+ movl $1,%edx /* 1 ms */
+ movl $(SYSCALL_CONSTRUCT_MACH(61)),%eax /* 61 = thread_switch */
+ syscall
+ jmp 0b
+COMMPAGE_DESCRIPTOR(spin_lock_up_64,_COMM_PAGE_SPINLOCK_LOCK,kUP,0)
+
+
+COMMPAGE_FUNCTION_START(spin_lock_mp_64, 64, 4)
+ movq %rdi,%r8
+0:
+ xorl %eax, %eax
+ orl $-1, %edx
+ lock
+ cmpxchgl %edx, (%r8)
+ jnz 1f
+ ret
+1:
+ xorl %eax, %eax
+ movl $(MP_SPIN_TRIES), %edx
+2: /* spin for awhile before relinquish */
+ pause
+ cmpl %eax, (%r8)
+ jz 0b
+ decl %edx
+ jnz 2b
+ /* failed to get lock after spinning so relinquish */
+ xorl %edi,%edi /* THREAD_NULL */
+ movl $1,%esi /* SWITCH_OPTION_DEPRESS */
+ movl $1,%edx /* 1 ms */
+ movl $(SYSCALL_CONSTRUCT_MACH(61)),%eax /* 61 = thread_switch */
+ syscall
+ jmp 0b
+COMMPAGE_DESCRIPTOR(spin_lock_mp_64,_COMM_PAGE_SPINLOCK_LOCK,0,kUP)
+
+
+COMMPAGE_FUNCTION_START(spin_unlock_64, 64, 4)
+ movl $0, (%rdi)
+ ret
+COMMPAGE_DESCRIPTOR(spin_unlock_64,_COMM_PAGE_SPINLOCK_UNLOCK,0,0)