+/*
+ * unsigned int hw_lock_to(hw_lock_t, unsigned int)
+ *
+ * Acquire lock, spinning until it becomes available or timeout.
+ * MACH_RT: also return with preemption disabled.
+ */
+LEAF_ENTRY(hw_lock_to)
+1:
+ movl L_ARG0,%edx /* fetch lock pointer */
+ movl %gs:CPU_ACTIVE_THREAD,%ecx
+ /*
+ * Attempt to grab the lock immediately
+ * - fastpath without timeout nonsense.
+ */
+ DISABLE_PREEMPTION
+ movl 0(%edx), %eax
+ testl %eax,%eax /* lock locked? */
+ jne 2f /* branch if so */
+ lock; cmpxchgl %ecx,0(%edx) /* try to acquire the HW lock */
+ jne 2f /* branch on failure */
+ movl $1,%eax
+ LEAF_RET
+
+2:
+#define INNER_LOOP_COUNT 1000
+ /*
+ * Failed to get the lock so set the timeout
+ * and then spin re-checking the lock but pausing
+ * every so many (INNER_LOOP_COUNT) spins to check for timeout.
+ */
+ movl L_ARG1,%ecx /* fetch timeout */
+ push %edi
+ push %ebx
+ mov %edx,%edi
+
+ rdtsc /* read cyclecount into %edx:%eax */
+ addl %ecx,%eax /* fetch and timeout */
+ adcl $0,%edx /* add carry */
+ mov %edx,%ecx
+ mov %eax,%ebx /* %ecx:%ebx is the timeout expiry */
+4:
+ /*
+ * The inner-loop spin to look for the lock being freed.
+ */
+ mov $(INNER_LOOP_COUNT),%edx
+5:
+ PAUSE /* pause for hyper-threading */
+ movl 0(%edi),%eax /* spin checking lock value in cache */
+ testl %eax,%eax
+ je 6f /* zero => unlocked, try to grab it */
+ decl %edx /* decrement inner loop count */
+ jnz 5b /* time to check for timeout? */
+
+ /*
+ * Here after spinning INNER_LOOP_COUNT times, check for timeout
+ */
+ rdtsc /* cyclecount into %edx:%eax */
+ cmpl %ecx,%edx /* compare high-order 32-bits */
+ jb 4b /* continue spinning if less, or */
+ cmpl %ebx,%eax /* compare low-order 32-bits */
+ jb 4b /* continue if less, else bail */
+ xor %eax,%eax /* with 0 return value */
+ pop %ebx
+ pop %edi
+ LEAF_RET
+
+6:
+ /*
+ * Here to try to grab the lock that now appears to be free
+ * after contention.
+ */
+ movl %gs:CPU_ACTIVE_THREAD,%edx
+ lock; cmpxchgl %edx,0(%edi) /* try to acquire the HW lock */
+ jne 4b /* no - spin again */
+ movl $1,%eax /* yes */
+ pop %ebx
+ pop %edi
+ LEAF_RET