+NONLEAF_ENTRY(lck_mtx_assert)
+ mov %rdi, %rdx /* Load lock address */
+ mov %gs:CPU_ACTIVE_THREAD, %rax /* Load current thread */
+
+ mov M_STATE(%rdx), %ecx
+ cmp $(MUTEX_IND), %ecx /* Is this an indirect mutex? */
+ jne 0f
+ mov M_PTR(%rdx), %rdx /* If so, take indirection */
+0:
+ mov M_OWNER(%rdx), %rcx /* Load owner */
+ cmp $(MUTEX_ASSERT_OWNED), %rsi
+ jne 2f /* Assert ownership? */
+ cmp %rax, %rcx /* Current thread match? */
+ jne 3f /* no, go panic */
+ testl $(M_ILOCKED_MSK | M_MLOCKED_MSK), M_STATE(%rdx)
+ je 3f
+1: /* yes, we own it */
+ NONLEAF_RET
+2:
+ cmp %rax, %rcx /* Current thread match? */
+ jne 1b /* No, return */
+ ALIGN_STACK()
+ LOAD_PTR_ARG1(%rdx)
+ LOAD_STRING_ARG0(mutex_assert_owned_str)
+ jmp 4f
+3:
+ ALIGN_STACK()
+ LOAD_PTR_ARG1(%rdx)
+ LOAD_STRING_ARG0(mutex_assert_not_owned_str)
+4:
+ CALL_PANIC()
+
+
+lck_mtx_destroyed:
+ ALIGN_STACK()
+ LOAD_PTR_ARG1(%rdx)
+ LOAD_STRING_ARG0(mutex_interlock_destroyed_str)
+ CALL_PANIC()
+