]> git.saurik.com Git - apple/xnu.git/blobdiff - osfmk/i386/commpage/spinlocks.s
xnu-1504.15.3.tar.gz
[apple/xnu.git] / osfmk / i386 / commpage / spinlocks.s
index e69f875bd4a23d75b29469f6930cf0279fd3d4b5..a0e98bcb357a83e8c3d18ff9908c9de472fbbe72 100644 (file)
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2003 Apple Computer, Inc. All rights reserved.
+ * Copyright (c) 2003-2009 Apple, Inc. All rights reserved.
  *
  * @APPLE_OSREFERENCE_LICENSE_HEADER_START@
  * 
 #include <sys/appleapiopts.h>
 #include <machine/cpu_capabilities.h>
 #include <machine/commpage.h>
+#include <mach/i386/syscall_sw.h>
+       
 
-/*
- * We need a relative branch within the comm page, and don't want the linker
- * to relocate it, so we have to hand-code the instructions. LEN is to account
- * for the length of a .long, since the jmp is relative to the next instruction.
- */
-
-#define JNZ .byte 0x0f, 0x85; .long
-#define JMP .byte 0xe9; .long
-#define LEN 4 
-
-/*
- * Branch prediction prefixes
- */
-
-#define LIKELY         .byte 0x3e
-#define UNLIKELY       .byte 0x2e
-
-#define MP_SPIN_TRIES  1024
-
-       .text
-       .align 4, 0x90
-
-Lspin_lock_try_up:
+COMMPAGE_FUNCTION_START(spin_lock_try_up, 32, 4)
        movl            4(%esp), %ecx 
        xorl            %eax, %eax
        orl             $-1, %edx
@@ -60,11 +40,10 @@ Lspin_lock_try_up:
        setz            %dl
        movzbl          %dl, %eax
        ret
-
-       COMMPAGE_DESCRIPTOR(spin_lock_try_up,_COMM_PAGE_SPINLOCK_TRY,kUP,0)
+COMMPAGE_DESCRIPTOR(spin_lock_try_up,_COMM_PAGE_SPINLOCK_TRY,kUP,0)
  
-       .align 4, 0x90
-Lspin_lock_try_mp:
+
+COMMPAGE_FUNCTION_START(spin_lock_try_mp, 32, 4)
        movl            4(%esp), %ecx 
        xorl            %eax, %eax
        orl             $-1, %edx
@@ -73,33 +52,36 @@ Lspin_lock_try_mp:
        setz            %dl
        movzbl          %dl, %eax
        ret
+COMMPAGE_DESCRIPTOR(spin_lock_try_mp,_COMM_PAGE_SPINLOCK_TRY,0,kUP)
 
-       COMMPAGE_DESCRIPTOR(spin_lock_try_mp,_COMM_PAGE_SPINLOCK_TRY,0,kUP)
 
-.set Lrelinquish_off,  _COMM_PAGE_RELINQUISH - _COMM_PAGE_SPINLOCK_LOCK
-
-       .align 4, 0x90
-Lspin_lock_up:
+COMMPAGE_FUNCTION_START(spin_lock_up, 32, 4)
        movl            4(%esp), %ecx
        xorl            %eax, %eax
-.set Lretry,           . - Lspin_lock_up
        orl             $-1, %edx
        cmpxchgl        %edx, (%ecx)
-       UNLIKELY
-       JNZ             Lrelinquish_off - . + Lspin_lock_up - LEN
+       jnz             1f
        ret
+1:
+       /* failed to get lock so relinquish the processor immediately on UP */
+       pushl           $1              /* 1 ms                         */
+       pushl           $1              /* SWITCH_OPTION_DEPRESS        */
+       pushl           $0              /* THREAD_NULL                  */
+       pushl           $0              /* push dummy stack ret addr    */
+       movl            $-61,%eax       /* SYSCALL_THREAD_SWITCH */
+       int             $(MACH_INT)
+       addl            $16, %esp       /* adjust stack*/
+       jmp             Lspin_lock_up
+COMMPAGE_DESCRIPTOR(spin_lock_up,_COMM_PAGE_SPINLOCK_LOCK,kUP,0)
 
-       COMMPAGE_DESCRIPTOR(spin_lock_up,_COMM_PAGE_SPINLOCK_LOCK,kUP,0)
 
-       .align 4, 0x90
-Lspin_lock_mp:
+COMMPAGE_FUNCTION_START(spin_lock_mp, 32, 4)
        movl            4(%esp), %ecx
        xorl            %eax, %eax
 0:
        orl             $-1, %edx
        lock
        cmpxchgl        %edx, (%ecx)
-       UNLIKELY
        jnz             1f
        ret
 1:
@@ -108,33 +90,100 @@ Lspin_lock_mp:
 2:
        pause   
        cmpl            %eax, (%ecx)
-       LIKELY
-       jz              0b
+       jz              0b              /* favor success and slow down spin loop */
        decl            %edx
-       LIKELY
        jnz             2b
-       JMP             Lrelinquish_off - . + Lspin_lock_mp - LEN
-       COMMPAGE_DESCRIPTOR(spin_lock_mp,_COMM_PAGE_SPINLOCK_LOCK,0,kUP)
+       /* failed to get lock after spinning so relinquish  */
+       pushl           $1              /* 1 ms                         */
+       pushl           $1              /* SWITCH_OPTION_DEPRESS        */
+       pushl           $0              /* THREAD_NULL                  */
+       pushl           $0              /* push dummy stack ret addr    */
+       movl            $-61,%eax       /* SYSCALL_THREAD_SWITCH */
+       int             $(MACH_INT)
+       addl            $16, %esp       /* adjust stack*/
+       jmp             Lspin_lock_mp
+COMMPAGE_DESCRIPTOR(spin_lock_mp,_COMM_PAGE_SPINLOCK_LOCK,0,kUP)
 
-       .align 4, 0x90
-Lspin_unlock:
+
+COMMPAGE_FUNCTION_START(spin_unlock, 32, 4)
        movl            4(%esp), %ecx
        movl            $0, (%ecx)
        ret
+COMMPAGE_DESCRIPTOR(spin_unlock,_COMM_PAGE_SPINLOCK_UNLOCK,0,0)
 
-       COMMPAGE_DESCRIPTOR(spin_unlock,_COMM_PAGE_SPINLOCK_UNLOCK,0,0)
 
-       .align 4, 0x90
-Lrelinquish:                           /* relinquish the processor     */
-       pushl           $1              /* 1 ms                         */
-       pushl           $1              /* SWITCH_OPTION_DEPRESS        */
-       pushl           $0              /* THREAD_NULL                  */
-       pushl           $0              /* push dummy stack ret addr    */
-       movl            $-61, %eax      /* syscall_thread_switch        */
-       lcall           $7, $0
-       addl            $16, %esp       /* adjust stack*/
-       xorl            %eax, %eax      /* set %eax to 0 again          */
-       JMP             Lretry - Lrelinquish_off - . + Lrelinquish - LEN
+/* ============================ 64-bit versions follow ===================== */
 
-       COMMPAGE_DESCRIPTOR(relinquish,_COMM_PAGE_RELINQUISH,0,0)
+
+COMMPAGE_FUNCTION_START(spin_lock_try_up_64, 64, 4)
+       xorl            %eax, %eax
+       orl             $-1, %edx
+       cmpxchgl        %edx, (%rdi)
+       setz            %dl
+       movzbl          %dl, %eax
+       ret
+COMMPAGE_DESCRIPTOR(spin_lock_try_up_64,_COMM_PAGE_SPINLOCK_TRY,kUP,0)
+
+
+COMMPAGE_FUNCTION_START(spin_lock_try_mp_64, 64, 4)
+       xorl            %eax, %eax
+       orl             $-1, %edx
+       lock
+       cmpxchgl        %edx, (%rdi)
+       setz            %dl
+       movzbl          %dl, %eax
+       ret
+COMMPAGE_DESCRIPTOR(spin_lock_try_mp_64,_COMM_PAGE_SPINLOCK_TRY,0,kUP)
+
+
+COMMPAGE_FUNCTION_START(spin_lock_up_64, 64, 4)
+       movq            %rdi,%r8
+0:
+       xorl            %eax, %eax
+       orl             $-1, %edx
+       cmpxchgl        %edx, (%r8)
+       jnz             1f
+       ret
+1:
+       /* failed to get lock so relinquish the processor immediately on UP */
+       xorl            %edi,%edi       /* THREAD_NULL                  */
+       movl            $1,%esi         /* SWITCH_OPTION_DEPRESS        */
+       movl            $1,%edx         /* 1 ms                         */
+       movl            $(SYSCALL_CONSTRUCT_MACH(61)),%eax      /* 61 = thread_switch */
+       syscall
+       jmp             0b
+COMMPAGE_DESCRIPTOR(spin_lock_up_64,_COMM_PAGE_SPINLOCK_LOCK,kUP,0)
+       
+       
+COMMPAGE_FUNCTION_START(spin_lock_mp_64, 64, 4)
+       movq            %rdi,%r8
+0:
+       xorl            %eax, %eax
+       orl             $-1, %edx
+       lock
+       cmpxchgl        %edx, (%r8)
+       jnz             1f
+       ret
+1:
+       xorl            %eax, %eax
+       movl            $(MP_SPIN_TRIES), %edx
+2:                                     /* spin for awhile before relinquish */
+       pause   
+       cmpl            %eax, (%r8)
+       jz              0b
+       decl            %edx
+       jnz             2b
+       /* failed to get lock after spinning so relinquish  */
+       xorl            %edi,%edi       /* THREAD_NULL                  */
+       movl            $1,%esi         /* SWITCH_OPTION_DEPRESS        */
+       movl            $1,%edx         /* 1 ms                         */
+       movl            $(SYSCALL_CONSTRUCT_MACH(61)),%eax      /* 61 = thread_switch */
+       syscall
+       jmp             0b
+COMMPAGE_DESCRIPTOR(spin_lock_mp_64,_COMM_PAGE_SPINLOCK_LOCK,0,kUP)
+
+
+COMMPAGE_FUNCTION_START(spin_unlock_64, 64, 4)
+       movl            $0, (%rdi)
+       ret
+COMMPAGE_DESCRIPTOR(spin_unlock_64,_COMM_PAGE_SPINLOCK_UNLOCK,0,0)