]> git.saurik.com Git - apple/xnu.git/blobdiff - osfmk/i386/commpage/spinlocks.s
xnu-792.17.14.tar.gz
[apple/xnu.git] / osfmk / i386 / commpage / spinlocks.s
index f8d6329fa575d6066179a300bc633ecb1ed2fee3..e69f875bd4a23d75b29469f6930cf0279fd3d4b5 100644 (file)
@@ -1,31 +1,51 @@
 /*
- * Copyright (c) 2003-2006 Apple Computer, Inc. All rights reserved.
+ * Copyright (c) 2003 Apple Computer, Inc. All rights reserved.
  *
- * @APPLE_LICENSE_HEADER_START@
+ * @APPLE_OSREFERENCE_LICENSE_HEADER_START@
  * 
- * The contents of this file constitute Original Code as defined in and
- * are subject to the Apple Public Source License Version 1.1 (the
- * "License").  You may not use this file except in compliance with the
- * License.  Please obtain a copy of the License at
- * http://www.apple.com/publicsource and read it before using this file.
+ * This file contains Original Code and/or Modifications of Original Code
+ * as defined in and that are subject to the Apple Public Source License
+ * Version 2.0 (the 'License'). You may not use this file except in
+ * compliance with the License. The rights granted to you under the License
+ * may not be used to create, or enable the creation or redistribution of,
+ * unlawful or unlicensed copies of an Apple operating system, or to
+ * circumvent, violate, or enable the circumvention or violation of, any
+ * terms of an Apple operating system software license agreement.
  * 
- * This Original Code and all software distributed under the License are
- * distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY KIND, EITHER
+ * Please obtain a copy of the License at
+ * http://www.opensource.apple.com/apsl/ and read it before using this file.
+ * 
+ * The Original Code and all software distributed under the License are
+ * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
  * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
  * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE OR NON-INFRINGEMENT.  Please see the
- * License for the specific language governing rights and limitations
- * under the License.
+ * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
+ * Please see the License for the specific language governing rights and
+ * limitations under the License.
  * 
- * @APPLE_LICENSE_HEADER_END@
+ * @APPLE_OSREFERENCE_LICENSE_HEADER_END@
  */
 
 #include <sys/appleapiopts.h>
 #include <machine/cpu_capabilities.h>
 #include <machine/commpage.h>
-#include <mach/i386/syscall_sw.h>
-       
 
+/*
+ * We need a relative branch within the comm page, and don't want the linker
+ * to relocate it, so we have to hand-code the instructions. LEN is to account
+ * for the length of a .long, since the jmp is relative to the next instruction.
+ */
+
+#define JNZ .byte 0x0f, 0x85; .long
+#define JMP .byte 0xe9; .long
+#define LEN 4 
+
+/*
+ * Branch prediction prefixes
+ */
+
+#define LIKELY         .byte 0x3e
+#define UNLIKELY       .byte 0x2e
 
 #define MP_SPIN_TRIES  1024
 
@@ -43,7 +63,6 @@ Lspin_lock_try_up:
 
        COMMPAGE_DESCRIPTOR(spin_lock_try_up,_COMM_PAGE_SPINLOCK_TRY,kUP,0)
  
        .align 4, 0x90
 Lspin_lock_try_mp:
        movl            4(%esp), %ecx 
@@ -57,29 +76,21 @@ Lspin_lock_try_mp:
 
        COMMPAGE_DESCRIPTOR(spin_lock_try_mp,_COMM_PAGE_SPINLOCK_TRY,0,kUP)
 
+.set Lrelinquish_off,  _COMM_PAGE_RELINQUISH - _COMM_PAGE_SPINLOCK_LOCK
 
        .align 4, 0x90
 Lspin_lock_up:
        movl            4(%esp), %ecx
        xorl            %eax, %eax
+.set Lretry,           . - Lspin_lock_up
        orl             $-1, %edx
        cmpxchgl        %edx, (%ecx)
-       jnz,pn          1f              /* predict not taken */
+       UNLIKELY
+       JNZ             Lrelinquish_off - . + Lspin_lock_up - LEN
        ret
-1:
-       /* failed to get lock so relinquish the processor immediately on UP */
-       pushl           $1              /* 1 ms                         */
-       pushl           $1              /* SWITCH_OPTION_DEPRESS        */
-       pushl           $0              /* THREAD_NULL                  */
-       pushl           $0              /* push dummy stack ret addr    */
-       movl            $-61,%eax       /* SYSCALL_THREAD_SWITCH */
-       int             $(MACH_INT)
-       addl            $16, %esp       /* adjust stack*/
-       jmp             Lspin_lock_up
 
        COMMPAGE_DESCRIPTOR(spin_lock_up,_COMM_PAGE_SPINLOCK_LOCK,kUP,0)
 
-
        .align 4, 0x90
 Lspin_lock_mp:
        movl            4(%esp), %ecx
@@ -88,7 +99,8 @@ Lspin_lock_mp:
        orl             $-1, %edx
        lock
        cmpxchgl        %edx, (%ecx)
-       jnz,pn          1f              /* predict not taken */
+       UNLIKELY
+       jnz             1f
        ret
 1:
        xorl            %eax, %eax
@@ -96,22 +108,15 @@ Lspin_lock_mp:
 2:
        pause   
        cmpl            %eax, (%ecx)
-       jz,pt           0b              /* favor success and slow down spin loop */
+       LIKELY
+       jz              0b
        decl            %edx
-       jnz,pn          2b              /* slow down spin loop with a mispredict */
-       /* failed to get lock after spinning so relinquish  */
-       pushl           $1              /* 1 ms                         */
-       pushl           $1              /* SWITCH_OPTION_DEPRESS        */
-       pushl           $0              /* THREAD_NULL                  */
-       pushl           $0              /* push dummy stack ret addr    */
-       movl            $-61,%eax       /* SYSCALL_THREAD_SWITCH */
-       int             $(MACH_INT)
-       addl            $16, %esp       /* adjust stack*/
-       jmp             Lspin_lock_mp
+       LIKELY
+       jnz             2b
+       JMP             Lrelinquish_off - . + Lspin_lock_mp - LEN
  
        COMMPAGE_DESCRIPTOR(spin_lock_mp,_COMM_PAGE_SPINLOCK_LOCK,0,kUP)
 
-
        .align 4, 0x90
 Lspin_unlock:
        movl            4(%esp), %ecx
@@ -120,93 +125,16 @@ Lspin_unlock:
 
        COMMPAGE_DESCRIPTOR(spin_unlock,_COMM_PAGE_SPINLOCK_UNLOCK,0,0)
 
-
-/* ============================ 64-bit versions follow ===================== */
-
-
-       .text
-       .code64
        .align 4, 0x90
+Lrelinquish:                           /* relinquish the processor     */
+       pushl           $1              /* 1 ms                         */
+       pushl           $1              /* SWITCH_OPTION_DEPRESS        */
+       pushl           $0              /* THREAD_NULL                  */
+       pushl           $0              /* push dummy stack ret addr    */
+       movl            $-61, %eax      /* syscall_thread_switch        */
+       lcall           $7, $0
+       addl            $16, %esp       /* adjust stack*/
+       xorl            %eax, %eax      /* set %eax to 0 again          */
+       JMP             Lretry - Lrelinquish_off - . + Lrelinquish - LEN
 
-Lspin_lock_try_up_64:
-       xorl            %eax, %eax
-       orl             $-1, %edx
-       cmpxchgl        %edx, (%rdi)
-       setz            %dl
-       movzbl          %dl, %eax
-       ret
-
-       COMMPAGE_DESCRIPTOR(spin_lock_try_up_64,_COMM_PAGE_SPINLOCK_TRY,kUP,0)
-
-
-       .align 4, 0x90
-Lspin_lock_try_mp_64:
-       xorl            %eax, %eax
-       orl             $-1, %edx
-       lock
-       cmpxchgl        %edx, (%rdi)
-       setz            %dl
-       movzbl          %dl, %eax
-       ret
-
-       COMMPAGE_DESCRIPTOR(spin_lock_try_mp_64,_COMM_PAGE_SPINLOCK_TRY,0,kUP)
-
-
-       .align 4, 0x90
-Lspin_lock_up_64:
-       movq            %rdi,%r8
-0:
-       xorl            %eax, %eax
-       orl             $-1, %edx
-       cmpxchgl        %edx, (%r8)
-       jnz,pn          1f              /* predict not taken */
-       ret
-1:
-       /* failed to get lock so relinquish the processor immediately on UP */
-       xorl            %edi,%edi       /* THREAD_NULL                  */
-       movl            $1,%esi         /* SWITCH_OPTION_DEPRESS        */
-       movl            $1,%edx         /* 1 ms                         */
-       movl            $(SYSCALL_CONSTRUCT_MACH(61)),%eax      /* 61 = thread_switch */
-       syscall
-       jmp             0b
-
-       COMMPAGE_DESCRIPTOR(spin_lock_up_64,_COMM_PAGE_SPINLOCK_LOCK,kUP,0)
-       
-       
-
-       .align 4, 0x90
-Lspin_lock_mp_64:
-       movq            %rdi,%r8
-0:
-       xorl            %eax, %eax
-       orl             $-1, %edx
-       lock
-       cmpxchgl        %edx, (%r8)
-       jnz,pn          1f              /* predict not taken */
-       ret
-1:
-       xorl            %eax, %eax
-       movl            $(MP_SPIN_TRIES), %edx
-2:                                     /* spin for awhile before relinquish */
-       pause   
-       cmpl            %eax, (%r8)
-       jz              0b
-       decl            %edx
-       jnz             2b
-       /* failed to get lock after spinning so relinquish  */
-       xorl            %edi,%edi       /* THREAD_NULL                  */
-       movl            $1,%esi         /* SWITCH_OPTION_DEPRESS        */
-       movl            $1,%edx         /* 1 ms                         */
-       movl            $(SYSCALL_CONSTRUCT_MACH(61)),%eax      /* 61 = thread_switch */
-       syscall
-       jmp             0b
-       COMMPAGE_DESCRIPTOR(spin_lock_mp_64,_COMM_PAGE_SPINLOCK_LOCK,0,kUP)
-
-
-       .align 4, 0x90
-Lspin_unlock_64:
-       movl            $0, (%rdi)
-       ret
-
-       COMMPAGE_DESCRIPTOR(spin_unlock_64,_COMM_PAGE_SPINLOCK_UNLOCK,0,0)
+       COMMPAGE_DESCRIPTOR(relinquish,_COMM_PAGE_RELINQUISH,0,0)