X-Git-Url: https://git.saurik.com/apple/xnu.git/blobdiff_plain/8f6c56a50524aa785f7e596d52dddfb331e18961..4452a7af2eac33dbad800bcc91f2399d62c18f53:/osfmk/i386/commpage/spinlocks.s diff --git a/osfmk/i386/commpage/spinlocks.s b/osfmk/i386/commpage/spinlocks.s index e69f875bd..e582635a0 100644 --- a/osfmk/i386/commpage/spinlocks.s +++ b/osfmk/i386/commpage/spinlocks.s @@ -1,5 +1,5 @@ /* - * Copyright (c) 2003 Apple Computer, Inc. All rights reserved. + * Copyright (c) 2003-2006 Apple Computer, Inc. All rights reserved. * * @APPLE_OSREFERENCE_LICENSE_HEADER_START@ * @@ -29,23 +29,9 @@ #include #include #include +#include + -/* - * We need a relative branch within the comm page, and don't want the linker - * to relocate it, so we have to hand-code the instructions. LEN is to account - * for the length of a .long, since the jmp is relative to the next instruction. - */ - -#define JNZ .byte 0x0f, 0x85; .long -#define JMP .byte 0xe9; .long -#define LEN 4 - -/* - * Branch prediction prefixes - */ - -#define LIKELY .byte 0x3e -#define UNLIKELY .byte 0x2e #define MP_SPIN_TRIES 1024 @@ -63,6 +49,7 @@ Lspin_lock_try_up: COMMPAGE_DESCRIPTOR(spin_lock_try_up,_COMM_PAGE_SPINLOCK_TRY,kUP,0) + .align 4, 0x90 Lspin_lock_try_mp: movl 4(%esp), %ecx @@ -76,21 +63,29 @@ Lspin_lock_try_mp: COMMPAGE_DESCRIPTOR(spin_lock_try_mp,_COMM_PAGE_SPINLOCK_TRY,0,kUP) -.set Lrelinquish_off, _COMM_PAGE_RELINQUISH - _COMM_PAGE_SPINLOCK_LOCK .align 4, 0x90 Lspin_lock_up: movl 4(%esp), %ecx xorl %eax, %eax -.set Lretry, . - Lspin_lock_up orl $-1, %edx cmpxchgl %edx, (%ecx) - UNLIKELY - JNZ Lrelinquish_off - . + Lspin_lock_up - LEN + jnz,pn 1f /* predict not taken */ ret +1: + /* failed to get lock so relinquish the processor immediately on UP */ + pushl $1 /* 1 ms */ + pushl $1 /* SWITCH_OPTION_DEPRESS */ + pushl $0 /* THREAD_NULL */ + pushl $0 /* push dummy stack ret addr */ + movl $-61,%eax /* SYSCALL_THREAD_SWITCH */ + int $(MACH_INT) + addl $16, %esp /* adjust stack*/ + jmp Lspin_lock_up COMMPAGE_DESCRIPTOR(spin_lock_up,_COMM_PAGE_SPINLOCK_LOCK,kUP,0) + .align 4, 0x90 Lspin_lock_mp: movl 4(%esp), %ecx @@ -99,8 +94,7 @@ Lspin_lock_mp: orl $-1, %edx lock cmpxchgl %edx, (%ecx) - UNLIKELY - jnz 1f + jnz,pn 1f /* predict not taken */ ret 1: xorl %eax, %eax @@ -108,15 +102,22 @@ Lspin_lock_mp: 2: pause cmpl %eax, (%ecx) - LIKELY - jz 0b + jz,pt 0b /* favor success and slow down spin loop */ decl %edx - LIKELY - jnz 2b - JMP Lrelinquish_off - . + Lspin_lock_mp - LEN + jnz,pn 2b /* slow down spin loop with a mispredict */ + /* failed to get lock after spinning so relinquish */ + pushl $1 /* 1 ms */ + pushl $1 /* SWITCH_OPTION_DEPRESS */ + pushl $0 /* THREAD_NULL */ + pushl $0 /* push dummy stack ret addr */ + movl $-61,%eax /* SYSCALL_THREAD_SWITCH */ + int $(MACH_INT) + addl $16, %esp /* adjust stack*/ + jmp Lspin_lock_mp COMMPAGE_DESCRIPTOR(spin_lock_mp,_COMM_PAGE_SPINLOCK_LOCK,0,kUP) + .align 4, 0x90 Lspin_unlock: movl 4(%esp), %ecx @@ -125,16 +126,93 @@ Lspin_unlock: COMMPAGE_DESCRIPTOR(spin_unlock,_COMM_PAGE_SPINLOCK_UNLOCK,0,0) + +/* ============================ 64-bit versions follow ===================== */ + + + .text + .code64 .align 4, 0x90 -Lrelinquish: /* relinquish the processor */ - pushl $1 /* 1 ms */ - pushl $1 /* SWITCH_OPTION_DEPRESS */ - pushl $0 /* THREAD_NULL */ - pushl $0 /* push dummy stack ret addr */ - movl $-61, %eax /* syscall_thread_switch */ - lcall $7, $0 - addl $16, %esp /* adjust stack*/ - xorl %eax, %eax /* set %eax to 0 again */ - JMP Lretry - Lrelinquish_off - . + Lrelinquish - LEN - COMMPAGE_DESCRIPTOR(relinquish,_COMM_PAGE_RELINQUISH,0,0) +Lspin_lock_try_up_64: + xorl %eax, %eax + orl $-1, %edx + cmpxchgl %edx, (%rdi) + setz %dl + movzbl %dl, %eax + ret + + COMMPAGE_DESCRIPTOR(spin_lock_try_up_64,_COMM_PAGE_SPINLOCK_TRY,kUP,0) + + + .align 4, 0x90 +Lspin_lock_try_mp_64: + xorl %eax, %eax + orl $-1, %edx + lock + cmpxchgl %edx, (%rdi) + setz %dl + movzbl %dl, %eax + ret + + COMMPAGE_DESCRIPTOR(spin_lock_try_mp_64,_COMM_PAGE_SPINLOCK_TRY,0,kUP) + + + .align 4, 0x90 +Lspin_lock_up_64: + movq %rdi,%r8 +0: + xorl %eax, %eax + orl $-1, %edx + cmpxchgl %edx, (%r8) + jnz,pn 1f /* predict not taken */ + ret +1: + /* failed to get lock so relinquish the processor immediately on UP */ + xorl %edi,%edi /* THREAD_NULL */ + movl $1,%esi /* SWITCH_OPTION_DEPRESS */ + movl $1,%edx /* 1 ms */ + movl $(SYSCALL_CONSTRUCT_MACH(61)),%eax /* 61 = thread_switch */ + syscall + jmp 0b + + COMMPAGE_DESCRIPTOR(spin_lock_up_64,_COMM_PAGE_SPINLOCK_LOCK,kUP,0) + + + + .align 4, 0x90 +Lspin_lock_mp_64: + movq %rdi,%r8 +0: + xorl %eax, %eax + orl $-1, %edx + lock + cmpxchgl %edx, (%r8) + jnz,pn 1f /* predict not taken */ + ret +1: + xorl %eax, %eax + movl $(MP_SPIN_TRIES), %edx +2: /* spin for awhile before relinquish */ + pause + cmpl %eax, (%r8) + jz 0b + decl %edx + jnz 2b + /* failed to get lock after spinning so relinquish */ + xorl %edi,%edi /* THREAD_NULL */ + movl $1,%esi /* SWITCH_OPTION_DEPRESS */ + movl $1,%edx /* 1 ms */ + movl $(SYSCALL_CONSTRUCT_MACH(61)),%eax /* 61 = thread_switch */ + syscall + jmp 0b + + COMMPAGE_DESCRIPTOR(spin_lock_mp_64,_COMM_PAGE_SPINLOCK_LOCK,0,kUP) + + + .align 4, 0x90 +Lspin_unlock_64: + movl $0, (%rdi) + ret + + COMMPAGE_DESCRIPTOR(spin_unlock_64,_COMM_PAGE_SPINLOCK_UNLOCK,0,0)