]> git.saurik.com Git - apple/xnu.git/blob - osfmk/i386/commpage/spinlocks.s
xnu-792.25.20.tar.gz
[apple/xnu.git] / osfmk / i386 / commpage / spinlocks.s
1 /*
2 * Copyright (c) 2003-2006 Apple Computer, Inc. All rights reserved.
3 *
4 * @APPLE_LICENSE_HEADER_START@
5 *
6 * The contents of this file constitute Original Code as defined in and
7 * are subject to the Apple Public Source License Version 1.1 (the
8 * "License"). You may not use this file except in compliance with the
9 * License. Please obtain a copy of the License at
10 * http://www.apple.com/publicsource and read it before using this file.
11 *
12 * This Original Code and all software distributed under the License are
13 * distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY KIND, EITHER
14 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
15 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE OR NON-INFRINGEMENT. Please see the
17 * License for the specific language governing rights and limitations
18 * under the License.
19 *
20 * @APPLE_LICENSE_HEADER_END@
21 */
22
23 #include <sys/appleapiopts.h>
24 #include <machine/cpu_capabilities.h>
25 #include <machine/commpage.h>
26 #include <mach/i386/syscall_sw.h>
27
28
29
30 #define MP_SPIN_TRIES 1024
31
32 .text
33 .align 4, 0x90
34
35 Lspin_lock_try_up:
36 movl 4(%esp), %ecx
37 xorl %eax, %eax
38 orl $-1, %edx
39 cmpxchgl %edx, (%ecx)
40 setz %dl
41 movzbl %dl, %eax
42 ret
43
44 COMMPAGE_DESCRIPTOR(spin_lock_try_up,_COMM_PAGE_SPINLOCK_TRY,kUP,0)
45
46
47 .align 4, 0x90
48 Lspin_lock_try_mp:
49 movl 4(%esp), %ecx
50 xorl %eax, %eax
51 orl $-1, %edx
52 lock
53 cmpxchgl %edx, (%ecx)
54 setz %dl
55 movzbl %dl, %eax
56 ret
57
58 COMMPAGE_DESCRIPTOR(spin_lock_try_mp,_COMM_PAGE_SPINLOCK_TRY,0,kUP)
59
60
61 .align 4, 0x90
62 Lspin_lock_up:
63 movl 4(%esp), %ecx
64 xorl %eax, %eax
65 orl $-1, %edx
66 cmpxchgl %edx, (%ecx)
67 jnz,pn 1f /* predict not taken */
68 ret
69 1:
70 /* failed to get lock so relinquish the processor immediately on UP */
71 pushl $1 /* 1 ms */
72 pushl $1 /* SWITCH_OPTION_DEPRESS */
73 pushl $0 /* THREAD_NULL */
74 pushl $0 /* push dummy stack ret addr */
75 movl $-61,%eax /* SYSCALL_THREAD_SWITCH */
76 int $(MACH_INT)
77 addl $16, %esp /* adjust stack*/
78 jmp Lspin_lock_up
79
80 COMMPAGE_DESCRIPTOR(spin_lock_up,_COMM_PAGE_SPINLOCK_LOCK,kUP,0)
81
82
83 .align 4, 0x90
84 Lspin_lock_mp:
85 movl 4(%esp), %ecx
86 xorl %eax, %eax
87 0:
88 orl $-1, %edx
89 lock
90 cmpxchgl %edx, (%ecx)
91 jnz,pn 1f /* predict not taken */
92 ret
93 1:
94 xorl %eax, %eax
95 movl $(MP_SPIN_TRIES), %edx
96 2:
97 pause
98 cmpl %eax, (%ecx)
99 jz,pt 0b /* favor success and slow down spin loop */
100 decl %edx
101 jnz,pn 2b /* slow down spin loop with a mispredict */
102 /* failed to get lock after spinning so relinquish */
103 pushl $1 /* 1 ms */
104 pushl $1 /* SWITCH_OPTION_DEPRESS */
105 pushl $0 /* THREAD_NULL */
106 pushl $0 /* push dummy stack ret addr */
107 movl $-61,%eax /* SYSCALL_THREAD_SWITCH */
108 int $(MACH_INT)
109 addl $16, %esp /* adjust stack*/
110 jmp Lspin_lock_mp
111
112 COMMPAGE_DESCRIPTOR(spin_lock_mp,_COMM_PAGE_SPINLOCK_LOCK,0,kUP)
113
114
115 .align 4, 0x90
116 Lspin_unlock:
117 movl 4(%esp), %ecx
118 movl $0, (%ecx)
119 ret
120
121 COMMPAGE_DESCRIPTOR(spin_unlock,_COMM_PAGE_SPINLOCK_UNLOCK,0,0)
122
123
124 /* ============================ 64-bit versions follow ===================== */
125
126
127 .text
128 .code64
129 .align 4, 0x90
130
131 Lspin_lock_try_up_64:
132 xorl %eax, %eax
133 orl $-1, %edx
134 cmpxchgl %edx, (%rdi)
135 setz %dl
136 movzbl %dl, %eax
137 ret
138
139 COMMPAGE_DESCRIPTOR(spin_lock_try_up_64,_COMM_PAGE_SPINLOCK_TRY,kUP,0)
140
141
142 .align 4, 0x90
143 Lspin_lock_try_mp_64:
144 xorl %eax, %eax
145 orl $-1, %edx
146 lock
147 cmpxchgl %edx, (%rdi)
148 setz %dl
149 movzbl %dl, %eax
150 ret
151
152 COMMPAGE_DESCRIPTOR(spin_lock_try_mp_64,_COMM_PAGE_SPINLOCK_TRY,0,kUP)
153
154
155 .align 4, 0x90
156 Lspin_lock_up_64:
157 movq %rdi,%r8
158 0:
159 xorl %eax, %eax
160 orl $-1, %edx
161 cmpxchgl %edx, (%r8)
162 jnz,pn 1f /* predict not taken */
163 ret
164 1:
165 /* failed to get lock so relinquish the processor immediately on UP */
166 xorl %edi,%edi /* THREAD_NULL */
167 movl $1,%esi /* SWITCH_OPTION_DEPRESS */
168 movl $1,%edx /* 1 ms */
169 movl $(SYSCALL_CONSTRUCT_MACH(61)),%eax /* 61 = thread_switch */
170 syscall
171 jmp 0b
172
173 COMMPAGE_DESCRIPTOR(spin_lock_up_64,_COMM_PAGE_SPINLOCK_LOCK,kUP,0)
174
175
176
177 .align 4, 0x90
178 Lspin_lock_mp_64:
179 movq %rdi,%r8
180 0:
181 xorl %eax, %eax
182 orl $-1, %edx
183 lock
184 cmpxchgl %edx, (%r8)
185 jnz,pn 1f /* predict not taken */
186 ret
187 1:
188 xorl %eax, %eax
189 movl $(MP_SPIN_TRIES), %edx
190 2: /* spin for awhile before relinquish */
191 pause
192 cmpl %eax, (%r8)
193 jz 0b
194 decl %edx
195 jnz 2b
196 /* failed to get lock after spinning so relinquish */
197 xorl %edi,%edi /* THREAD_NULL */
198 movl $1,%esi /* SWITCH_OPTION_DEPRESS */
199 movl $1,%edx /* 1 ms */
200 movl $(SYSCALL_CONSTRUCT_MACH(61)),%eax /* 61 = thread_switch */
201 syscall
202 jmp 0b
203
204 COMMPAGE_DESCRIPTOR(spin_lock_mp_64,_COMM_PAGE_SPINLOCK_LOCK,0,kUP)
205
206
207 .align 4, 0x90
208 Lspin_unlock_64:
209 movl $0, (%rdi)
210 ret
211
212 COMMPAGE_DESCRIPTOR(spin_unlock_64,_COMM_PAGE_SPINLOCK_UNLOCK,0,0)