]>
Commit | Line | Data |
---|---|---|
1c79356b A |
1 | /* |
2 | * Copyright (c) 2000 Apple Computer, Inc. All rights reserved. | |
3 | * | |
4 | * @APPLE_LICENSE_HEADER_START@ | |
5 | * | |
de355530 A |
6 | * The contents of this file constitute Original Code as defined in and |
7 | * are subject to the Apple Public Source License Version 1.1 (the | |
8 | * "License"). You may not use this file except in compliance with the | |
9 | * License. Please obtain a copy of the License at | |
10 | * http://www.apple.com/publicsource and read it before using this file. | |
1c79356b | 11 | * |
de355530 A |
12 | * This Original Code and all software distributed under the License are |
13 | * distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY KIND, EITHER | |
1c79356b A |
14 | * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES, |
15 | * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY, | |
de355530 A |
16 | * FITNESS FOR A PARTICULAR PURPOSE OR NON-INFRINGEMENT. Please see the |
17 | * License for the specific language governing rights and limitations | |
18 | * under the License. | |
1c79356b A |
19 | * |
20 | * @APPLE_LICENSE_HEADER_END@ | |
21 | */ | |
22 | /* | |
23 | * @OSF_COPYRIGHT@ | |
24 | */ | |
25 | /* | |
26 | * Mach Operating System | |
27 | * Copyright (c) 1989 Carnegie-Mellon University | |
28 | * All rights reserved. The CMU software License Agreement specifies | |
29 | * the terms and conditions for use and redistribution. | |
30 | */ | |
31 | ||
32 | #include <cpus.h> | |
33 | #include <mach_rt.h> | |
34 | #include <platforms.h> | |
35 | #include <mach_ldebug.h> | |
36 | #include <i386/asm.h> | |
1c79356b A |
37 | #include <kern/etap_options.h> |
38 | ||
9bccf70c | 39 | #include "assym.s" |
1c79356b A |
40 | |
41 | /* | |
42 | * When performance isn't the only concern, it's | |
43 | * nice to build stack frames... | |
44 | */ | |
45 | #define BUILD_STACK_FRAMES ((MACH_LDEBUG || ETAP_LOCK_TRACE) && MACH_KDB) | |
46 | ||
47 | #if BUILD_STACK_FRAMES | |
48 | ||
49 | #define L_PC 4(%ebp) | |
50 | #define L_ARG0 8(%ebp) | |
51 | #define L_ARG1 12(%ebp) | |
52 | ||
53 | #define SWT_HI -4(%ebp) | |
54 | #define SWT_LO -8(%ebp) | |
55 | #define MISSED -12(%ebp) | |
56 | ||
57 | #else /* BUILD_STACK_FRAMES */ | |
58 | ||
59 | #undef FRAME | |
60 | #undef EMARF | |
61 | #define FRAME | |
62 | #define EMARF | |
63 | #define L_PC (%esp) | |
64 | #define L_ARG0 4(%esp) | |
65 | #define L_ARG1 8(%esp) | |
66 | ||
67 | #endif /* BUILD_STACK_FRAMES */ | |
68 | ||
69 | ||
9bccf70c A |
70 | #define M_ILK (%edx) |
71 | #define M_LOCKED 1(%edx) | |
72 | #define M_WAITERS 2(%edx) | |
73 | #define M_PROMOTED_PRI 4(%edx) | |
1c79356b | 74 | #if MACH_LDEBUG |
9bccf70c A |
75 | #define M_TYPE 6(%edx) |
76 | #define M_PC 10(%edx) | |
77 | #define M_THREAD 14(%edx) | |
1c79356b A |
78 | #endif /* MACH_LDEBUG */ |
79 | ||
80 | #include <i386/AT386/mp/mp.h> | |
81 | #if (NCPUS > 1) | |
82 | #define CX(addr,reg) addr(,reg,4) | |
83 | #else | |
84 | #define CPU_NUMBER(reg) | |
85 | #define CX(addr,reg) addr | |
86 | #endif /* (NCPUS > 1) */ | |
87 | ||
88 | #if MACH_LDEBUG | |
89 | /* | |
90 | * Routines for general lock debugging. | |
91 | */ | |
92 | #define S_TYPE 4(%edx) | |
93 | #define S_PC 8(%edx) | |
94 | #define S_THREAD 12(%edx) | |
95 | #define S_DURATIONH 16(%edx) | |
96 | #define S_DURATIONL 20(%edx) | |
97 | ||
98 | /* | |
99 | * Checks for expected lock types and calls "panic" on | |
100 | * mismatch. Detects calls to Mutex functions with | |
101 | * type simplelock and vice versa. | |
102 | */ | |
103 | #define CHECK_MUTEX_TYPE() \ | |
9bccf70c | 104 | cmpl $ MUTEX_TAG,M_TYPE ; \ |
1c79356b A |
105 | je 1f ; \ |
106 | pushl $2f ; \ | |
107 | call EXT(panic) ; \ | |
108 | hlt ; \ | |
109 | .data ; \ | |
110 | 2: String "not a mutex!" ; \ | |
111 | .text ; \ | |
112 | 1: | |
113 | ||
114 | #define CHECK_SIMPLE_LOCK_TYPE() \ | |
9bccf70c | 115 | cmpl $ SIMPLE_LOCK_TAG,S_TYPE ; \ |
1c79356b A |
116 | je 1f ; \ |
117 | pushl $2f ; \ | |
118 | call EXT(panic) ; \ | |
119 | hlt ; \ | |
120 | .data ; \ | |
121 | 2: String "not a simple lock!" ; \ | |
122 | .text ; \ | |
123 | 1: | |
124 | ||
125 | /* | |
126 | * If one or more simplelocks are currently held by a thread, | |
127 | * an attempt to acquire a mutex will cause this check to fail | |
128 | * (since a mutex lock may context switch, holding a simplelock | |
129 | * is not a good thing). | |
130 | */ | |
131 | #if 0 /*MACH_RT - 11/12/99 - lion@apple.com disable check for now*/ | |
132 | #define CHECK_PREEMPTION_LEVEL() \ | |
9bccf70c | 133 | movl $ CPD_PREEMPTION_LEVEL,%eax ; \ |
1c79356b A |
134 | cmpl $0,%gs:(%eax) ; \ |
135 | je 1f ; \ | |
136 | pushl $2f ; \ | |
137 | call EXT(panic) ; \ | |
138 | hlt ; \ | |
139 | .data ; \ | |
140 | 2: String "preemption_level != 0!" ; \ | |
141 | .text ; \ | |
142 | 1: | |
143 | #else /* MACH_RT */ | |
144 | #define CHECK_PREEMPTION_LEVEL() | |
145 | #endif /* MACH_RT */ | |
146 | ||
147 | #define CHECK_NO_SIMPLELOCKS() \ | |
9bccf70c | 148 | movl $ CPD_SIMPLE_LOCK_COUNT,%eax ; \ |
1c79356b A |
149 | cmpl $0,%gs:(%eax) ; \ |
150 | je 1f ; \ | |
151 | pushl $2f ; \ | |
152 | call EXT(panic) ; \ | |
153 | hlt ; \ | |
154 | .data ; \ | |
155 | 2: String "simple_locks_held!" ; \ | |
156 | .text ; \ | |
157 | 1: | |
158 | ||
159 | /* | |
160 | * Verifies return to the correct thread in "unlock" situations. | |
161 | */ | |
162 | #define CHECK_THREAD(thd) \ | |
9bccf70c | 163 | movl $ CPD_ACTIVE_THREAD,%eax ; \ |
1c79356b A |
164 | movl %gs:(%eax),%ecx ; \ |
165 | testl %ecx,%ecx ; \ | |
166 | je 1f ; \ | |
167 | cmpl %ecx,thd ; \ | |
168 | je 1f ; \ | |
169 | pushl $2f ; \ | |
170 | call EXT(panic) ; \ | |
171 | hlt ; \ | |
172 | .data ; \ | |
173 | 2: String "wrong thread!" ; \ | |
174 | .text ; \ | |
175 | 1: | |
176 | ||
177 | #define CHECK_MYLOCK(thd) \ | |
9bccf70c | 178 | movl $ CPD_ACTIVE_THREAD,%eax ; \ |
1c79356b A |
179 | movl %gs:(%eax),%ecx ; \ |
180 | testl %ecx,%ecx ; \ | |
181 | je 1f ; \ | |
182 | cmpl %ecx,thd ; \ | |
183 | jne 1f ; \ | |
184 | pushl $2f ; \ | |
185 | call EXT(panic) ; \ | |
186 | hlt ; \ | |
187 | .data ; \ | |
188 | 2: String "mylock attempt!" ; \ | |
189 | .text ; \ | |
190 | 1: | |
191 | ||
192 | #define METER_SIMPLE_LOCK_LOCK(reg) \ | |
193 | pushl reg ; \ | |
194 | call EXT(meter_simple_lock) ; \ | |
195 | popl reg | |
196 | ||
197 | #define METER_SIMPLE_LOCK_UNLOCK(reg) \ | |
198 | pushl reg ; \ | |
199 | call EXT(meter_simple_unlock) ; \ | |
200 | popl reg | |
201 | ||
202 | #else /* MACH_LDEBUG */ | |
203 | #define CHECK_MUTEX_TYPE() | |
204 | #define CHECK_SIMPLE_LOCK_TYPE | |
205 | #define CHECK_THREAD(thd) | |
206 | #define CHECK_PREEMPTION_LEVEL() | |
207 | #define CHECK_NO_SIMPLELOCKS() | |
208 | #define CHECK_MYLOCK(thd) | |
209 | #define METER_SIMPLE_LOCK_LOCK(reg) | |
210 | #define METER_SIMPLE_LOCK_UNLOCK(reg) | |
211 | #endif /* MACH_LDEBUG */ | |
212 | ||
213 | ||
214 | /* | |
215 | * void hw_lock_init(hw_lock_t) | |
216 | * | |
217 | * Initialize a hardware lock. | |
218 | */ | |
219 | ENTRY(hw_lock_init) | |
220 | FRAME | |
221 | movl L_ARG0,%edx /* fetch lock pointer */ | |
222 | xorl %eax,%eax | |
223 | movb %al,0(%edx) /* clear the lock */ | |
224 | EMARF | |
225 | ret | |
226 | ||
227 | /* | |
228 | * void hw_lock_lock(hw_lock_t) | |
9bccf70c | 229 | * unsigned int hw_lock_to(hw_lock_t, unsigned int) |
1c79356b A |
230 | * |
231 | * Acquire lock, spinning until it becomes available. | |
9bccf70c | 232 | * XXX: For now, we don't actually implement the timeout. |
1c79356b A |
233 | * MACH_RT: also return with preemption disabled. |
234 | */ | |
9bccf70c | 235 | ENTRY2(hw_lock_lock,hw_lock_to) |
1c79356b A |
236 | FRAME |
237 | movl L_ARG0,%edx /* fetch lock pointer */ | |
238 | ||
239 | 1: DISABLE_PREEMPTION(%eax) | |
240 | movb $1,%cl | |
241 | xchgb 0(%edx),%cl /* try to acquire the HW lock */ | |
242 | testb %cl,%cl /* success? */ | |
243 | jne 3f | |
9bccf70c | 244 | movl $1,%eax /* In case this was a timeout call */ |
1c79356b A |
245 | EMARF /* if yes, then nothing left to do */ |
246 | ret | |
247 | ||
248 | 3: ENABLE_PREEMPTION(%eax) /* no reason we can't be preemptable now */ | |
249 | ||
250 | movb $1,%cl | |
251 | 2: testb %cl,0(%edx) /* spin checking lock value in cache */ | |
252 | jne 2b /* non-zero means locked, keep spinning */ | |
253 | jmp 1b /* zero means unlocked, try to grab it */ | |
254 | ||
255 | /* | |
256 | * void hw_lock_unlock(hw_lock_t) | |
257 | * | |
258 | * Unconditionally release lock. | |
259 | * MACH_RT: release preemption level. | |
260 | */ | |
261 | ENTRY(hw_lock_unlock) | |
262 | FRAME | |
263 | movl L_ARG0,%edx /* fetch lock pointer */ | |
264 | xorl %eax,%eax | |
265 | xchgb 0(%edx),%al /* clear the lock... a mov instruction */ | |
266 | /* ...might be cheaper and less paranoid */ | |
267 | ENABLE_PREEMPTION(%eax) | |
268 | EMARF | |
269 | ret | |
270 | ||
271 | /* | |
272 | * unsigned int hw_lock_try(hw_lock_t) | |
273 | * MACH_RT: returns with preemption disabled on success. | |
274 | */ | |
275 | ENTRY(hw_lock_try) | |
276 | FRAME | |
277 | movl L_ARG0,%edx /* fetch lock pointer */ | |
278 | ||
279 | DISABLE_PREEMPTION(%eax) | |
280 | movb $1,%cl | |
281 | xchgb 0(%edx),%cl /* try to acquire the HW lock */ | |
282 | testb %cl,%cl /* success? */ | |
283 | jne 1f /* if yes, let the caller know */ | |
284 | ||
285 | movl $1,%eax /* success */ | |
286 | EMARF | |
287 | ret | |
288 | ||
289 | 1: ENABLE_PREEMPTION(%eax) /* failure: release preemption... */ | |
290 | xorl %eax,%eax /* ...and return failure */ | |
291 | EMARF | |
292 | ret | |
293 | ||
294 | /* | |
295 | * unsigned int hw_lock_held(hw_lock_t) | |
296 | * MACH_RT: doesn't change preemption state. | |
297 | * N.B. Racy, of course. | |
298 | */ | |
299 | ENTRY(hw_lock_held) | |
300 | FRAME | |
301 | movl L_ARG0,%edx /* fetch lock pointer */ | |
302 | ||
303 | movb $1,%cl | |
304 | testb %cl,0(%edx) /* check lock value */ | |
305 | jne 1f /* non-zero means locked */ | |
306 | xorl %eax,%eax /* tell caller: lock wasn't locked */ | |
307 | EMARF | |
308 | ret | |
309 | ||
310 | 1: movl $1,%eax /* tell caller: lock was locked */ | |
311 | EMARF | |
312 | ret | |
313 | ||
314 | ||
315 | ||
316 | #if 0 | |
317 | ||
318 | ||
319 | ENTRY(_usimple_lock_init) | |
320 | FRAME | |
321 | movl L_ARG0,%edx /* fetch lock pointer */ | |
322 | xorl %eax,%eax | |
323 | movb %al,USL_INTERLOCK(%edx) /* unlock the HW lock */ | |
324 | EMARF | |
325 | ret | |
326 | ||
327 | ENTRY(_simple_lock) | |
328 | FRAME | |
329 | movl L_ARG0,%edx /* fetch lock pointer */ | |
330 | ||
331 | CHECK_SIMPLE_LOCK_TYPE() | |
332 | ||
333 | DISABLE_PREEMPTION(%eax) | |
334 | ||
335 | sl_get_hw: | |
336 | movb $1,%cl | |
337 | xchgb USL_INTERLOCK(%edx),%cl /* try to acquire the HW lock */ | |
338 | testb %cl,%cl /* did we succeed? */ | |
339 | ||
340 | #if MACH_LDEBUG | |
341 | je 5f | |
342 | CHECK_MYLOCK(S_THREAD) | |
343 | jmp sl_get_hw | |
344 | 5: | |
345 | #else /* MACH_LDEBUG */ | |
346 | jne sl_get_hw /* no, try again */ | |
347 | #endif /* MACH_LDEBUG */ | |
348 | ||
349 | #if MACH_LDEBUG | |
350 | movl L_PC,%ecx | |
351 | movl %ecx,S_PC | |
9bccf70c | 352 | movl $ CPD_ACTIVE_THREAD,%eax |
1c79356b A |
353 | movl %gs:(%eax),%ecx |
354 | movl %ecx,S_THREAD | |
355 | incl CX(EXT(simple_lock_count),%eax) | |
356 | #if 0 | |
357 | METER_SIMPLE_LOCK_LOCK(%edx) | |
358 | #endif | |
359 | #if NCPUS == 1 | |
360 | pushf | |
361 | pushl %edx | |
362 | cli | |
363 | call EXT(lock_stack_push) | |
364 | popl %edx | |
365 | popfl | |
366 | #endif /* NCPUS == 1 */ | |
367 | #endif /* MACH_LDEBUG */ | |
368 | ||
369 | EMARF | |
370 | ret | |
371 | ||
372 | ENTRY(_simple_lock_try) | |
373 | FRAME | |
374 | movl L_ARG0,%edx /* fetch lock pointer */ | |
375 | ||
376 | CHECK_SIMPLE_LOCK_TYPE() | |
377 | ||
378 | DISABLE_PREEMPTION(%eax) | |
379 | ||
380 | movb $1,%cl | |
381 | xchgb USL_INTERLOCK(%edx),%cl /* try to acquire the HW lock */ | |
382 | testb %cl,%cl /* did we succeed? */ | |
383 | jne 1f /* no, return failure */ | |
384 | ||
385 | #if MACH_LDEBUG | |
386 | movl L_PC,%ecx | |
387 | movl %ecx,S_PC | |
9bccf70c | 388 | movl $ CPD_ACTIVE_THREAD,%eax |
1c79356b A |
389 | movl %gs:(%eax),%ecx |
390 | movl %ecx,S_THREAD | |
391 | incl CX(EXT(simple_lock_count),%eax) | |
392 | #if 0 | |
393 | METER_SIMPLE_LOCK_LOCK(%edx) | |
394 | #endif | |
395 | #if NCPUS == 1 | |
396 | pushf | |
397 | pushl %edx | |
398 | cli | |
399 | call EXT(lock_stack_push) | |
400 | popl %edx | |
401 | popfl | |
402 | #endif /* NCPUS == 1 */ | |
403 | #endif /* MACH_LDEBUG */ | |
404 | ||
405 | movl $1,%eax /* return success */ | |
406 | ||
407 | EMARF | |
408 | ret | |
409 | ||
410 | 1: | |
411 | ENABLE_PREEMPTION(%eax) | |
412 | ||
413 | xorl %eax,%eax /* and return failure */ | |
414 | ||
415 | EMARF | |
416 | ret | |
417 | ||
418 | ENTRY(_simple_unlock) | |
419 | FRAME | |
420 | movl L_ARG0,%edx /* fetch lock pointer */ | |
421 | ||
422 | CHECK_SIMPLE_LOCK_TYPE() | |
423 | CHECK_THREAD(S_THREAD) | |
424 | ||
425 | #if MACH_LDEBUG | |
426 | xorl %eax,%eax | |
427 | movl %eax,S_THREAD /* disown thread */ | |
428 | MP_DISABLE_PREEMPTION(%eax) | |
429 | CPU_NUMBER(%eax) | |
430 | decl CX(EXT(simple_lock_count),%eax) | |
431 | MP_ENABLE_PREEMPTION(%eax) | |
432 | #if 0 | |
433 | METER_SIMPLE_LOCK_UNLOCK(%edx) | |
434 | #endif | |
435 | #if NCPUS == 1 | |
436 | pushf | |
437 | pushl %edx | |
438 | cli | |
439 | call EXT(lock_stack_pop) | |
440 | popl %edx | |
441 | popfl | |
442 | #endif /* NCPUS == 1 */ | |
443 | #endif /* MACH_LDEBUG */ | |
444 | ||
445 | xorb %cl,%cl | |
446 | xchgb USL_INTERLOCK(%edx),%cl /* unlock the HW lock */ | |
447 | ||
448 | ENABLE_PREEMPTION(%eax) | |
449 | ||
450 | EMARF | |
451 | ret | |
452 | ||
453 | #endif /* 0 */ | |
454 | ||
455 | ||
456 | ENTRY(mutex_init) | |
457 | FRAME | |
458 | movl L_ARG0,%edx /* fetch lock pointer */ | |
459 | xorl %eax,%eax | |
460 | movb %al,M_ILK /* clear interlock */ | |
461 | movb %al,M_LOCKED /* clear locked flag */ | |
462 | movw %ax,M_WAITERS /* init waiter count */ | |
9bccf70c | 463 | movw %ax,M_PROMOTED_PRI |
1c79356b A |
464 | |
465 | #if MACH_LDEBUG | |
9bccf70c | 466 | movl $ MUTEX_TAG,M_TYPE /* set lock type */ |
1c79356b A |
467 | movl %eax,M_PC /* init caller pc */ |
468 | movl %eax,M_THREAD /* and owning thread */ | |
469 | #endif | |
470 | #if ETAP_LOCK_TRACE | |
471 | movl L_ARG1,%ecx /* fetch event type */ | |
472 | pushl %ecx /* push event type */ | |
473 | pushl %edx /* push mutex address */ | |
474 | call EXT(etap_mutex_init) /* init ETAP data */ | |
475 | addl $8,%esp | |
476 | #endif /* ETAP_LOCK_TRACE */ | |
477 | ||
478 | EMARF | |
479 | ret | |
480 | ||
0b4e3aa0 | 481 | ENTRY2(mutex_lock,_mutex_lock) |
1c79356b A |
482 | FRAME |
483 | ||
484 | #if ETAP_LOCK_TRACE | |
485 | subl $12,%esp /* make room for locals */ | |
486 | movl $0,SWT_HI /* set wait time to zero (HI) */ | |
487 | movl $0,SWT_LO /* set wait time to zero (LO) */ | |
488 | movl $0,MISSED /* clear local miss marker */ | |
489 | #endif /* ETAP_LOCK_TRACE */ | |
490 | ||
491 | movl L_ARG0,%edx /* fetch lock pointer */ | |
492 | ||
493 | CHECK_MUTEX_TYPE() | |
494 | CHECK_NO_SIMPLELOCKS() | |
495 | CHECK_PREEMPTION_LEVEL() | |
496 | ||
497 | ml_retry: | |
498 | DISABLE_PREEMPTION(%eax) | |
499 | ||
500 | ml_get_hw: | |
501 | movb $1,%cl | |
502 | xchgb %cl,M_ILK | |
503 | testb %cl,%cl /* did we succeed? */ | |
504 | jne ml_get_hw /* no, try again */ | |
505 | ||
1c79356b A |
506 | movb $1,%cl |
507 | xchgb %cl,M_LOCKED /* try to set locked flag */ | |
508 | testb %cl,%cl /* is the mutex locked? */ | |
509 | jne ml_fail /* yes, we lose */ | |
510 | ||
9bccf70c A |
511 | pushl %edx |
512 | call EXT(mutex_lock_acquire) | |
513 | addl $4,%esp | |
514 | movl L_ARG0,%edx | |
515 | ||
1c79356b A |
516 | #if MACH_LDEBUG |
517 | movl L_PC,%ecx | |
518 | movl %ecx,M_PC | |
9bccf70c | 519 | movl $ CPD_ACTIVE_THREAD,%eax |
1c79356b A |
520 | movl %gs:(%eax),%ecx |
521 | movl %ecx,M_THREAD | |
522 | testl %ecx,%ecx | |
523 | je 3f | |
524 | incl TH_MUTEX_COUNT(%ecx) | |
525 | 3: | |
526 | #endif | |
527 | ||
528 | xorb %cl,%cl | |
529 | xchgb %cl,M_ILK | |
530 | ||
531 | ENABLE_PREEMPTION(%eax) | |
532 | ||
533 | #if ETAP_LOCK_TRACE | |
534 | movl L_PC,%eax /* fetch pc */ | |
535 | pushl SWT_LO /* push wait time (low) */ | |
536 | pushl SWT_HI /* push wait time (high) */ | |
537 | pushl %eax /* push pc */ | |
538 | pushl %edx /* push mutex address */ | |
539 | call EXT(etap_mutex_hold) /* collect hold timestamp */ | |
540 | addl $16+12,%esp /* clean up stack, adjusting for locals */ | |
541 | #endif /* ETAP_LOCK_TRACE */ | |
542 | ||
543 | EMARF | |
544 | ret | |
545 | ||
546 | ml_fail: | |
547 | #if ETAP_LOCK_TRACE | |
548 | cmp $0,MISSED /* did we already take a wait timestamp? */ | |
549 | jne ml_block /* yup. carry-on */ | |
550 | pushl %edx /* push mutex address */ | |
551 | call EXT(etap_mutex_miss) /* get wait timestamp */ | |
552 | movl %eax,SWT_HI /* set wait time (high word) */ | |
553 | movl %edx,SWT_LO /* set wait time (low word) */ | |
554 | popl %edx /* clean up stack */ | |
555 | movl $1,MISSED /* mark wait timestamp as taken */ | |
556 | #endif /* ETAP_LOCK_TRACE */ | |
557 | ||
558 | ml_block: | |
559 | CHECK_MYLOCK(M_THREAD) | |
9bccf70c A |
560 | xorl %eax,%eax |
561 | pushl %eax /* no promotion here yet */ | |
1c79356b A |
562 | pushl %edx /* push mutex address */ |
563 | call EXT(mutex_lock_wait) /* wait for the lock */ | |
9bccf70c | 564 | addl $8,%esp |
1c79356b A |
565 | movl L_ARG0,%edx /* refetch lock pointer */ |
566 | jmp ml_retry /* and try again */ | |
567 | ||
0b4e3aa0 | 568 | ENTRY2(mutex_try,_mutex_try) |
1c79356b A |
569 | FRAME |
570 | ||
571 | #if ETAP_LOCK_TRACE | |
572 | subl $8,%esp /* make room for locals */ | |
573 | movl $0,SWT_HI /* set wait time to zero (HI) */ | |
574 | movl $0,SWT_LO /* set wait time to zero (LO) */ | |
575 | #endif /* ETAP_LOCK_TRACE */ | |
576 | ||
577 | movl L_ARG0,%edx /* fetch lock pointer */ | |
578 | ||
579 | CHECK_MUTEX_TYPE() | |
580 | CHECK_NO_SIMPLELOCKS() | |
581 | ||
9bccf70c A |
582 | DISABLE_PREEMPTION(%eax) |
583 | ||
584 | mt_get_hw: | |
585 | movb $1,%cl | |
586 | xchgb %cl,M_ILK | |
587 | testb %cl,%cl | |
588 | jne mt_get_hw | |
589 | ||
590 | movb $1,%cl | |
591 | xchgb %cl,M_LOCKED | |
592 | testb %cl,%cl | |
593 | jne mt_fail | |
594 | ||
595 | pushl %edx | |
596 | call EXT(mutex_lock_acquire) | |
597 | addl $4,%esp | |
598 | movl L_ARG0,%edx | |
599 | ||
600 | #if MACH_LDEBUG | |
601 | movl L_PC,%ecx | |
602 | movl %ecx,M_PC | |
603 | movl $ CPD_ACTIVE_THREAD,%ecx | |
604 | movl %gs:(%ecx),%ecx | |
605 | movl %ecx,M_THREAD | |
606 | testl %ecx,%ecx | |
607 | je 1f | |
608 | incl TH_MUTEX_COUNT(%ecx) | |
609 | 1: | |
610 | #endif | |
611 | ||
612 | xorb %cl,%cl | |
613 | xchgb %cl,M_ILK | |
614 | ||
615 | ENABLE_PREEMPTION(%eax) | |
616 | ||
617 | #if ETAP_LOCK_TRACE | |
618 | movl L_PC,%eax /* fetch pc */ | |
619 | pushl SWT_LO /* push wait time (low) */ | |
620 | pushl SWT_HI /* push wait time (high) */ | |
621 | pushl %eax /* push pc */ | |
622 | pushl %edx /* push mutex address */ | |
623 | call EXT(etap_mutex_hold) /* get start hold timestamp */ | |
624 | addl $16,%esp /* clean up stack, adjusting for locals */ | |
625 | #endif /* ETAP_LOCK_TRACE */ | |
626 | ||
627 | movl $1,%eax | |
1c79356b A |
628 | |
629 | #if MACH_LDEBUG || ETAP_LOCK_TRACE | |
9bccf70c A |
630 | #if ETAP_LOCK_TRACE |
631 | addl $8,%esp /* pop stack claimed on entry */ | |
1c79356b | 632 | #endif |
9bccf70c A |
633 | #endif |
634 | ||
635 | EMARF | |
636 | ret | |
1c79356b | 637 | |
9bccf70c | 638 | mt_fail: |
1c79356b A |
639 | #if MACH_LDEBUG |
640 | movl L_PC,%ecx | |
641 | movl %ecx,M_PC | |
9bccf70c | 642 | movl $ CPD_ACTIVE_THREAD,%ecx |
1c79356b A |
643 | movl %gs:(%ecx),%ecx |
644 | movl %ecx,M_THREAD | |
645 | testl %ecx,%ecx | |
646 | je 1f | |
647 | incl TH_MUTEX_COUNT(%ecx) | |
648 | 1: | |
649 | #endif | |
650 | ||
9bccf70c A |
651 | xorb %cl,%cl |
652 | xchgb %cl,M_ILK | |
653 | ||
654 | ENABLE_PREEMPTION(%eax) | |
655 | ||
1c79356b A |
656 | #if ETAP_LOCK_TRACE |
657 | movl L_PC,%eax /* fetch pc */ | |
658 | pushl SWT_LO /* push wait time (low) */ | |
659 | pushl SWT_HI /* push wait time (high) */ | |
660 | pushl %eax /* push pc */ | |
661 | pushl %edx /* push mutex address */ | |
662 | call EXT(etap_mutex_hold) /* get start hold timestamp */ | |
663 | addl $16,%esp /* clean up stack, adjusting for locals */ | |
1c79356b A |
664 | #endif /* ETAP_LOCK_TRACE */ |
665 | ||
9bccf70c A |
666 | xorl %eax,%eax |
667 | ||
1c79356b | 668 | #if MACH_LDEBUG || ETAP_LOCK_TRACE |
1c79356b A |
669 | #if ETAP_LOCK_TRACE |
670 | addl $8,%esp /* pop stack claimed on entry */ | |
671 | #endif | |
672 | #endif | |
673 | ||
674 | EMARF | |
675 | ret | |
676 | ||
677 | ENTRY(mutex_unlock) | |
678 | FRAME | |
679 | movl L_ARG0,%edx /* fetch lock pointer */ | |
680 | ||
681 | #if ETAP_LOCK_TRACE | |
682 | pushl %edx /* push mutex address */ | |
683 | call EXT(etap_mutex_unlock) /* collect ETAP data */ | |
684 | popl %edx /* restore mutex address */ | |
685 | #endif /* ETAP_LOCK_TRACE */ | |
686 | ||
687 | CHECK_MUTEX_TYPE() | |
688 | CHECK_THREAD(M_THREAD) | |
689 | ||
690 | DISABLE_PREEMPTION(%eax) | |
691 | ||
692 | mu_get_hw: | |
693 | movb $1,%cl | |
694 | xchgb %cl,M_ILK | |
695 | testb %cl,%cl /* did we succeed? */ | |
696 | jne mu_get_hw /* no, try again */ | |
697 | ||
698 | cmpw $0,M_WAITERS /* are there any waiters? */ | |
699 | jne mu_wakeup /* yes, more work to do */ | |
700 | ||
701 | mu_doit: | |
702 | #if MACH_LDEBUG | |
703 | xorl %eax,%eax | |
704 | movl %eax,M_THREAD /* disown thread */ | |
9bccf70c | 705 | movl $ CPD_ACTIVE_THREAD,%eax |
1c79356b A |
706 | movl %gs:(%eax),%ecx |
707 | testl %ecx,%ecx | |
708 | je 0f | |
709 | decl TH_MUTEX_COUNT(%ecx) | |
710 | 0: | |
711 | #endif | |
712 | ||
713 | xorb %cl,%cl | |
714 | xchgb %cl,M_LOCKED /* unlock the mutex */ | |
715 | ||
716 | xorb %cl,%cl | |
717 | xchgb %cl,M_ILK | |
718 | ||
719 | ENABLE_PREEMPTION(%eax) | |
720 | ||
721 | EMARF | |
722 | ret | |
723 | ||
724 | mu_wakeup: | |
9bccf70c A |
725 | xorl %eax,%eax |
726 | pushl %eax /* no promotion here yet */ | |
1c79356b A |
727 | pushl %edx /* push mutex address */ |
728 | call EXT(mutex_unlock_wakeup)/* yes, wake a thread */ | |
9bccf70c | 729 | addl $8,%esp |
1c79356b A |
730 | movl L_ARG0,%edx /* refetch lock pointer */ |
731 | jmp mu_doit | |
732 | ||
733 | ENTRY(interlock_unlock) | |
734 | FRAME | |
735 | movl L_ARG0,%edx | |
736 | ||
737 | xorb %cl,%cl | |
738 | xchgb %cl,M_ILK | |
739 | ||
740 | ENABLE_PREEMPTION(%eax) | |
741 | ||
742 | EMARF | |
743 | ret | |
744 | ||
745 | ||
746 | ENTRY(_disable_preemption) | |
747 | #if MACH_RT | |
748 | _DISABLE_PREEMPTION(%eax) | |
749 | #endif /* MACH_RT */ | |
750 | ret | |
751 | ||
752 | ENTRY(_enable_preemption) | |
753 | #if MACH_RT | |
754 | #if MACH_ASSERT | |
9bccf70c | 755 | movl $ CPD_PREEMPTION_LEVEL,%eax |
1c79356b A |
756 | cmpl $0,%gs:(%eax) |
757 | jg 1f | |
758 | pushl %gs:(%eax) | |
759 | pushl $2f | |
760 | call EXT(panic) | |
761 | hlt | |
762 | .data | |
763 | 2: String "_enable_preemption: preemption_level(%d) < 0!" | |
764 | .text | |
765 | 1: | |
766 | #endif /* MACH_ASSERT */ | |
767 | _ENABLE_PREEMPTION(%eax) | |
768 | #endif /* MACH_RT */ | |
769 | ret | |
770 | ||
771 | ENTRY(_enable_preemption_no_check) | |
772 | #if MACH_RT | |
773 | #if MACH_ASSERT | |
9bccf70c | 774 | movl $ CPD_PREEMPTION_LEVEL,%eax |
1c79356b A |
775 | cmpl $0,%gs:(%eax) |
776 | jg 1f | |
777 | pushl $2f | |
778 | call EXT(panic) | |
779 | hlt | |
780 | .data | |
781 | 2: String "_enable_preemption_no_check: preemption_level <= 0!" | |
782 | .text | |
783 | 1: | |
784 | #endif /* MACH_ASSERT */ | |
785 | _ENABLE_PREEMPTION_NO_CHECK(%eax) | |
786 | #endif /* MACH_RT */ | |
787 | ret | |
788 | ||
789 | ||
790 | ENTRY(_mp_disable_preemption) | |
791 | #if MACH_RT && NCPUS > 1 | |
792 | _DISABLE_PREEMPTION(%eax) | |
793 | #endif /* MACH_RT && NCPUS > 1*/ | |
794 | ret | |
795 | ||
796 | ENTRY(_mp_enable_preemption) | |
797 | #if MACH_RT && NCPUS > 1 | |
798 | #if MACH_ASSERT | |
9bccf70c | 799 | movl $ CPD_PREEMPTION_LEVEL,%eax |
1c79356b A |
800 | cmpl $0,%gs:(%eax) |
801 | jg 1f | |
802 | pushl %gs:(%eax) | |
803 | pushl $2f | |
804 | call EXT(panic) | |
805 | hlt | |
806 | .data | |
807 | 2: String "_mp_enable_preemption: preemption_level (%d) <= 0!" | |
808 | .text | |
809 | 1: | |
810 | #endif /* MACH_ASSERT */ | |
811 | _ENABLE_PREEMPTION(%eax) | |
812 | #endif /* MACH_RT && NCPUS > 1 */ | |
813 | ret | |
814 | ||
815 | ENTRY(_mp_enable_preemption_no_check) | |
816 | #if MACH_RT && NCPUS > 1 | |
817 | #if MACH_ASSERT | |
9bccf70c | 818 | movl $ CPD_PREEMPTION_LEVEL,%eax |
1c79356b A |
819 | cmpl $0,%gs:(%eax) |
820 | jg 1f | |
821 | pushl $2f | |
822 | call EXT(panic) | |
823 | hlt | |
824 | .data | |
825 | 2: String "_mp_enable_preemption_no_check: preemption_level <= 0!" | |
826 | .text | |
827 | 1: | |
828 | #endif /* MACH_ASSERT */ | |
829 | _ENABLE_PREEMPTION_NO_CHECK(%eax) | |
830 | #endif /* MACH_RT && NCPUS > 1 */ | |
831 | ret | |
832 | ||
833 | ||
834 | ENTRY(i_bit_set) | |
835 | movl S_ARG0,%edx | |
836 | movl S_ARG1,%eax | |
837 | lock | |
838 | bts %dl,(%eax) | |
839 | ret | |
840 | ||
841 | ENTRY(i_bit_clear) | |
842 | movl S_ARG0,%edx | |
843 | movl S_ARG1,%eax | |
844 | lock | |
845 | btr %dl,(%eax) | |
846 | ret | |
847 | ||
848 | ENTRY(bit_lock) | |
849 | movl S_ARG0,%ecx | |
850 | movl S_ARG1,%eax | |
851 | 1: | |
852 | lock | |
853 | bts %ecx,(%eax) | |
854 | jb 1b | |
855 | ret | |
856 | ||
857 | ENTRY(bit_lock_try) | |
858 | movl S_ARG0,%ecx | |
859 | movl S_ARG1,%eax | |
860 | lock | |
861 | bts %ecx,(%eax) | |
862 | jb bit_lock_failed | |
863 | ret /* %eax better not be null ! */ | |
864 | bit_lock_failed: | |
865 | xorl %eax,%eax | |
866 | ret | |
867 | ||
868 | ENTRY(bit_unlock) | |
869 | movl S_ARG0,%ecx | |
870 | movl S_ARG1,%eax | |
871 | lock | |
872 | btr %ecx,(%eax) | |
873 | ret |