]>
Commit | Line | Data |
---|---|---|
1 | /* | |
2 | * Copyright (c) 2000 Apple Computer, Inc. All rights reserved. | |
3 | * | |
4 | * @APPLE_LICENSE_HEADER_START@ | |
5 | * | |
6 | * The contents of this file constitute Original Code as defined in and | |
7 | * are subject to the Apple Public Source License Version 1.1 (the | |
8 | * "License"). You may not use this file except in compliance with the | |
9 | * License. Please obtain a copy of the License at | |
10 | * http://www.apple.com/publicsource and read it before using this file. | |
11 | * | |
12 | * This Original Code and all software distributed under the License are | |
13 | * distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY KIND, EITHER | |
14 | * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES, | |
15 | * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY, | |
16 | * FITNESS FOR A PARTICULAR PURPOSE OR NON-INFRINGEMENT. Please see the | |
17 | * License for the specific language governing rights and limitations | |
18 | * under the License. | |
19 | * | |
20 | * @APPLE_LICENSE_HEADER_END@ | |
21 | */ | |
22 | /* | |
23 | * @OSF_COPYRIGHT@ | |
24 | */ | |
25 | /* | |
26 | * Mach Operating System | |
27 | * Copyright (c) 1989 Carnegie-Mellon University | |
28 | * All rights reserved. The CMU software License Agreement specifies | |
29 | * the terms and conditions for use and redistribution. | |
30 | */ | |
31 | ||
32 | #include <cpus.h> | |
33 | #include <mach_rt.h> | |
34 | #include <platforms.h> | |
35 | #include <mach_ldebug.h> | |
36 | #include <i386/asm.h> | |
37 | #include <assym.s> | |
38 | #include <kern/etap_options.h> | |
39 | ||
40 | ||
41 | /* | |
42 | * When performance isn't the only concern, it's | |
43 | * nice to build stack frames... | |
44 | */ | |
45 | #define BUILD_STACK_FRAMES ((MACH_LDEBUG || ETAP_LOCK_TRACE) && MACH_KDB) | |
46 | ||
47 | #if BUILD_STACK_FRAMES | |
48 | ||
49 | #define L_PC 4(%ebp) | |
50 | #define L_ARG0 8(%ebp) | |
51 | #define L_ARG1 12(%ebp) | |
52 | ||
53 | #define SWT_HI -4(%ebp) | |
54 | #define SWT_LO -8(%ebp) | |
55 | #define MISSED -12(%ebp) | |
56 | ||
57 | #else /* BUILD_STACK_FRAMES */ | |
58 | ||
59 | #undef FRAME | |
60 | #undef EMARF | |
61 | #define FRAME | |
62 | #define EMARF | |
63 | #define L_PC (%esp) | |
64 | #define L_ARG0 4(%esp) | |
65 | #define L_ARG1 8(%esp) | |
66 | ||
67 | #endif /* BUILD_STACK_FRAMES */ | |
68 | ||
69 | ||
70 | #define M_ILK (%edx) | |
71 | #define M_LOCKED 1(%edx) | |
72 | #define M_WAITERS 2(%edx) | |
73 | #if MACH_LDEBUG | |
74 | #define M_TYPE 4(%edx) | |
75 | #define M_PC 8(%edx) | |
76 | #define M_THREAD 12(%edx) | |
77 | #endif /* MACH_LDEBUG */ | |
78 | ||
79 | #include <i386/AT386/mp/mp.h> | |
80 | #if (NCPUS > 1) | |
81 | #define CX(addr,reg) addr(,reg,4) | |
82 | #else | |
83 | #define CPU_NUMBER(reg) | |
84 | #define CX(addr,reg) addr | |
85 | #endif /* (NCPUS > 1) */ | |
86 | ||
87 | #if MACH_LDEBUG | |
88 | /* | |
89 | * Routines for general lock debugging. | |
90 | */ | |
91 | #define S_TYPE 4(%edx) | |
92 | #define S_PC 8(%edx) | |
93 | #define S_THREAD 12(%edx) | |
94 | #define S_DURATIONH 16(%edx) | |
95 | #define S_DURATIONL 20(%edx) | |
96 | ||
97 | /* | |
98 | * Checks for expected lock types and calls "panic" on | |
99 | * mismatch. Detects calls to Mutex functions with | |
100 | * type simplelock and vice versa. | |
101 | */ | |
102 | #define CHECK_MUTEX_TYPE() \ | |
103 | cmpl $MUTEX_TAG, M_TYPE ; \ | |
104 | je 1f ; \ | |
105 | pushl $2f ; \ | |
106 | call EXT(panic) ; \ | |
107 | hlt ; \ | |
108 | .data ; \ | |
109 | 2: String "not a mutex!" ; \ | |
110 | .text ; \ | |
111 | 1: | |
112 | ||
113 | #define CHECK_SIMPLE_LOCK_TYPE() \ | |
114 | cmpl $SIMPLE_LOCK_TAG,S_TYPE ; \ | |
115 | je 1f ; \ | |
116 | pushl $2f ; \ | |
117 | call EXT(panic) ; \ | |
118 | hlt ; \ | |
119 | .data ; \ | |
120 | 2: String "not a simple lock!" ; \ | |
121 | .text ; \ | |
122 | 1: | |
123 | ||
124 | /* | |
125 | * If one or more simplelocks are currently held by a thread, | |
126 | * an attempt to acquire a mutex will cause this check to fail | |
127 | * (since a mutex lock may context switch, holding a simplelock | |
128 | * is not a good thing). | |
129 | */ | |
130 | #if 0 /*MACH_RT - 11/12/99 - lion@apple.com disable check for now*/ | |
131 | #define CHECK_PREEMPTION_LEVEL() \ | |
132 | movl $CPD_PREEMPTION_LEVEL,%eax ; \ | |
133 | cmpl $0,%gs:(%eax) ; \ | |
134 | je 1f ; \ | |
135 | pushl $2f ; \ | |
136 | call EXT(panic) ; \ | |
137 | hlt ; \ | |
138 | .data ; \ | |
139 | 2: String "preemption_level != 0!" ; \ | |
140 | .text ; \ | |
141 | 1: | |
142 | #else /* MACH_RT */ | |
143 | #define CHECK_PREEMPTION_LEVEL() | |
144 | #endif /* MACH_RT */ | |
145 | ||
146 | #define CHECK_NO_SIMPLELOCKS() \ | |
147 | movl $CPD_SIMPLE_LOCK_COUNT,%eax ; \ | |
148 | cmpl $0,%gs:(%eax) ; \ | |
149 | je 1f ; \ | |
150 | pushl $2f ; \ | |
151 | call EXT(panic) ; \ | |
152 | hlt ; \ | |
153 | .data ; \ | |
154 | 2: String "simple_locks_held!" ; \ | |
155 | .text ; \ | |
156 | 1: | |
157 | ||
158 | /* | |
159 | * Verifies return to the correct thread in "unlock" situations. | |
160 | */ | |
161 | #define CHECK_THREAD(thd) \ | |
162 | movl $CPD_ACTIVE_THREAD,%eax ; \ | |
163 | movl %gs:(%eax),%ecx ; \ | |
164 | testl %ecx,%ecx ; \ | |
165 | je 1f ; \ | |
166 | cmpl %ecx,thd ; \ | |
167 | je 1f ; \ | |
168 | pushl $2f ; \ | |
169 | call EXT(panic) ; \ | |
170 | hlt ; \ | |
171 | .data ; \ | |
172 | 2: String "wrong thread!" ; \ | |
173 | .text ; \ | |
174 | 1: | |
175 | ||
176 | #define CHECK_MYLOCK(thd) \ | |
177 | movl $CPD_ACTIVE_THREAD,%eax ; \ | |
178 | movl %gs:(%eax),%ecx ; \ | |
179 | testl %ecx,%ecx ; \ | |
180 | je 1f ; \ | |
181 | cmpl %ecx,thd ; \ | |
182 | jne 1f ; \ | |
183 | pushl $2f ; \ | |
184 | call EXT(panic) ; \ | |
185 | hlt ; \ | |
186 | .data ; \ | |
187 | 2: String "mylock attempt!" ; \ | |
188 | .text ; \ | |
189 | 1: | |
190 | ||
191 | #define METER_SIMPLE_LOCK_LOCK(reg) \ | |
192 | pushl reg ; \ | |
193 | call EXT(meter_simple_lock) ; \ | |
194 | popl reg | |
195 | ||
196 | #define METER_SIMPLE_LOCK_UNLOCK(reg) \ | |
197 | pushl reg ; \ | |
198 | call EXT(meter_simple_unlock) ; \ | |
199 | popl reg | |
200 | ||
201 | #else /* MACH_LDEBUG */ | |
202 | #define CHECK_MUTEX_TYPE() | |
203 | #define CHECK_SIMPLE_LOCK_TYPE | |
204 | #define CHECK_THREAD(thd) | |
205 | #define CHECK_PREEMPTION_LEVEL() | |
206 | #define CHECK_NO_SIMPLELOCKS() | |
207 | #define CHECK_MYLOCK(thd) | |
208 | #define METER_SIMPLE_LOCK_LOCK(reg) | |
209 | #define METER_SIMPLE_LOCK_UNLOCK(reg) | |
210 | #endif /* MACH_LDEBUG */ | |
211 | ||
212 | ||
213 | /* | |
214 | * void hw_lock_init(hw_lock_t) | |
215 | * | |
216 | * Initialize a hardware lock. | |
217 | */ | |
218 | ENTRY(hw_lock_init) | |
219 | FRAME | |
220 | movl L_ARG0,%edx /* fetch lock pointer */ | |
221 | xorl %eax,%eax | |
222 | movb %al,0(%edx) /* clear the lock */ | |
223 | EMARF | |
224 | ret | |
225 | ||
226 | /* | |
227 | * void hw_lock_lock(hw_lock_t) | |
228 | * | |
229 | * Acquire lock, spinning until it becomes available. | |
230 | * MACH_RT: also return with preemption disabled. | |
231 | */ | |
232 | ENTRY(hw_lock_lock) | |
233 | FRAME | |
234 | movl L_ARG0,%edx /* fetch lock pointer */ | |
235 | ||
236 | 1: DISABLE_PREEMPTION(%eax) | |
237 | movb $1,%cl | |
238 | xchgb 0(%edx),%cl /* try to acquire the HW lock */ | |
239 | testb %cl,%cl /* success? */ | |
240 | jne 3f | |
241 | ||
242 | EMARF /* if yes, then nothing left to do */ | |
243 | ret | |
244 | ||
245 | 3: ENABLE_PREEMPTION(%eax) /* no reason we can't be preemptable now */ | |
246 | ||
247 | movb $1,%cl | |
248 | 2: testb %cl,0(%edx) /* spin checking lock value in cache */ | |
249 | jne 2b /* non-zero means locked, keep spinning */ | |
250 | jmp 1b /* zero means unlocked, try to grab it */ | |
251 | ||
252 | /* | |
253 | * void hw_lock_unlock(hw_lock_t) | |
254 | * | |
255 | * Unconditionally release lock. | |
256 | * MACH_RT: release preemption level. | |
257 | */ | |
258 | ENTRY(hw_lock_unlock) | |
259 | FRAME | |
260 | movl L_ARG0,%edx /* fetch lock pointer */ | |
261 | xorl %eax,%eax | |
262 | xchgb 0(%edx),%al /* clear the lock... a mov instruction */ | |
263 | /* ...might be cheaper and less paranoid */ | |
264 | ENABLE_PREEMPTION(%eax) | |
265 | EMARF | |
266 | ret | |
267 | ||
268 | /* | |
269 | * unsigned int hw_lock_try(hw_lock_t) | |
270 | * MACH_RT: returns with preemption disabled on success. | |
271 | */ | |
272 | ENTRY(hw_lock_try) | |
273 | FRAME | |
274 | movl L_ARG0,%edx /* fetch lock pointer */ | |
275 | ||
276 | DISABLE_PREEMPTION(%eax) | |
277 | movb $1,%cl | |
278 | xchgb 0(%edx),%cl /* try to acquire the HW lock */ | |
279 | testb %cl,%cl /* success? */ | |
280 | jne 1f /* if yes, let the caller know */ | |
281 | ||
282 | movl $1,%eax /* success */ | |
283 | EMARF | |
284 | ret | |
285 | ||
286 | 1: ENABLE_PREEMPTION(%eax) /* failure: release preemption... */ | |
287 | xorl %eax,%eax /* ...and return failure */ | |
288 | EMARF | |
289 | ret | |
290 | ||
291 | /* | |
292 | * unsigned int hw_lock_held(hw_lock_t) | |
293 | * MACH_RT: doesn't change preemption state. | |
294 | * N.B. Racy, of course. | |
295 | */ | |
296 | ENTRY(hw_lock_held) | |
297 | FRAME | |
298 | movl L_ARG0,%edx /* fetch lock pointer */ | |
299 | ||
300 | movb $1,%cl | |
301 | testb %cl,0(%edx) /* check lock value */ | |
302 | jne 1f /* non-zero means locked */ | |
303 | xorl %eax,%eax /* tell caller: lock wasn't locked */ | |
304 | EMARF | |
305 | ret | |
306 | ||
307 | 1: movl $1,%eax /* tell caller: lock was locked */ | |
308 | EMARF | |
309 | ret | |
310 | ||
311 | ||
312 | ||
313 | #if 0 | |
314 | ||
315 | ||
316 | ENTRY(_usimple_lock_init) | |
317 | FRAME | |
318 | movl L_ARG0,%edx /* fetch lock pointer */ | |
319 | xorl %eax,%eax | |
320 | movb %al,USL_INTERLOCK(%edx) /* unlock the HW lock */ | |
321 | EMARF | |
322 | ret | |
323 | ||
324 | ENTRY(_simple_lock) | |
325 | FRAME | |
326 | movl L_ARG0,%edx /* fetch lock pointer */ | |
327 | ||
328 | CHECK_SIMPLE_LOCK_TYPE() | |
329 | ||
330 | DISABLE_PREEMPTION(%eax) | |
331 | ||
332 | sl_get_hw: | |
333 | movb $1,%cl | |
334 | xchgb USL_INTERLOCK(%edx),%cl /* try to acquire the HW lock */ | |
335 | testb %cl,%cl /* did we succeed? */ | |
336 | ||
337 | #if MACH_LDEBUG | |
338 | je 5f | |
339 | CHECK_MYLOCK(S_THREAD) | |
340 | jmp sl_get_hw | |
341 | 5: | |
342 | #else /* MACH_LDEBUG */ | |
343 | jne sl_get_hw /* no, try again */ | |
344 | #endif /* MACH_LDEBUG */ | |
345 | ||
346 | #if MACH_LDEBUG | |
347 | movl L_PC,%ecx | |
348 | movl %ecx,S_PC | |
349 | movl $CPD_ACTIVE_THREAD,%eax | |
350 | movl %gs:(%eax),%ecx | |
351 | movl %ecx,S_THREAD | |
352 | incl CX(EXT(simple_lock_count),%eax) | |
353 | #if 0 | |
354 | METER_SIMPLE_LOCK_LOCK(%edx) | |
355 | #endif | |
356 | #if NCPUS == 1 | |
357 | pushf | |
358 | pushl %edx | |
359 | cli | |
360 | call EXT(lock_stack_push) | |
361 | popl %edx | |
362 | popfl | |
363 | #endif /* NCPUS == 1 */ | |
364 | #endif /* MACH_LDEBUG */ | |
365 | ||
366 | EMARF | |
367 | ret | |
368 | ||
369 | ENTRY(_simple_lock_try) | |
370 | FRAME | |
371 | movl L_ARG0,%edx /* fetch lock pointer */ | |
372 | ||
373 | CHECK_SIMPLE_LOCK_TYPE() | |
374 | ||
375 | DISABLE_PREEMPTION(%eax) | |
376 | ||
377 | movb $1,%cl | |
378 | xchgb USL_INTERLOCK(%edx),%cl /* try to acquire the HW lock */ | |
379 | testb %cl,%cl /* did we succeed? */ | |
380 | jne 1f /* no, return failure */ | |
381 | ||
382 | #if MACH_LDEBUG | |
383 | movl L_PC,%ecx | |
384 | movl %ecx,S_PC | |
385 | movl $CPD_ACTIVE_THREAD,%eax | |
386 | movl %gs:(%eax),%ecx | |
387 | movl %ecx,S_THREAD | |
388 | incl CX(EXT(simple_lock_count),%eax) | |
389 | #if 0 | |
390 | METER_SIMPLE_LOCK_LOCK(%edx) | |
391 | #endif | |
392 | #if NCPUS == 1 | |
393 | pushf | |
394 | pushl %edx | |
395 | cli | |
396 | call EXT(lock_stack_push) | |
397 | popl %edx | |
398 | popfl | |
399 | #endif /* NCPUS == 1 */ | |
400 | #endif /* MACH_LDEBUG */ | |
401 | ||
402 | movl $1,%eax /* return success */ | |
403 | ||
404 | EMARF | |
405 | ret | |
406 | ||
407 | 1: | |
408 | ENABLE_PREEMPTION(%eax) | |
409 | ||
410 | xorl %eax,%eax /* and return failure */ | |
411 | ||
412 | EMARF | |
413 | ret | |
414 | ||
415 | ENTRY(_simple_unlock) | |
416 | FRAME | |
417 | movl L_ARG0,%edx /* fetch lock pointer */ | |
418 | ||
419 | CHECK_SIMPLE_LOCK_TYPE() | |
420 | CHECK_THREAD(S_THREAD) | |
421 | ||
422 | #if MACH_LDEBUG | |
423 | xorl %eax,%eax | |
424 | movl %eax,S_THREAD /* disown thread */ | |
425 | MP_DISABLE_PREEMPTION(%eax) | |
426 | CPU_NUMBER(%eax) | |
427 | decl CX(EXT(simple_lock_count),%eax) | |
428 | MP_ENABLE_PREEMPTION(%eax) | |
429 | #if 0 | |
430 | METER_SIMPLE_LOCK_UNLOCK(%edx) | |
431 | #endif | |
432 | #if NCPUS == 1 | |
433 | pushf | |
434 | pushl %edx | |
435 | cli | |
436 | call EXT(lock_stack_pop) | |
437 | popl %edx | |
438 | popfl | |
439 | #endif /* NCPUS == 1 */ | |
440 | #endif /* MACH_LDEBUG */ | |
441 | ||
442 | xorb %cl,%cl | |
443 | xchgb USL_INTERLOCK(%edx),%cl /* unlock the HW lock */ | |
444 | ||
445 | ENABLE_PREEMPTION(%eax) | |
446 | ||
447 | EMARF | |
448 | ret | |
449 | ||
450 | #endif /* 0 */ | |
451 | ||
452 | ||
453 | ENTRY(mutex_init) | |
454 | FRAME | |
455 | movl L_ARG0,%edx /* fetch lock pointer */ | |
456 | xorl %eax,%eax | |
457 | movb %al,M_ILK /* clear interlock */ | |
458 | movb %al,M_LOCKED /* clear locked flag */ | |
459 | movw %ax,M_WAITERS /* init waiter count */ | |
460 | ||
461 | #if MACH_LDEBUG | |
462 | movl $MUTEX_TAG,M_TYPE /* set lock type */ | |
463 | movl %eax,M_PC /* init caller pc */ | |
464 | movl %eax,M_THREAD /* and owning thread */ | |
465 | #endif | |
466 | #if ETAP_LOCK_TRACE | |
467 | movl L_ARG1,%ecx /* fetch event type */ | |
468 | pushl %ecx /* push event type */ | |
469 | pushl %edx /* push mutex address */ | |
470 | call EXT(etap_mutex_init) /* init ETAP data */ | |
471 | addl $8,%esp | |
472 | #endif /* ETAP_LOCK_TRACE */ | |
473 | ||
474 | EMARF | |
475 | ret | |
476 | ||
477 | ENTRY2(mutex_lock,_mutex_lock) | |
478 | FRAME | |
479 | ||
480 | #if ETAP_LOCK_TRACE | |
481 | subl $12,%esp /* make room for locals */ | |
482 | movl $0,SWT_HI /* set wait time to zero (HI) */ | |
483 | movl $0,SWT_LO /* set wait time to zero (LO) */ | |
484 | movl $0,MISSED /* clear local miss marker */ | |
485 | #endif /* ETAP_LOCK_TRACE */ | |
486 | ||
487 | movl L_ARG0,%edx /* fetch lock pointer */ | |
488 | ||
489 | CHECK_MUTEX_TYPE() | |
490 | CHECK_NO_SIMPLELOCKS() | |
491 | CHECK_PREEMPTION_LEVEL() | |
492 | ||
493 | ml_retry: | |
494 | DISABLE_PREEMPTION(%eax) | |
495 | ||
496 | ml_get_hw: | |
497 | movb $1,%cl | |
498 | xchgb %cl,M_ILK | |
499 | testb %cl,%cl /* did we succeed? */ | |
500 | jne ml_get_hw /* no, try again */ | |
501 | ||
502 | /* | |
503 | / Beware of a race between this code path and the inline ASM fast-path locking | |
504 | / sequence which attempts to lock a mutex by directly setting the locked flag | |
505 | / | |
506 | */ | |
507 | ||
508 | movb $1,%cl | |
509 | xchgb %cl,M_LOCKED /* try to set locked flag */ | |
510 | testb %cl,%cl /* is the mutex locked? */ | |
511 | jne ml_fail /* yes, we lose */ | |
512 | ||
513 | #if MACH_LDEBUG | |
514 | movl L_PC,%ecx | |
515 | movl %ecx,M_PC | |
516 | movl $CPD_ACTIVE_THREAD,%eax | |
517 | movl %gs:(%eax),%ecx | |
518 | movl %ecx,M_THREAD | |
519 | testl %ecx,%ecx | |
520 | je 3f | |
521 | incl TH_MUTEX_COUNT(%ecx) | |
522 | 3: | |
523 | #endif | |
524 | ||
525 | xorb %cl,%cl | |
526 | xchgb %cl,M_ILK | |
527 | ||
528 | ENABLE_PREEMPTION(%eax) | |
529 | ||
530 | #if ETAP_LOCK_TRACE | |
531 | movl L_PC,%eax /* fetch pc */ | |
532 | pushl SWT_LO /* push wait time (low) */ | |
533 | pushl SWT_HI /* push wait time (high) */ | |
534 | pushl %eax /* push pc */ | |
535 | pushl %edx /* push mutex address */ | |
536 | call EXT(etap_mutex_hold) /* collect hold timestamp */ | |
537 | addl $16+12,%esp /* clean up stack, adjusting for locals */ | |
538 | #endif /* ETAP_LOCK_TRACE */ | |
539 | ||
540 | EMARF | |
541 | ret | |
542 | ||
543 | ml_fail: | |
544 | #if ETAP_LOCK_TRACE | |
545 | cmp $0,MISSED /* did we already take a wait timestamp? */ | |
546 | jne ml_block /* yup. carry-on */ | |
547 | pushl %edx /* push mutex address */ | |
548 | call EXT(etap_mutex_miss) /* get wait timestamp */ | |
549 | movl %eax,SWT_HI /* set wait time (high word) */ | |
550 | movl %edx,SWT_LO /* set wait time (low word) */ | |
551 | popl %edx /* clean up stack */ | |
552 | movl $1,MISSED /* mark wait timestamp as taken */ | |
553 | #endif /* ETAP_LOCK_TRACE */ | |
554 | ||
555 | ml_block: | |
556 | CHECK_MYLOCK(M_THREAD) | |
557 | pushl %edx /* push mutex address */ | |
558 | call EXT(mutex_lock_wait) /* wait for the lock */ | |
559 | addl $4,%esp | |
560 | movl L_ARG0,%edx /* refetch lock pointer */ | |
561 | jmp ml_retry /* and try again */ | |
562 | ||
563 | ENTRY2(mutex_try,_mutex_try) | |
564 | FRAME | |
565 | ||
566 | #if ETAP_LOCK_TRACE | |
567 | subl $8,%esp /* make room for locals */ | |
568 | movl $0,SWT_HI /* set wait time to zero (HI) */ | |
569 | movl $0,SWT_LO /* set wait time to zero (LO) */ | |
570 | #endif /* ETAP_LOCK_TRACE */ | |
571 | ||
572 | movl L_ARG0,%edx /* fetch lock pointer */ | |
573 | ||
574 | CHECK_MUTEX_TYPE() | |
575 | CHECK_NO_SIMPLELOCKS() | |
576 | ||
577 | xorl %eax,%eax | |
578 | movb $1,%al /* locked value for mutex */ | |
579 | xchgb %al,M_LOCKED /* swap locked values */ | |
580 | xorb $1,%al /* generate return value */ | |
581 | ||
582 | #if MACH_LDEBUG || ETAP_LOCK_TRACE | |
583 | testl %eax,%eax /* did we succeed? */ | |
584 | je 2f /* no, skip */ | |
585 | #endif | |
586 | ||
587 | #if MACH_LDEBUG | |
588 | movl L_PC,%ecx | |
589 | movl %ecx,M_PC | |
590 | movl $CPD_ACTIVE_THREAD,%ecx | |
591 | movl %gs:(%ecx),%ecx | |
592 | movl %ecx,M_THREAD | |
593 | testl %ecx,%ecx | |
594 | je 1f | |
595 | incl TH_MUTEX_COUNT(%ecx) | |
596 | 1: | |
597 | #endif | |
598 | ||
599 | #if ETAP_LOCK_TRACE | |
600 | movl L_PC,%eax /* fetch pc */ | |
601 | pushl SWT_LO /* push wait time (low) */ | |
602 | pushl SWT_HI /* push wait time (high) */ | |
603 | pushl %eax /* push pc */ | |
604 | pushl %edx /* push mutex address */ | |
605 | call EXT(etap_mutex_hold) /* get start hold timestamp */ | |
606 | addl $16,%esp /* clean up stack, adjusting for locals */ | |
607 | movl $1,%eax /* put back successful return value */ | |
608 | #endif /* ETAP_LOCK_TRACE */ | |
609 | ||
610 | #if MACH_LDEBUG || ETAP_LOCK_TRACE | |
611 | 2: | |
612 | #if ETAP_LOCK_TRACE | |
613 | addl $8,%esp /* pop stack claimed on entry */ | |
614 | #endif | |
615 | #endif | |
616 | ||
617 | EMARF | |
618 | ret | |
619 | ||
620 | ENTRY(mutex_unlock) | |
621 | FRAME | |
622 | movl L_ARG0,%edx /* fetch lock pointer */ | |
623 | ||
624 | #if ETAP_LOCK_TRACE | |
625 | pushl %edx /* push mutex address */ | |
626 | call EXT(etap_mutex_unlock) /* collect ETAP data */ | |
627 | popl %edx /* restore mutex address */ | |
628 | #endif /* ETAP_LOCK_TRACE */ | |
629 | ||
630 | CHECK_MUTEX_TYPE() | |
631 | CHECK_THREAD(M_THREAD) | |
632 | ||
633 | DISABLE_PREEMPTION(%eax) | |
634 | ||
635 | mu_get_hw: | |
636 | movb $1,%cl | |
637 | xchgb %cl,M_ILK | |
638 | testb %cl,%cl /* did we succeed? */ | |
639 | jne mu_get_hw /* no, try again */ | |
640 | ||
641 | cmpw $0,M_WAITERS /* are there any waiters? */ | |
642 | jne mu_wakeup /* yes, more work to do */ | |
643 | ||
644 | mu_doit: | |
645 | #if MACH_LDEBUG | |
646 | xorl %eax,%eax | |
647 | movl %eax,M_THREAD /* disown thread */ | |
648 | movl $CPD_ACTIVE_THREAD,%eax | |
649 | movl %gs:(%eax),%ecx | |
650 | testl %ecx,%ecx | |
651 | je 0f | |
652 | decl TH_MUTEX_COUNT(%ecx) | |
653 | 0: | |
654 | #endif | |
655 | ||
656 | xorb %cl,%cl | |
657 | xchgb %cl,M_LOCKED /* unlock the mutex */ | |
658 | ||
659 | xorb %cl,%cl | |
660 | xchgb %cl,M_ILK | |
661 | ||
662 | ENABLE_PREEMPTION(%eax) | |
663 | ||
664 | EMARF | |
665 | ret | |
666 | ||
667 | mu_wakeup: | |
668 | pushl %edx /* push mutex address */ | |
669 | call EXT(mutex_unlock_wakeup)/* yes, wake a thread */ | |
670 | addl $4,%esp | |
671 | movl L_ARG0,%edx /* refetch lock pointer */ | |
672 | jmp mu_doit | |
673 | ||
674 | ENTRY(interlock_unlock) | |
675 | FRAME | |
676 | movl L_ARG0,%edx | |
677 | ||
678 | xorb %cl,%cl | |
679 | xchgb %cl,M_ILK | |
680 | ||
681 | ENABLE_PREEMPTION(%eax) | |
682 | ||
683 | EMARF | |
684 | ret | |
685 | ||
686 | ||
687 | ENTRY(_disable_preemption) | |
688 | #if MACH_RT | |
689 | _DISABLE_PREEMPTION(%eax) | |
690 | #endif /* MACH_RT */ | |
691 | ret | |
692 | ||
693 | ENTRY(_enable_preemption) | |
694 | #if MACH_RT | |
695 | #if MACH_ASSERT | |
696 | movl $CPD_PREEMPTION_LEVEL,%eax | |
697 | cmpl $0,%gs:(%eax) | |
698 | jg 1f | |
699 | pushl %gs:(%eax) | |
700 | pushl $2f | |
701 | call EXT(panic) | |
702 | hlt | |
703 | .data | |
704 | 2: String "_enable_preemption: preemption_level(%d) < 0!" | |
705 | .text | |
706 | 1: | |
707 | #endif /* MACH_ASSERT */ | |
708 | _ENABLE_PREEMPTION(%eax) | |
709 | #endif /* MACH_RT */ | |
710 | ret | |
711 | ||
712 | ENTRY(_enable_preemption_no_check) | |
713 | #if MACH_RT | |
714 | #if MACH_ASSERT | |
715 | movl $CPD_PREEMPTION_LEVEL,%eax | |
716 | cmpl $0,%gs:(%eax) | |
717 | jg 1f | |
718 | pushl $2f | |
719 | call EXT(panic) | |
720 | hlt | |
721 | .data | |
722 | 2: String "_enable_preemption_no_check: preemption_level <= 0!" | |
723 | .text | |
724 | 1: | |
725 | #endif /* MACH_ASSERT */ | |
726 | _ENABLE_PREEMPTION_NO_CHECK(%eax) | |
727 | #endif /* MACH_RT */ | |
728 | ret | |
729 | ||
730 | ||
731 | ENTRY(_mp_disable_preemption) | |
732 | #if MACH_RT && NCPUS > 1 | |
733 | _DISABLE_PREEMPTION(%eax) | |
734 | #endif /* MACH_RT && NCPUS > 1*/ | |
735 | ret | |
736 | ||
737 | ENTRY(_mp_enable_preemption) | |
738 | #if MACH_RT && NCPUS > 1 | |
739 | #if MACH_ASSERT | |
740 | movl $CPD_PREEMPTION_LEVEL,%eax | |
741 | cmpl $0,%gs:(%eax) | |
742 | jg 1f | |
743 | pushl %gs:(%eax) | |
744 | pushl $2f | |
745 | call EXT(panic) | |
746 | hlt | |
747 | .data | |
748 | 2: String "_mp_enable_preemption: preemption_level (%d) <= 0!" | |
749 | .text | |
750 | 1: | |
751 | #endif /* MACH_ASSERT */ | |
752 | _ENABLE_PREEMPTION(%eax) | |
753 | #endif /* MACH_RT && NCPUS > 1 */ | |
754 | ret | |
755 | ||
756 | ENTRY(_mp_enable_preemption_no_check) | |
757 | #if MACH_RT && NCPUS > 1 | |
758 | #if MACH_ASSERT | |
759 | movl $CPD_PREEMPTION_LEVEL,%eax | |
760 | cmpl $0,%gs:(%eax) | |
761 | jg 1f | |
762 | pushl $2f | |
763 | call EXT(panic) | |
764 | hlt | |
765 | .data | |
766 | 2: String "_mp_enable_preemption_no_check: preemption_level <= 0!" | |
767 | .text | |
768 | 1: | |
769 | #endif /* MACH_ASSERT */ | |
770 | _ENABLE_PREEMPTION_NO_CHECK(%eax) | |
771 | #endif /* MACH_RT && NCPUS > 1 */ | |
772 | ret | |
773 | ||
774 | ||
775 | ENTRY(i_bit_set) | |
776 | movl S_ARG0,%edx | |
777 | movl S_ARG1,%eax | |
778 | lock | |
779 | bts %dl,(%eax) | |
780 | ret | |
781 | ||
782 | ENTRY(i_bit_clear) | |
783 | movl S_ARG0,%edx | |
784 | movl S_ARG1,%eax | |
785 | lock | |
786 | btr %dl,(%eax) | |
787 | ret | |
788 | ||
789 | ENTRY(bit_lock) | |
790 | movl S_ARG0,%ecx | |
791 | movl S_ARG1,%eax | |
792 | 1: | |
793 | lock | |
794 | bts %ecx,(%eax) | |
795 | jb 1b | |
796 | ret | |
797 | ||
798 | ENTRY(bit_lock_try) | |
799 | movl S_ARG0,%ecx | |
800 | movl S_ARG1,%eax | |
801 | lock | |
802 | bts %ecx,(%eax) | |
803 | jb bit_lock_failed | |
804 | ret /* %eax better not be null ! */ | |
805 | bit_lock_failed: | |
806 | xorl %eax,%eax | |
807 | ret | |
808 | ||
809 | ENTRY(bit_unlock) | |
810 | movl S_ARG0,%ecx | |
811 | movl S_ARG1,%eax | |
812 | lock | |
813 | btr %ecx,(%eax) | |
814 | ret |