]>
Commit | Line | Data |
---|---|---|
1c79356b A |
1 | /* |
2 | * Copyright (c) 2000 Apple Computer, Inc. All rights reserved. | |
3 | * | |
4 | * @APPLE_LICENSE_HEADER_START@ | |
5 | * | |
d7e50217 | 6 | * Copyright (c) 1999-2003 Apple Computer, Inc. All Rights Reserved. |
1c79356b | 7 | * |
d7e50217 A |
8 | * This file contains Original Code and/or Modifications of Original Code |
9 | * as defined in and that are subject to the Apple Public Source License | |
10 | * Version 2.0 (the 'License'). You may not use this file except in | |
11 | * compliance with the License. Please obtain a copy of the License at | |
12 | * http://www.opensource.apple.com/apsl/ and read it before using this | |
13 | * file. | |
14 | * | |
15 | * The Original Code and all software distributed under the License are | |
16 | * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER | |
1c79356b A |
17 | * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES, |
18 | * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY, | |
d7e50217 A |
19 | * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT. |
20 | * Please see the License for the specific language governing rights and | |
21 | * limitations under the License. | |
1c79356b A |
22 | * |
23 | * @APPLE_LICENSE_HEADER_END@ | |
24 | */ | |
25 | /* | |
26 | * @OSF_COPYRIGHT@ | |
27 | */ | |
28 | /* | |
29 | * Mach Operating System | |
30 | * Copyright (c) 1989 Carnegie-Mellon University | |
31 | * All rights reserved. The CMU software License Agreement specifies | |
32 | * the terms and conditions for use and redistribution. | |
33 | */ | |
34 | ||
35 | #include <cpus.h> | |
36 | #include <mach_rt.h> | |
37 | #include <platforms.h> | |
38 | #include <mach_ldebug.h> | |
39 | #include <i386/asm.h> | |
1c79356b A |
40 | #include <kern/etap_options.h> |
41 | ||
9bccf70c | 42 | #include "assym.s" |
1c79356b A |
43 | |
44 | /* | |
45 | * When performance isn't the only concern, it's | |
46 | * nice to build stack frames... | |
47 | */ | |
48 | #define BUILD_STACK_FRAMES ((MACH_LDEBUG || ETAP_LOCK_TRACE) && MACH_KDB) | |
49 | ||
50 | #if BUILD_STACK_FRAMES | |
51 | ||
52 | #define L_PC 4(%ebp) | |
53 | #define L_ARG0 8(%ebp) | |
54 | #define L_ARG1 12(%ebp) | |
55 | ||
56 | #define SWT_HI -4(%ebp) | |
57 | #define SWT_LO -8(%ebp) | |
58 | #define MISSED -12(%ebp) | |
59 | ||
60 | #else /* BUILD_STACK_FRAMES */ | |
61 | ||
62 | #undef FRAME | |
63 | #undef EMARF | |
64 | #define FRAME | |
65 | #define EMARF | |
66 | #define L_PC (%esp) | |
67 | #define L_ARG0 4(%esp) | |
68 | #define L_ARG1 8(%esp) | |
69 | ||
70 | #endif /* BUILD_STACK_FRAMES */ | |
71 | ||
72 | ||
9bccf70c A |
73 | #define M_ILK (%edx) |
74 | #define M_LOCKED 1(%edx) | |
75 | #define M_WAITERS 2(%edx) | |
76 | #define M_PROMOTED_PRI 4(%edx) | |
1c79356b | 77 | #if MACH_LDEBUG |
9bccf70c A |
78 | #define M_TYPE 6(%edx) |
79 | #define M_PC 10(%edx) | |
80 | #define M_THREAD 14(%edx) | |
1c79356b A |
81 | #endif /* MACH_LDEBUG */ |
82 | ||
83 | #include <i386/AT386/mp/mp.h> | |
84 | #if (NCPUS > 1) | |
85 | #define CX(addr,reg) addr(,reg,4) | |
86 | #else | |
87 | #define CPU_NUMBER(reg) | |
88 | #define CX(addr,reg) addr | |
89 | #endif /* (NCPUS > 1) */ | |
90 | ||
91 | #if MACH_LDEBUG | |
92 | /* | |
93 | * Routines for general lock debugging. | |
94 | */ | |
95 | #define S_TYPE 4(%edx) | |
96 | #define S_PC 8(%edx) | |
97 | #define S_THREAD 12(%edx) | |
98 | #define S_DURATIONH 16(%edx) | |
99 | #define S_DURATIONL 20(%edx) | |
100 | ||
101 | /* | |
102 | * Checks for expected lock types and calls "panic" on | |
103 | * mismatch. Detects calls to Mutex functions with | |
104 | * type simplelock and vice versa. | |
105 | */ | |
106 | #define CHECK_MUTEX_TYPE() \ | |
9bccf70c | 107 | cmpl $ MUTEX_TAG,M_TYPE ; \ |
1c79356b A |
108 | je 1f ; \ |
109 | pushl $2f ; \ | |
110 | call EXT(panic) ; \ | |
111 | hlt ; \ | |
112 | .data ; \ | |
113 | 2: String "not a mutex!" ; \ | |
114 | .text ; \ | |
115 | 1: | |
116 | ||
117 | #define CHECK_SIMPLE_LOCK_TYPE() \ | |
9bccf70c | 118 | cmpl $ SIMPLE_LOCK_TAG,S_TYPE ; \ |
1c79356b A |
119 | je 1f ; \ |
120 | pushl $2f ; \ | |
121 | call EXT(panic) ; \ | |
122 | hlt ; \ | |
123 | .data ; \ | |
124 | 2: String "not a simple lock!" ; \ | |
125 | .text ; \ | |
126 | 1: | |
127 | ||
128 | /* | |
129 | * If one or more simplelocks are currently held by a thread, | |
130 | * an attempt to acquire a mutex will cause this check to fail | |
131 | * (since a mutex lock may context switch, holding a simplelock | |
132 | * is not a good thing). | |
133 | */ | |
134 | #if 0 /*MACH_RT - 11/12/99 - lion@apple.com disable check for now*/ | |
135 | #define CHECK_PREEMPTION_LEVEL() \ | |
9bccf70c | 136 | movl $ CPD_PREEMPTION_LEVEL,%eax ; \ |
1c79356b A |
137 | cmpl $0,%gs:(%eax) ; \ |
138 | je 1f ; \ | |
139 | pushl $2f ; \ | |
140 | call EXT(panic) ; \ | |
141 | hlt ; \ | |
142 | .data ; \ | |
143 | 2: String "preemption_level != 0!" ; \ | |
144 | .text ; \ | |
145 | 1: | |
146 | #else /* MACH_RT */ | |
147 | #define CHECK_PREEMPTION_LEVEL() | |
148 | #endif /* MACH_RT */ | |
149 | ||
150 | #define CHECK_NO_SIMPLELOCKS() \ | |
9bccf70c | 151 | movl $ CPD_SIMPLE_LOCK_COUNT,%eax ; \ |
1c79356b A |
152 | cmpl $0,%gs:(%eax) ; \ |
153 | je 1f ; \ | |
154 | pushl $2f ; \ | |
155 | call EXT(panic) ; \ | |
156 | hlt ; \ | |
157 | .data ; \ | |
158 | 2: String "simple_locks_held!" ; \ | |
159 | .text ; \ | |
160 | 1: | |
161 | ||
162 | /* | |
163 | * Verifies return to the correct thread in "unlock" situations. | |
164 | */ | |
165 | #define CHECK_THREAD(thd) \ | |
9bccf70c | 166 | movl $ CPD_ACTIVE_THREAD,%eax ; \ |
1c79356b A |
167 | movl %gs:(%eax),%ecx ; \ |
168 | testl %ecx,%ecx ; \ | |
169 | je 1f ; \ | |
170 | cmpl %ecx,thd ; \ | |
171 | je 1f ; \ | |
172 | pushl $2f ; \ | |
173 | call EXT(panic) ; \ | |
174 | hlt ; \ | |
175 | .data ; \ | |
176 | 2: String "wrong thread!" ; \ | |
177 | .text ; \ | |
178 | 1: | |
179 | ||
180 | #define CHECK_MYLOCK(thd) \ | |
9bccf70c | 181 | movl $ CPD_ACTIVE_THREAD,%eax ; \ |
1c79356b A |
182 | movl %gs:(%eax),%ecx ; \ |
183 | testl %ecx,%ecx ; \ | |
184 | je 1f ; \ | |
185 | cmpl %ecx,thd ; \ | |
186 | jne 1f ; \ | |
187 | pushl $2f ; \ | |
188 | call EXT(panic) ; \ | |
189 | hlt ; \ | |
190 | .data ; \ | |
191 | 2: String "mylock attempt!" ; \ | |
192 | .text ; \ | |
193 | 1: | |
194 | ||
195 | #define METER_SIMPLE_LOCK_LOCK(reg) \ | |
196 | pushl reg ; \ | |
197 | call EXT(meter_simple_lock) ; \ | |
198 | popl reg | |
199 | ||
200 | #define METER_SIMPLE_LOCK_UNLOCK(reg) \ | |
201 | pushl reg ; \ | |
202 | call EXT(meter_simple_unlock) ; \ | |
203 | popl reg | |
204 | ||
205 | #else /* MACH_LDEBUG */ | |
206 | #define CHECK_MUTEX_TYPE() | |
207 | #define CHECK_SIMPLE_LOCK_TYPE | |
208 | #define CHECK_THREAD(thd) | |
209 | #define CHECK_PREEMPTION_LEVEL() | |
210 | #define CHECK_NO_SIMPLELOCKS() | |
211 | #define CHECK_MYLOCK(thd) | |
212 | #define METER_SIMPLE_LOCK_LOCK(reg) | |
213 | #define METER_SIMPLE_LOCK_UNLOCK(reg) | |
214 | #endif /* MACH_LDEBUG */ | |
215 | ||
216 | ||
217 | /* | |
218 | * void hw_lock_init(hw_lock_t) | |
219 | * | |
220 | * Initialize a hardware lock. | |
221 | */ | |
222 | ENTRY(hw_lock_init) | |
223 | FRAME | |
224 | movl L_ARG0,%edx /* fetch lock pointer */ | |
225 | xorl %eax,%eax | |
226 | movb %al,0(%edx) /* clear the lock */ | |
227 | EMARF | |
228 | ret | |
229 | ||
230 | /* | |
231 | * void hw_lock_lock(hw_lock_t) | |
9bccf70c | 232 | * unsigned int hw_lock_to(hw_lock_t, unsigned int) |
1c79356b A |
233 | * |
234 | * Acquire lock, spinning until it becomes available. | |
9bccf70c | 235 | * XXX: For now, we don't actually implement the timeout. |
1c79356b A |
236 | * MACH_RT: also return with preemption disabled. |
237 | */ | |
9bccf70c | 238 | ENTRY2(hw_lock_lock,hw_lock_to) |
1c79356b A |
239 | FRAME |
240 | movl L_ARG0,%edx /* fetch lock pointer */ | |
241 | ||
242 | 1: DISABLE_PREEMPTION(%eax) | |
243 | movb $1,%cl | |
244 | xchgb 0(%edx),%cl /* try to acquire the HW lock */ | |
245 | testb %cl,%cl /* success? */ | |
246 | jne 3f | |
9bccf70c | 247 | movl $1,%eax /* In case this was a timeout call */ |
1c79356b A |
248 | EMARF /* if yes, then nothing left to do */ |
249 | ret | |
250 | ||
251 | 3: ENABLE_PREEMPTION(%eax) /* no reason we can't be preemptable now */ | |
252 | ||
253 | movb $1,%cl | |
254 | 2: testb %cl,0(%edx) /* spin checking lock value in cache */ | |
255 | jne 2b /* non-zero means locked, keep spinning */ | |
256 | jmp 1b /* zero means unlocked, try to grab it */ | |
257 | ||
258 | /* | |
259 | * void hw_lock_unlock(hw_lock_t) | |
260 | * | |
261 | * Unconditionally release lock. | |
262 | * MACH_RT: release preemption level. | |
263 | */ | |
264 | ENTRY(hw_lock_unlock) | |
265 | FRAME | |
266 | movl L_ARG0,%edx /* fetch lock pointer */ | |
267 | xorl %eax,%eax | |
268 | xchgb 0(%edx),%al /* clear the lock... a mov instruction */ | |
269 | /* ...might be cheaper and less paranoid */ | |
270 | ENABLE_PREEMPTION(%eax) | |
271 | EMARF | |
272 | ret | |
273 | ||
274 | /* | |
275 | * unsigned int hw_lock_try(hw_lock_t) | |
276 | * MACH_RT: returns with preemption disabled on success. | |
277 | */ | |
278 | ENTRY(hw_lock_try) | |
279 | FRAME | |
280 | movl L_ARG0,%edx /* fetch lock pointer */ | |
281 | ||
282 | DISABLE_PREEMPTION(%eax) | |
283 | movb $1,%cl | |
284 | xchgb 0(%edx),%cl /* try to acquire the HW lock */ | |
285 | testb %cl,%cl /* success? */ | |
286 | jne 1f /* if yes, let the caller know */ | |
287 | ||
288 | movl $1,%eax /* success */ | |
289 | EMARF | |
290 | ret | |
291 | ||
292 | 1: ENABLE_PREEMPTION(%eax) /* failure: release preemption... */ | |
293 | xorl %eax,%eax /* ...and return failure */ | |
294 | EMARF | |
295 | ret | |
296 | ||
297 | /* | |
298 | * unsigned int hw_lock_held(hw_lock_t) | |
299 | * MACH_RT: doesn't change preemption state. | |
300 | * N.B. Racy, of course. | |
301 | */ | |
302 | ENTRY(hw_lock_held) | |
303 | FRAME | |
304 | movl L_ARG0,%edx /* fetch lock pointer */ | |
305 | ||
306 | movb $1,%cl | |
307 | testb %cl,0(%edx) /* check lock value */ | |
308 | jne 1f /* non-zero means locked */ | |
309 | xorl %eax,%eax /* tell caller: lock wasn't locked */ | |
310 | EMARF | |
311 | ret | |
312 | ||
313 | 1: movl $1,%eax /* tell caller: lock was locked */ | |
314 | EMARF | |
315 | ret | |
316 | ||
317 | ||
318 | ||
319 | #if 0 | |
320 | ||
321 | ||
322 | ENTRY(_usimple_lock_init) | |
323 | FRAME | |
324 | movl L_ARG0,%edx /* fetch lock pointer */ | |
325 | xorl %eax,%eax | |
326 | movb %al,USL_INTERLOCK(%edx) /* unlock the HW lock */ | |
327 | EMARF | |
328 | ret | |
329 | ||
330 | ENTRY(_simple_lock) | |
331 | FRAME | |
332 | movl L_ARG0,%edx /* fetch lock pointer */ | |
333 | ||
334 | CHECK_SIMPLE_LOCK_TYPE() | |
335 | ||
336 | DISABLE_PREEMPTION(%eax) | |
337 | ||
338 | sl_get_hw: | |
339 | movb $1,%cl | |
340 | xchgb USL_INTERLOCK(%edx),%cl /* try to acquire the HW lock */ | |
341 | testb %cl,%cl /* did we succeed? */ | |
342 | ||
343 | #if MACH_LDEBUG | |
344 | je 5f | |
345 | CHECK_MYLOCK(S_THREAD) | |
346 | jmp sl_get_hw | |
347 | 5: | |
348 | #else /* MACH_LDEBUG */ | |
349 | jne sl_get_hw /* no, try again */ | |
350 | #endif /* MACH_LDEBUG */ | |
351 | ||
352 | #if MACH_LDEBUG | |
353 | movl L_PC,%ecx | |
354 | movl %ecx,S_PC | |
9bccf70c | 355 | movl $ CPD_ACTIVE_THREAD,%eax |
1c79356b A |
356 | movl %gs:(%eax),%ecx |
357 | movl %ecx,S_THREAD | |
358 | incl CX(EXT(simple_lock_count),%eax) | |
359 | #if 0 | |
360 | METER_SIMPLE_LOCK_LOCK(%edx) | |
361 | #endif | |
362 | #if NCPUS == 1 | |
363 | pushf | |
364 | pushl %edx | |
365 | cli | |
366 | call EXT(lock_stack_push) | |
367 | popl %edx | |
368 | popfl | |
369 | #endif /* NCPUS == 1 */ | |
370 | #endif /* MACH_LDEBUG */ | |
371 | ||
372 | EMARF | |
373 | ret | |
374 | ||
375 | ENTRY(_simple_lock_try) | |
376 | FRAME | |
377 | movl L_ARG0,%edx /* fetch lock pointer */ | |
378 | ||
379 | CHECK_SIMPLE_LOCK_TYPE() | |
380 | ||
381 | DISABLE_PREEMPTION(%eax) | |
382 | ||
383 | movb $1,%cl | |
384 | xchgb USL_INTERLOCK(%edx),%cl /* try to acquire the HW lock */ | |
385 | testb %cl,%cl /* did we succeed? */ | |
386 | jne 1f /* no, return failure */ | |
387 | ||
388 | #if MACH_LDEBUG | |
389 | movl L_PC,%ecx | |
390 | movl %ecx,S_PC | |
9bccf70c | 391 | movl $ CPD_ACTIVE_THREAD,%eax |
1c79356b A |
392 | movl %gs:(%eax),%ecx |
393 | movl %ecx,S_THREAD | |
394 | incl CX(EXT(simple_lock_count),%eax) | |
395 | #if 0 | |
396 | METER_SIMPLE_LOCK_LOCK(%edx) | |
397 | #endif | |
398 | #if NCPUS == 1 | |
399 | pushf | |
400 | pushl %edx | |
401 | cli | |
402 | call EXT(lock_stack_push) | |
403 | popl %edx | |
404 | popfl | |
405 | #endif /* NCPUS == 1 */ | |
406 | #endif /* MACH_LDEBUG */ | |
407 | ||
408 | movl $1,%eax /* return success */ | |
409 | ||
410 | EMARF | |
411 | ret | |
412 | ||
413 | 1: | |
414 | ENABLE_PREEMPTION(%eax) | |
415 | ||
416 | xorl %eax,%eax /* and return failure */ | |
417 | ||
418 | EMARF | |
419 | ret | |
420 | ||
421 | ENTRY(_simple_unlock) | |
422 | FRAME | |
423 | movl L_ARG0,%edx /* fetch lock pointer */ | |
424 | ||
425 | CHECK_SIMPLE_LOCK_TYPE() | |
426 | CHECK_THREAD(S_THREAD) | |
427 | ||
428 | #if MACH_LDEBUG | |
429 | xorl %eax,%eax | |
430 | movl %eax,S_THREAD /* disown thread */ | |
431 | MP_DISABLE_PREEMPTION(%eax) | |
432 | CPU_NUMBER(%eax) | |
433 | decl CX(EXT(simple_lock_count),%eax) | |
434 | MP_ENABLE_PREEMPTION(%eax) | |
435 | #if 0 | |
436 | METER_SIMPLE_LOCK_UNLOCK(%edx) | |
437 | #endif | |
438 | #if NCPUS == 1 | |
439 | pushf | |
440 | pushl %edx | |
441 | cli | |
442 | call EXT(lock_stack_pop) | |
443 | popl %edx | |
444 | popfl | |
445 | #endif /* NCPUS == 1 */ | |
446 | #endif /* MACH_LDEBUG */ | |
447 | ||
448 | xorb %cl,%cl | |
449 | xchgb USL_INTERLOCK(%edx),%cl /* unlock the HW lock */ | |
450 | ||
451 | ENABLE_PREEMPTION(%eax) | |
452 | ||
453 | EMARF | |
454 | ret | |
455 | ||
456 | #endif /* 0 */ | |
457 | ||
458 | ||
459 | ENTRY(mutex_init) | |
460 | FRAME | |
461 | movl L_ARG0,%edx /* fetch lock pointer */ | |
462 | xorl %eax,%eax | |
463 | movb %al,M_ILK /* clear interlock */ | |
464 | movb %al,M_LOCKED /* clear locked flag */ | |
465 | movw %ax,M_WAITERS /* init waiter count */ | |
9bccf70c | 466 | movw %ax,M_PROMOTED_PRI |
1c79356b A |
467 | |
468 | #if MACH_LDEBUG | |
9bccf70c | 469 | movl $ MUTEX_TAG,M_TYPE /* set lock type */ |
1c79356b A |
470 | movl %eax,M_PC /* init caller pc */ |
471 | movl %eax,M_THREAD /* and owning thread */ | |
472 | #endif | |
473 | #if ETAP_LOCK_TRACE | |
474 | movl L_ARG1,%ecx /* fetch event type */ | |
475 | pushl %ecx /* push event type */ | |
476 | pushl %edx /* push mutex address */ | |
477 | call EXT(etap_mutex_init) /* init ETAP data */ | |
478 | addl $8,%esp | |
479 | #endif /* ETAP_LOCK_TRACE */ | |
480 | ||
481 | EMARF | |
482 | ret | |
483 | ||
0b4e3aa0 | 484 | ENTRY2(mutex_lock,_mutex_lock) |
1c79356b A |
485 | FRAME |
486 | ||
487 | #if ETAP_LOCK_TRACE | |
488 | subl $12,%esp /* make room for locals */ | |
489 | movl $0,SWT_HI /* set wait time to zero (HI) */ | |
490 | movl $0,SWT_LO /* set wait time to zero (LO) */ | |
491 | movl $0,MISSED /* clear local miss marker */ | |
492 | #endif /* ETAP_LOCK_TRACE */ | |
493 | ||
494 | movl L_ARG0,%edx /* fetch lock pointer */ | |
495 | ||
496 | CHECK_MUTEX_TYPE() | |
497 | CHECK_NO_SIMPLELOCKS() | |
498 | CHECK_PREEMPTION_LEVEL() | |
499 | ||
500 | ml_retry: | |
501 | DISABLE_PREEMPTION(%eax) | |
502 | ||
503 | ml_get_hw: | |
504 | movb $1,%cl | |
505 | xchgb %cl,M_ILK | |
506 | testb %cl,%cl /* did we succeed? */ | |
507 | jne ml_get_hw /* no, try again */ | |
508 | ||
1c79356b A |
509 | movb $1,%cl |
510 | xchgb %cl,M_LOCKED /* try to set locked flag */ | |
511 | testb %cl,%cl /* is the mutex locked? */ | |
512 | jne ml_fail /* yes, we lose */ | |
513 | ||
9bccf70c A |
514 | pushl %edx |
515 | call EXT(mutex_lock_acquire) | |
516 | addl $4,%esp | |
517 | movl L_ARG0,%edx | |
518 | ||
1c79356b A |
519 | #if MACH_LDEBUG |
520 | movl L_PC,%ecx | |
521 | movl %ecx,M_PC | |
9bccf70c | 522 | movl $ CPD_ACTIVE_THREAD,%eax |
1c79356b A |
523 | movl %gs:(%eax),%ecx |
524 | movl %ecx,M_THREAD | |
525 | testl %ecx,%ecx | |
526 | je 3f | |
527 | incl TH_MUTEX_COUNT(%ecx) | |
528 | 3: | |
529 | #endif | |
530 | ||
531 | xorb %cl,%cl | |
532 | xchgb %cl,M_ILK | |
533 | ||
534 | ENABLE_PREEMPTION(%eax) | |
535 | ||
536 | #if ETAP_LOCK_TRACE | |
537 | movl L_PC,%eax /* fetch pc */ | |
538 | pushl SWT_LO /* push wait time (low) */ | |
539 | pushl SWT_HI /* push wait time (high) */ | |
540 | pushl %eax /* push pc */ | |
541 | pushl %edx /* push mutex address */ | |
542 | call EXT(etap_mutex_hold) /* collect hold timestamp */ | |
543 | addl $16+12,%esp /* clean up stack, adjusting for locals */ | |
544 | #endif /* ETAP_LOCK_TRACE */ | |
545 | ||
546 | EMARF | |
547 | ret | |
548 | ||
549 | ml_fail: | |
550 | #if ETAP_LOCK_TRACE | |
551 | cmp $0,MISSED /* did we already take a wait timestamp? */ | |
552 | jne ml_block /* yup. carry-on */ | |
553 | pushl %edx /* push mutex address */ | |
554 | call EXT(etap_mutex_miss) /* get wait timestamp */ | |
555 | movl %eax,SWT_HI /* set wait time (high word) */ | |
556 | movl %edx,SWT_LO /* set wait time (low word) */ | |
557 | popl %edx /* clean up stack */ | |
558 | movl $1,MISSED /* mark wait timestamp as taken */ | |
559 | #endif /* ETAP_LOCK_TRACE */ | |
560 | ||
561 | ml_block: | |
562 | CHECK_MYLOCK(M_THREAD) | |
9bccf70c A |
563 | xorl %eax,%eax |
564 | pushl %eax /* no promotion here yet */ | |
1c79356b A |
565 | pushl %edx /* push mutex address */ |
566 | call EXT(mutex_lock_wait) /* wait for the lock */ | |
9bccf70c | 567 | addl $8,%esp |
1c79356b A |
568 | movl L_ARG0,%edx /* refetch lock pointer */ |
569 | jmp ml_retry /* and try again */ | |
570 | ||
0b4e3aa0 | 571 | ENTRY2(mutex_try,_mutex_try) |
1c79356b A |
572 | FRAME |
573 | ||
574 | #if ETAP_LOCK_TRACE | |
575 | subl $8,%esp /* make room for locals */ | |
576 | movl $0,SWT_HI /* set wait time to zero (HI) */ | |
577 | movl $0,SWT_LO /* set wait time to zero (LO) */ | |
578 | #endif /* ETAP_LOCK_TRACE */ | |
579 | ||
580 | movl L_ARG0,%edx /* fetch lock pointer */ | |
581 | ||
582 | CHECK_MUTEX_TYPE() | |
583 | CHECK_NO_SIMPLELOCKS() | |
584 | ||
9bccf70c A |
585 | DISABLE_PREEMPTION(%eax) |
586 | ||
587 | mt_get_hw: | |
588 | movb $1,%cl | |
589 | xchgb %cl,M_ILK | |
590 | testb %cl,%cl | |
591 | jne mt_get_hw | |
592 | ||
593 | movb $1,%cl | |
594 | xchgb %cl,M_LOCKED | |
595 | testb %cl,%cl | |
596 | jne mt_fail | |
597 | ||
598 | pushl %edx | |
599 | call EXT(mutex_lock_acquire) | |
600 | addl $4,%esp | |
601 | movl L_ARG0,%edx | |
602 | ||
603 | #if MACH_LDEBUG | |
604 | movl L_PC,%ecx | |
605 | movl %ecx,M_PC | |
606 | movl $ CPD_ACTIVE_THREAD,%ecx | |
607 | movl %gs:(%ecx),%ecx | |
608 | movl %ecx,M_THREAD | |
609 | testl %ecx,%ecx | |
610 | je 1f | |
611 | incl TH_MUTEX_COUNT(%ecx) | |
612 | 1: | |
613 | #endif | |
614 | ||
615 | xorb %cl,%cl | |
616 | xchgb %cl,M_ILK | |
617 | ||
618 | ENABLE_PREEMPTION(%eax) | |
619 | ||
620 | #if ETAP_LOCK_TRACE | |
621 | movl L_PC,%eax /* fetch pc */ | |
622 | pushl SWT_LO /* push wait time (low) */ | |
623 | pushl SWT_HI /* push wait time (high) */ | |
624 | pushl %eax /* push pc */ | |
625 | pushl %edx /* push mutex address */ | |
626 | call EXT(etap_mutex_hold) /* get start hold timestamp */ | |
627 | addl $16,%esp /* clean up stack, adjusting for locals */ | |
628 | #endif /* ETAP_LOCK_TRACE */ | |
629 | ||
630 | movl $1,%eax | |
1c79356b A |
631 | |
632 | #if MACH_LDEBUG || ETAP_LOCK_TRACE | |
9bccf70c A |
633 | #if ETAP_LOCK_TRACE |
634 | addl $8,%esp /* pop stack claimed on entry */ | |
1c79356b | 635 | #endif |
9bccf70c A |
636 | #endif |
637 | ||
638 | EMARF | |
639 | ret | |
1c79356b | 640 | |
9bccf70c | 641 | mt_fail: |
1c79356b A |
642 | #if MACH_LDEBUG |
643 | movl L_PC,%ecx | |
644 | movl %ecx,M_PC | |
9bccf70c | 645 | movl $ CPD_ACTIVE_THREAD,%ecx |
1c79356b A |
646 | movl %gs:(%ecx),%ecx |
647 | movl %ecx,M_THREAD | |
648 | testl %ecx,%ecx | |
649 | je 1f | |
650 | incl TH_MUTEX_COUNT(%ecx) | |
651 | 1: | |
652 | #endif | |
653 | ||
9bccf70c A |
654 | xorb %cl,%cl |
655 | xchgb %cl,M_ILK | |
656 | ||
657 | ENABLE_PREEMPTION(%eax) | |
658 | ||
1c79356b A |
659 | #if ETAP_LOCK_TRACE |
660 | movl L_PC,%eax /* fetch pc */ | |
661 | pushl SWT_LO /* push wait time (low) */ | |
662 | pushl SWT_HI /* push wait time (high) */ | |
663 | pushl %eax /* push pc */ | |
664 | pushl %edx /* push mutex address */ | |
665 | call EXT(etap_mutex_hold) /* get start hold timestamp */ | |
666 | addl $16,%esp /* clean up stack, adjusting for locals */ | |
1c79356b A |
667 | #endif /* ETAP_LOCK_TRACE */ |
668 | ||
9bccf70c A |
669 | xorl %eax,%eax |
670 | ||
1c79356b | 671 | #if MACH_LDEBUG || ETAP_LOCK_TRACE |
1c79356b A |
672 | #if ETAP_LOCK_TRACE |
673 | addl $8,%esp /* pop stack claimed on entry */ | |
674 | #endif | |
675 | #endif | |
676 | ||
677 | EMARF | |
678 | ret | |
679 | ||
680 | ENTRY(mutex_unlock) | |
681 | FRAME | |
682 | movl L_ARG0,%edx /* fetch lock pointer */ | |
683 | ||
684 | #if ETAP_LOCK_TRACE | |
685 | pushl %edx /* push mutex address */ | |
686 | call EXT(etap_mutex_unlock) /* collect ETAP data */ | |
687 | popl %edx /* restore mutex address */ | |
688 | #endif /* ETAP_LOCK_TRACE */ | |
689 | ||
690 | CHECK_MUTEX_TYPE() | |
691 | CHECK_THREAD(M_THREAD) | |
692 | ||
693 | DISABLE_PREEMPTION(%eax) | |
694 | ||
695 | mu_get_hw: | |
696 | movb $1,%cl | |
697 | xchgb %cl,M_ILK | |
698 | testb %cl,%cl /* did we succeed? */ | |
699 | jne mu_get_hw /* no, try again */ | |
700 | ||
701 | cmpw $0,M_WAITERS /* are there any waiters? */ | |
702 | jne mu_wakeup /* yes, more work to do */ | |
703 | ||
704 | mu_doit: | |
705 | #if MACH_LDEBUG | |
706 | xorl %eax,%eax | |
707 | movl %eax,M_THREAD /* disown thread */ | |
9bccf70c | 708 | movl $ CPD_ACTIVE_THREAD,%eax |
1c79356b A |
709 | movl %gs:(%eax),%ecx |
710 | testl %ecx,%ecx | |
711 | je 0f | |
712 | decl TH_MUTEX_COUNT(%ecx) | |
713 | 0: | |
714 | #endif | |
715 | ||
716 | xorb %cl,%cl | |
717 | xchgb %cl,M_LOCKED /* unlock the mutex */ | |
718 | ||
719 | xorb %cl,%cl | |
720 | xchgb %cl,M_ILK | |
721 | ||
722 | ENABLE_PREEMPTION(%eax) | |
723 | ||
724 | EMARF | |
725 | ret | |
726 | ||
727 | mu_wakeup: | |
9bccf70c A |
728 | xorl %eax,%eax |
729 | pushl %eax /* no promotion here yet */ | |
1c79356b A |
730 | pushl %edx /* push mutex address */ |
731 | call EXT(mutex_unlock_wakeup)/* yes, wake a thread */ | |
9bccf70c | 732 | addl $8,%esp |
1c79356b A |
733 | movl L_ARG0,%edx /* refetch lock pointer */ |
734 | jmp mu_doit | |
735 | ||
736 | ENTRY(interlock_unlock) | |
737 | FRAME | |
738 | movl L_ARG0,%edx | |
739 | ||
740 | xorb %cl,%cl | |
741 | xchgb %cl,M_ILK | |
742 | ||
743 | ENABLE_PREEMPTION(%eax) | |
744 | ||
745 | EMARF | |
746 | ret | |
747 | ||
748 | ||
749 | ENTRY(_disable_preemption) | |
750 | #if MACH_RT | |
751 | _DISABLE_PREEMPTION(%eax) | |
752 | #endif /* MACH_RT */ | |
753 | ret | |
754 | ||
755 | ENTRY(_enable_preemption) | |
756 | #if MACH_RT | |
757 | #if MACH_ASSERT | |
9bccf70c | 758 | movl $ CPD_PREEMPTION_LEVEL,%eax |
1c79356b A |
759 | cmpl $0,%gs:(%eax) |
760 | jg 1f | |
761 | pushl %gs:(%eax) | |
762 | pushl $2f | |
763 | call EXT(panic) | |
764 | hlt | |
765 | .data | |
766 | 2: String "_enable_preemption: preemption_level(%d) < 0!" | |
767 | .text | |
768 | 1: | |
769 | #endif /* MACH_ASSERT */ | |
770 | _ENABLE_PREEMPTION(%eax) | |
771 | #endif /* MACH_RT */ | |
772 | ret | |
773 | ||
774 | ENTRY(_enable_preemption_no_check) | |
775 | #if MACH_RT | |
776 | #if MACH_ASSERT | |
9bccf70c | 777 | movl $ CPD_PREEMPTION_LEVEL,%eax |
1c79356b A |
778 | cmpl $0,%gs:(%eax) |
779 | jg 1f | |
780 | pushl $2f | |
781 | call EXT(panic) | |
782 | hlt | |
783 | .data | |
784 | 2: String "_enable_preemption_no_check: preemption_level <= 0!" | |
785 | .text | |
786 | 1: | |
787 | #endif /* MACH_ASSERT */ | |
788 | _ENABLE_PREEMPTION_NO_CHECK(%eax) | |
789 | #endif /* MACH_RT */ | |
790 | ret | |
791 | ||
792 | ||
793 | ENTRY(_mp_disable_preemption) | |
794 | #if MACH_RT && NCPUS > 1 | |
795 | _DISABLE_PREEMPTION(%eax) | |
796 | #endif /* MACH_RT && NCPUS > 1*/ | |
797 | ret | |
798 | ||
799 | ENTRY(_mp_enable_preemption) | |
800 | #if MACH_RT && NCPUS > 1 | |
801 | #if MACH_ASSERT | |
9bccf70c | 802 | movl $ CPD_PREEMPTION_LEVEL,%eax |
1c79356b A |
803 | cmpl $0,%gs:(%eax) |
804 | jg 1f | |
805 | pushl %gs:(%eax) | |
806 | pushl $2f | |
807 | call EXT(panic) | |
808 | hlt | |
809 | .data | |
810 | 2: String "_mp_enable_preemption: preemption_level (%d) <= 0!" | |
811 | .text | |
812 | 1: | |
813 | #endif /* MACH_ASSERT */ | |
814 | _ENABLE_PREEMPTION(%eax) | |
815 | #endif /* MACH_RT && NCPUS > 1 */ | |
816 | ret | |
817 | ||
818 | ENTRY(_mp_enable_preemption_no_check) | |
819 | #if MACH_RT && NCPUS > 1 | |
820 | #if MACH_ASSERT | |
9bccf70c | 821 | movl $ CPD_PREEMPTION_LEVEL,%eax |
1c79356b A |
822 | cmpl $0,%gs:(%eax) |
823 | jg 1f | |
824 | pushl $2f | |
825 | call EXT(panic) | |
826 | hlt | |
827 | .data | |
828 | 2: String "_mp_enable_preemption_no_check: preemption_level <= 0!" | |
829 | .text | |
830 | 1: | |
831 | #endif /* MACH_ASSERT */ | |
832 | _ENABLE_PREEMPTION_NO_CHECK(%eax) | |
833 | #endif /* MACH_RT && NCPUS > 1 */ | |
834 | ret | |
835 | ||
836 | ||
837 | ENTRY(i_bit_set) | |
838 | movl S_ARG0,%edx | |
839 | movl S_ARG1,%eax | |
840 | lock | |
841 | bts %dl,(%eax) | |
842 | ret | |
843 | ||
844 | ENTRY(i_bit_clear) | |
845 | movl S_ARG0,%edx | |
846 | movl S_ARG1,%eax | |
847 | lock | |
848 | btr %dl,(%eax) | |
849 | ret | |
850 | ||
851 | ENTRY(bit_lock) | |
852 | movl S_ARG0,%ecx | |
853 | movl S_ARG1,%eax | |
854 | 1: | |
855 | lock | |
856 | bts %ecx,(%eax) | |
857 | jb 1b | |
858 | ret | |
859 | ||
860 | ENTRY(bit_lock_try) | |
861 | movl S_ARG0,%ecx | |
862 | movl S_ARG1,%eax | |
863 | lock | |
864 | bts %ecx,(%eax) | |
865 | jb bit_lock_failed | |
866 | ret /* %eax better not be null ! */ | |
867 | bit_lock_failed: | |
868 | xorl %eax,%eax | |
869 | ret | |
870 | ||
871 | ENTRY(bit_unlock) | |
872 | movl S_ARG0,%ecx | |
873 | movl S_ARG1,%eax | |
874 | lock | |
875 | btr %ecx,(%eax) | |
876 | ret |