]>
git.saurik.com Git - apple/libc.git/blob - darwin/SpinlocksLoadStoreEx.c
2 * Copyright (c) 2011 Apple Inc. All rights reserved.
4 * @APPLE_LICENSE_HEADER_START@
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. Please obtain a copy of the License at
10 * http://www.opensource.apple.com/apsl/ and read it before using this
13 * The Original Code and all software distributed under the License are
14 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
15 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
16 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
18 * Please see the License for the specific language governing rights and
19 * limitations under the License.
21 * @APPLE_LICENSE_HEADER_END@
24 // OSAtomic.h is included by files that include this C file.
25 /* #include "OSAtomic.h" */
27 typedef int32_t OSSpinLock
;
30 * Bear with me, there's method to this madness. clang actually produces more optimal
31 * fastpath code if the lock code is split in half like this.
33 * Once we're spinning in userspace/kernel like this then we can be more lenient about
34 * how much effort is spent doing the spin. Where the lock is uncontended, this split
35 * arrangement produces a very good fastpath at the cost of some code duplication.
37 * Similarly, the gotos produce less wasteful code during spins then the equivalent code
41 #import <mach/mach_traps.h>
43 static inline void _OSSpinLockSlow(volatile OSSpinLock
* lock
) _OSATOMIC_VARIANT(_OSSpinLockSlow
);
44 static inline void _OSSpinLockSlow(volatile OSSpinLock
* lock
)
50 #if (defined(_ARM_ARCH_7) && !defined(_OSATOMIC_NO_BARRIERS))
51 uint32_t tries
= MP_SPIN_TRIES
;
53 if (*lock
== 0) goto _try_store
;
58 __asm__ ("mov r0, %[_a] ;"
61 "bl _syscall_thread_switch ;"
62 : : [_a
] "i" (0), [_b
] "i" (1), [_c
] "i" (1)
63 : "r0", "r1", "r2", "r9", "r12", "lr" );
67 _osatomic_load_exclusive(lock
, r
);
68 if (slowpath(r
)) goto _spin
;
69 _osatomic_store_exclusive(lock
, 1, t
);
70 } while (slowpath(t
));
75 void OSSpinLockLock(volatile OSSpinLock
* lock
) _OSATOMIC_VARIANT(OSSpinLockLock
);
76 void OSSpinLockLock(volatile OSSpinLock
* lock
)
78 _OSATOMIC_ALIAS(spin_lock
, OSSpinLockLock
);
79 _OSATOMIC_ALIAS(_spin_lock
, OSSpinLockLock
);
85 _osatomic_load_exclusive(lock
, r
);
86 if (slowpath(r
)) return _OSSpinLockSlow(lock
);
87 _osatomic_store_exclusive(lock
, 1, t
);
88 } while (slowpath(t
));
95 bool OSSpinLockTry(OSSpinLock
* lock
) _OSATOMIC_VARIANT(OSSpinLockTry
);
96 bool OSSpinLockTry(OSSpinLock
* lock
)
98 _OSATOMIC_ALIAS(spin_lock_try
, OSSpinLockTry
);
99 _OSATOMIC_ALIAS(_spin_lock_try
, OSSpinLockTry
);
105 _osatomic_load_exclusive(lock
, r
);
106 if (slowpath(r
)) return false;
107 _osatomic_store_exclusive(lock
, 1, t
);
108 } while (slowpath(t
));
114 void OSSpinLockUnlock(OSSpinLock
* lock
) _OSATOMIC_VARIANT(OSSpinLockUnlock
);
115 void OSSpinLockUnlock(OSSpinLock
* lock
)
117 _OSATOMIC_ALIAS(spin_unlock
, OSSpinLockUnlock
);
118 _OSATOMIC_ALIAS(_spin_unlock
, OSSpinLockUnlock
);
124 #endif // _OSATOMIC_WFE