]> git.saurik.com Git - apple/libc.git/blame_incremental - i386/sys/OSAtomic.s
Libc-825.40.1.tar.gz
[apple/libc.git] / i386 / sys / OSAtomic.s
... / ...
CommitLineData
1/*
2 * Copyright (c) 2007 Apple Inc. All rights reserved.
3 * Copyright (c) 2004-2006 Apple Computer, Inc. All rights reserved.
4 *
5 * @APPLE_LICENSE_HEADER_START@
6 *
7 * This file contains Original Code and/or Modifications of Original Code
8 * as defined in and that are subject to the Apple Public Source License
9 * Version 2.0 (the 'License'). You may not use this file except in
10 * compliance with the License. Please obtain a copy of the License at
11 * http://www.opensource.apple.com/apsl/ and read it before using this
12 * file.
13 *
14 * The Original Code and all software distributed under the License are
15 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
16 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
17 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
19 * Please see the License for the specific language governing rights and
20 * limitations under the License.
21 *
22 * @APPLE_LICENSE_HEADER_END@
23 */
24
25#include <machine/cpu_capabilities.h>
26#include <platfunc.h>
27#include <architecture/i386/asm_help.h>
28
29#define ATOMIC_UP 0
30#define ATOMIC_MP 1
31#define ATOMIC_RET_ORIG 0
32#define ATOMIC_RET_NEW 1
33
34// compare and exchange 32-bit
35// xchg32 <new> <dst> <mp>
36.macro xchg32
37 .if $2 == ATOMIC_MP
38 lock
39 .endif
40 cmpxchgl $0, ($1)
41.endm
42
43// compare and exchange 64-bit
44// xchg64 <dst> <mp>
45.macro xchg64
46 .if $1 == ATOMIC_MP
47 lock
48 .endif
49 cmpxchg8b ($0)
50.endm
51
52
53// int32_t OSAtomicAdd32(int32_t theAmount, volatile int32_t *theValue);
54#define ATOMIC_ARITHMETIC(instr, orig, mp) \
55 movl 8(%esp), %ecx /* load 2nd arg ptr into ecx */ ;\
56 movl (%ecx), %eax /* load contents of ecx into eax */ ;\
571: movl 4(%esp), %edx /* load 1st arg into edx */ ;\
58 instr %eax, %edx /* do the operation */ ;\
59 xchg32 %edx, %ecx, mp /* old in %eax, new in %edx, exchange into %ecx */ ;\
60 jnz 1b /* go back if we failed to exchange */ ;\
61 .if orig == ATOMIC_RET_NEW ;\
62 movl %edx, %eax /* return new value */ ;\
63 .endif
64
65// bool OSAtomicTestAndSet(uint32_t n, volatile void *theAddress);
66#define ATOMIC_BIT_OP(instr, mp) \
67 movl 4(%esp), %eax ;\
68 movl 8(%esp), %edx ;\
69 shldl $3,%edx,%ecx /* save top 3 bits of address in %ecx */ ;\
70 shll $3,%edx ;\
71 xorl $7,%eax /* bit position is numbered big endian so convert to little endian */ ;\
72 addl %eax,%edx /* generate bit address */ ;\
73 adcl $0,%ecx /* handle carry out of lower half of address */ ;\
74 movl %edx,%eax /* copy lower half of bit address */ ;\
75 andl $31,%eax /* keep bit offset in range 0..31 */ ;\
76 xorl %eax,%edx /* 4-byte align address */ ;\
77 shrdl $3,%ecx,%edx /* restore 32-bit byte address in %edx */ ;\
78 .if mp == ATOMIC_MP ;\
79 lock ;\
80 .endif ;\
81 instr %eax, (%edx) ;\
82 setc %al ;\
83 movzbl %al,%eax // widen in case caller assumes we return an int
84
85// int64_t OSAtomicAdd64(int64_t theAmount, volatile int64_t *theValue);
86#define ATOMIC_ADD64(mp) \
87 pushl %ebx ;\
88 pushl %esi ;\
89 movl 20(%esp), %esi ;\
90 movl 0(%esi), %eax ;\
91 movl 4(%esi), %edx ;\
921: movl 12(%esp), %ebx ;\
93 movl 16(%esp), %ecx ;\
94 addl %eax, %ebx ;\
95 adcl %edx, %ecx ;\
96 xchg64 %esi, mp ;\
97 jnz 1b ;\
98 movl %ebx, %eax ;\
99 movl %ecx, %edx ;\
100 popl %esi ;\
101 popl %ebx
102
103 .text
104
105PLATFUNC_FUNCTION_START(OSAtomicAnd32, up, 32, 2)
106PLATFUNC_FUNCTION_START(OSAtomicAnd32Barrier, up, 32, 2)
107 ATOMIC_ARITHMETIC(andl, ATOMIC_RET_NEW, ATOMIC_UP)
108 ret
109
110PLATFUNC_FUNCTION_START_GENERIC(OSAtomicAnd32, mp, 32, 2)
111PLATFUNC_FUNCTION_START_GENERIC(OSAtomicAnd32Barrier, mp, 32, 2)
112 ATOMIC_ARITHMETIC(andl, ATOMIC_RET_NEW, ATOMIC_MP)
113 ret
114
115PLATFUNC_FUNCTION_START(OSAtomicOr32, up, 32, 2)
116PLATFUNC_FUNCTION_START(OSAtomicOr32Barrier, up, 32, 2)
117 ATOMIC_ARITHMETIC(orl, ATOMIC_RET_NEW, ATOMIC_UP)
118 ret
119
120PLATFUNC_FUNCTION_START_GENERIC(OSAtomicOr32, mp, 32, 2)
121PLATFUNC_FUNCTION_START_GENERIC(OSAtomicOr32Barrier, mp, 32, 2)
122 ATOMIC_ARITHMETIC(orl, ATOMIC_RET_NEW, ATOMIC_MP)
123 ret
124
125PLATFUNC_FUNCTION_START(OSAtomicXor32, up, 32, 2)
126PLATFUNC_FUNCTION_START(OSAtomicXor32Barrier, up, 32, 2)
127 ATOMIC_ARITHMETIC(xorl, ATOMIC_RET_NEW, ATOMIC_UP)
128 ret
129
130PLATFUNC_FUNCTION_START_GENERIC(OSAtomicXor32, mp, 32, 2)
131PLATFUNC_FUNCTION_START_GENERIC(OSAtomicXor32Barrier, mp, 32, 2)
132 ATOMIC_ARITHMETIC(xorl, ATOMIC_RET_NEW, ATOMIC_MP)
133 ret
134
135PLATFUNC_FUNCTION_START(OSAtomicAnd32Orig, up, 32, 2)
136PLATFUNC_FUNCTION_START(OSAtomicAnd32OrigBarrier, up, 32, 2)
137 ATOMIC_ARITHMETIC(andl, ATOMIC_RET_ORIG, ATOMIC_UP)
138 ret
139
140PLATFUNC_FUNCTION_START_GENERIC(OSAtomicAnd32Orig, mp, 32, 2)
141PLATFUNC_FUNCTION_START_GENERIC(OSAtomicAnd32OrigBarrier, mp, 32, 2)
142 ATOMIC_ARITHMETIC(andl, ATOMIC_RET_ORIG, ATOMIC_MP)
143 ret
144
145PLATFUNC_FUNCTION_START(OSAtomicOr32Orig, up, 32, 2)
146PLATFUNC_FUNCTION_START(OSAtomicOr32OrigBarrier, up, 32, 2)
147 ATOMIC_ARITHMETIC(orl, ATOMIC_RET_ORIG, ATOMIC_UP)
148 ret
149
150PLATFUNC_FUNCTION_START_GENERIC(OSAtomicOr32Orig, mp, 32, 2)
151PLATFUNC_FUNCTION_START_GENERIC(OSAtomicOr32OrigBarrier, mp, 32, 2)
152 ATOMIC_ARITHMETIC(orl, ATOMIC_RET_ORIG, ATOMIC_MP)
153 ret
154
155PLATFUNC_FUNCTION_START(OSAtomicXor32Orig, up, 32, 2)
156PLATFUNC_FUNCTION_START(OSAtomicXor32OrigBarrier, up, 32, 2)
157 ATOMIC_ARITHMETIC(xorl, ATOMIC_RET_ORIG, ATOMIC_UP)
158 ret
159
160PLATFUNC_FUNCTION_START_GENERIC(OSAtomicXor32Orig, mp, 32, 2)
161PLATFUNC_FUNCTION_START_GENERIC(OSAtomicXor32OrigBarrier, mp, 32, 2)
162 ATOMIC_ARITHMETIC(xorl, ATOMIC_RET_ORIG, ATOMIC_MP)
163 ret
164
165// bool OSAtomicCompareAndSwapInt(int oldValue, int newValue, volatile int *theValue);
166PLATFUNC_FUNCTION_START(OSAtomicCompareAndSwapPtr, up, 32, 2)
167PLATFUNC_FUNCTION_START(OSAtomicCompareAndSwapPtrBarrier, up, 32, 2)
168PLATFUNC_FUNCTION_START(OSAtomicCompareAndSwapInt, up, 32, 2)
169PLATFUNC_FUNCTION_START(OSAtomicCompareAndSwapIntBarrier, up, 32, 2)
170PLATFUNC_FUNCTION_START(OSAtomicCompareAndSwapLong, up, 32, 2)
171PLATFUNC_FUNCTION_START(OSAtomicCompareAndSwapLongBarrier, up, 32, 2)
172PLATFUNC_FUNCTION_START(OSAtomicCompareAndSwap32, up, 32, 2)
173PLATFUNC_FUNCTION_START(OSAtomicCompareAndSwap32Barrier, up, 32, 2)
174 movl 4(%esp), %eax
175 movl 8(%esp), %edx
176 movl 12(%esp), %ecx
177 xchg32 %edx, %ecx, ATOMIC_UP
178 sete %al
179 movzbl %al,%eax // widen in case caller assumes we return an int
180 ret
181
182PLATFUNC_FUNCTION_START_GENERIC(OSAtomicCompareAndSwapPtr, mp, 32, 2)
183PLATFUNC_FUNCTION_START_GENERIC(OSAtomicCompareAndSwapPtrBarrier, mp, 32, 2)
184PLATFUNC_FUNCTION_START_GENERIC(OSAtomicCompareAndSwapInt, mp, 32, 2)
185PLATFUNC_FUNCTION_START_GENERIC(OSAtomicCompareAndSwapIntBarrier, mp, 32, 2)
186PLATFUNC_FUNCTION_START_GENERIC(OSAtomicCompareAndSwapLong, mp, 32, 2)
187PLATFUNC_FUNCTION_START_GENERIC(OSAtomicCompareAndSwapLongBarrier, mp, 32, 2)
188PLATFUNC_FUNCTION_START_GENERIC(OSAtomicCompareAndSwap32, mp, 32, 2)
189PLATFUNC_FUNCTION_START_GENERIC(OSAtomicCompareAndSwap32Barrier, mp, 32, 2)
190 movl 4(%esp), %eax
191 movl 8(%esp), %edx
192 movl 12(%esp), %ecx
193 xchg32 %edx, %ecx, ATOMIC_MP
194 sete %al
195 movzbl %al,%eax // widen in case caller assumes we return an int
196 ret
197
198// bool OSAtomicCompareAndSwap64(int64_t oldValue, int64_t newValue, volatile int64_t *theValue);
199PLATFUNC_FUNCTION_START(OSAtomicCompareAndSwap64, up, 32, 2)
200PLATFUNC_FUNCTION_START(OSAtomicCompareAndSwap64Barrier, up, 32, 2)
201 pushl %ebx // push out spare stuff for space
202 pushl %esi
203 movl 12(%esp), %eax // load in 1st 64-bit parameter
204 movl 16(%esp), %edx
205 movl 20(%esp), %ebx // load in 2nd 64-bit parameter
206 movl 24(%esp), %ecx
207 movl 28(%esp), %esi // laod in destination address
208 xchg64 %esi, ATOMIC_UP // compare and swap 64-bit
209 sete %al
210 movzbl %al,%eax // widen in case caller assumes we return an int
211 popl %esi
212 popl %ebx
213 ret
214
215PLATFUNC_FUNCTION_START_GENERIC(OSAtomicCompareAndSwap64, mp, 32, 2)
216PLATFUNC_FUNCTION_START_GENERIC(OSAtomicCompareAndSwap64Barrier, mp, 32, 2)
217 pushl %ebx // push out spare stuff for space
218 pushl %esi
219 movl 12(%esp), %eax // load in 1st 64-bit parameter
220 movl 16(%esp), %edx
221 movl 20(%esp), %ebx // load in 2nd 64-bit parameter
222 movl 24(%esp), %ecx
223 movl 28(%esp), %esi // laod in destination address
224 xchg64 %esi, ATOMIC_MP // compare and swap 64-bit
225 sete %al
226 movzbl %al,%eax // widen in case caller assumes we return an int
227 popl %esi
228 popl %ebx
229 ret
230
231PLATFUNC_FUNCTION_START(OSAtomicAdd32, up, 32, 2)
232PLATFUNC_FUNCTION_START(OSAtomicAdd32Barrier, up, 32, 2)
233 movl 4(%esp), %eax
234 movl 8(%esp), %edx
235 movl %eax, %ecx
236 xaddl %eax, (%edx)
237 addl %ecx, %eax
238 ret
239
240PLATFUNC_FUNCTION_START_GENERIC(OSAtomicAdd32, mp, 32, 2)
241PLATFUNC_FUNCTION_START_GENERIC(OSAtomicAdd32Barrier, mp, 32, 2)
242 movl 4(%esp), %eax
243 movl 8(%esp), %edx
244 movl %eax, %ecx
245 lock
246 xaddl %eax, (%edx)
247 addl %ecx, %eax
248 ret
249
250PLATFUNC_FUNCTION_START(OSAtomicAdd64, up, 32, 2)
251PLATFUNC_FUNCTION_START(OSAtomicAdd64Barrier, up, 32, 2)
252 ATOMIC_ADD64(ATOMIC_UP)
253 ret
254
255PLATFUNC_FUNCTION_START_GENERIC(OSAtomicAdd64, mp, 32, 2)
256PLATFUNC_FUNCTION_START_GENERIC(OSAtomicAdd64Barrier, mp, 32, 2)
257 ATOMIC_ADD64(ATOMIC_MP)
258 ret
259
260PLATFUNC_FUNCTION_START(OSAtomicTestAndSet, up, 32, 2)
261PLATFUNC_FUNCTION_START(OSAtomicTestAndSetBarrier, up, 32, 2)
262 ATOMIC_BIT_OP(btsl, ATOMIC_UP)
263 ret
264
265PLATFUNC_FUNCTION_START_GENERIC(OSAtomicTestAndSet, mp, 32, 2)
266PLATFUNC_FUNCTION_START_GENERIC(OSAtomicTestAndSetBarrier, mp, 32, 2)
267 ATOMIC_BIT_OP(btsl, ATOMIC_MP)
268 ret
269
270PLATFUNC_FUNCTION_START(OSAtomicTestAndClear, up, 32, 2)
271PLATFUNC_FUNCTION_START(OSAtomicTestAndClearBarrier, up, 32, 2)
272 ATOMIC_BIT_OP(btrl, ATOMIC_UP)
273 ret
274
275PLATFUNC_FUNCTION_START_GENERIC(OSAtomicTestAndClear, mp, 32, 2)
276PLATFUNC_FUNCTION_START_GENERIC(OSAtomicTestAndClearBarrier, mp, 32, 2)
277 ATOMIC_BIT_OP(btrl, ATOMIC_MP)
278 ret
279
280// OSMemoryBarrier()
281// These are used both in 32 and 64-bit mode. We use a fence even on UP
282// machines, so this function can be used with nontemporal stores.
283
284PLATFUNC_FUNCTION_START_GENERIC(OSMemoryBarrier, all, 32, 4)
285 lock
286 addl $0,(%esp)
287 ret
288PLATFUNC_DESCRIPTOR(OSMemoryBarrier,all,0,kHasSSE2);
289
290PLATFUNC_FUNCTION_START(OSMemoryBarrier, sse2, 32, 4)
291 mfence
292 ret
293PLATFUNC_DESCRIPTOR(OSMemoryBarrier,sse2,kHasSSE2,0);
294
295 /*
296 * typedef volatile struct {
297 * void *opaque1; <-- ptr to 1st queue element or null
298 * long opaque2; <-- generation count
299 * } OSQueueHead;
300 *
301 * void OSAtomicEnqueue( OSQueueHead *list, void *new, size_t offset);
302 */
303PLATFUNC_FUNCTION_START(OSAtomicEnqueue, up, 32, 2)
304 pushl %edi
305 pushl %esi
306 pushl %ebx
307 movl 16(%esp),%edi // %edi == ptr to list head
308 movl 20(%esp),%ebx // %ebx == new
309 movl 24(%esp),%esi // %esi == offset
310 movl (%edi),%eax // %eax == ptr to 1st element in Q
311 movl 4(%edi),%edx // %edx == current generation count
3121: movl %eax,(%ebx,%esi)// link to old list head from new element
313 movl %edx,%ecx
314 incl %ecx // increment generation count
315 xchg64 %edi, ATOMIC_UP // ...push on new element
316 jnz 1b
317 popl %ebx
318 popl %esi
319 popl %edi
320 ret
321
322PLATFUNC_FUNCTION_START_GENERIC(OSAtomicEnqueue, mp, 32, 2)
323 pushl %edi
324 pushl %esi
325 pushl %ebx
326 movl 16(%esp),%edi // %edi == ptr to list head
327 movl 20(%esp),%ebx // %ebx == new
328 movl 24(%esp),%esi // %esi == offset
329 movl (%edi),%eax // %eax == ptr to 1st element in Q
330 movl 4(%edi),%edx // %edx == current generation count
3311: movl %eax,(%ebx,%esi)// link to old list head from new element
332 movl %edx,%ecx
333 incl %ecx // increment generation count
334 xchg64 %edi, ATOMIC_MP // ...push on new element
335 jnz 1b
336 popl %ebx
337 popl %esi
338 popl %edi
339 ret
340
341/* void* OSAtomicDequeue( OSQueueHead *list, size_t offset); */
342PLATFUNC_FUNCTION_START(OSAtomicDequeue, up, 32, 2)
343 pushl %edi
344 pushl %esi
345 pushl %ebx
346 movl 16(%esp),%edi // %edi == ptr to list head
347 movl 20(%esp),%esi // %esi == offset
348 movl (%edi),%eax // %eax == ptr to 1st element in Q
349 movl 4(%edi),%edx // %edx == current generation count
3501: testl %eax,%eax // list empty?
351 jz 2f // yes
352 movl (%eax,%esi),%ebx // point to 2nd in Q
353 movl %edx,%ecx
354 incl %ecx // increment generation count
355 xchg64 %edi, ATOMIC_UP // ...pop off 1st element
356 jnz 1b
3572: popl %ebx
358 popl %esi
359 popl %edi
360 ret // ptr to 1st element in Q still in %eax
361
362PLATFUNC_FUNCTION_START_GENERIC(OSAtomicDequeue, mp, 32, 2)
363 pushl %edi
364 pushl %esi
365 pushl %ebx
366 movl 16(%esp),%edi // %edi == ptr to list head
367 movl 20(%esp),%esi // %esi == offset
368 movl (%edi),%eax // %eax == ptr to 1st element in Q
369 movl 4(%edi),%edx // %edx == current generation count
3701: testl %eax,%eax // list empty?
371 jz 2f // yes
372 movl (%eax,%esi),%ebx // point to 2nd in Q
373 movl %edx,%ecx
374 incl %ecx // increment generation count
375 xchg64 %edi, ATOMIC_MP // ...pop off 1st element
376 jnz 1b
3772: popl %ebx
378 popl %esi
379 popl %edi
380 ret // ptr to 1st element in Q still in %eax
381
382/*
383 * typedef volatile struct {
384 * void *opaque1; <-- ptr to first queue element or null
385 * void *opaque2; <-- ptr to last queue element or null
386 * int opaque3; <-- spinlock
387 * } OSFifoQueueHead;
388 *
389 * void OSAtomicFifoEnqueue( OSFifoQueueHead *list, void *new, size_t offset);
390 */
391 .align 2
392 .globl _OSAtomicFifoEnqueue
393_OSAtomicFifoEnqueue:
394 pushl %edi
395 pushl %esi
396 pushl %ebx
397 xorl %ebx,%ebx // clear "preemption pending" flag
398 movl 16(%esp),%edi // %edi == ptr to list head
399 movl 20(%esp),%esi // %esi == new
400 EXTERN_TO_REG(_commpage_pfz_base,%ecx)
401 movl (%ecx), %ecx
402 addl $(_COMM_TEXT_PFZ_ENQUEUE_OFFSET), %ecx
403 movl 24(%esp),%edx // %edx == offset
404 call *%ecx
405 testl %ebx,%ebx // pending preemption?
406 jz 1f
407 call _preempt
4081: popl %ebx
409 popl %esi
410 popl %edi
411 ret
412
413/* void* OSAtomicFifoDequeue( OSFifoQueueHead *list, size_t offset); */
414 .align 2
415 .globl _OSAtomicFifoDequeue
416_OSAtomicFifoDequeue:
417 pushl %edi
418 pushl %esi
419 pushl %ebx
420 xorl %ebx,%ebx // clear "preemption pending" flag
421 movl 16(%esp),%edi // %edi == ptr to list head
422 PICIFY(_commpage_pfz_base)
423 movl (%edx),%ecx
424 movl 20(%esp),%edx // %edx == offset
425 addl $(_COMM_TEXT_PFZ_DEQUEUE_OFFSET), %ecx
426 call *%ecx
427 testl %ebx,%ebx // pending preemption?
428 jz 1f
429 pushl %eax // save return value across sysenter
430 call _preempt
431 popl %eax
4321: popl %ebx
433 popl %esi
434 popl %edi
435 ret // ptr to 1st element in Q still in %eax
436
437// Local Variables:
438// tab-width: 8
439// End: