X-Git-Url: https://git.saurik.com/apple/xnu.git/blobdiff_plain/2d21ac55c334faf3a56e5634905ed6987fc787d4..04b8595b18b1b41ac7a206e4b3d51a635f8413d7:/libkern/gen/OSAtomicOperations.c?ds=inline diff --git a/libkern/gen/OSAtomicOperations.c b/libkern/gen/OSAtomicOperations.c index bbdb0d970..25ff477ff 100644 --- a/libkern/gen/OSAtomicOperations.c +++ b/libkern/gen/OSAtomicOperations.c @@ -54,54 +54,24 @@ enum { * Like standards, there are a lot of atomic ops to choose from! */ -#ifndef __ppc__ - +#if defined(__i386__) || defined(__x86_64__) +/* Implemented in assembly for i386 and x86_64 */ +#else +#error Unsupported arch +#endif +#undef OSIncrementAtomic SInt32 OSIncrementAtomic(volatile SInt32 * value) { return OSAddAtomic(1, value); } +#undef OSDecrementAtomic SInt32 OSDecrementAtomic(volatile SInt32 * value) { return OSAddAtomic(-1, value); } -#ifdef CMPXCHG8B -void * OSDequeueAtomic(void * volatile * inList, SInt32 inOffset) -{ - /* The _pointer_ is volatile, not the listhead itself */ - void * volatile oldListHead; - void * volatile newListHead; - - do { - oldListHead = *inList; - if (oldListHead == NULL) { - break; - } - - newListHead = *(void * volatile *) (((char *) oldListHead) + inOffset); - } while (! OSCompareAndSwap((UInt32)oldListHead, - (UInt32)newListHead, (volatile UInt32 *)inList)); - return oldListHead; -} - -void OSEnqueueAtomic(void * volatile * inList, void * inNewLink, SInt32 inOffset) -{ - /* The _pointer_ is volatile, not the listhead itself */ - void * volatile oldListHead; - void * volatile newListHead = inNewLink; - void * volatile * newLinkNextPtr = (void * volatile *) (((char *) inNewLink) + inOffset); - - do { - oldListHead = *inList; - *newLinkNextPtr = oldListHead; - } while (! OSCompareAndSwap((UInt32)oldListHead, (UInt32)newListHead, - (volatile UInt32 *)inList)); -} -#endif /* CMPXCHG8B */ -#endif /* !__ppc__ */ - static UInt32 OSBitwiseAtomic(UInt32 and_mask, UInt32 or_mask, UInt32 xor_mask, volatile UInt32 * value) { UInt32 oldValue; @@ -115,28 +85,32 @@ static UInt32 OSBitwiseAtomic(UInt32 and_mask, UInt32 or_mask, UInt32 xor_mask, return oldValue; } +#undef OSBitAndAtomic UInt32 OSBitAndAtomic(UInt32 mask, volatile UInt32 * value) { return OSBitwiseAtomic(mask, 0, 0, value); } +#undef OSBitOrAtomic UInt32 OSBitOrAtomic(UInt32 mask, volatile UInt32 * value) { return OSBitwiseAtomic((UInt32) -1, mask, 0, value); } +#undef OSBitXorAtomic UInt32 OSBitXorAtomic(UInt32 mask, volatile UInt32 * value) { return OSBitwiseAtomic((UInt32) -1, 0, mask, value); } +#if defined(__i386__) || defined(__x86_64__) static Boolean OSCompareAndSwap8(UInt8 oldValue8, UInt8 newValue8, volatile UInt8 * value8) { UInt32 mask = 0x000000ff; - UInt32 alignment = ((UInt32) value8) & (sizeof(UInt32) - 1); + UInt32 alignment = (UInt32)((unsigned long) value8) & (sizeof(UInt32) - 1); UInt32 shiftValues = (24 << 24) | (16 << 16) | (8 << 8); int shift = (UInt32) *(((UInt8 *) &shiftValues) + alignment); - volatile UInt32 * value32 = (volatile UInt32 *) (value8 - alignment); + volatile UInt32 * value32 = (volatile UInt32 *) ((uintptr_t)value8 - alignment); UInt32 oldValue; UInt32 newValue; @@ -148,6 +122,7 @@ static Boolean OSCompareAndSwap8(UInt8 oldValue8, UInt8 newValue8, volatile UInt return OSCompareAndSwap(oldValue, newValue, value32); } +#endif static Boolean OSTestAndSetClear(UInt32 bit, Boolean wantSet, volatile UInt8 * startAddress) { @@ -193,6 +168,7 @@ SInt8 OSDecrementAtomic8(volatile SInt8 * value) return OSAddAtomic8(-1, value); } +#if defined(__i386__) || defined(__x86_64__) SInt8 OSAddAtomic8(SInt32 amount, volatile SInt8 * value) { SInt8 oldValue; @@ -205,6 +181,7 @@ SInt8 OSAddAtomic8(SInt32 amount, volatile SInt8 * value) return oldValue; } +#endif static UInt8 OSBitwiseAtomic8(UInt32 and_mask, UInt32 or_mask, UInt32 xor_mask, volatile UInt8 * value) { @@ -234,13 +211,14 @@ UInt8 OSBitXorAtomic8(UInt32 mask, volatile UInt8 * value) return OSBitwiseAtomic8((UInt32) -1, 0, mask, value); } +#if defined(__i386__) || defined(__x86_64__) static Boolean OSCompareAndSwap16(UInt16 oldValue16, UInt16 newValue16, volatile UInt16 * value16) { UInt32 mask = 0x0000ffff; - UInt32 alignment = ((UInt32) value16) & (sizeof(UInt32) - 1); + UInt32 alignment = (UInt32)((unsigned long) value16) & (sizeof(UInt32) - 1); UInt32 shiftValues = (16 << 24) | (16 << 16); UInt32 shift = (UInt32) *(((UInt8 *) &shiftValues) + alignment); - volatile UInt32 * value32 = (volatile UInt32 *) (((UInt32) value16) - alignment); + volatile UInt32 * value32 = (volatile UInt32 *) (((unsigned long) value16) - alignment); UInt32 oldValue; UInt32 newValue; @@ -252,6 +230,7 @@ static Boolean OSCompareAndSwap16(UInt16 oldValue16, UInt16 newValue16, volatile return OSCompareAndSwap(oldValue, newValue, value32); } +#endif SInt16 OSIncrementAtomic16(volatile SInt16 * value) { @@ -263,6 +242,7 @@ SInt16 OSDecrementAtomic16(volatile SInt16 * value) return OSAddAtomic16(-1, value); } +#if defined(__i386__) || defined(__x86_64__) SInt16 OSAddAtomic16(SInt32 amount, volatile SInt16 * value) { SInt16 oldValue; @@ -275,6 +255,7 @@ SInt16 OSAddAtomic16(SInt32 amount, volatile SInt16 * value) return oldValue; } +#endif static UInt16 OSBitwiseAtomic16(UInt32 and_mask, UInt32 or_mask, UInt32 xor_mask, volatile UInt16 * value) {