* Like standards, there are a lot of atomic ops to choose from!
*/
-#ifndef __ppc__
-
+#if defined(__i386__) || defined(__x86_64__)
+/* Implemented in assembly for i386 and x86_64 */
+#else
+#error Unsupported arch
+#endif
+#undef OSIncrementAtomic
SInt32 OSIncrementAtomic(volatile SInt32 * value)
{
return OSAddAtomic(1, value);
}
+#undef OSDecrementAtomic
SInt32 OSDecrementAtomic(volatile SInt32 * value)
{
return OSAddAtomic(-1, value);
}
-#ifdef CMPXCHG8B
-void * OSDequeueAtomic(void * volatile * inList, SInt32 inOffset)
-{
- /* The _pointer_ is volatile, not the listhead itself */
- void * volatile oldListHead;
- void * volatile newListHead;
-
- do {
- oldListHead = *inList;
- if (oldListHead == NULL) {
- break;
- }
-
- newListHead = *(void * volatile *) (((char *) oldListHead) + inOffset);
- } while (! OSCompareAndSwap((UInt32)oldListHead,
- (UInt32)newListHead, (volatile UInt32 *)inList));
- return oldListHead;
-}
-
-void OSEnqueueAtomic(void * volatile * inList, void * inNewLink, SInt32 inOffset)
-{
- /* The _pointer_ is volatile, not the listhead itself */
- void * volatile oldListHead;
- void * volatile newListHead = inNewLink;
- void * volatile * newLinkNextPtr = (void * volatile *) (((char *) inNewLink) + inOffset);
-
- do {
- oldListHead = *inList;
- *newLinkNextPtr = oldListHead;
- } while (! OSCompareAndSwap((UInt32)oldListHead, (UInt32)newListHead,
- (volatile UInt32 *)inList));
-}
-#endif /* CMPXCHG8B */
-#endif /* !__ppc__ */
-
static UInt32 OSBitwiseAtomic(UInt32 and_mask, UInt32 or_mask, UInt32 xor_mask, volatile UInt32 * value)
{
UInt32 oldValue;
return oldValue;
}
+#undef OSBitAndAtomic
UInt32 OSBitAndAtomic(UInt32 mask, volatile UInt32 * value)
{
return OSBitwiseAtomic(mask, 0, 0, value);
}
+#undef OSBitOrAtomic
UInt32 OSBitOrAtomic(UInt32 mask, volatile UInt32 * value)
{
return OSBitwiseAtomic((UInt32) -1, mask, 0, value);
}
+#undef OSBitXorAtomic
UInt32 OSBitXorAtomic(UInt32 mask, volatile UInt32 * value)
{
return OSBitwiseAtomic((UInt32) -1, 0, mask, value);
}
+#if defined(__i386__) || defined(__x86_64__)
static Boolean OSCompareAndSwap8(UInt8 oldValue8, UInt8 newValue8, volatile UInt8 * value8)
{
UInt32 mask = 0x000000ff;
- UInt32 alignment = ((UInt32) value8) & (sizeof(UInt32) - 1);
+ UInt32 alignment = (UInt32)((unsigned long) value8) & (sizeof(UInt32) - 1);
UInt32 shiftValues = (24 << 24) | (16 << 16) | (8 << 8);
int shift = (UInt32) *(((UInt8 *) &shiftValues) + alignment);
- volatile UInt32 * value32 = (volatile UInt32 *) (value8 - alignment);
+ volatile UInt32 * value32 = (volatile UInt32 *) ((uintptr_t)value8 - alignment);
UInt32 oldValue;
UInt32 newValue;
return OSCompareAndSwap(oldValue, newValue, value32);
}
+#endif
static Boolean OSTestAndSetClear(UInt32 bit, Boolean wantSet, volatile UInt8 * startAddress)
{
return OSAddAtomic8(-1, value);
}
+#if defined(__i386__) || defined(__x86_64__)
SInt8 OSAddAtomic8(SInt32 amount, volatile SInt8 * value)
{
SInt8 oldValue;
return oldValue;
}
+#endif
static UInt8 OSBitwiseAtomic8(UInt32 and_mask, UInt32 or_mask, UInt32 xor_mask, volatile UInt8 * value)
{
return OSBitwiseAtomic8((UInt32) -1, 0, mask, value);
}
+#if defined(__i386__) || defined(__x86_64__)
static Boolean OSCompareAndSwap16(UInt16 oldValue16, UInt16 newValue16, volatile UInt16 * value16)
{
UInt32 mask = 0x0000ffff;
- UInt32 alignment = ((UInt32) value16) & (sizeof(UInt32) - 1);
+ UInt32 alignment = (UInt32)((unsigned long) value16) & (sizeof(UInt32) - 1);
UInt32 shiftValues = (16 << 24) | (16 << 16);
UInt32 shift = (UInt32) *(((UInt8 *) &shiftValues) + alignment);
- volatile UInt32 * value32 = (volatile UInt32 *) (((UInt32) value16) - alignment);
+ volatile UInt32 * value32 = (volatile UInt32 *) (((unsigned long) value16) - alignment);
UInt32 oldValue;
UInt32 newValue;
return OSCompareAndSwap(oldValue, newValue, value32);
}
+#endif
SInt16 OSIncrementAtomic16(volatile SInt16 * value)
{
return OSAddAtomic16(-1, value);
}
+#if defined(__i386__) || defined(__x86_64__)
SInt16 OSAddAtomic16(SInt32 amount, volatile SInt16 * value)
{
SInt16 oldValue;
return oldValue;
}
+#endif
static UInt16 OSBitwiseAtomic16(UInt32 and_mask, UInt32 or_mask, UInt32 xor_mask, volatile UInt16 * value)
{