/*
- * Copyright (c) 2000 Apple Computer, Inc. All rights reserved.
+ * Copyright (c) 2000-2004 Apple Computer, Inc. All rights reserved.
*
- * @APPLE_LICENSE_HEADER_START@
- *
- * Copyright (c) 1999-2003 Apple Computer, Inc. All Rights Reserved.
+ * @APPLE_OSREFERENCE_LICENSE_HEADER_START@
*
* This file contains Original Code and/or Modifications of Original Code
* as defined in and that are subject to the Apple Public Source License
* Version 2.0 (the 'License'). You may not use this file except in
- * compliance with the License. Please obtain a copy of the License at
- * http://www.opensource.apple.com/apsl/ and read it before using this
- * file.
+ * compliance with the License. The rights granted to you under the License
+ * may not be used to create, or enable the creation or redistribution of,
+ * unlawful or unlicensed copies of an Apple operating system, or to
+ * circumvent, violate, or enable the circumvention or violation of, any
+ * terms of an Apple operating system software license agreement.
+ *
+ * Please obtain a copy of the License at
+ * http://www.opensource.apple.com/apsl/ and read it before using this file.
*
* The Original Code and all software distributed under the License are
* distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
* Please see the License for the specific language governing rights and
* limitations under the License.
*
- * @APPLE_LICENSE_HEADER_END@
+ * @APPLE_OSREFERENCE_LICENSE_HEADER_END@
*/
/*
* Copyright (C) 1998 Apple Computer
/*
* Machine-dependent simple locks for the i386.
*/
+#ifdef KERNEL_PRIVATE
#ifndef _I386_LOCK_H_
#define _I386_LOCK_H_
#include <kern/macro_help.h>
#include <kern/assert.h>
#include <i386/hw_lock_types.h>
+#include <i386/locks.h>
#include <mach_rt.h>
#include <mach_ldebug.h>
-#include <cpus.h>
+
+typedef lck_rw_t lock_t;
+
+extern unsigned int LockTimeOutTSC; /* Lock timeout in TSC ticks */
+extern unsigned int LockTimeOut; /* Lock timeout in absolute time */
#if defined(__GNUC__)
: \
"r" (bit), "m" (*(volatile int *)(l)));
-extern __inline__ unsigned long i_bit_isset(unsigned int testbit, volatile unsigned long *word)
-{
- int bit;
+static inline char xchgb(volatile char * cp, char new);
- __asm__ volatile("btl %2,%1\n\tsbbl %0,%0" : "=r" (bit)
- : "m" (word), "ir" (testbit));
- return bit;
-}
-
-extern __inline__ char xchgb(volatile char * cp, char new);
+static inline void atomic_incl(volatile long * p, long delta);
+static inline void atomic_incs(volatile short * p, short delta);
+static inline void atomic_incb(volatile char * p, char delta);
-extern __inline__ void atomic_incl(long * p, long delta);
-extern __inline__ void atomic_incs(short * p, short delta);
-extern __inline__ void atomic_incb(char * p, char delta);
+static inline void atomic_decl(volatile long * p, long delta);
+static inline void atomic_decs(volatile short * p, short delta);
+static inline void atomic_decb(volatile char * p, char delta);
-extern __inline__ void atomic_decl(long * p, long delta);
-extern __inline__ void atomic_decs(short * p, short delta);
-extern __inline__ void atomic_decb(char * p, char delta);
+static inline long atomic_getl(const volatile long * p);
+static inline short atomic_gets(const volatile short * p);
+static inline char atomic_getb(const volatile char * p);
-extern __inline__ long atomic_getl(long * p);
-extern __inline__ short atomic_gets(short * p);
-extern __inline__ char atomic_getb(char * p);
+static inline void atomic_setl(volatile long * p, long value);
+static inline void atomic_sets(volatile short * p, short value);
+static inline void atomic_setb(volatile char * p, char value);
-extern __inline__ void atomic_setl(long * p, long value);
-extern __inline__ void atomic_sets(short * p, short value);
-extern __inline__ void atomic_setb(char * p, char value);
-
-extern __inline__ char xchgb(volatile char * cp, char new)
+static inline char xchgb(volatile char * cp, char new)
{
register char old = new;
return (old);
}
-extern __inline__ void atomic_incl(long * p, long delta)
+static inline void atomic_incl(volatile long * p, long delta)
{
-#if NEED_ATOMIC
__asm__ volatile (" lock \n \
- addl %0,%1" : \
+ add %0,%1" : \
: \
"r" (delta), "m" (*(volatile long *)p));
-#else /* NEED_ATOMIC */
- *p += delta;
-#endif /* NEED_ATOMIC */
}
-extern __inline__ void atomic_incs(short * p, short delta)
+static inline void atomic_incs(volatile short * p, short delta)
{
-#if NEED_ATOMIC
__asm__ volatile (" lock \n \
addw %0,%1" : \
: \
"q" (delta), "m" (*(volatile short *)p));
-#else /* NEED_ATOMIC */
- *p += delta;
-#endif /* NEED_ATOMIC */
}
-extern __inline__ void atomic_incb(char * p, char delta)
+static inline void atomic_incb(volatile char * p, char delta)
{
-#if NEED_ATOMIC
__asm__ volatile (" lock \n \
addb %0,%1" : \
: \
"q" (delta), "m" (*(volatile char *)p));
-#else /* NEED_ATOMIC */
- *p += delta;
-#endif /* NEED_ATOMIC */
}
-extern __inline__ void atomic_decl(long * p, long delta)
+static inline void atomic_decl(volatile long * p, long delta)
{
-#if NCPUS > 1
__asm__ volatile (" lock \n \
- subl %0,%1" : \
+ sub %0,%1" : \
: \
"r" (delta), "m" (*(volatile long *)p));
-#else /* NCPUS > 1 */
- *p -= delta;
-#endif /* NCPUS > 1 */
}
-extern __inline__ void atomic_decs(short * p, short delta)
+static inline int atomic_decl_and_test(volatile long * p, long delta)
+{
+ uint8_t ret;
+ __asm__ volatile (
+ " lock \n\t"
+ " sub %1,%2 \n\t"
+ " sete %0"
+ : "=qm" (ret)
+ : "r" (delta), "m" (*(volatile long *)p));
+ return ret;
+}
+
+static inline void atomic_decs(volatile short * p, short delta)
{
-#if NEED_ATOMIC
__asm__ volatile (" lock \n \
subw %0,%1" : \
: \
"q" (delta), "m" (*(volatile short *)p));
-#else /* NEED_ATOMIC */
- *p -= delta;
-#endif /* NEED_ATOMIC */
}
-extern __inline__ void atomic_decb(char * p, char delta)
+static inline void atomic_decb(volatile char * p, char delta)
{
-#if NEED_ATOMIC
__asm__ volatile (" lock \n \
subb %0,%1" : \
: \
"q" (delta), "m" (*(volatile char *)p));
-#else /* NEED_ATOMIC */
- *p -= delta;
-#endif /* NEED_ATOMIC */
}
-extern __inline__ long atomic_getl(long * p)
+static inline long atomic_getl(const volatile long * p)
{
return (*p);
}
-extern __inline__ short atomic_gets(short * p)
+static inline short atomic_gets(const volatile short * p)
{
return (*p);
}
-extern __inline__ char atomic_getb(char * p)
+static inline char atomic_getb(const volatile char * p)
{
return (*p);
}
-extern __inline__ void atomic_setl(long * p, long value)
+static inline void atomic_setl(volatile long * p, long value)
{
*p = value;
}
-extern __inline__ void atomic_sets(short * p, short value)
+static inline void atomic_sets(volatile short * p, short value)
{
*p = value;
}
-extern __inline__ void atomic_setb(char * p, char value)
+static inline void atomic_setb(volatile char * p, char value)
{
*p = value;
}
#endif /* !defined(__GNUC__) */
-
-#if !(USLOCK_DEBUG || USLOCK_STATS)
-/*
- * Take responsibility for production-quality usimple_locks.
- * Let the portable lock package build simple_locks in terms
- * of usimple_locks, which is done efficiently with macros.
- * Currently, these aren't inlined although they probably
- * should be. The portable lock package is used for the
- * usimple_lock prototypes and data declarations.
- *
- * For non-production configurations, punt entirely to the
- * portable lock package.
- *
- * N.B. I've left in the hooks for ETAP, so we can
- * compare the performance of stats-gathering on top
- * of "production" locks v. stats-gathering on top
- * of portable, C-based locks.
- */
-#define USIMPLE_LOCK_CALLS
-#endif /* !(USLOCK_DEBUG || USLOCK_STATS) */
-
extern void kernel_preempt_check (void);
#endif /* MACH_KERNEL_PRIVATE */
#endif /* __APLE_API_PRIVATE */
#endif /* _I386_LOCK_H_ */
+
+#endif /* KERNEL_PRIVATE */