extern unsigned int hw_lock_to(
hw_lock_t,
- unsigned int);
+ uint64_t);
extern unsigned int hw_lock_try(
hw_lock_t);
__BEGIN_DECLS
extern uint32_t hw_atomic_add(
- uint32_t *dest,
+ volatile uint32_t *dest,
uint32_t delt);
extern uint32_t hw_atomic_sub(
- uint32_t *dest,
+ volatile uint32_t *dest,
uint32_t delt);
extern uint32_t hw_atomic_or(
- uint32_t *dest,
+ volatile uint32_t *dest,
uint32_t mask);
extern uint32_t hw_atomic_and(
- uint32_t *dest,
+ volatile uint32_t *dest,
+ uint32_t mask);
+
+/*
+ * Variant of hw_atomic_or which doesn't return a value; potentially
+ * more efficient on some platforms.
+ */
+extern void hw_atomic_or_noret(
+ volatile uint32_t *dest,
+ uint32_t mask);
+/*
+ * Variant of hw_atomic_and which doesn't return a value; potentially
+ * more efficient on some platforms.
+ */
+
+extern void hw_atomic_and_noret(
+ volatile uint32_t *dest,
uint32_t mask);
extern uint32_t hw_compare_and_store(
uint32_t oldval,
uint32_t newval,
- uint32_t *dest);
+ volatile uint32_t *dest);
extern void hw_queue_atomic(
unsigned int *anchor,
extern unsigned int usimple_lock_try(
usimple_lock_t);
+extern void usimple_lock_try_lock_loop(
+ usimple_lock_t);
+
__END_DECLS
#define ETAP_NO_TRACE 0
#define simple_lock(l) usimple_lock(l)
#define simple_unlock(l) usimple_unlock(l)
#define simple_lock_try(l) usimple_lock_try(l)
+#define simple_lock_try_lock_loop(l) usimple_lock_try_lock_loop(l)
#define simple_lock_addr(l) (&(l))
-#define thread_sleep_simple_lock(l, e, i) \
- thread_sleep_usimple_lock((l), (e), (i))
#endif /* !defined(simple_lock_init) */
#endif /*!_KERN_SIMPLE_LOCK_H_*/