+/*
+ * Compare and exchange:
+ * - returns failure (0) if the location did not contain the old value,
+ * - returns success (1) if the location was set to the new value.
+ */
+static inline uint32_t
+atomic_cmpxchg(uint32_t *p, uint32_t old, uint32_t new)
+{
+ uint32_t res = old;
+
+ asm volatile(
+ "lock; cmpxchgl %1,%2; \n\t"
+ " setz %%al; \n\t"
+ " movzbl %%al,%0"
+ : "+a" (res) /* %0: old value to compare, returns success */
+ : "r" (new), /* %1: new value to set */
+ "m" (*(p)) /* %2: memory address */
+ : "memory");
+ return (res);
+}
+
+static inline uint64_t
+atomic_load64(uint64_t *quadp)
+{
+ uint64_t ret;
+
+ asm volatile(
+ " lock; cmpxchg8b %1"
+ : "=A" (ret)
+ : "m" (*quadp), "a" (0), "d" (0), "b" (0), "c" (0));
+ return (ret);
+}
+
+static inline uint64_t
+atomic_loadstore64(uint64_t *quadp, uint64_t new)
+{
+ uint64_t ret;
+
+ ret = *quadp;
+ asm volatile(
+ "1: \n\t"
+ " lock; cmpxchg8b %1 \n\t"
+ " jnz 1b"
+ : "+A" (ret)
+ : "m" (*quadp),
+ "b" ((uint32_t)new), "c" ((uint32_t)(new >> 32)));
+ return (ret);
+}
+
+static inline void atomic_incl(long * p, long delta)