+ if (__improbable(--val == 0)) {
+ atomic_load_explicit(rc, dealloc_order);
+ }
+
+#if OS_REFCNT_DEBUG
+ if (__improbable(grp && ref_debug_enable)) {
+ if (val == 0) {
+ ref_log_drop(grp, (void *)rc); /* rc is only used as an identifier */
+ }
+ ref_release_group(grp, !val);
+ }
+#endif
+
+ return val;
+}
+
+#undef os_ref_retain_try_mask
+bool
+os_ref_retain_try_mask(os_ref_atomic_t *rc, struct os_refgrp * __debug_only grp, os_ref_count_t bits)
+{
+ os_ref_count_t cur = os_ref_get_count_internal(rc);
+
+ while (1) {
+ if (__improbable((cur >> bits) == 0)) {
+ return false;
+ }
+
+ os_ref_check_overflow(rc, cur);
+
+ os_ref_count_t next = cur + (1U << bits);
+ if (atomic_compare_exchange_weak_explicit(rc, &cur, next,
+ memory_order_relaxed, memory_order_relaxed)) {
+ break;
+ }
+ }
+
+#if OS_REFCNT_DEBUG
+ if (__improbable(grp && ref_debug_enable)) {
+ ref_retain_debug(rc, grp);
+ }
+#endif
+
+ return true;
+}
+
+#undef os_ref_retain_locked_mask
+void
+os_ref_retain_locked_mask(os_ref_atomic_t *rc, struct os_refgrp * __debug_only grp, os_ref_count_t bits)
+{
+ os_ref_count_t val = os_ref_get_count_internal(rc);
+
+ os_ref_check_overflow(rc, val);
+ os_ref_assert_referenced(rc, val >> bits);
+
+ val += (1U << bits);
+ atomic_store_explicit(rc, val, memory_order_relaxed);
+
+#if OS_REFCNT_DEBUG
+ if (__improbable(grp && ref_debug_enable)) {
+ ref_retain_debug(rc, grp);
+ }
+#endif
+}
+
+#undef os_ref_release_locked_mask
+os_ref_count_t
+os_ref_release_locked_mask(os_ref_atomic_t *rc, struct os_refgrp * __debug_only grp, os_ref_count_t bits)
+{
+ os_ref_count_t val = os_ref_get_count_internal(rc);
+ os_ref_check_underflow(rc, val >> bits);
+ val -= (1U << bits);
+ atomic_store_explicit(rc, val, memory_order_relaxed);
+
+ val >>= bits;