1 #ifndef _OS_REFCNT_INTERNAL_H
2 #define _OS_REFCNT_INTERNAL_H
5 os_ref_atomic_t ref_count
;
7 struct os_refgrp
*ref_group
;
13 const char *const grp_name
;
14 os_ref_atomic_t grp_children
; /* number of refcount objects in group */
15 os_ref_atomic_t grp_count
; /* current reference count of group */
16 _Atomic
uint64_t grp_retain_total
;
17 _Atomic
uint64_t grp_release_total
;
18 struct os_refgrp
*grp_parent
;
19 void *grp_log
; /* refcount logging context */
23 # define OS_REF_ATOMIC_INITIALIZER ATOMIC_VAR_INIT(0)
25 # define OS_REF_INITIALIZER { .ref_count = OS_REF_ATOMIC_INITIALIZER, .ref_group = NULL }
27 # define OS_REF_INITIALIZER { .ref_count = OS_REF_ATOMIC_INITIALIZER }
33 # define os_ref_if_debug(x, y) x
35 # define os_ref_if_debug(x, y) y
38 void os_ref_init_count_external(os_ref_atomic_t
*, struct os_refgrp
*, os_ref_count_t
);
39 void os_ref_retain_external(os_ref_atomic_t
*, struct os_refgrp
*);
40 void os_ref_retain_locked_external(os_ref_atomic_t
*, struct os_refgrp
*);
41 os_ref_count_t
os_ref_release_external(os_ref_atomic_t
*, struct os_refgrp
*,
42 memory_order release_order
, memory_order dealloc_order
);
43 os_ref_count_t
os_ref_release_relaxed_external(os_ref_atomic_t
*, struct os_refgrp
*);
44 os_ref_count_t
os_ref_release_barrier_external(os_ref_atomic_t
*, struct os_refgrp
*);
45 os_ref_count_t
os_ref_release_locked_external(os_ref_atomic_t
*, struct os_refgrp
*);
46 bool os_ref_retain_try_external(os_ref_atomic_t
*, struct os_refgrp
*);
48 #if XNU_KERNEL_PRIVATE
49 void os_ref_init_count_internal(os_ref_atomic_t
*, struct os_refgrp
*, os_ref_count_t
);
50 void os_ref_retain_internal(os_ref_atomic_t
*, struct os_refgrp
*);
51 os_ref_count_t
os_ref_release_relaxed_internal(os_ref_atomic_t
*, struct os_refgrp
*);
52 os_ref_count_t
os_ref_release_barrier_internal(os_ref_atomic_t
*, struct os_refgrp
*);
53 os_ref_count_t
os_ref_release_internal(os_ref_atomic_t
*, struct os_refgrp
*,
54 memory_order release_order
, memory_order dealloc_order
);
55 bool os_ref_retain_try_internal(os_ref_atomic_t
*, struct os_refgrp
*);
56 void os_ref_retain_locked_internal(os_ref_atomic_t
*, struct os_refgrp
*);
57 os_ref_count_t
os_ref_release_locked_internal(os_ref_atomic_t
*, struct os_refgrp
*);
59 /* For now, the internal and external variants are identical */
60 #define os_ref_init_count_internal os_ref_init_count_external
61 #define os_ref_retain_internal os_ref_retain_external
62 #define os_ref_retain_locked_internal os_ref_retain_locked_external
63 #define os_ref_release_internal os_ref_release_external
64 #define os_ref_release_barrier_internal os_ref_release_barrier_external
65 #define os_ref_release_relaxed_internal os_ref_release_relaxed_external
66 #define os_ref_release_locked_internal os_ref_release_locked_external
67 #define os_ref_retain_try_internal os_ref_retain_try_external
71 os_ref_init_count(struct os_refcnt
*rc
, struct os_refgrp
* __unused grp
, os_ref_count_t count
)
76 os_ref_init_count_internal(&rc
->ref_count
, os_ref_if_debug(rc
->ref_group
, NULL
), count
);
80 os_ref_retain(struct os_refcnt
*rc
)
82 os_ref_retain_internal(&rc
->ref_count
, os_ref_if_debug(rc
->ref_group
, NULL
));
85 static inline os_ref_count_t
86 os_ref_release_locked(struct os_refcnt
*rc
)
88 return os_ref_release_locked_internal(&rc
->ref_count
, os_ref_if_debug(rc
->ref_group
, NULL
));
92 os_ref_retain_locked(struct os_refcnt
*rc
)
94 os_ref_retain_internal(&rc
->ref_count
, os_ref_if_debug(rc
->ref_group
, NULL
));
98 os_ref_retain_try(struct os_refcnt
*rc
)
100 return os_ref_retain_try_internal(&rc
->ref_count
, os_ref_if_debug(rc
->ref_group
, NULL
));
103 __deprecated_msg("inefficient codegen, prefer os_ref_release / os_ref_release_relaxed")
104 static inline os_ref_count_t OS_WARN_RESULT
105 os_ref_release_explicit(struct os_refcnt
*rc
, memory_order release_order
, memory_order dealloc_order
)
107 return os_ref_release_internal(&rc
->ref_count
, os_ref_if_debug(rc
->ref_group
, NULL
),
108 release_order
, dealloc_order
);
112 # define os_refgrp_decl(qual, var, name, parent) \
113 qual struct os_refgrp __attribute__((section("__DATA,__refgrps"))) var = { \
114 .grp_name = (name), \
115 .grp_children = ATOMIC_VAR_INIT(0u), \
116 .grp_count = ATOMIC_VAR_INIT(0u), \
117 .grp_retain_total = ATOMIC_VAR_INIT(0u), \
118 .grp_release_total = ATOMIC_VAR_INIT(0u), \
119 .grp_parent = (parent), \
122 # define os_refgrp_decl_extern(var) \
123 extern struct os_refgrp var
125 /* Create a default group based on the init() callsite if no explicit group
127 # define os_ref_init_count(rc, grp, count) ({ \
128 os_refgrp_decl(static, __grp, __func__, NULL); \
129 (os_ref_init_count)((rc), (grp) ? (grp) : &__grp, (count)); \
132 #else /* OS_REFCNT_DEBUG */
134 # define os_refgrp_decl(...) extern struct os_refgrp var __attribute__((unused))
135 # define os_refgrp_decl_extern(var) os_refgrp_decl(var)
136 # define os_ref_init_count(rc, grp, count) (os_ref_init_count)((rc), NULL, (count))
138 #endif /* OS_REFCNT_DEBUG */
140 #if XNU_KERNEL_PRIVATE
141 void os_ref_panic_live(void *rc
) __abortlike
;
145 os_ref_panic_live(void *rc
)
147 panic("os_refcnt: unexpected release of final reference (rc=%p)\n", rc
);
148 __builtin_unreachable();
152 static inline os_ref_count_t OS_WARN_RESULT
153 os_ref_release(struct os_refcnt
*rc
)
155 return os_ref_release_barrier_internal(&rc
->ref_count
,
156 os_ref_if_debug(rc
->ref_group
, NULL
));
159 static inline os_ref_count_t OS_WARN_RESULT
160 os_ref_release_relaxed(struct os_refcnt
*rc
)
162 return os_ref_release_relaxed_internal(&rc
->ref_count
,
163 os_ref_if_debug(rc
->ref_group
, NULL
));
167 os_ref_release_live(struct os_refcnt
*rc
)
169 if (__improbable(os_ref_release(rc
) == 0)) {
170 os_ref_panic_live(rc
);
174 static inline os_ref_count_t
175 os_ref_get_count_internal(os_ref_atomic_t
*rc
)
177 return atomic_load_explicit(rc
, memory_order_relaxed
);
180 static inline os_ref_count_t
181 os_ref_get_count(struct os_refcnt
*rc
)
183 return os_ref_get_count_internal(&rc
->ref_count
);
193 os_ref_init_count_raw(os_ref_atomic_t
*rc
, struct os_refgrp
*grp
, os_ref_count_t count
)
195 os_ref_init_count_internal(rc
, grp
, count
);
199 os_ref_retain_raw(os_ref_atomic_t
*rc
, struct os_refgrp
*grp
)
201 os_ref_retain_internal(rc
, grp
);
204 static inline os_ref_count_t
205 os_ref_release_raw(os_ref_atomic_t
*rc
, struct os_refgrp
*grp
)
207 return os_ref_release_barrier_internal(rc
, grp
);
210 static inline os_ref_count_t
211 os_ref_release_relaxed_raw(os_ref_atomic_t
*rc
, struct os_refgrp
*grp
)
213 return os_ref_release_relaxed_internal(rc
, grp
);
217 os_ref_release_live_raw(os_ref_atomic_t
*rc
, struct os_refgrp
*grp
)
219 if (__improbable(os_ref_release_barrier_internal(rc
, grp
) == 0)) {
220 os_ref_panic_live(rc
);
225 os_ref_retain_try_raw(os_ref_atomic_t
*rc
, struct os_refgrp
*grp
)
227 return os_ref_retain_try_internal(rc
, grp
);
231 os_ref_retain_locked_raw(os_ref_atomic_t
*rc
, struct os_refgrp
*grp
)
233 os_ref_retain_locked_internal(rc
, grp
);
236 static inline os_ref_count_t
237 os_ref_release_locked_raw(os_ref_atomic_t
*rc
, struct os_refgrp
*grp
)
239 return os_ref_release_locked_internal(rc
, grp
);
242 static inline os_ref_count_t
243 os_ref_get_count_raw(os_ref_atomic_t
*rc
)
245 return os_ref_get_count_internal(rc
);
249 /* remove the group argument for non-debug */
250 #define os_ref_init_count_raw(rc, grp, count) (os_ref_init_count_raw)((rc), NULL, (count))
251 #define os_ref_retain_raw(rc, grp) (os_ref_retain_raw)((rc), NULL)
252 #define os_ref_release_raw(rc, grp) (os_ref_release_raw)((rc), NULL)
253 #define os_ref_release_relaxed_raw(rc, grp) (os_ref_release_relaxed_raw)((rc), NULL)
254 #define os_ref_release_live_raw(rc, grp) (os_ref_release_live_raw)((rc), NULL)
255 #define os_ref_retain_try_raw(rc, grp) (os_ref_retain_try_raw)((rc), NULL)
256 #define os_ref_retain_locked_raw(rc, grp) (os_ref_retain_locked_raw)((rc), NULL)
257 #define os_ref_release_locked_raw(rc, grp) (os_ref_release_locked_raw)((rc), NULL)
260 #if XNU_KERNEL_PRIVATE
261 os_ref_count_t
os_ref_release_mask_internal(os_ref_atomic_t
*rc
, struct os_refgrp
*grp
,
262 os_ref_count_t b
, memory_order release_order
, memory_order dealloc_order
);
264 static inline os_ref_count_t
265 os_ref_release_mask(os_ref_atomic_t
*rc
, struct os_refgrp
*grp
, os_ref_count_t b
)
267 return os_ref_release_mask_internal(rc
, grp
, b
, memory_order_release
, memory_order_acquire
);
270 static inline os_ref_count_t
271 os_ref_release_relaxed_mask(os_ref_atomic_t
*rc
, struct os_refgrp
*grp
, os_ref_count_t b
)
273 return os_ref_release_mask_internal(rc
, grp
, b
, memory_order_relaxed
, memory_order_relaxed
);
277 os_ref_release_live_mask(os_ref_atomic_t
*rc
, struct os_refgrp
*grp
, os_ref_count_t b
)
279 if (__improbable(os_ref_release_mask_internal(rc
, grp
, b
,
280 memory_order_release
, memory_order_relaxed
) == 0)) {
281 os_ref_panic_live(rc
);
286 /* remove the group argument for non-debug */
287 #define os_ref_init_count_mask(rc, grp, init_c, init_b, b) (os_ref_init_count_mask)(rc, NULL, init_c, init_b, b)
288 #define os_ref_retain_mask(rc, grp, b) (os_ref_retain_mask)((rc), NULL, (b))
289 #define os_ref_release_mask(rc, grp, b) (os_ref_release_mask)((rc), NULL, (b))
290 #define os_ref_release_relaxed_mask(rc, grp, b) (os_ref_relaxed_mask)((rc), NULL, (b))
291 #define os_ref_release_live_mask(rc, grp, b) (os_ref_release_live_mask)((rc), NULL, (b))
292 #define os_ref_retain_try_mask(rc, grp, b) (os_ref_retain_try_mask)((rc), NULL, (b))
293 #define os_ref_release_locked_mask(rc, grp, b) (os_ref_release_locked_mask)((rc), NULL, (b))
294 #define os_ref_retain_locked_mask(rc, grp, b) (os_ref_retain_locked_mask)((rc), NULL, (b))
301 #endif /* _OS_REFCNT_INTERNAL_H */