xref: /xnu-8792.61.2/libkern/os/refcnt_internal.h (revision 42e220869062b56f8d7d0726fd4c88954f87902c)
1 #ifndef _OS_REFCNT_INTERNAL_H
2 #define _OS_REFCNT_INTERNAL_H
3 
4 struct os_refcnt {
5 	os_ref_atomic_t ref_count;
6 #if OS_REFCNT_DEBUG
7 	struct os_refgrp *ref_group;
8 #endif
9 };
10 
11 #if OS_REFCNT_DEBUG
12 /*
13  * As this structure gets baked-in at compile-time, changes can break the ABI.
14  * To allow a little more flexibility for the future a new 'flags' member is
15  * added but left unused for the moment. Newly compiled consumers will get the
16  * new structure and once every upstream project has been recompiled the new field
17  * can be used.
18  */
19 struct os_refgrp {
20 	const char *grp_name;
21 	os_ref_atomic_t grp_children;  /* number of refcount objects in group */
22 	os_ref_atomic_t grp_count;     /* current reference count of group */
23 	_Atomic uint64_t grp_retain_total;
24 	_Atomic uint64_t grp_release_total;
25 	struct os_refgrp *grp_parent;
26 	void *grp_log;                 /* refcount logging context */
27 	uint64_t grp_flags;            /* Unused for now. */
28 };
29 
30 #endif
31 
32 # define OS_REF_ATOMIC_INITIALIZER ATOMIC_VAR_INIT(0)
33 #if OS_REFCNT_DEBUG
34 # define OS_REF_INITIALIZER { .ref_count = OS_REF_ATOMIC_INITIALIZER, .ref_group = NULL }
35 #else
36 # define OS_REF_INITIALIZER { .ref_count = OS_REF_ATOMIC_INITIALIZER }
37 #endif
38 
39 __BEGIN_DECLS
40 
41 #if OS_REFCNT_DEBUG
42 # define os_ref_if_debug(x, y) x
43 #else
44 # define os_ref_if_debug(x, y) y
45 #endif
46 
47 void os_ref_init_count_external(os_ref_atomic_t *, struct os_refgrp *, os_ref_count_t);
48 void os_ref_retain_external(os_ref_atomic_t *, struct os_refgrp *);
49 void os_ref_retain_locked_external(os_ref_atomic_t *, struct os_refgrp *);
50 os_ref_count_t os_ref_release_external(os_ref_atomic_t *, struct os_refgrp *,
51     memory_order release_order, memory_order dealloc_order);
52 os_ref_count_t os_ref_release_relaxed_external(os_ref_atomic_t *, struct os_refgrp *);
53 os_ref_count_t os_ref_release_barrier_external(os_ref_atomic_t *, struct os_refgrp *);
54 os_ref_count_t os_ref_release_locked_external(os_ref_atomic_t *, struct os_refgrp *);
55 bool os_ref_retain_try_external(os_ref_atomic_t *, struct os_refgrp *);
56 
57 #if XNU_KERNEL_PRIVATE
58 void os_ref_init_count_internal(os_ref_atomic_t *, struct os_refgrp *, os_ref_count_t);
59 void os_ref_retain_internal(os_ref_atomic_t *, struct os_refgrp *);
60 void os_ref_retain_floor_internal(os_ref_atomic_t *, os_ref_count_t, struct os_refgrp *);
61 os_ref_count_t os_ref_release_relaxed_internal(os_ref_atomic_t *, struct os_refgrp *);
62 os_ref_count_t os_ref_release_barrier_internal(os_ref_atomic_t *, struct os_refgrp *);
63 os_ref_count_t os_ref_release_internal(os_ref_atomic_t *, struct os_refgrp *,
64     memory_order release_order, memory_order dealloc_order);
65 bool os_ref_retain_try_internal(os_ref_atomic_t *, struct os_refgrp *);
66 bool os_ref_retain_floor_try_internal(os_ref_atomic_t *, os_ref_count_t, struct os_refgrp *);
67 void os_ref_retain_locked_internal(os_ref_atomic_t *, struct os_refgrp *);
68 void os_ref_retain_floor_locked_internal(os_ref_atomic_t *, os_ref_count_t, struct os_refgrp *);
69 os_ref_count_t os_ref_release_locked_internal(os_ref_atomic_t *, struct os_refgrp *);
70 #else
71 /* For now, the internal and external variants are identical */
72 #define os_ref_init_count_internal      os_ref_init_count_external
73 #define os_ref_retain_internal          os_ref_retain_external
74 #define os_ref_retain_locked_internal   os_ref_retain_locked_external
75 #define os_ref_release_internal         os_ref_release_external
76 #define os_ref_release_barrier_internal os_ref_release_barrier_external
77 #define os_ref_release_relaxed_internal os_ref_release_relaxed_external
78 #define os_ref_release_locked_internal  os_ref_release_locked_external
79 #define os_ref_retain_try_internal      os_ref_retain_try_external
80 #endif
81 
82 static inline void
os_ref_init_count(struct os_refcnt * rc,struct os_refgrp * __unused grp,os_ref_count_t count)83 os_ref_init_count(struct os_refcnt *rc, struct os_refgrp * __unused grp, os_ref_count_t count)
84 {
85 #if OS_REFCNT_DEBUG
86 	rc->ref_group = grp;
87 #endif
88 	os_ref_init_count_internal(&rc->ref_count, os_ref_if_debug(rc->ref_group, NULL), count);
89 }
90 
91 static inline void
os_ref_retain(struct os_refcnt * rc)92 os_ref_retain(struct os_refcnt *rc)
93 {
94 	os_ref_retain_internal(&rc->ref_count, os_ref_if_debug(rc->ref_group, NULL));
95 }
96 
97 static inline os_ref_count_t
os_ref_release_locked(struct os_refcnt * rc)98 os_ref_release_locked(struct os_refcnt *rc)
99 {
100 	return os_ref_release_locked_internal(&rc->ref_count, os_ref_if_debug(rc->ref_group, NULL));
101 }
102 
103 static inline void
os_ref_retain_locked(struct os_refcnt * rc)104 os_ref_retain_locked(struct os_refcnt *rc)
105 {
106 	os_ref_retain_internal(&rc->ref_count, os_ref_if_debug(rc->ref_group, NULL));
107 }
108 
109 static inline bool
os_ref_retain_try(struct os_refcnt * rc)110 os_ref_retain_try(struct os_refcnt *rc)
111 {
112 	return os_ref_retain_try_internal(&rc->ref_count, os_ref_if_debug(rc->ref_group, NULL));
113 }
114 
115 __deprecated_msg("inefficient codegen, prefer os_ref_release / os_ref_release_relaxed")
116 static inline os_ref_count_t OS_WARN_RESULT
os_ref_release_explicit(struct os_refcnt * rc,memory_order release_order,memory_order dealloc_order)117 os_ref_release_explicit(struct os_refcnt *rc, memory_order release_order, memory_order dealloc_order)
118 {
119 	return os_ref_release_internal(&rc->ref_count, os_ref_if_debug(rc->ref_group, NULL),
120 	           release_order, dealloc_order);
121 }
122 
123 #if OS_REFCNT_DEBUG
124 # define os_refgrp_initializer(name, parent) \
125 	 { \
126 	        .grp_name =          (name), \
127 	        .grp_children =      ATOMIC_VAR_INIT(0u), \
128 	        .grp_count =         ATOMIC_VAR_INIT(0u), \
129 	        .grp_retain_total =  ATOMIC_VAR_INIT(0u), \
130 	        .grp_release_total = ATOMIC_VAR_INIT(0u), \
131 	        .grp_parent =        (parent), \
132 	        .grp_log =           NULL, \
133 	}
134 # define os_refgrp_decl(qual, var, name, parent) \
135 	qual struct os_refgrp __attribute__((section("__DATA,__refgrps"))) var =  \
136 	    os_refgrp_initializer(name, parent)
137 # define os_refgrp_decl_extern(var) \
138 	extern struct os_refgrp var
139 
140 /* Create a default group based on the init() callsite if no explicit group
141  * is provided. */
142 # define os_ref_init_count(rc, grp, count) ({ \
143 	        os_refgrp_decl(static, __grp, __func__, NULL); \
144 	        (os_ref_init_count)((rc), (grp) ? (grp) : &__grp, (count)); \
145 	})
146 
147 #else /* OS_REFCNT_DEBUG */
148 
149 # define os_refgrp_decl(qual, var, name, parent) extern struct os_refgrp var __attribute__((unused))
150 # define os_refgrp_decl_extern(var) os_refgrp_decl(, var, ,)
151 # define os_ref_init_count(rc, grp, count) (os_ref_init_count)((rc), NULL, (count))
152 
153 #endif /* OS_REFCNT_DEBUG */
154 
155 #if XNU_KERNEL_PRIVATE
156 void os_ref_panic_live(void *rc) __abortlike;
157 #else
158 __abortlike
159 static inline void
os_ref_panic_live(void * rc)160 os_ref_panic_live(void *rc)
161 {
162 	panic("os_refcnt: unexpected release of final reference (rc=%p)\n", rc);
163 	__builtin_unreachable();
164 }
165 #endif
166 
167 static inline os_ref_count_t OS_WARN_RESULT
os_ref_release(struct os_refcnt * rc)168 os_ref_release(struct os_refcnt *rc)
169 {
170 	return os_ref_release_barrier_internal(&rc->ref_count,
171 	           os_ref_if_debug(rc->ref_group, NULL));
172 }
173 
174 static inline os_ref_count_t OS_WARN_RESULT
os_ref_release_relaxed(struct os_refcnt * rc)175 os_ref_release_relaxed(struct os_refcnt *rc)
176 {
177 	return os_ref_release_relaxed_internal(&rc->ref_count,
178 	           os_ref_if_debug(rc->ref_group, NULL));
179 }
180 
181 static inline void
os_ref_release_live(struct os_refcnt * rc)182 os_ref_release_live(struct os_refcnt *rc)
183 {
184 	if (__improbable(os_ref_release(rc) == 0)) {
185 		os_ref_panic_live(rc);
186 	}
187 }
188 
189 static inline os_ref_count_t
os_ref_get_count_internal(os_ref_atomic_t * rc)190 os_ref_get_count_internal(os_ref_atomic_t *rc)
191 {
192 	return atomic_load_explicit(rc, memory_order_relaxed);
193 }
194 
195 static inline os_ref_count_t
os_ref_get_count(struct os_refcnt * rc)196 os_ref_get_count(struct os_refcnt *rc)
197 {
198 	return os_ref_get_count_internal(&rc->ref_count);
199 }
200 
201 #if XNU_KERNEL_PRIVATE
202 #pragma GCC visibility push(hidden)
203 
204 /*
205  * Raw API
206  */
207 
208 static inline void
os_ref_init_count_raw(os_ref_atomic_t * rc,struct os_refgrp * grp,os_ref_count_t count)209 os_ref_init_count_raw(os_ref_atomic_t *rc, struct os_refgrp *grp, os_ref_count_t count)
210 {
211 	os_ref_init_count_internal(rc, grp, count);
212 }
213 
214 static inline void
os_ref_retain_floor(struct os_refcnt * rc,os_ref_count_t f)215 os_ref_retain_floor(struct os_refcnt *rc, os_ref_count_t f)
216 {
217 	os_ref_retain_floor_internal(&rc->ref_count, f, os_ref_if_debug(rc->ref_group, NULL));
218 }
219 
220 static inline void
os_ref_retain_raw(os_ref_atomic_t * rc,struct os_refgrp * grp)221 os_ref_retain_raw(os_ref_atomic_t *rc, struct os_refgrp *grp)
222 {
223 	os_ref_retain_internal(rc, grp);
224 }
225 
226 static inline void
os_ref_retain_floor_raw(os_ref_atomic_t * rc,os_ref_count_t f,struct os_refgrp * grp)227 os_ref_retain_floor_raw(os_ref_atomic_t *rc, os_ref_count_t f, struct os_refgrp *grp)
228 {
229 	os_ref_retain_floor_internal(rc, f, grp);
230 }
231 
232 static inline os_ref_count_t
os_ref_release_raw(os_ref_atomic_t * rc,struct os_refgrp * grp)233 os_ref_release_raw(os_ref_atomic_t *rc, struct os_refgrp *grp)
234 {
235 	return os_ref_release_barrier_internal(rc, grp);
236 }
237 
238 static inline os_ref_count_t
os_ref_release_raw_relaxed(os_ref_atomic_t * rc,struct os_refgrp * grp)239 os_ref_release_raw_relaxed(os_ref_atomic_t *rc, struct os_refgrp *grp)
240 {
241 	return os_ref_release_relaxed_internal(rc, grp);
242 }
243 
244 static inline void
os_ref_release_live_raw(os_ref_atomic_t * rc,struct os_refgrp * grp)245 os_ref_release_live_raw(os_ref_atomic_t *rc, struct os_refgrp *grp)
246 {
247 	if (__improbable(os_ref_release_barrier_internal(rc, grp) == 0)) {
248 		os_ref_panic_live(rc);
249 	}
250 }
251 
252 static inline bool
os_ref_retain_try_raw(os_ref_atomic_t * rc,struct os_refgrp * grp)253 os_ref_retain_try_raw(os_ref_atomic_t *rc, struct os_refgrp *grp)
254 {
255 	return os_ref_retain_try_internal(rc, grp);
256 }
257 
258 static inline bool
os_ref_retain_floor_try_raw(os_ref_atomic_t * rc,os_ref_count_t f,struct os_refgrp * grp)259 os_ref_retain_floor_try_raw(os_ref_atomic_t *rc, os_ref_count_t f,
260     struct os_refgrp *grp)
261 {
262 	return os_ref_retain_floor_try_internal(rc, f, grp);
263 }
264 
265 static inline void
os_ref_retain_locked_raw(os_ref_atomic_t * rc,struct os_refgrp * grp)266 os_ref_retain_locked_raw(os_ref_atomic_t *rc, struct os_refgrp *grp)
267 {
268 	os_ref_retain_locked_internal(rc, grp);
269 }
270 
271 static inline void
os_ref_retain_floor_locked_raw(os_ref_atomic_t * rc,os_ref_count_t f,struct os_refgrp * grp)272 os_ref_retain_floor_locked_raw(os_ref_atomic_t *rc, os_ref_count_t f,
273     struct os_refgrp *grp)
274 {
275 	os_ref_retain_floor_locked_internal(rc, f, grp);
276 }
277 
278 static inline os_ref_count_t
os_ref_release_locked_raw(os_ref_atomic_t * rc,struct os_refgrp * grp)279 os_ref_release_locked_raw(os_ref_atomic_t *rc, struct os_refgrp *grp)
280 {
281 	return os_ref_release_locked_internal(rc, grp);
282 }
283 
284 static inline os_ref_count_t
os_ref_get_count_raw(os_ref_atomic_t * rc)285 os_ref_get_count_raw(os_ref_atomic_t *rc)
286 {
287 	return os_ref_get_count_internal(rc);
288 }
289 
290 #if !OS_REFCNT_DEBUG
291 /* remove the group argument for non-debug */
292 #define os_ref_init_count_raw(rc, grp, count) (os_ref_init_count_raw)((rc), NULL, (count))
293 #define os_ref_retain_raw(rc, grp) (os_ref_retain_raw)((rc), NULL)
294 #define os_ref_retain_floor_raw(rc, f, grp) (os_ref_retain_floor_raw)((rc), f, NULL)
295 #define os_ref_release_raw(rc, grp) (os_ref_release_raw)((rc), NULL)
296 #define os_ref_release_raw_relaxed(rc, grp) (os_ref_release_raw_relaxed)((rc), NULL)
297 #define os_ref_release_live_raw(rc, grp) (os_ref_release_live_raw)((rc), NULL)
298 #define os_ref_retain_try_raw(rc, grp) (os_ref_retain_try_raw)((rc), NULL)
299 #define os_ref_retain_floor_try_raw(rc, f, grp) (os_ref_retain_floor_try_raw)((rc), f, NULL)
300 #define os_ref_retain_locked_raw(rc, grp) (os_ref_retain_locked_raw)((rc), NULL)
301 #define os_ref_retain_floor_locked_raw(rc, f, grp) (os_ref_retain_floor_locked_raw)((rc), f, NULL)
302 #define os_ref_release_locked_raw(rc, grp) (os_ref_release_locked_raw)((rc), NULL)
303 #endif
304 
305 extern void
306 os_ref_log_fini(struct os_refgrp *grp);
307 
308 extern void
309 os_ref_log_init(struct os_refgrp *grp);
310 
311 extern void
312 os_ref_retain_mask_internal(os_ref_atomic_t *rc, uint32_t n, struct os_refgrp *grp);
313 extern void
314 os_ref_retain_acquire_mask_internal(os_ref_atomic_t *rc, uint32_t n, struct os_refgrp *grp);
315 extern uint32_t
316 os_ref_retain_try_mask_internal(os_ref_atomic_t *, uint32_t n,
317     uint32_t reject_mask, struct os_refgrp *grp) OS_WARN_RESULT;
318 extern bool
319 os_ref_retain_try_acquire_mask_internal(os_ref_atomic_t *, uint32_t n,
320     uint32_t reject_mask, struct os_refgrp *grp) OS_WARN_RESULT;
321 
322 extern uint32_t
323 os_ref_release_barrier_mask_internal(os_ref_atomic_t *rc, uint32_t n, struct os_refgrp *grp);
324 extern uint32_t
325 os_ref_release_relaxed_mask_internal(os_ref_atomic_t *rc, uint32_t n, struct os_refgrp *grp);
326 
327 static inline uint32_t
os_ref_get_raw_mask(os_ref_atomic_t * rc)328 os_ref_get_raw_mask(os_ref_atomic_t *rc)
329 {
330 	return os_ref_get_count_internal(rc);
331 }
332 
333 static inline uint32_t
os_ref_get_bits_mask(os_ref_atomic_t * rc,uint32_t b)334 os_ref_get_bits_mask(os_ref_atomic_t *rc, uint32_t b)
335 {
336 	return os_ref_get_raw_mask(rc) & ((1u << b) - 1);
337 }
338 
339 static inline os_ref_count_t
os_ref_get_count_mask(os_ref_atomic_t * rc,uint32_t b)340 os_ref_get_count_mask(os_ref_atomic_t *rc, uint32_t b)
341 {
342 	return os_ref_get_raw_mask(rc) >> b;
343 }
344 
345 static inline void
os_ref_retain_mask(os_ref_atomic_t * rc,uint32_t b,struct os_refgrp * grp)346 os_ref_retain_mask(os_ref_atomic_t *rc, uint32_t b, struct os_refgrp *grp)
347 {
348 	os_ref_retain_mask_internal(rc, 1u << b, grp);
349 }
350 
351 static inline void
os_ref_retain_acquire_mask(os_ref_atomic_t * rc,uint32_t b,struct os_refgrp * grp)352 os_ref_retain_acquire_mask(os_ref_atomic_t *rc, uint32_t b, struct os_refgrp *grp)
353 {
354 	os_ref_retain_acquire_mask_internal(rc, 1u << b, grp);
355 }
356 
357 static inline uint32_t
os_ref_retain_try_mask(os_ref_atomic_t * rc,uint32_t b,uint32_t reject_mask,struct os_refgrp * grp)358 os_ref_retain_try_mask(os_ref_atomic_t *rc, uint32_t b,
359     uint32_t reject_mask, struct os_refgrp *grp)
360 {
361 	return os_ref_retain_try_mask_internal(rc, 1u << b, reject_mask, grp);
362 }
363 
364 static inline bool
os_ref_retain_try_acquire_mask(os_ref_atomic_t * rc,uint32_t b,uint32_t reject_mask,struct os_refgrp * grp)365 os_ref_retain_try_acquire_mask(os_ref_atomic_t *rc, uint32_t b,
366     uint32_t reject_mask, struct os_refgrp *grp)
367 {
368 	return os_ref_retain_try_acquire_mask_internal(rc, 1u << b, reject_mask, grp);
369 }
370 
371 static inline uint32_t
os_ref_release_raw_mask(os_ref_atomic_t * rc,uint32_t b,struct os_refgrp * grp)372 os_ref_release_raw_mask(os_ref_atomic_t *rc, uint32_t b, struct os_refgrp *grp)
373 {
374 	return os_ref_release_barrier_mask_internal(rc, 1u << b, grp);
375 }
376 
377 static inline uint32_t
os_ref_release_raw_relaxed_mask(os_ref_atomic_t * rc,uint32_t b,struct os_refgrp * grp)378 os_ref_release_raw_relaxed_mask(os_ref_atomic_t *rc, uint32_t b, struct os_refgrp *grp)
379 {
380 	return os_ref_release_relaxed_mask_internal(rc, 1u << b, grp);
381 }
382 
383 static inline os_ref_count_t
os_ref_release_mask(os_ref_atomic_t * rc,uint32_t b,struct os_refgrp * grp)384 os_ref_release_mask(os_ref_atomic_t *rc, uint32_t b, struct os_refgrp *grp)
385 {
386 	return os_ref_release_barrier_mask_internal(rc, 1u << b, grp) >> b;
387 }
388 
389 static inline os_ref_count_t
os_ref_release_relaxed_mask(os_ref_atomic_t * rc,uint32_t b,struct os_refgrp * grp)390 os_ref_release_relaxed_mask(os_ref_atomic_t *rc, uint32_t b, struct os_refgrp *grp)
391 {
392 	return os_ref_release_relaxed_mask_internal(rc, 1u << b, grp) >> b;
393 }
394 
395 static inline uint32_t
os_ref_release_live_raw_mask(os_ref_atomic_t * rc,uint32_t b,struct os_refgrp * grp)396 os_ref_release_live_raw_mask(os_ref_atomic_t *rc, uint32_t b, struct os_refgrp *grp)
397 {
398 	uint32_t val = os_ref_release_barrier_mask_internal(rc, 1u << b, grp);
399 	if (__improbable(val < 1u << b)) {
400 		os_ref_panic_live(rc);
401 	}
402 	return val;
403 }
404 
405 static inline void
os_ref_release_live_mask(os_ref_atomic_t * rc,uint32_t b,struct os_refgrp * grp)406 os_ref_release_live_mask(os_ref_atomic_t *rc, uint32_t b, struct os_refgrp *grp)
407 {
408 	os_ref_release_live_raw_mask(rc, b, grp);
409 }
410 
411 #if !OS_REFCNT_DEBUG
412 /* remove the group argument for non-debug */
413 #define os_ref_init_count_mask(rc, b, grp, init_c, init_b) (os_ref_init_count_mask)(rc, b, NULL, init_c, init_b)
414 #define os_ref_retain_mask(rc, b, grp) (os_ref_retain_mask)((rc), (b), NULL)
415 #define os_ref_retain_acquire_mask(rc, b, grp) (os_ref_retain_acquire_mask)((rc), (b), NULL)
416 #define os_ref_retain_try_mask(rc, b, m, grp) (os_ref_retain_try_mask)((rc), (b), (m), NULL)
417 #define os_ref_retain_try_acquire_mask(rc, b, grp) (os_ref_retain_try_acquire_mask)((rc), (b), NULL)
418 #define os_ref_release_mask(rc, b, grp) (os_ref_release_mask)((rc), (b), NULL)
419 #define os_ref_release_relaxed_mask(rc, b, grp) (os_ref_release_relaxed_mask)((rc), (b), NULL)
420 #define os_ref_release_raw_mask(rc, b, grp) (os_ref_release_raw_mask)((rc), (b), NULL)
421 #define os_ref_release_relaxed_raw_mask(rc, b, grp) (os_ref_release_relaxed_raw_mask)((rc), (b), NULL)
422 #define os_ref_release_live_raw_mask(rc, b, grp) (os_ref_release_live_raw_mask)((rc), (b), NULL)
423 #define os_ref_release_live_mask(rc, b, grp) (os_ref_release_live_mask)((rc), (b), NULL)
424 #endif
425 
426 #pragma GCC visibility pop
427 #endif
428 
429 __END_DECLS
430 
431 #endif /* _OS_REFCNT_INTERNAL_H */
432