1 #define JEMALLOC_CTL_C_
2 #include "jemalloc/internal/jemalloc_internal.h"
4 /******************************************************************************/
8 * ctl_mtx protects the following:
12 static malloc_mutex_t ctl_mtx
;
13 static bool ctl_initialized
;
14 static uint64_t ctl_epoch
;
15 static ctl_stats_t ctl_stats
;
17 /******************************************************************************/
18 /* Helpers for named and indexed nodes. */
20 static inline const ctl_named_node_t
*
21 ctl_named_node(const ctl_node_t
*node
)
24 return ((node
->named
) ? (const ctl_named_node_t
*)node
: NULL
);
27 static inline const ctl_named_node_t
*
28 ctl_named_children(const ctl_named_node_t
*node
, int index
)
30 const ctl_named_node_t
*children
= ctl_named_node(node
->children
);
32 return (children
? &children
[index
] : NULL
);
35 static inline const ctl_indexed_node_t
*
36 ctl_indexed_node(const ctl_node_t
*node
)
39 return ((node
->named
== false) ? (const ctl_indexed_node_t
*)node
:
43 /******************************************************************************/
44 /* Function prototypes for non-inline static functions. */
46 #define CTL_PROTO(n) \
47 static int n##_ctl(const size_t *mib, size_t miblen, void *oldp, \
48 size_t *oldlenp, void *newp, size_t newlen);
50 #define INDEX_PROTO(n) \
51 const ctl_named_node_t *n##_index(const size_t *mib, size_t miblen, \
54 static bool ctl_arena_init(ctl_arena_stats_t
*astats
);
55 static void ctl_arena_clear(ctl_arena_stats_t
*astats
);
56 static void ctl_arena_stats_amerge(ctl_arena_stats_t
*cstats
,
58 static void ctl_arena_stats_smerge(ctl_arena_stats_t
*sstats
,
59 ctl_arena_stats_t
*astats
);
60 static void ctl_arena_refresh(arena_t
*arena
, unsigned i
);
61 static void ctl_refresh(void);
62 static bool ctl_init(void);
63 static int ctl_lookup(const char *name
, ctl_node_t
const **nodesp
,
64 size_t *mibp
, size_t *depthp
);
68 CTL_PROTO(thread_tcache_enabled
)
69 CTL_PROTO(thread_tcache_flush
)
70 CTL_PROTO(thread_arena
)
71 CTL_PROTO(thread_allocated
)
72 CTL_PROTO(thread_allocatedp
)
73 CTL_PROTO(thread_deallocated
)
74 CTL_PROTO(thread_deallocatedp
)
75 CTL_PROTO(config_debug
)
77 CTL_PROTO(config_fill
)
78 CTL_PROTO(config_lazy_lock
)
79 CTL_PROTO(config_mremap
)
80 CTL_PROTO(config_munmap
)
81 CTL_PROTO(config_prof
)
82 CTL_PROTO(config_prof_libgcc
)
83 CTL_PROTO(config_prof_libunwind
)
84 CTL_PROTO(config_stats
)
85 CTL_PROTO(config_tcache
)
87 CTL_PROTO(config_utrace
)
88 CTL_PROTO(config_valgrind
)
89 CTL_PROTO(config_xmalloc
)
91 CTL_PROTO(opt_lg_chunk
)
92 CTL_PROTO(opt_narenas
)
93 CTL_PROTO(opt_lg_dirty_mult
)
94 CTL_PROTO(opt_stats_print
)
97 CTL_PROTO(opt_quarantine
)
98 CTL_PROTO(opt_redzone
)
100 CTL_PROTO(opt_valgrind
)
101 CTL_PROTO(opt_xmalloc
)
102 CTL_PROTO(opt_tcache
)
103 CTL_PROTO(opt_lg_tcache_max
)
105 CTL_PROTO(opt_prof_prefix
)
106 CTL_PROTO(opt_prof_active
)
107 CTL_PROTO(opt_lg_prof_sample
)
108 CTL_PROTO(opt_lg_prof_interval
)
109 CTL_PROTO(opt_prof_gdump
)
110 CTL_PROTO(opt_prof_final
)
111 CTL_PROTO(opt_prof_leak
)
112 CTL_PROTO(opt_prof_accum
)
113 CTL_PROTO(arenas_bin_i_size
)
114 CTL_PROTO(arenas_bin_i_nregs
)
115 CTL_PROTO(arenas_bin_i_run_size
)
116 INDEX_PROTO(arenas_bin_i
)
117 CTL_PROTO(arenas_lrun_i_size
)
118 INDEX_PROTO(arenas_lrun_i
)
119 CTL_PROTO(arenas_narenas
)
120 CTL_PROTO(arenas_initialized
)
121 CTL_PROTO(arenas_quantum
)
122 CTL_PROTO(arenas_page
)
123 CTL_PROTO(arenas_tcache_max
)
124 CTL_PROTO(arenas_nbins
)
125 CTL_PROTO(arenas_nhbins
)
126 CTL_PROTO(arenas_nlruns
)
127 CTL_PROTO(arenas_purge
)
128 CTL_PROTO(prof_active
)
130 CTL_PROTO(prof_interval
)
131 CTL_PROTO(stats_chunks_current
)
132 CTL_PROTO(stats_chunks_total
)
133 CTL_PROTO(stats_chunks_high
)
134 CTL_PROTO(stats_huge_allocated
)
135 CTL_PROTO(stats_huge_nmalloc
)
136 CTL_PROTO(stats_huge_ndalloc
)
137 CTL_PROTO(stats_arenas_i_small_allocated
)
138 CTL_PROTO(stats_arenas_i_small_nmalloc
)
139 CTL_PROTO(stats_arenas_i_small_ndalloc
)
140 CTL_PROTO(stats_arenas_i_small_nrequests
)
141 CTL_PROTO(stats_arenas_i_large_allocated
)
142 CTL_PROTO(stats_arenas_i_large_nmalloc
)
143 CTL_PROTO(stats_arenas_i_large_ndalloc
)
144 CTL_PROTO(stats_arenas_i_large_nrequests
)
145 CTL_PROTO(stats_arenas_i_bins_j_allocated
)
146 CTL_PROTO(stats_arenas_i_bins_j_nmalloc
)
147 CTL_PROTO(stats_arenas_i_bins_j_ndalloc
)
148 CTL_PROTO(stats_arenas_i_bins_j_nrequests
)
149 CTL_PROTO(stats_arenas_i_bins_j_nfills
)
150 CTL_PROTO(stats_arenas_i_bins_j_nflushes
)
151 CTL_PROTO(stats_arenas_i_bins_j_nruns
)
152 CTL_PROTO(stats_arenas_i_bins_j_nreruns
)
153 CTL_PROTO(stats_arenas_i_bins_j_curruns
)
154 INDEX_PROTO(stats_arenas_i_bins_j
)
155 CTL_PROTO(stats_arenas_i_lruns_j_nmalloc
)
156 CTL_PROTO(stats_arenas_i_lruns_j_ndalloc
)
157 CTL_PROTO(stats_arenas_i_lruns_j_nrequests
)
158 CTL_PROTO(stats_arenas_i_lruns_j_curruns
)
159 INDEX_PROTO(stats_arenas_i_lruns_j
)
160 CTL_PROTO(stats_arenas_i_nthreads
)
161 CTL_PROTO(stats_arenas_i_pactive
)
162 CTL_PROTO(stats_arenas_i_pdirty
)
163 CTL_PROTO(stats_arenas_i_mapped
)
164 CTL_PROTO(stats_arenas_i_npurge
)
165 CTL_PROTO(stats_arenas_i_nmadvise
)
166 CTL_PROTO(stats_arenas_i_purged
)
167 INDEX_PROTO(stats_arenas_i
)
168 CTL_PROTO(stats_cactive
)
169 CTL_PROTO(stats_allocated
)
170 CTL_PROTO(stats_active
)
171 CTL_PROTO(stats_mapped
)
173 /******************************************************************************/
176 /* Maximum tree depth. */
177 #define CTL_MAX_DEPTH 6
179 #define NAME(n) {true}, n
180 #define CHILD(t, c) \
181 sizeof(c##_node) / sizeof(ctl_##t##_node_t), \
182 (ctl_node_t *)c##_node, \
184 #define CTL(c) 0, NULL, c##_ctl
187 * Only handles internal indexed nodes, since there are currently no external
190 #define INDEX(i) {false}, i##_index
192 static const ctl_named_node_t tcache_node
[] = {
193 {NAME("enabled"), CTL(thread_tcache_enabled
)},
194 {NAME("flush"), CTL(thread_tcache_flush
)}
197 static const ctl_named_node_t thread_node
[] = {
198 {NAME("arena"), CTL(thread_arena
)},
199 {NAME("allocated"), CTL(thread_allocated
)},
200 {NAME("allocatedp"), CTL(thread_allocatedp
)},
201 {NAME("deallocated"), CTL(thread_deallocated
)},
202 {NAME("deallocatedp"), CTL(thread_deallocatedp
)},
203 {NAME("tcache"), CHILD(named
, tcache
)}
206 static const ctl_named_node_t config_node
[] = {
207 {NAME("debug"), CTL(config_debug
)},
208 {NAME("dss"), CTL(config_dss
)},
209 {NAME("fill"), CTL(config_fill
)},
210 {NAME("lazy_lock"), CTL(config_lazy_lock
)},
211 {NAME("mremap"), CTL(config_mremap
)},
212 {NAME("munmap"), CTL(config_munmap
)},
213 {NAME("prof"), CTL(config_prof
)},
214 {NAME("prof_libgcc"), CTL(config_prof_libgcc
)},
215 {NAME("prof_libunwind"), CTL(config_prof_libunwind
)},
216 {NAME("stats"), CTL(config_stats
)},
217 {NAME("tcache"), CTL(config_tcache
)},
218 {NAME("tls"), CTL(config_tls
)},
219 {NAME("utrace"), CTL(config_utrace
)},
220 {NAME("valgrind"), CTL(config_valgrind
)},
221 {NAME("xmalloc"), CTL(config_xmalloc
)}
224 static const ctl_named_node_t opt_node
[] = {
225 {NAME("abort"), CTL(opt_abort
)},
226 {NAME("lg_chunk"), CTL(opt_lg_chunk
)},
227 {NAME("narenas"), CTL(opt_narenas
)},
228 {NAME("lg_dirty_mult"), CTL(opt_lg_dirty_mult
)},
229 {NAME("stats_print"), CTL(opt_stats_print
)},
230 {NAME("junk"), CTL(opt_junk
)},
231 {NAME("zero"), CTL(opt_zero
)},
232 {NAME("quarantine"), CTL(opt_quarantine
)},
233 {NAME("redzone"), CTL(opt_redzone
)},
234 {NAME("utrace"), CTL(opt_utrace
)},
235 {NAME("valgrind"), CTL(opt_valgrind
)},
236 {NAME("xmalloc"), CTL(opt_xmalloc
)},
237 {NAME("tcache"), CTL(opt_tcache
)},
238 {NAME("lg_tcache_max"), CTL(opt_lg_tcache_max
)},
239 {NAME("prof"), CTL(opt_prof
)},
240 {NAME("prof_prefix"), CTL(opt_prof_prefix
)},
241 {NAME("prof_active"), CTL(opt_prof_active
)},
242 {NAME("lg_prof_sample"), CTL(opt_lg_prof_sample
)},
243 {NAME("lg_prof_interval"), CTL(opt_lg_prof_interval
)},
244 {NAME("prof_gdump"), CTL(opt_prof_gdump
)},
245 {NAME("prof_final"), CTL(opt_prof_final
)},
246 {NAME("prof_leak"), CTL(opt_prof_leak
)},
247 {NAME("prof_accum"), CTL(opt_prof_accum
)}
250 static const ctl_named_node_t arenas_bin_i_node
[] = {
251 {NAME("size"), CTL(arenas_bin_i_size
)},
252 {NAME("nregs"), CTL(arenas_bin_i_nregs
)},
253 {NAME("run_size"), CTL(arenas_bin_i_run_size
)}
255 static const ctl_named_node_t super_arenas_bin_i_node
[] = {
256 {NAME(""), CHILD(named
, arenas_bin_i
)}
259 static const ctl_indexed_node_t arenas_bin_node
[] = {
260 {INDEX(arenas_bin_i
)}
263 static const ctl_named_node_t arenas_lrun_i_node
[] = {
264 {NAME("size"), CTL(arenas_lrun_i_size
)}
266 static const ctl_named_node_t super_arenas_lrun_i_node
[] = {
267 {NAME(""), CHILD(named
, arenas_lrun_i
)}
270 static const ctl_indexed_node_t arenas_lrun_node
[] = {
271 {INDEX(arenas_lrun_i
)}
274 static const ctl_named_node_t arenas_node
[] = {
275 {NAME("narenas"), CTL(arenas_narenas
)},
276 {NAME("initialized"), CTL(arenas_initialized
)},
277 {NAME("quantum"), CTL(arenas_quantum
)},
278 {NAME("page"), CTL(arenas_page
)},
279 {NAME("tcache_max"), CTL(arenas_tcache_max
)},
280 {NAME("nbins"), CTL(arenas_nbins
)},
281 {NAME("nhbins"), CTL(arenas_nhbins
)},
282 {NAME("bin"), CHILD(indexed
, arenas_bin
)},
283 {NAME("nlruns"), CTL(arenas_nlruns
)},
284 {NAME("lrun"), CHILD(indexed
, arenas_lrun
)},
285 {NAME("purge"), CTL(arenas_purge
)}
288 static const ctl_named_node_t prof_node
[] = {
289 {NAME("active"), CTL(prof_active
)},
290 {NAME("dump"), CTL(prof_dump
)},
291 {NAME("interval"), CTL(prof_interval
)}
294 static const ctl_named_node_t stats_chunks_node
[] = {
295 {NAME("current"), CTL(stats_chunks_current
)},
296 {NAME("total"), CTL(stats_chunks_total
)},
297 {NAME("high"), CTL(stats_chunks_high
)}
300 static const ctl_named_node_t stats_huge_node
[] = {
301 {NAME("allocated"), CTL(stats_huge_allocated
)},
302 {NAME("nmalloc"), CTL(stats_huge_nmalloc
)},
303 {NAME("ndalloc"), CTL(stats_huge_ndalloc
)}
306 static const ctl_named_node_t stats_arenas_i_small_node
[] = {
307 {NAME("allocated"), CTL(stats_arenas_i_small_allocated
)},
308 {NAME("nmalloc"), CTL(stats_arenas_i_small_nmalloc
)},
309 {NAME("ndalloc"), CTL(stats_arenas_i_small_ndalloc
)},
310 {NAME("nrequests"), CTL(stats_arenas_i_small_nrequests
)}
313 static const ctl_named_node_t stats_arenas_i_large_node
[] = {
314 {NAME("allocated"), CTL(stats_arenas_i_large_allocated
)},
315 {NAME("nmalloc"), CTL(stats_arenas_i_large_nmalloc
)},
316 {NAME("ndalloc"), CTL(stats_arenas_i_large_ndalloc
)},
317 {NAME("nrequests"), CTL(stats_arenas_i_large_nrequests
)}
320 static const ctl_named_node_t stats_arenas_i_bins_j_node
[] = {
321 {NAME("allocated"), CTL(stats_arenas_i_bins_j_allocated
)},
322 {NAME("nmalloc"), CTL(stats_arenas_i_bins_j_nmalloc
)},
323 {NAME("ndalloc"), CTL(stats_arenas_i_bins_j_ndalloc
)},
324 {NAME("nrequests"), CTL(stats_arenas_i_bins_j_nrequests
)},
325 {NAME("nfills"), CTL(stats_arenas_i_bins_j_nfills
)},
326 {NAME("nflushes"), CTL(stats_arenas_i_bins_j_nflushes
)},
327 {NAME("nruns"), CTL(stats_arenas_i_bins_j_nruns
)},
328 {NAME("nreruns"), CTL(stats_arenas_i_bins_j_nreruns
)},
329 {NAME("curruns"), CTL(stats_arenas_i_bins_j_curruns
)}
331 static const ctl_named_node_t super_stats_arenas_i_bins_j_node
[] = {
332 {NAME(""), CHILD(named
, stats_arenas_i_bins_j
)}
335 static const ctl_indexed_node_t stats_arenas_i_bins_node
[] = {
336 {INDEX(stats_arenas_i_bins_j
)}
339 static const ctl_named_node_t stats_arenas_i_lruns_j_node
[] = {
340 {NAME("nmalloc"), CTL(stats_arenas_i_lruns_j_nmalloc
)},
341 {NAME("ndalloc"), CTL(stats_arenas_i_lruns_j_ndalloc
)},
342 {NAME("nrequests"), CTL(stats_arenas_i_lruns_j_nrequests
)},
343 {NAME("curruns"), CTL(stats_arenas_i_lruns_j_curruns
)}
345 static const ctl_named_node_t super_stats_arenas_i_lruns_j_node
[] = {
346 {NAME(""), CHILD(named
, stats_arenas_i_lruns_j
)}
349 static const ctl_indexed_node_t stats_arenas_i_lruns_node
[] = {
350 {INDEX(stats_arenas_i_lruns_j
)}
353 static const ctl_named_node_t stats_arenas_i_node
[] = {
354 {NAME("nthreads"), CTL(stats_arenas_i_nthreads
)},
355 {NAME("pactive"), CTL(stats_arenas_i_pactive
)},
356 {NAME("pdirty"), CTL(stats_arenas_i_pdirty
)},
357 {NAME("mapped"), CTL(stats_arenas_i_mapped
)},
358 {NAME("npurge"), CTL(stats_arenas_i_npurge
)},
359 {NAME("nmadvise"), CTL(stats_arenas_i_nmadvise
)},
360 {NAME("purged"), CTL(stats_arenas_i_purged
)},
361 {NAME("small"), CHILD(named
, stats_arenas_i_small
)},
362 {NAME("large"), CHILD(named
, stats_arenas_i_large
)},
363 {NAME("bins"), CHILD(indexed
, stats_arenas_i_bins
)},
364 {NAME("lruns"), CHILD(indexed
, stats_arenas_i_lruns
)}
366 static const ctl_named_node_t super_stats_arenas_i_node
[] = {
367 {NAME(""), CHILD(named
, stats_arenas_i
)}
370 static const ctl_indexed_node_t stats_arenas_node
[] = {
371 {INDEX(stats_arenas_i
)}
374 static const ctl_named_node_t stats_node
[] = {
375 {NAME("cactive"), CTL(stats_cactive
)},
376 {NAME("allocated"), CTL(stats_allocated
)},
377 {NAME("active"), CTL(stats_active
)},
378 {NAME("mapped"), CTL(stats_mapped
)},
379 {NAME("chunks"), CHILD(named
, stats_chunks
)},
380 {NAME("huge"), CHILD(named
, stats_huge
)},
381 {NAME("arenas"), CHILD(indexed
, stats_arenas
)}
384 static const ctl_named_node_t root_node
[] = {
385 {NAME("version"), CTL(version
)},
386 {NAME("epoch"), CTL(epoch
)},
387 {NAME("thread"), CHILD(named
, thread
)},
388 {NAME("config"), CHILD(named
, config
)},
389 {NAME("opt"), CHILD(named
, opt
)},
390 {NAME("arenas"), CHILD(named
, arenas
)},
391 {NAME("prof"), CHILD(named
, prof
)},
392 {NAME("stats"), CHILD(named
, stats
)}
394 static const ctl_named_node_t super_root_node
[] = {
395 {NAME(""), CHILD(named
, root
)}
403 /******************************************************************************/
406 ctl_arena_init(ctl_arena_stats_t
*astats
)
409 if (astats
->lstats
== NULL
) {
410 astats
->lstats
= (malloc_large_stats_t
*)base_alloc(nlclasses
*
411 sizeof(malloc_large_stats_t
));
412 if (astats
->lstats
== NULL
)
420 ctl_arena_clear(ctl_arena_stats_t
*astats
)
426 memset(&astats
->astats
, 0, sizeof(arena_stats_t
));
427 astats
->allocated_small
= 0;
428 astats
->nmalloc_small
= 0;
429 astats
->ndalloc_small
= 0;
430 astats
->nrequests_small
= 0;
431 memset(astats
->bstats
, 0, NBINS
* sizeof(malloc_bin_stats_t
));
432 memset(astats
->lstats
, 0, nlclasses
*
433 sizeof(malloc_large_stats_t
));
438 ctl_arena_stats_amerge(ctl_arena_stats_t
*cstats
, arena_t
*arena
)
442 arena_stats_merge(arena
, &cstats
->pactive
, &cstats
->pdirty
,
443 &cstats
->astats
, cstats
->bstats
, cstats
->lstats
);
445 for (i
= 0; i
< NBINS
; i
++) {
446 cstats
->allocated_small
+= cstats
->bstats
[i
].allocated
;
447 cstats
->nmalloc_small
+= cstats
->bstats
[i
].nmalloc
;
448 cstats
->ndalloc_small
+= cstats
->bstats
[i
].ndalloc
;
449 cstats
->nrequests_small
+= cstats
->bstats
[i
].nrequests
;
454 ctl_arena_stats_smerge(ctl_arena_stats_t
*sstats
, ctl_arena_stats_t
*astats
)
458 sstats
->pactive
+= astats
->pactive
;
459 sstats
->pdirty
+= astats
->pdirty
;
461 sstats
->astats
.mapped
+= astats
->astats
.mapped
;
462 sstats
->astats
.npurge
+= astats
->astats
.npurge
;
463 sstats
->astats
.nmadvise
+= astats
->astats
.nmadvise
;
464 sstats
->astats
.purged
+= astats
->astats
.purged
;
466 sstats
->allocated_small
+= astats
->allocated_small
;
467 sstats
->nmalloc_small
+= astats
->nmalloc_small
;
468 sstats
->ndalloc_small
+= astats
->ndalloc_small
;
469 sstats
->nrequests_small
+= astats
->nrequests_small
;
471 sstats
->astats
.allocated_large
+= astats
->astats
.allocated_large
;
472 sstats
->astats
.nmalloc_large
+= astats
->astats
.nmalloc_large
;
473 sstats
->astats
.ndalloc_large
+= astats
->astats
.ndalloc_large
;
474 sstats
->astats
.nrequests_large
+= astats
->astats
.nrequests_large
;
476 for (i
= 0; i
< nlclasses
; i
++) {
477 sstats
->lstats
[i
].nmalloc
+= astats
->lstats
[i
].nmalloc
;
478 sstats
->lstats
[i
].ndalloc
+= astats
->lstats
[i
].ndalloc
;
479 sstats
->lstats
[i
].nrequests
+= astats
->lstats
[i
].nrequests
;
480 sstats
->lstats
[i
].curruns
+= astats
->lstats
[i
].curruns
;
483 for (i
= 0; i
< NBINS
; i
++) {
484 sstats
->bstats
[i
].allocated
+= astats
->bstats
[i
].allocated
;
485 sstats
->bstats
[i
].nmalloc
+= astats
->bstats
[i
].nmalloc
;
486 sstats
->bstats
[i
].ndalloc
+= astats
->bstats
[i
].ndalloc
;
487 sstats
->bstats
[i
].nrequests
+= astats
->bstats
[i
].nrequests
;
489 sstats
->bstats
[i
].nfills
+= astats
->bstats
[i
].nfills
;
490 sstats
->bstats
[i
].nflushes
+=
491 astats
->bstats
[i
].nflushes
;
493 sstats
->bstats
[i
].nruns
+= astats
->bstats
[i
].nruns
;
494 sstats
->bstats
[i
].reruns
+= astats
->bstats
[i
].reruns
;
495 sstats
->bstats
[i
].curruns
+= astats
->bstats
[i
].curruns
;
500 ctl_arena_refresh(arena_t
*arena
, unsigned i
)
502 ctl_arena_stats_t
*astats
= &ctl_stats
.arenas
[i
];
503 ctl_arena_stats_t
*sstats
= &ctl_stats
.arenas
[narenas
];
505 ctl_arena_clear(astats
);
507 sstats
->nthreads
+= astats
->nthreads
;
509 ctl_arena_stats_amerge(astats
, arena
);
510 /* Merge into sum stats as well. */
511 ctl_arena_stats_smerge(sstats
, astats
);
513 astats
->pactive
+= arena
->nactive
;
514 astats
->pdirty
+= arena
->ndirty
;
515 /* Merge into sum stats as well. */
516 sstats
->pactive
+= arena
->nactive
;
517 sstats
->pdirty
+= arena
->ndirty
;
525 VARIABLE_ARRAY(arena_t
*, tarenas
, narenas
);
528 malloc_mutex_lock(&chunks_mtx
);
529 ctl_stats
.chunks
.current
= stats_chunks
.curchunks
;
530 ctl_stats
.chunks
.total
= stats_chunks
.nchunks
;
531 ctl_stats
.chunks
.high
= stats_chunks
.highchunks
;
532 malloc_mutex_unlock(&chunks_mtx
);
534 malloc_mutex_lock(&huge_mtx
);
535 ctl_stats
.huge
.allocated
= huge_allocated
;
536 ctl_stats
.huge
.nmalloc
= huge_nmalloc
;
537 ctl_stats
.huge
.ndalloc
= huge_ndalloc
;
538 malloc_mutex_unlock(&huge_mtx
);
542 * Clear sum stats, since they will be merged into by
543 * ctl_arena_refresh().
545 ctl_stats
.arenas
[narenas
].nthreads
= 0;
546 ctl_arena_clear(&ctl_stats
.arenas
[narenas
]);
548 malloc_mutex_lock(&arenas_lock
);
549 memcpy(tarenas
, arenas
, sizeof(arena_t
*) * narenas
);
550 for (i
= 0; i
< narenas
; i
++) {
551 if (arenas
[i
] != NULL
)
552 ctl_stats
.arenas
[i
].nthreads
= arenas
[i
]->nthreads
;
554 ctl_stats
.arenas
[i
].nthreads
= 0;
556 malloc_mutex_unlock(&arenas_lock
);
557 for (i
= 0; i
< narenas
; i
++) {
558 bool initialized
= (tarenas
[i
] != NULL
);
560 ctl_stats
.arenas
[i
].initialized
= initialized
;
562 ctl_arena_refresh(tarenas
[i
], i
);
566 ctl_stats
.allocated
= ctl_stats
.arenas
[narenas
].allocated_small
567 + ctl_stats
.arenas
[narenas
].astats
.allocated_large
568 + ctl_stats
.huge
.allocated
;
569 ctl_stats
.active
= (ctl_stats
.arenas
[narenas
].pactive
<<
570 LG_PAGE
) + ctl_stats
.huge
.allocated
;
571 ctl_stats
.mapped
= (ctl_stats
.chunks
.current
<< opt_lg_chunk
);
582 malloc_mutex_lock(&ctl_mtx
);
583 if (ctl_initialized
== false) {
585 * Allocate space for one extra arena stats element, which
586 * contains summed stats across all arenas.
588 ctl_stats
.arenas
= (ctl_arena_stats_t
*)base_alloc(
589 (narenas
+ 1) * sizeof(ctl_arena_stats_t
));
590 if (ctl_stats
.arenas
== NULL
) {
594 memset(ctl_stats
.arenas
, 0, (narenas
+ 1) *
595 sizeof(ctl_arena_stats_t
));
598 * Initialize all stats structures, regardless of whether they
599 * ever get used. Lazy initialization would allow errors to
600 * cause inconsistent state to be viewable by the application.
604 for (i
= 0; i
<= narenas
; i
++) {
605 if (ctl_arena_init(&ctl_stats
.arenas
[i
])) {
611 ctl_stats
.arenas
[narenas
].initialized
= true;
615 ctl_initialized
= true;
620 malloc_mutex_unlock(&ctl_mtx
);
625 ctl_lookup(const char *name
, ctl_node_t
const **nodesp
, size_t *mibp
,
629 const char *elm
, *tdot
, *dot
;
631 const ctl_named_node_t
*node
;
634 /* Equivalent to strchrnul(). */
635 dot
= ((tdot
= strchr(elm
, '.')) != NULL
) ? tdot
: strchr(elm
, '\0');
636 elen
= (size_t)((uintptr_t)dot
- (uintptr_t)elm
);
641 node
= super_root_node
;
642 for (i
= 0; i
< *depthp
; i
++) {
644 assert(node
->nchildren
> 0);
645 if (ctl_named_node(node
->children
) != NULL
) {
646 const ctl_named_node_t
*pnode
= node
;
648 /* Children are named. */
649 for (j
= 0; j
< node
->nchildren
; j
++) {
650 const ctl_named_node_t
*child
=
651 ctl_named_children(node
, j
);
652 if (strlen(child
->name
) == elen
&&
653 strncmp(elm
, child
->name
, elen
) == 0) {
657 (const ctl_node_t
*)node
;
668 const ctl_indexed_node_t
*inode
;
670 /* Children are indexed. */
671 index
= malloc_strtoumax(elm
, NULL
, 10);
672 if (index
== UINTMAX_MAX
|| index
> SIZE_T_MAX
) {
677 inode
= ctl_indexed_node(node
->children
);
678 node
= inode
->index(mibp
, *depthp
, (size_t)index
);
685 nodesp
[i
] = (const ctl_node_t
*)node
;
686 mibp
[i
] = (size_t)index
;
689 if (node
->ctl
!= NULL
) {
693 * The name contains more elements than are
694 * in this path through the tree.
699 /* Complete lookup successful. */
706 /* No more elements. */
711 dot
= ((tdot
= strchr(elm
, '.')) != NULL
) ? tdot
:
713 elen
= (size_t)((uintptr_t)dot
- (uintptr_t)elm
);
722 ctl_byname(const char *name
, void *oldp
, size_t *oldlenp
, void *newp
,
727 ctl_node_t
const *nodes
[CTL_MAX_DEPTH
];
728 size_t mib
[CTL_MAX_DEPTH
];
729 const ctl_named_node_t
*node
;
731 if (ctl_initialized
== false && ctl_init()) {
736 depth
= CTL_MAX_DEPTH
;
737 ret
= ctl_lookup(name
, nodes
, mib
, &depth
);
741 node
= ctl_named_node(nodes
[depth
-1]);
742 if (node
!= NULL
&& node
->ctl
)
743 ret
= node
->ctl(mib
, depth
, oldp
, oldlenp
, newp
, newlen
);
745 /* The name refers to a partial path through the ctl tree. */
754 ctl_nametomib(const char *name
, size_t *mibp
, size_t *miblenp
)
758 if (ctl_initialized
== false && ctl_init()) {
763 ret
= ctl_lookup(name
, NULL
, mibp
, miblenp
);
769 ctl_bymib(const size_t *mib
, size_t miblen
, void *oldp
, size_t *oldlenp
,
770 void *newp
, size_t newlen
)
773 const ctl_named_node_t
*node
;
776 if (ctl_initialized
== false && ctl_init()) {
781 /* Iterate down the tree. */
782 node
= super_root_node
;
783 for (i
= 0; i
< miblen
; i
++) {
785 assert(node
->nchildren
> 0);
786 if (ctl_named_node(node
->children
) != NULL
) {
787 /* Children are named. */
788 if (node
->nchildren
<= mib
[i
]) {
792 node
= ctl_named_children(node
, mib
[i
]);
794 const ctl_indexed_node_t
*inode
;
796 /* Indexed element. */
797 inode
= ctl_indexed_node(node
->children
);
798 node
= inode
->index(mib
, miblen
, mib
[i
]);
806 /* Call the ctl function. */
807 if (node
&& node
->ctl
)
808 ret
= node
->ctl(mib
, miblen
, oldp
, oldlenp
, newp
, newlen
);
822 if (malloc_mutex_init(&ctl_mtx
))
825 ctl_initialized
= false;
830 /******************************************************************************/
831 /* *_ctl() functions. */
833 #define READONLY() do { \
834 if (newp != NULL || newlen != 0) { \
840 #define WRITEONLY() do { \
841 if (oldp != NULL || oldlenp != NULL) { \
847 #define READ(v, t) do { \
848 if (oldp != NULL && oldlenp != NULL) { \
849 if (*oldlenp != sizeof(t)) { \
850 size_t copylen = (sizeof(t) <= *oldlenp) \
851 ? sizeof(t) : *oldlenp; \
852 memcpy(oldp, (void *)&v, copylen); \
860 #define WRITE(v, t) do { \
861 if (newp != NULL) { \
862 if (newlen != sizeof(t)) { \
871 * There's a lot of code duplication in the following macros due to limitations
872 * in how nested cpp macros are expanded.
874 #define CTL_RO_CLGEN(c, l, n, v, t) \
876 n##_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp, \
877 void *newp, size_t newlen) \
885 malloc_mutex_lock(&ctl_mtx); \
893 malloc_mutex_unlock(&ctl_mtx); \
897 #define CTL_RO_CGEN(c, n, v, t) \
899 n##_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp, \
900 void *newp, size_t newlen) \
907 malloc_mutex_lock(&ctl_mtx); \
914 malloc_mutex_unlock(&ctl_mtx); \
918 #define CTL_RO_GEN(n, v, t) \
920 n##_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp, \
921 void *newp, size_t newlen) \
926 malloc_mutex_lock(&ctl_mtx); \
933 malloc_mutex_unlock(&ctl_mtx); \
938 * ctl_mtx is not acquired, under the assumption that no pertinent data will
939 * mutate during the call.
941 #define CTL_RO_NL_CGEN(c, n, v, t) \
943 n##_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp, \
944 void *newp, size_t newlen) \
960 #define CTL_RO_NL_GEN(n, v, t) \
962 n##_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp, \
963 void *newp, size_t newlen) \
977 #define CTL_RO_BOOL_CONFIG_GEN(n) \
979 n##_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp, \
980 void *newp, size_t newlen) \
987 READ(oldval, bool); \
994 CTL_RO_NL_GEN(version
, JEMALLOC_VERSION
, const char *)
997 epoch_ctl(const size_t *mib
, size_t miblen
, void *oldp
, size_t *oldlenp
,
998 void *newp
, size_t newlen
)
1003 malloc_mutex_lock(&ctl_mtx
);
1004 WRITE(newval
, uint64_t);
1007 READ(ctl_epoch
, uint64_t);
1011 malloc_mutex_unlock(&ctl_mtx
);
1016 thread_tcache_enabled_ctl(const size_t *mib
, size_t miblen
, void *oldp
,
1017 size_t *oldlenp
, void *newp
, size_t newlen
)
1022 if (config_tcache
== false)
1025 oldval
= tcache_enabled_get();
1027 if (newlen
!= sizeof(bool)) {
1031 tcache_enabled_set(*(bool *)newp
);
1041 thread_tcache_flush_ctl(const size_t *mib
, size_t miblen
, void *oldp
,
1042 size_t *oldlenp
, void *newp
, size_t newlen
)
1046 if (config_tcache
== false)
1060 thread_arena_ctl(const size_t *mib
, size_t miblen
, void *oldp
, size_t *oldlenp
,
1061 void *newp
, size_t newlen
)
1064 unsigned newind
, oldind
;
1066 newind
= oldind
= choose_arena(NULL
)->ind
;
1067 WRITE(newind
, unsigned);
1068 READ(oldind
, unsigned);
1069 if (newind
!= oldind
) {
1072 if (newind
>= narenas
) {
1073 /* New arena index is out of range. */
1078 /* Initialize arena if necessary. */
1079 malloc_mutex_lock(&arenas_lock
);
1080 if ((arena
= arenas
[newind
]) == NULL
&& (arena
=
1081 arenas_extend(newind
)) == NULL
) {
1082 malloc_mutex_unlock(&arenas_lock
);
1086 assert(arena
== arenas
[newind
]);
1087 arenas
[oldind
]->nthreads
--;
1088 arenas
[newind
]->nthreads
++;
1089 malloc_mutex_unlock(&arenas_lock
);
1091 /* Set new arena association. */
1092 if (config_tcache
) {
1094 if ((uintptr_t)(tcache
= *tcache_tsd_get()) >
1095 (uintptr_t)TCACHE_STATE_MAX
) {
1096 tcache_arena_dissociate(tcache
);
1097 tcache_arena_associate(tcache
, arena
);
1100 arenas_tsd_set(&arena
);
1108 CTL_RO_NL_CGEN(config_stats
, thread_allocated
,
1109 thread_allocated_tsd_get()->allocated
, uint64_t)
1110 CTL_RO_NL_CGEN(config_stats
, thread_allocatedp
,
1111 &thread_allocated_tsd_get()->allocated
, uint64_t *)
1112 CTL_RO_NL_CGEN(config_stats
, thread_deallocated
,
1113 thread_allocated_tsd_get()->deallocated
, uint64_t)
1114 CTL_RO_NL_CGEN(config_stats
, thread_deallocatedp
,
1115 &thread_allocated_tsd_get()->deallocated
, uint64_t *)
1117 /******************************************************************************/
1119 CTL_RO_BOOL_CONFIG_GEN(config_debug
)
1120 CTL_RO_BOOL_CONFIG_GEN(config_dss
)
1121 CTL_RO_BOOL_CONFIG_GEN(config_fill
)
1122 CTL_RO_BOOL_CONFIG_GEN(config_lazy_lock
)
1123 CTL_RO_BOOL_CONFIG_GEN(config_mremap
)
1124 CTL_RO_BOOL_CONFIG_GEN(config_munmap
)
1125 CTL_RO_BOOL_CONFIG_GEN(config_prof
)
1126 CTL_RO_BOOL_CONFIG_GEN(config_prof_libgcc
)
1127 CTL_RO_BOOL_CONFIG_GEN(config_prof_libunwind
)
1128 CTL_RO_BOOL_CONFIG_GEN(config_stats
)
1129 CTL_RO_BOOL_CONFIG_GEN(config_tcache
)
1130 CTL_RO_BOOL_CONFIG_GEN(config_tls
)
1131 CTL_RO_BOOL_CONFIG_GEN(config_utrace
)
1132 CTL_RO_BOOL_CONFIG_GEN(config_valgrind
)
1133 CTL_RO_BOOL_CONFIG_GEN(config_xmalloc
)
1135 /******************************************************************************/
1137 CTL_RO_NL_GEN(opt_abort
, opt_abort
, bool)
1138 CTL_RO_NL_GEN(opt_lg_chunk
, opt_lg_chunk
, size_t)
1139 CTL_RO_NL_GEN(opt_narenas
, opt_narenas
, size_t)
1140 CTL_RO_NL_GEN(opt_lg_dirty_mult
, opt_lg_dirty_mult
, ssize_t
)
1141 CTL_RO_NL_GEN(opt_stats_print
, opt_stats_print
, bool)
1142 CTL_RO_NL_CGEN(config_fill
, opt_junk
, opt_junk
, bool)
1143 CTL_RO_NL_CGEN(config_fill
, opt_zero
, opt_zero
, bool)
1144 CTL_RO_NL_CGEN(config_fill
, opt_quarantine
, opt_quarantine
, size_t)
1145 CTL_RO_NL_CGEN(config_fill
, opt_redzone
, opt_redzone
, bool)
1146 CTL_RO_NL_CGEN(config_utrace
, opt_utrace
, opt_utrace
, bool)
1147 CTL_RO_NL_CGEN(config_valgrind
, opt_valgrind
, opt_valgrind
, bool)
1148 CTL_RO_NL_CGEN(config_xmalloc
, opt_xmalloc
, opt_xmalloc
, bool)
1149 CTL_RO_NL_CGEN(config_tcache
, opt_tcache
, opt_tcache
, bool)
1150 CTL_RO_NL_CGEN(config_tcache
, opt_lg_tcache_max
, opt_lg_tcache_max
, ssize_t
)
1151 CTL_RO_NL_CGEN(config_prof
, opt_prof
, opt_prof
, bool)
1152 CTL_RO_NL_CGEN(config_prof
, opt_prof_prefix
, opt_prof_prefix
, const char *)
1153 CTL_RO_CGEN(config_prof
, opt_prof_active
, opt_prof_active
, bool) /* Mutable. */
1154 CTL_RO_NL_CGEN(config_prof
, opt_lg_prof_sample
, opt_lg_prof_sample
, size_t)
1155 CTL_RO_NL_CGEN(config_prof
, opt_lg_prof_interval
, opt_lg_prof_interval
, ssize_t
)
1156 CTL_RO_NL_CGEN(config_prof
, opt_prof_gdump
, opt_prof_gdump
, bool)
1157 CTL_RO_NL_CGEN(config_prof
, opt_prof_final
, opt_prof_final
, bool)
1158 CTL_RO_NL_CGEN(config_prof
, opt_prof_leak
, opt_prof_leak
, bool)
1159 CTL_RO_NL_CGEN(config_prof
, opt_prof_accum
, opt_prof_accum
, bool)
1161 /******************************************************************************/
1163 CTL_RO_NL_GEN(arenas_bin_i_size
, arena_bin_info
[mib
[2]].reg_size
, size_t)
1164 CTL_RO_NL_GEN(arenas_bin_i_nregs
, arena_bin_info
[mib
[2]].nregs
, uint32_t)
1165 CTL_RO_NL_GEN(arenas_bin_i_run_size
, arena_bin_info
[mib
[2]].run_size
, size_t)
1166 const ctl_named_node_t
*
1167 arenas_bin_i_index(const size_t *mib
, size_t miblen
, size_t i
)
1172 return (super_arenas_bin_i_node
);
1175 CTL_RO_NL_GEN(arenas_lrun_i_size
, ((mib
[2]+1) << LG_PAGE
), size_t)
1176 const ctl_named_node_t
*
1177 arenas_lrun_i_index(const size_t *mib
, size_t miblen
, size_t i
)
1182 return (super_arenas_lrun_i_node
);
1185 CTL_RO_NL_GEN(arenas_narenas
, narenas
, unsigned)
1188 arenas_initialized_ctl(const size_t *mib
, size_t miblen
, void *oldp
,
1189 size_t *oldlenp
, void *newp
, size_t newlen
)
1194 malloc_mutex_lock(&ctl_mtx
);
1196 if (*oldlenp
!= narenas
* sizeof(bool)) {
1198 nread
= (*oldlenp
< narenas
* sizeof(bool))
1199 ? (*oldlenp
/ sizeof(bool)) : narenas
;
1205 for (i
= 0; i
< nread
; i
++)
1206 ((bool *)oldp
)[i
] = ctl_stats
.arenas
[i
].initialized
;
1209 malloc_mutex_unlock(&ctl_mtx
);
1213 CTL_RO_NL_GEN(arenas_quantum
, QUANTUM
, size_t)
1214 CTL_RO_NL_GEN(arenas_page
, PAGE
, size_t)
1215 CTL_RO_NL_CGEN(config_tcache
, arenas_tcache_max
, tcache_maxclass
, size_t)
1216 CTL_RO_NL_GEN(arenas_nbins
, NBINS
, unsigned)
1217 CTL_RO_NL_CGEN(config_tcache
, arenas_nhbins
, nhbins
, unsigned)
1218 CTL_RO_NL_GEN(arenas_nlruns
, nlclasses
, size_t)
1221 arenas_purge_ctl(const size_t *mib
, size_t miblen
, void *oldp
, size_t *oldlenp
,
1222 void *newp
, size_t newlen
)
1229 WRITE(arena
, unsigned);
1230 if (newp
!= NULL
&& arena
>= narenas
) {
1234 VARIABLE_ARRAY(arena_t
*, tarenas
, narenas
);
1236 malloc_mutex_lock(&arenas_lock
);
1237 memcpy(tarenas
, arenas
, sizeof(arena_t
*) * narenas
);
1238 malloc_mutex_unlock(&arenas_lock
);
1240 if (arena
== UINT_MAX
) {
1242 for (i
= 0; i
< narenas
; i
++) {
1243 if (tarenas
[i
] != NULL
)
1244 arena_purge_all(tarenas
[i
]);
1247 assert(arena
< narenas
);
1248 if (tarenas
[arena
] != NULL
)
1249 arena_purge_all(tarenas
[arena
]);
1258 /******************************************************************************/
1261 prof_active_ctl(const size_t *mib
, size_t miblen
, void *oldp
, size_t *oldlenp
,
1262 void *newp
, size_t newlen
)
1267 if (config_prof
== false)
1270 malloc_mutex_lock(&ctl_mtx
); /* Protect opt_prof_active. */
1271 oldval
= opt_prof_active
;
1274 * The memory barriers will tend to make opt_prof_active
1275 * propagate faster on systems with weak memory ordering.
1278 WRITE(opt_prof_active
, bool);
1285 malloc_mutex_unlock(&ctl_mtx
);
1290 prof_dump_ctl(const size_t *mib
, size_t miblen
, void *oldp
, size_t *oldlenp
,
1291 void *newp
, size_t newlen
)
1294 const char *filename
= NULL
;
1296 if (config_prof
== false)
1300 WRITE(filename
, const char *);
1302 if (prof_mdump(filename
)) {
1312 CTL_RO_NL_CGEN(config_prof
, prof_interval
, prof_interval
, uint64_t)
1314 /******************************************************************************/
1316 CTL_RO_CGEN(config_stats
, stats_chunks_current
, ctl_stats
.chunks
.current
,
1318 CTL_RO_CGEN(config_stats
, stats_chunks_total
, ctl_stats
.chunks
.total
, uint64_t)
1319 CTL_RO_CGEN(config_stats
, stats_chunks_high
, ctl_stats
.chunks
.high
, size_t)
1320 CTL_RO_CGEN(config_stats
, stats_huge_allocated
, huge_allocated
, size_t)
1321 CTL_RO_CGEN(config_stats
, stats_huge_nmalloc
, huge_nmalloc
, uint64_t)
1322 CTL_RO_CGEN(config_stats
, stats_huge_ndalloc
, huge_ndalloc
, uint64_t)
1323 CTL_RO_CGEN(config_stats
, stats_arenas_i_small_allocated
,
1324 ctl_stats
.arenas
[mib
[2]].allocated_small
, size_t)
1325 CTL_RO_CGEN(config_stats
, stats_arenas_i_small_nmalloc
,
1326 ctl_stats
.arenas
[mib
[2]].nmalloc_small
, uint64_t)
1327 CTL_RO_CGEN(config_stats
, stats_arenas_i_small_ndalloc
,
1328 ctl_stats
.arenas
[mib
[2]].ndalloc_small
, uint64_t)
1329 CTL_RO_CGEN(config_stats
, stats_arenas_i_small_nrequests
,
1330 ctl_stats
.arenas
[mib
[2]].nrequests_small
, uint64_t)
1331 CTL_RO_CGEN(config_stats
, stats_arenas_i_large_allocated
,
1332 ctl_stats
.arenas
[mib
[2]].astats
.allocated_large
, size_t)
1333 CTL_RO_CGEN(config_stats
, stats_arenas_i_large_nmalloc
,
1334 ctl_stats
.arenas
[mib
[2]].astats
.nmalloc_large
, uint64_t)
1335 CTL_RO_CGEN(config_stats
, stats_arenas_i_large_ndalloc
,
1336 ctl_stats
.arenas
[mib
[2]].astats
.ndalloc_large
, uint64_t)
1337 CTL_RO_CGEN(config_stats
, stats_arenas_i_large_nrequests
,
1338 ctl_stats
.arenas
[mib
[2]].astats
.nrequests_large
, uint64_t)
1340 CTL_RO_CGEN(config_stats
, stats_arenas_i_bins_j_allocated
,
1341 ctl_stats
.arenas
[mib
[2]].bstats
[mib
[4]].allocated
, size_t)
1342 CTL_RO_CGEN(config_stats
, stats_arenas_i_bins_j_nmalloc
,
1343 ctl_stats
.arenas
[mib
[2]].bstats
[mib
[4]].nmalloc
, uint64_t)
1344 CTL_RO_CGEN(config_stats
, stats_arenas_i_bins_j_ndalloc
,
1345 ctl_stats
.arenas
[mib
[2]].bstats
[mib
[4]].ndalloc
, uint64_t)
1346 CTL_RO_CGEN(config_stats
, stats_arenas_i_bins_j_nrequests
,
1347 ctl_stats
.arenas
[mib
[2]].bstats
[mib
[4]].nrequests
, uint64_t)
1348 CTL_RO_CGEN(config_stats
&& config_tcache
, stats_arenas_i_bins_j_nfills
,
1349 ctl_stats
.arenas
[mib
[2]].bstats
[mib
[4]].nfills
, uint64_t)
1350 CTL_RO_CGEN(config_stats
&& config_tcache
, stats_arenas_i_bins_j_nflushes
,
1351 ctl_stats
.arenas
[mib
[2]].bstats
[mib
[4]].nflushes
, uint64_t)
1352 CTL_RO_CGEN(config_stats
, stats_arenas_i_bins_j_nruns
,
1353 ctl_stats
.arenas
[mib
[2]].bstats
[mib
[4]].nruns
, uint64_t)
1354 CTL_RO_CGEN(config_stats
, stats_arenas_i_bins_j_nreruns
,
1355 ctl_stats
.arenas
[mib
[2]].bstats
[mib
[4]].reruns
, uint64_t)
1356 CTL_RO_CGEN(config_stats
, stats_arenas_i_bins_j_curruns
,
1357 ctl_stats
.arenas
[mib
[2]].bstats
[mib
[4]].curruns
, size_t)
1359 const ctl_named_node_t
*
1360 stats_arenas_i_bins_j_index(const size_t *mib
, size_t miblen
, size_t j
)
1365 return (super_stats_arenas_i_bins_j_node
);
1368 CTL_RO_CGEN(config_stats
, stats_arenas_i_lruns_j_nmalloc
,
1369 ctl_stats
.arenas
[mib
[2]].lstats
[mib
[4]].nmalloc
, uint64_t)
1370 CTL_RO_CGEN(config_stats
, stats_arenas_i_lruns_j_ndalloc
,
1371 ctl_stats
.arenas
[mib
[2]].lstats
[mib
[4]].ndalloc
, uint64_t)
1372 CTL_RO_CGEN(config_stats
, stats_arenas_i_lruns_j_nrequests
,
1373 ctl_stats
.arenas
[mib
[2]].lstats
[mib
[4]].nrequests
, uint64_t)
1374 CTL_RO_CGEN(config_stats
, stats_arenas_i_lruns_j_curruns
,
1375 ctl_stats
.arenas
[mib
[2]].lstats
[mib
[4]].curruns
, size_t)
1377 const ctl_named_node_t
*
1378 stats_arenas_i_lruns_j_index(const size_t *mib
, size_t miblen
, size_t j
)
1383 return (super_stats_arenas_i_lruns_j_node
);
1386 CTL_RO_GEN(stats_arenas_i_nthreads
, ctl_stats
.arenas
[mib
[2]].nthreads
, unsigned)
1387 CTL_RO_GEN(stats_arenas_i_pactive
, ctl_stats
.arenas
[mib
[2]].pactive
, size_t)
1388 CTL_RO_GEN(stats_arenas_i_pdirty
, ctl_stats
.arenas
[mib
[2]].pdirty
, size_t)
1389 CTL_RO_CGEN(config_stats
, stats_arenas_i_mapped
,
1390 ctl_stats
.arenas
[mib
[2]].astats
.mapped
, size_t)
1391 CTL_RO_CGEN(config_stats
, stats_arenas_i_npurge
,
1392 ctl_stats
.arenas
[mib
[2]].astats
.npurge
, uint64_t)
1393 CTL_RO_CGEN(config_stats
, stats_arenas_i_nmadvise
,
1394 ctl_stats
.arenas
[mib
[2]].astats
.nmadvise
, uint64_t)
1395 CTL_RO_CGEN(config_stats
, stats_arenas_i_purged
,
1396 ctl_stats
.arenas
[mib
[2]].astats
.purged
, uint64_t)
1398 const ctl_named_node_t
*
1399 stats_arenas_i_index(const size_t *mib
, size_t miblen
, size_t i
)
1401 const ctl_named_node_t
* ret
;
1403 malloc_mutex_lock(&ctl_mtx
);
1404 if (ctl_stats
.arenas
[i
].initialized
== false) {
1409 ret
= super_stats_arenas_i_node
;
1411 malloc_mutex_unlock(&ctl_mtx
);
1415 CTL_RO_CGEN(config_stats
, stats_cactive
, &stats_cactive
, size_t *)
1416 CTL_RO_CGEN(config_stats
, stats_allocated
, ctl_stats
.allocated
, size_t)
1417 CTL_RO_CGEN(config_stats
, stats_active
, ctl_stats
.active
, size_t)
1418 CTL_RO_CGEN(config_stats
, stats_mapped
, ctl_stats
.mapped
, size_t)