1 #define JEMALLOC_CTL_C_
2 #include "jemalloc/internal/jemalloc_internal.h"
4 /******************************************************************************/
8 * ctl_mtx protects the following:
12 static malloc_mutex_t ctl_mtx
;
13 static bool ctl_initialized
;
14 static uint64_t ctl_epoch
;
15 static ctl_stats_t ctl_stats
;
17 /******************************************************************************/
18 /* Helpers for named and indexed nodes. */
20 static inline const ctl_named_node_t
*
21 ctl_named_node(const ctl_node_t
*node
)
24 return ((node
->named
) ? (const ctl_named_node_t
*)node
: NULL
);
27 static inline const ctl_named_node_t
*
28 ctl_named_children(const ctl_named_node_t
*node
, int index
)
30 const ctl_named_node_t
*children
= ctl_named_node(node
->children
);
32 return (children
? &children
[index
] : NULL
);
35 static inline const ctl_indexed_node_t
*
36 ctl_indexed_node(const ctl_node_t
*node
)
39 return ((node
->named
== false) ? (const ctl_indexed_node_t
*)node
:
43 /******************************************************************************/
44 /* Function prototypes for non-inline static functions. */
46 #define CTL_PROTO(n) \
47 static int n##_ctl(const size_t *mib, size_t miblen, void *oldp, \
48 size_t *oldlenp, void *newp, size_t newlen);
50 #define INDEX_PROTO(n) \
51 static const ctl_named_node_t *n##_index(const size_t *mib, \
52 size_t miblen, size_t i);
54 static bool ctl_arena_init(ctl_arena_stats_t
*astats
);
55 static void ctl_arena_clear(ctl_arena_stats_t
*astats
);
56 static void ctl_arena_stats_amerge(ctl_arena_stats_t
*cstats
,
58 static void ctl_arena_stats_smerge(ctl_arena_stats_t
*sstats
,
59 ctl_arena_stats_t
*astats
);
60 static void ctl_arena_refresh(arena_t
*arena
, unsigned i
);
61 static bool ctl_grow(void);
62 static void ctl_refresh(void);
63 static bool ctl_init(void);
64 static int ctl_lookup(const char *name
, ctl_node_t
const **nodesp
,
65 size_t *mibp
, size_t *depthp
);
69 CTL_PROTO(thread_tcache_enabled
)
70 CTL_PROTO(thread_tcache_flush
)
71 CTL_PROTO(thread_arena
)
72 CTL_PROTO(thread_allocated
)
73 CTL_PROTO(thread_allocatedp
)
74 CTL_PROTO(thread_deallocated
)
75 CTL_PROTO(thread_deallocatedp
)
76 CTL_PROTO(config_debug
)
78 CTL_PROTO(config_fill
)
79 CTL_PROTO(config_lazy_lock
)
80 CTL_PROTO(config_mremap
)
81 CTL_PROTO(config_munmap
)
82 CTL_PROTO(config_prof
)
83 CTL_PROTO(config_prof_libgcc
)
84 CTL_PROTO(config_prof_libunwind
)
85 CTL_PROTO(config_stats
)
86 CTL_PROTO(config_tcache
)
88 CTL_PROTO(config_utrace
)
89 CTL_PROTO(config_valgrind
)
90 CTL_PROTO(config_xmalloc
)
93 CTL_PROTO(opt_lg_chunk
)
94 CTL_PROTO(opt_narenas
)
95 CTL_PROTO(opt_lg_dirty_mult
)
96 CTL_PROTO(opt_stats_print
)
99 CTL_PROTO(opt_quarantine
)
100 CTL_PROTO(opt_redzone
)
101 CTL_PROTO(opt_utrace
)
102 CTL_PROTO(opt_valgrind
)
103 CTL_PROTO(opt_xmalloc
)
104 CTL_PROTO(opt_tcache
)
105 CTL_PROTO(opt_lg_tcache_max
)
107 CTL_PROTO(opt_prof_prefix
)
108 CTL_PROTO(opt_prof_active
)
109 CTL_PROTO(opt_lg_prof_sample
)
110 CTL_PROTO(opt_lg_prof_interval
)
111 CTL_PROTO(opt_prof_gdump
)
112 CTL_PROTO(opt_prof_final
)
113 CTL_PROTO(opt_prof_leak
)
114 CTL_PROTO(opt_prof_accum
)
115 CTL_PROTO(arena_i_purge
)
116 static void arena_purge(unsigned arena_ind
);
117 CTL_PROTO(arena_i_dss
)
119 CTL_PROTO(arenas_bin_i_size
)
120 CTL_PROTO(arenas_bin_i_nregs
)
121 CTL_PROTO(arenas_bin_i_run_size
)
122 INDEX_PROTO(arenas_bin_i
)
123 CTL_PROTO(arenas_lrun_i_size
)
124 INDEX_PROTO(arenas_lrun_i
)
125 CTL_PROTO(arenas_narenas
)
126 CTL_PROTO(arenas_initialized
)
127 CTL_PROTO(arenas_quantum
)
128 CTL_PROTO(arenas_page
)
129 CTL_PROTO(arenas_tcache_max
)
130 CTL_PROTO(arenas_nbins
)
131 CTL_PROTO(arenas_nhbins
)
132 CTL_PROTO(arenas_nlruns
)
133 CTL_PROTO(arenas_purge
)
134 CTL_PROTO(arenas_extend
)
135 CTL_PROTO(prof_active
)
137 CTL_PROTO(prof_interval
)
138 CTL_PROTO(stats_chunks_current
)
139 CTL_PROTO(stats_chunks_total
)
140 CTL_PROTO(stats_chunks_high
)
141 CTL_PROTO(stats_huge_allocated
)
142 CTL_PROTO(stats_huge_nmalloc
)
143 CTL_PROTO(stats_huge_ndalloc
)
144 CTL_PROTO(stats_arenas_i_small_allocated
)
145 CTL_PROTO(stats_arenas_i_small_nmalloc
)
146 CTL_PROTO(stats_arenas_i_small_ndalloc
)
147 CTL_PROTO(stats_arenas_i_small_nrequests
)
148 CTL_PROTO(stats_arenas_i_large_allocated
)
149 CTL_PROTO(stats_arenas_i_large_nmalloc
)
150 CTL_PROTO(stats_arenas_i_large_ndalloc
)
151 CTL_PROTO(stats_arenas_i_large_nrequests
)
152 CTL_PROTO(stats_arenas_i_bins_j_allocated
)
153 CTL_PROTO(stats_arenas_i_bins_j_nmalloc
)
154 CTL_PROTO(stats_arenas_i_bins_j_ndalloc
)
155 CTL_PROTO(stats_arenas_i_bins_j_nrequests
)
156 CTL_PROTO(stats_arenas_i_bins_j_nfills
)
157 CTL_PROTO(stats_arenas_i_bins_j_nflushes
)
158 CTL_PROTO(stats_arenas_i_bins_j_nruns
)
159 CTL_PROTO(stats_arenas_i_bins_j_nreruns
)
160 CTL_PROTO(stats_arenas_i_bins_j_curruns
)
161 INDEX_PROTO(stats_arenas_i_bins_j
)
162 CTL_PROTO(stats_arenas_i_lruns_j_nmalloc
)
163 CTL_PROTO(stats_arenas_i_lruns_j_ndalloc
)
164 CTL_PROTO(stats_arenas_i_lruns_j_nrequests
)
165 CTL_PROTO(stats_arenas_i_lruns_j_curruns
)
166 INDEX_PROTO(stats_arenas_i_lruns_j
)
167 CTL_PROTO(stats_arenas_i_nthreads
)
168 CTL_PROTO(stats_arenas_i_dss
)
169 CTL_PROTO(stats_arenas_i_pactive
)
170 CTL_PROTO(stats_arenas_i_pdirty
)
171 CTL_PROTO(stats_arenas_i_mapped
)
172 CTL_PROTO(stats_arenas_i_npurge
)
173 CTL_PROTO(stats_arenas_i_nmadvise
)
174 CTL_PROTO(stats_arenas_i_purged
)
175 INDEX_PROTO(stats_arenas_i
)
176 CTL_PROTO(stats_cactive
)
177 CTL_PROTO(stats_allocated
)
178 CTL_PROTO(stats_active
)
179 CTL_PROTO(stats_mapped
)
181 /******************************************************************************/
184 /* Maximum tree depth. */
185 #define CTL_MAX_DEPTH 6
187 #define NAME(n) {true}, n
188 #define CHILD(t, c) \
189 sizeof(c##_node) / sizeof(ctl_##t##_node_t), \
190 (ctl_node_t *)c##_node, \
192 #define CTL(c) 0, NULL, c##_ctl
195 * Only handles internal indexed nodes, since there are currently no external
198 #define INDEX(i) {false}, i##_index
200 static const ctl_named_node_t tcache_node
[] = {
201 {NAME("enabled"), CTL(thread_tcache_enabled
)},
202 {NAME("flush"), CTL(thread_tcache_flush
)}
205 static const ctl_named_node_t thread_node
[] = {
206 {NAME("arena"), CTL(thread_arena
)},
207 {NAME("allocated"), CTL(thread_allocated
)},
208 {NAME("allocatedp"), CTL(thread_allocatedp
)},
209 {NAME("deallocated"), CTL(thread_deallocated
)},
210 {NAME("deallocatedp"), CTL(thread_deallocatedp
)},
211 {NAME("tcache"), CHILD(named
, tcache
)}
214 static const ctl_named_node_t config_node
[] = {
215 {NAME("debug"), CTL(config_debug
)},
216 {NAME("dss"), CTL(config_dss
)},
217 {NAME("fill"), CTL(config_fill
)},
218 {NAME("lazy_lock"), CTL(config_lazy_lock
)},
219 {NAME("mremap"), CTL(config_mremap
)},
220 {NAME("munmap"), CTL(config_munmap
)},
221 {NAME("prof"), CTL(config_prof
)},
222 {NAME("prof_libgcc"), CTL(config_prof_libgcc
)},
223 {NAME("prof_libunwind"), CTL(config_prof_libunwind
)},
224 {NAME("stats"), CTL(config_stats
)},
225 {NAME("tcache"), CTL(config_tcache
)},
226 {NAME("tls"), CTL(config_tls
)},
227 {NAME("utrace"), CTL(config_utrace
)},
228 {NAME("valgrind"), CTL(config_valgrind
)},
229 {NAME("xmalloc"), CTL(config_xmalloc
)}
232 static const ctl_named_node_t opt_node
[] = {
233 {NAME("abort"), CTL(opt_abort
)},
234 {NAME("dss"), CTL(opt_dss
)},
235 {NAME("lg_chunk"), CTL(opt_lg_chunk
)},
236 {NAME("narenas"), CTL(opt_narenas
)},
237 {NAME("lg_dirty_mult"), CTL(opt_lg_dirty_mult
)},
238 {NAME("stats_print"), CTL(opt_stats_print
)},
239 {NAME("junk"), CTL(opt_junk
)},
240 {NAME("zero"), CTL(opt_zero
)},
241 {NAME("quarantine"), CTL(opt_quarantine
)},
242 {NAME("redzone"), CTL(opt_redzone
)},
243 {NAME("utrace"), CTL(opt_utrace
)},
244 {NAME("valgrind"), CTL(opt_valgrind
)},
245 {NAME("xmalloc"), CTL(opt_xmalloc
)},
246 {NAME("tcache"), CTL(opt_tcache
)},
247 {NAME("lg_tcache_max"), CTL(opt_lg_tcache_max
)},
248 {NAME("prof"), CTL(opt_prof
)},
249 {NAME("prof_prefix"), CTL(opt_prof_prefix
)},
250 {NAME("prof_active"), CTL(opt_prof_active
)},
251 {NAME("lg_prof_sample"), CTL(opt_lg_prof_sample
)},
252 {NAME("lg_prof_interval"), CTL(opt_lg_prof_interval
)},
253 {NAME("prof_gdump"), CTL(opt_prof_gdump
)},
254 {NAME("prof_final"), CTL(opt_prof_final
)},
255 {NAME("prof_leak"), CTL(opt_prof_leak
)},
256 {NAME("prof_accum"), CTL(opt_prof_accum
)}
259 static const ctl_named_node_t arena_i_node
[] = {
260 {NAME("purge"), CTL(arena_i_purge
)},
261 {NAME("dss"), CTL(arena_i_dss
)}
263 static const ctl_named_node_t super_arena_i_node
[] = {
264 {NAME(""), CHILD(named
, arena_i
)}
267 static const ctl_indexed_node_t arena_node
[] = {
271 static const ctl_named_node_t arenas_bin_i_node
[] = {
272 {NAME("size"), CTL(arenas_bin_i_size
)},
273 {NAME("nregs"), CTL(arenas_bin_i_nregs
)},
274 {NAME("run_size"), CTL(arenas_bin_i_run_size
)}
276 static const ctl_named_node_t super_arenas_bin_i_node
[] = {
277 {NAME(""), CHILD(named
, arenas_bin_i
)}
280 static const ctl_indexed_node_t arenas_bin_node
[] = {
281 {INDEX(arenas_bin_i
)}
284 static const ctl_named_node_t arenas_lrun_i_node
[] = {
285 {NAME("size"), CTL(arenas_lrun_i_size
)}
287 static const ctl_named_node_t super_arenas_lrun_i_node
[] = {
288 {NAME(""), CHILD(named
, arenas_lrun_i
)}
291 static const ctl_indexed_node_t arenas_lrun_node
[] = {
292 {INDEX(arenas_lrun_i
)}
295 static const ctl_named_node_t arenas_node
[] = {
296 {NAME("narenas"), CTL(arenas_narenas
)},
297 {NAME("initialized"), CTL(arenas_initialized
)},
298 {NAME("quantum"), CTL(arenas_quantum
)},
299 {NAME("page"), CTL(arenas_page
)},
300 {NAME("tcache_max"), CTL(arenas_tcache_max
)},
301 {NAME("nbins"), CTL(arenas_nbins
)},
302 {NAME("nhbins"), CTL(arenas_nhbins
)},
303 {NAME("bin"), CHILD(indexed
, arenas_bin
)},
304 {NAME("nlruns"), CTL(arenas_nlruns
)},
305 {NAME("lrun"), CHILD(indexed
, arenas_lrun
)},
306 {NAME("purge"), CTL(arenas_purge
)},
307 {NAME("extend"), CTL(arenas_extend
)}
310 static const ctl_named_node_t prof_node
[] = {
311 {NAME("active"), CTL(prof_active
)},
312 {NAME("dump"), CTL(prof_dump
)},
313 {NAME("interval"), CTL(prof_interval
)}
316 static const ctl_named_node_t stats_chunks_node
[] = {
317 {NAME("current"), CTL(stats_chunks_current
)},
318 {NAME("total"), CTL(stats_chunks_total
)},
319 {NAME("high"), CTL(stats_chunks_high
)}
322 static const ctl_named_node_t stats_huge_node
[] = {
323 {NAME("allocated"), CTL(stats_huge_allocated
)},
324 {NAME("nmalloc"), CTL(stats_huge_nmalloc
)},
325 {NAME("ndalloc"), CTL(stats_huge_ndalloc
)}
328 static const ctl_named_node_t stats_arenas_i_small_node
[] = {
329 {NAME("allocated"), CTL(stats_arenas_i_small_allocated
)},
330 {NAME("nmalloc"), CTL(stats_arenas_i_small_nmalloc
)},
331 {NAME("ndalloc"), CTL(stats_arenas_i_small_ndalloc
)},
332 {NAME("nrequests"), CTL(stats_arenas_i_small_nrequests
)}
335 static const ctl_named_node_t stats_arenas_i_large_node
[] = {
336 {NAME("allocated"), CTL(stats_arenas_i_large_allocated
)},
337 {NAME("nmalloc"), CTL(stats_arenas_i_large_nmalloc
)},
338 {NAME("ndalloc"), CTL(stats_arenas_i_large_ndalloc
)},
339 {NAME("nrequests"), CTL(stats_arenas_i_large_nrequests
)}
342 static const ctl_named_node_t stats_arenas_i_bins_j_node
[] = {
343 {NAME("allocated"), CTL(stats_arenas_i_bins_j_allocated
)},
344 {NAME("nmalloc"), CTL(stats_arenas_i_bins_j_nmalloc
)},
345 {NAME("ndalloc"), CTL(stats_arenas_i_bins_j_ndalloc
)},
346 {NAME("nrequests"), CTL(stats_arenas_i_bins_j_nrequests
)},
347 {NAME("nfills"), CTL(stats_arenas_i_bins_j_nfills
)},
348 {NAME("nflushes"), CTL(stats_arenas_i_bins_j_nflushes
)},
349 {NAME("nruns"), CTL(stats_arenas_i_bins_j_nruns
)},
350 {NAME("nreruns"), CTL(stats_arenas_i_bins_j_nreruns
)},
351 {NAME("curruns"), CTL(stats_arenas_i_bins_j_curruns
)}
353 static const ctl_named_node_t super_stats_arenas_i_bins_j_node
[] = {
354 {NAME(""), CHILD(named
, stats_arenas_i_bins_j
)}
357 static const ctl_indexed_node_t stats_arenas_i_bins_node
[] = {
358 {INDEX(stats_arenas_i_bins_j
)}
361 static const ctl_named_node_t stats_arenas_i_lruns_j_node
[] = {
362 {NAME("nmalloc"), CTL(stats_arenas_i_lruns_j_nmalloc
)},
363 {NAME("ndalloc"), CTL(stats_arenas_i_lruns_j_ndalloc
)},
364 {NAME("nrequests"), CTL(stats_arenas_i_lruns_j_nrequests
)},
365 {NAME("curruns"), CTL(stats_arenas_i_lruns_j_curruns
)}
367 static const ctl_named_node_t super_stats_arenas_i_lruns_j_node
[] = {
368 {NAME(""), CHILD(named
, stats_arenas_i_lruns_j
)}
371 static const ctl_indexed_node_t stats_arenas_i_lruns_node
[] = {
372 {INDEX(stats_arenas_i_lruns_j
)}
375 static const ctl_named_node_t stats_arenas_i_node
[] = {
376 {NAME("nthreads"), CTL(stats_arenas_i_nthreads
)},
377 {NAME("dss"), CTL(stats_arenas_i_dss
)},
378 {NAME("pactive"), CTL(stats_arenas_i_pactive
)},
379 {NAME("pdirty"), CTL(stats_arenas_i_pdirty
)},
380 {NAME("mapped"), CTL(stats_arenas_i_mapped
)},
381 {NAME("npurge"), CTL(stats_arenas_i_npurge
)},
382 {NAME("nmadvise"), CTL(stats_arenas_i_nmadvise
)},
383 {NAME("purged"), CTL(stats_arenas_i_purged
)},
384 {NAME("small"), CHILD(named
, stats_arenas_i_small
)},
385 {NAME("large"), CHILD(named
, stats_arenas_i_large
)},
386 {NAME("bins"), CHILD(indexed
, stats_arenas_i_bins
)},
387 {NAME("lruns"), CHILD(indexed
, stats_arenas_i_lruns
)}
389 static const ctl_named_node_t super_stats_arenas_i_node
[] = {
390 {NAME(""), CHILD(named
, stats_arenas_i
)}
393 static const ctl_indexed_node_t stats_arenas_node
[] = {
394 {INDEX(stats_arenas_i
)}
397 static const ctl_named_node_t stats_node
[] = {
398 {NAME("cactive"), CTL(stats_cactive
)},
399 {NAME("allocated"), CTL(stats_allocated
)},
400 {NAME("active"), CTL(stats_active
)},
401 {NAME("mapped"), CTL(stats_mapped
)},
402 {NAME("chunks"), CHILD(named
, stats_chunks
)},
403 {NAME("huge"), CHILD(named
, stats_huge
)},
404 {NAME("arenas"), CHILD(indexed
, stats_arenas
)}
407 static const ctl_named_node_t root_node
[] = {
408 {NAME("version"), CTL(version
)},
409 {NAME("epoch"), CTL(epoch
)},
410 {NAME("thread"), CHILD(named
, thread
)},
411 {NAME("config"), CHILD(named
, config
)},
412 {NAME("opt"), CHILD(named
, opt
)},
413 {NAME("arena"), CHILD(indexed
, arena
)},
414 {NAME("arenas"), CHILD(named
, arenas
)},
415 {NAME("prof"), CHILD(named
, prof
)},
416 {NAME("stats"), CHILD(named
, stats
)}
418 static const ctl_named_node_t super_root_node
[] = {
419 {NAME(""), CHILD(named
, root
)}
427 /******************************************************************************/
430 ctl_arena_init(ctl_arena_stats_t
*astats
)
433 if (astats
->lstats
== NULL
) {
434 astats
->lstats
= (malloc_large_stats_t
*)base_alloc(nlclasses
*
435 sizeof(malloc_large_stats_t
));
436 if (astats
->lstats
== NULL
)
444 ctl_arena_clear(ctl_arena_stats_t
*astats
)
447 astats
->dss
= dss_prec_names
[dss_prec_limit
];
451 memset(&astats
->astats
, 0, sizeof(arena_stats_t
));
452 astats
->allocated_small
= 0;
453 astats
->nmalloc_small
= 0;
454 astats
->ndalloc_small
= 0;
455 astats
->nrequests_small
= 0;
456 memset(astats
->bstats
, 0, NBINS
* sizeof(malloc_bin_stats_t
));
457 memset(astats
->lstats
, 0, nlclasses
*
458 sizeof(malloc_large_stats_t
));
463 ctl_arena_stats_amerge(ctl_arena_stats_t
*cstats
, arena_t
*arena
)
467 arena_stats_merge(arena
, &cstats
->dss
, &cstats
->pactive
,
468 &cstats
->pdirty
, &cstats
->astats
, cstats
->bstats
, cstats
->lstats
);
470 for (i
= 0; i
< NBINS
; i
++) {
471 cstats
->allocated_small
+= cstats
->bstats
[i
].allocated
;
472 cstats
->nmalloc_small
+= cstats
->bstats
[i
].nmalloc
;
473 cstats
->ndalloc_small
+= cstats
->bstats
[i
].ndalloc
;
474 cstats
->nrequests_small
+= cstats
->bstats
[i
].nrequests
;
479 ctl_arena_stats_smerge(ctl_arena_stats_t
*sstats
, ctl_arena_stats_t
*astats
)
483 sstats
->pactive
+= astats
->pactive
;
484 sstats
->pdirty
+= astats
->pdirty
;
486 sstats
->astats
.mapped
+= astats
->astats
.mapped
;
487 sstats
->astats
.npurge
+= astats
->astats
.npurge
;
488 sstats
->astats
.nmadvise
+= astats
->astats
.nmadvise
;
489 sstats
->astats
.purged
+= astats
->astats
.purged
;
491 sstats
->allocated_small
+= astats
->allocated_small
;
492 sstats
->nmalloc_small
+= astats
->nmalloc_small
;
493 sstats
->ndalloc_small
+= astats
->ndalloc_small
;
494 sstats
->nrequests_small
+= astats
->nrequests_small
;
496 sstats
->astats
.allocated_large
+= astats
->astats
.allocated_large
;
497 sstats
->astats
.nmalloc_large
+= astats
->astats
.nmalloc_large
;
498 sstats
->astats
.ndalloc_large
+= astats
->astats
.ndalloc_large
;
499 sstats
->astats
.nrequests_large
+= astats
->astats
.nrequests_large
;
501 for (i
= 0; i
< nlclasses
; i
++) {
502 sstats
->lstats
[i
].nmalloc
+= astats
->lstats
[i
].nmalloc
;
503 sstats
->lstats
[i
].ndalloc
+= astats
->lstats
[i
].ndalloc
;
504 sstats
->lstats
[i
].nrequests
+= astats
->lstats
[i
].nrequests
;
505 sstats
->lstats
[i
].curruns
+= astats
->lstats
[i
].curruns
;
508 for (i
= 0; i
< NBINS
; i
++) {
509 sstats
->bstats
[i
].allocated
+= astats
->bstats
[i
].allocated
;
510 sstats
->bstats
[i
].nmalloc
+= astats
->bstats
[i
].nmalloc
;
511 sstats
->bstats
[i
].ndalloc
+= astats
->bstats
[i
].ndalloc
;
512 sstats
->bstats
[i
].nrequests
+= astats
->bstats
[i
].nrequests
;
514 sstats
->bstats
[i
].nfills
+= astats
->bstats
[i
].nfills
;
515 sstats
->bstats
[i
].nflushes
+=
516 astats
->bstats
[i
].nflushes
;
518 sstats
->bstats
[i
].nruns
+= astats
->bstats
[i
].nruns
;
519 sstats
->bstats
[i
].reruns
+= astats
->bstats
[i
].reruns
;
520 sstats
->bstats
[i
].curruns
+= astats
->bstats
[i
].curruns
;
525 ctl_arena_refresh(arena_t
*arena
, unsigned i
)
527 ctl_arena_stats_t
*astats
= &ctl_stats
.arenas
[i
];
528 ctl_arena_stats_t
*sstats
= &ctl_stats
.arenas
[ctl_stats
.narenas
];
530 ctl_arena_clear(astats
);
532 sstats
->nthreads
+= astats
->nthreads
;
534 ctl_arena_stats_amerge(astats
, arena
);
535 /* Merge into sum stats as well. */
536 ctl_arena_stats_smerge(sstats
, astats
);
538 astats
->pactive
+= arena
->nactive
;
539 astats
->pdirty
+= arena
->ndirty
;
540 /* Merge into sum stats as well. */
541 sstats
->pactive
+= arena
->nactive
;
542 sstats
->pdirty
+= arena
->ndirty
;
550 ctl_arena_stats_t
*astats
;
553 /* Extend arena stats and arenas arrays. */
554 astats_size
= (ctl_stats
.narenas
+ 2) * sizeof(ctl_arena_stats_t
);
555 if (ctl_stats
.narenas
== narenas_auto
) {
556 /* ctl_stats.arenas and arenas came from base_alloc(). */
557 astats
= (ctl_arena_stats_t
*)imalloc(astats_size
);
560 memcpy(astats
, ctl_stats
.arenas
, (ctl_stats
.narenas
+ 1) *
561 sizeof(ctl_arena_stats_t
));
563 tarenas
= (arena_t
**)imalloc((ctl_stats
.narenas
+ 1) *
565 if (tarenas
== NULL
) {
569 memcpy(tarenas
, arenas
, ctl_stats
.narenas
* sizeof(arena_t
*));
571 astats
= (ctl_arena_stats_t
*)iralloc(ctl_stats
.arenas
,
572 astats_size
, 0, 0, false, false);
576 tarenas
= (arena_t
**)iralloc(arenas
, (ctl_stats
.narenas
+ 1) *
577 sizeof(arena_t
*), 0, 0, false, false);
581 /* Initialize the new astats and arenas elements. */
582 memset(&astats
[ctl_stats
.narenas
+ 1], 0, sizeof(ctl_arena_stats_t
));
583 if (ctl_arena_init(&astats
[ctl_stats
.narenas
+ 1]))
585 tarenas
[ctl_stats
.narenas
] = NULL
;
586 /* Swap merged stats to their new location. */
588 ctl_arena_stats_t tstats
;
589 memcpy(&tstats
, &astats
[ctl_stats
.narenas
],
590 sizeof(ctl_arena_stats_t
));
591 memcpy(&astats
[ctl_stats
.narenas
],
592 &astats
[ctl_stats
.narenas
+ 1], sizeof(ctl_arena_stats_t
));
593 memcpy(&astats
[ctl_stats
.narenas
+ 1], &tstats
,
594 sizeof(ctl_arena_stats_t
));
596 ctl_stats
.arenas
= astats
;
598 malloc_mutex_lock(&arenas_lock
);
601 arenas_extend(narenas_total
- 1);
602 malloc_mutex_unlock(&arenas_lock
);
611 VARIABLE_ARRAY(arena_t
*, tarenas
, ctl_stats
.narenas
);
614 malloc_mutex_lock(&chunks_mtx
);
615 ctl_stats
.chunks
.current
= stats_chunks
.curchunks
;
616 ctl_stats
.chunks
.total
= stats_chunks
.nchunks
;
617 ctl_stats
.chunks
.high
= stats_chunks
.highchunks
;
618 malloc_mutex_unlock(&chunks_mtx
);
620 malloc_mutex_lock(&huge_mtx
);
621 ctl_stats
.huge
.allocated
= huge_allocated
;
622 ctl_stats
.huge
.nmalloc
= huge_nmalloc
;
623 ctl_stats
.huge
.ndalloc
= huge_ndalloc
;
624 malloc_mutex_unlock(&huge_mtx
);
628 * Clear sum stats, since they will be merged into by
629 * ctl_arena_refresh().
631 ctl_stats
.arenas
[ctl_stats
.narenas
].nthreads
= 0;
632 ctl_arena_clear(&ctl_stats
.arenas
[ctl_stats
.narenas
]);
634 malloc_mutex_lock(&arenas_lock
);
635 memcpy(tarenas
, arenas
, sizeof(arena_t
*) * ctl_stats
.narenas
);
636 for (i
= 0; i
< ctl_stats
.narenas
; i
++) {
637 if (arenas
[i
] != NULL
)
638 ctl_stats
.arenas
[i
].nthreads
= arenas
[i
]->nthreads
;
640 ctl_stats
.arenas
[i
].nthreads
= 0;
642 malloc_mutex_unlock(&arenas_lock
);
643 for (i
= 0; i
< ctl_stats
.narenas
; i
++) {
644 bool initialized
= (tarenas
[i
] != NULL
);
646 ctl_stats
.arenas
[i
].initialized
= initialized
;
648 ctl_arena_refresh(tarenas
[i
], i
);
652 ctl_stats
.allocated
=
653 ctl_stats
.arenas
[ctl_stats
.narenas
].allocated_small
654 + ctl_stats
.arenas
[ctl_stats
.narenas
].astats
.allocated_large
655 + ctl_stats
.huge
.allocated
;
657 (ctl_stats
.arenas
[ctl_stats
.narenas
].pactive
<< LG_PAGE
)
658 + ctl_stats
.huge
.allocated
;
659 ctl_stats
.mapped
= (ctl_stats
.chunks
.current
<< opt_lg_chunk
);
670 malloc_mutex_lock(&ctl_mtx
);
671 if (ctl_initialized
== false) {
673 * Allocate space for one extra arena stats element, which
674 * contains summed stats across all arenas.
676 assert(narenas_auto
== narenas_total_get());
677 ctl_stats
.narenas
= narenas_auto
;
678 ctl_stats
.arenas
= (ctl_arena_stats_t
*)base_alloc(
679 (ctl_stats
.narenas
+ 1) * sizeof(ctl_arena_stats_t
));
680 if (ctl_stats
.arenas
== NULL
) {
684 memset(ctl_stats
.arenas
, 0, (ctl_stats
.narenas
+ 1) *
685 sizeof(ctl_arena_stats_t
));
688 * Initialize all stats structures, regardless of whether they
689 * ever get used. Lazy initialization would allow errors to
690 * cause inconsistent state to be viewable by the application.
694 for (i
= 0; i
<= ctl_stats
.narenas
; i
++) {
695 if (ctl_arena_init(&ctl_stats
.arenas
[i
])) {
701 ctl_stats
.arenas
[ctl_stats
.narenas
].initialized
= true;
705 ctl_initialized
= true;
710 malloc_mutex_unlock(&ctl_mtx
);
715 ctl_lookup(const char *name
, ctl_node_t
const **nodesp
, size_t *mibp
,
719 const char *elm
, *tdot
, *dot
;
721 const ctl_named_node_t
*node
;
724 /* Equivalent to strchrnul(). */
725 dot
= ((tdot
= strchr(elm
, '.')) != NULL
) ? tdot
: strchr(elm
, '\0');
726 elen
= (size_t)((uintptr_t)dot
- (uintptr_t)elm
);
731 node
= super_root_node
;
732 for (i
= 0; i
< *depthp
; i
++) {
734 assert(node
->nchildren
> 0);
735 if (ctl_named_node(node
->children
) != NULL
) {
736 const ctl_named_node_t
*pnode
= node
;
738 /* Children are named. */
739 for (j
= 0; j
< node
->nchildren
; j
++) {
740 const ctl_named_node_t
*child
=
741 ctl_named_children(node
, j
);
742 if (strlen(child
->name
) == elen
&&
743 strncmp(elm
, child
->name
, elen
) == 0) {
747 (const ctl_node_t
*)node
;
758 const ctl_indexed_node_t
*inode
;
760 /* Children are indexed. */
761 index
= malloc_strtoumax(elm
, NULL
, 10);
762 if (index
== UINTMAX_MAX
|| index
> SIZE_T_MAX
) {
767 inode
= ctl_indexed_node(node
->children
);
768 node
= inode
->index(mibp
, *depthp
, (size_t)index
);
775 nodesp
[i
] = (const ctl_node_t
*)node
;
776 mibp
[i
] = (size_t)index
;
779 if (node
->ctl
!= NULL
) {
783 * The name contains more elements than are
784 * in this path through the tree.
789 /* Complete lookup successful. */
796 /* No more elements. */
801 dot
= ((tdot
= strchr(elm
, '.')) != NULL
) ? tdot
:
803 elen
= (size_t)((uintptr_t)dot
- (uintptr_t)elm
);
812 ctl_byname(const char *name
, void *oldp
, size_t *oldlenp
, void *newp
,
817 ctl_node_t
const *nodes
[CTL_MAX_DEPTH
];
818 size_t mib
[CTL_MAX_DEPTH
];
819 const ctl_named_node_t
*node
;
821 if (ctl_initialized
== false && ctl_init()) {
826 depth
= CTL_MAX_DEPTH
;
827 ret
= ctl_lookup(name
, nodes
, mib
, &depth
);
831 node
= ctl_named_node(nodes
[depth
-1]);
832 if (node
!= NULL
&& node
->ctl
)
833 ret
= node
->ctl(mib
, depth
, oldp
, oldlenp
, newp
, newlen
);
835 /* The name refers to a partial path through the ctl tree. */
844 ctl_nametomib(const char *name
, size_t *mibp
, size_t *miblenp
)
848 if (ctl_initialized
== false && ctl_init()) {
853 ret
= ctl_lookup(name
, NULL
, mibp
, miblenp
);
859 ctl_bymib(const size_t *mib
, size_t miblen
, void *oldp
, size_t *oldlenp
,
860 void *newp
, size_t newlen
)
863 const ctl_named_node_t
*node
;
866 if (ctl_initialized
== false && ctl_init()) {
871 /* Iterate down the tree. */
872 node
= super_root_node
;
873 for (i
= 0; i
< miblen
; i
++) {
875 assert(node
->nchildren
> 0);
876 if (ctl_named_node(node
->children
) != NULL
) {
877 /* Children are named. */
878 if (node
->nchildren
<= mib
[i
]) {
882 node
= ctl_named_children(node
, mib
[i
]);
884 const ctl_indexed_node_t
*inode
;
886 /* Indexed element. */
887 inode
= ctl_indexed_node(node
->children
);
888 node
= inode
->index(mib
, miblen
, mib
[i
]);
896 /* Call the ctl function. */
897 if (node
&& node
->ctl
)
898 ret
= node
->ctl(mib
, miblen
, oldp
, oldlenp
, newp
, newlen
);
912 if (malloc_mutex_init(&ctl_mtx
))
915 ctl_initialized
= false;
924 malloc_mutex_lock(&ctl_mtx
);
928 ctl_postfork_parent(void)
931 malloc_mutex_postfork_parent(&ctl_mtx
);
935 ctl_postfork_child(void)
938 malloc_mutex_postfork_child(&ctl_mtx
);
941 /******************************************************************************/
942 /* *_ctl() functions. */
944 #define READONLY() do { \
945 if (newp != NULL || newlen != 0) { \
951 #define WRITEONLY() do { \
952 if (oldp != NULL || oldlenp != NULL) { \
958 #define READ(v, t) do { \
959 if (oldp != NULL && oldlenp != NULL) { \
960 if (*oldlenp != sizeof(t)) { \
961 size_t copylen = (sizeof(t) <= *oldlenp) \
962 ? sizeof(t) : *oldlenp; \
963 memcpy(oldp, (void *)&v, copylen); \
971 #define WRITE(v, t) do { \
972 if (newp != NULL) { \
973 if (newlen != sizeof(t)) { \
982 * There's a lot of code duplication in the following macros due to limitations
983 * in how nested cpp macros are expanded.
985 #define CTL_RO_CLGEN(c, l, n, v, t) \
987 n##_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp, \
988 void *newp, size_t newlen) \
996 malloc_mutex_lock(&ctl_mtx); \
1004 malloc_mutex_unlock(&ctl_mtx); \
1008 #define CTL_RO_CGEN(c, n, v, t) \
1010 n##_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp, \
1011 void *newp, size_t newlen) \
1018 malloc_mutex_lock(&ctl_mtx); \
1025 malloc_mutex_unlock(&ctl_mtx); \
1029 #define CTL_RO_GEN(n, v, t) \
1031 n##_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp, \
1032 void *newp, size_t newlen) \
1037 malloc_mutex_lock(&ctl_mtx); \
1044 malloc_mutex_unlock(&ctl_mtx); \
1049 * ctl_mtx is not acquired, under the assumption that no pertinent data will
1050 * mutate during the call.
1052 #define CTL_RO_NL_CGEN(c, n, v, t) \
1054 n##_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp, \
1055 void *newp, size_t newlen) \
1071 #define CTL_RO_NL_GEN(n, v, t) \
1073 n##_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp, \
1074 void *newp, size_t newlen) \
1088 #define CTL_RO_BOOL_CONFIG_GEN(n) \
1090 n##_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp, \
1091 void *newp, size_t newlen) \
1098 READ(oldval, bool); \
1105 CTL_RO_NL_GEN(version
, JEMALLOC_VERSION
, const char *)
1108 epoch_ctl(const size_t *mib
, size_t miblen
, void *oldp
, size_t *oldlenp
,
1109 void *newp
, size_t newlen
)
1114 malloc_mutex_lock(&ctl_mtx
);
1115 WRITE(newval
, uint64_t);
1118 READ(ctl_epoch
, uint64_t);
1122 malloc_mutex_unlock(&ctl_mtx
);
1127 thread_tcache_enabled_ctl(const size_t *mib
, size_t miblen
, void *oldp
,
1128 size_t *oldlenp
, void *newp
, size_t newlen
)
1133 if (config_tcache
== false)
1136 oldval
= tcache_enabled_get();
1138 if (newlen
!= sizeof(bool)) {
1142 tcache_enabled_set(*(bool *)newp
);
1152 thread_tcache_flush_ctl(const size_t *mib
, size_t miblen
, void *oldp
,
1153 size_t *oldlenp
, void *newp
, size_t newlen
)
1157 if (config_tcache
== false)
1171 thread_arena_ctl(const size_t *mib
, size_t miblen
, void *oldp
, size_t *oldlenp
,
1172 void *newp
, size_t newlen
)
1175 unsigned newind
, oldind
;
1177 malloc_mutex_lock(&ctl_mtx
);
1178 newind
= oldind
= choose_arena(NULL
)->ind
;
1179 WRITE(newind
, unsigned);
1180 READ(oldind
, unsigned);
1181 if (newind
!= oldind
) {
1184 if (newind
>= ctl_stats
.narenas
) {
1185 /* New arena index is out of range. */
1190 /* Initialize arena if necessary. */
1191 malloc_mutex_lock(&arenas_lock
);
1192 if ((arena
= arenas
[newind
]) == NULL
&& (arena
=
1193 arenas_extend(newind
)) == NULL
) {
1194 malloc_mutex_unlock(&arenas_lock
);
1198 assert(arena
== arenas
[newind
]);
1199 arenas
[oldind
]->nthreads
--;
1200 arenas
[newind
]->nthreads
++;
1201 malloc_mutex_unlock(&arenas_lock
);
1203 /* Set new arena association. */
1204 if (config_tcache
) {
1206 if ((uintptr_t)(tcache
= *tcache_tsd_get()) >
1207 (uintptr_t)TCACHE_STATE_MAX
) {
1208 tcache_arena_dissociate(tcache
);
1209 tcache_arena_associate(tcache
, arena
);
1212 arenas_tsd_set(&arena
);
1217 malloc_mutex_unlock(&ctl_mtx
);
1221 CTL_RO_NL_CGEN(config_stats
, thread_allocated
,
1222 thread_allocated_tsd_get()->allocated
, uint64_t)
1223 CTL_RO_NL_CGEN(config_stats
, thread_allocatedp
,
1224 &thread_allocated_tsd_get()->allocated
, uint64_t *)
1225 CTL_RO_NL_CGEN(config_stats
, thread_deallocated
,
1226 thread_allocated_tsd_get()->deallocated
, uint64_t)
1227 CTL_RO_NL_CGEN(config_stats
, thread_deallocatedp
,
1228 &thread_allocated_tsd_get()->deallocated
, uint64_t *)
1230 /******************************************************************************/
1232 CTL_RO_BOOL_CONFIG_GEN(config_debug
)
1233 CTL_RO_BOOL_CONFIG_GEN(config_dss
)
1234 CTL_RO_BOOL_CONFIG_GEN(config_fill
)
1235 CTL_RO_BOOL_CONFIG_GEN(config_lazy_lock
)
1236 CTL_RO_BOOL_CONFIG_GEN(config_mremap
)
1237 CTL_RO_BOOL_CONFIG_GEN(config_munmap
)
1238 CTL_RO_BOOL_CONFIG_GEN(config_prof
)
1239 CTL_RO_BOOL_CONFIG_GEN(config_prof_libgcc
)
1240 CTL_RO_BOOL_CONFIG_GEN(config_prof_libunwind
)
1241 CTL_RO_BOOL_CONFIG_GEN(config_stats
)
1242 CTL_RO_BOOL_CONFIG_GEN(config_tcache
)
1243 CTL_RO_BOOL_CONFIG_GEN(config_tls
)
1244 CTL_RO_BOOL_CONFIG_GEN(config_utrace
)
1245 CTL_RO_BOOL_CONFIG_GEN(config_valgrind
)
1246 CTL_RO_BOOL_CONFIG_GEN(config_xmalloc
)
1248 /******************************************************************************/
1250 CTL_RO_NL_GEN(opt_abort
, opt_abort
, bool)
1251 CTL_RO_NL_GEN(opt_dss
, opt_dss
, const char *)
1252 CTL_RO_NL_GEN(opt_lg_chunk
, opt_lg_chunk
, size_t)
1253 CTL_RO_NL_GEN(opt_narenas
, opt_narenas
, size_t)
1254 CTL_RO_NL_GEN(opt_lg_dirty_mult
, opt_lg_dirty_mult
, ssize_t
)
1255 CTL_RO_NL_GEN(opt_stats_print
, opt_stats_print
, bool)
1256 CTL_RO_NL_CGEN(config_fill
, opt_junk
, opt_junk
, bool)
1257 CTL_RO_NL_CGEN(config_fill
, opt_zero
, opt_zero
, bool)
1258 CTL_RO_NL_CGEN(config_fill
, opt_quarantine
, opt_quarantine
, size_t)
1259 CTL_RO_NL_CGEN(config_fill
, opt_redzone
, opt_redzone
, bool)
1260 CTL_RO_NL_CGEN(config_utrace
, opt_utrace
, opt_utrace
, bool)
1261 CTL_RO_NL_CGEN(config_valgrind
, opt_valgrind
, opt_valgrind
, bool)
1262 CTL_RO_NL_CGEN(config_xmalloc
, opt_xmalloc
, opt_xmalloc
, bool)
1263 CTL_RO_NL_CGEN(config_tcache
, opt_tcache
, opt_tcache
, bool)
1264 CTL_RO_NL_CGEN(config_tcache
, opt_lg_tcache_max
, opt_lg_tcache_max
, ssize_t
)
1265 CTL_RO_NL_CGEN(config_prof
, opt_prof
, opt_prof
, bool)
1266 CTL_RO_NL_CGEN(config_prof
, opt_prof_prefix
, opt_prof_prefix
, const char *)
1267 CTL_RO_CGEN(config_prof
, opt_prof_active
, opt_prof_active
, bool) /* Mutable. */
1268 CTL_RO_NL_CGEN(config_prof
, opt_lg_prof_sample
, opt_lg_prof_sample
, size_t)
1269 CTL_RO_NL_CGEN(config_prof
, opt_lg_prof_interval
, opt_lg_prof_interval
, ssize_t
)
1270 CTL_RO_NL_CGEN(config_prof
, opt_prof_gdump
, opt_prof_gdump
, bool)
1271 CTL_RO_NL_CGEN(config_prof
, opt_prof_final
, opt_prof_final
, bool)
1272 CTL_RO_NL_CGEN(config_prof
, opt_prof_leak
, opt_prof_leak
, bool)
1273 CTL_RO_NL_CGEN(config_prof
, opt_prof_accum
, opt_prof_accum
, bool)
1275 /******************************************************************************/
1277 /* ctl_mutex must be held during execution of this function. */
1279 arena_purge(unsigned arena_ind
)
1281 VARIABLE_ARRAY(arena_t
*, tarenas
, ctl_stats
.narenas
);
1283 malloc_mutex_lock(&arenas_lock
);
1284 memcpy(tarenas
, arenas
, sizeof(arena_t
*) * ctl_stats
.narenas
);
1285 malloc_mutex_unlock(&arenas_lock
);
1287 if (arena_ind
== ctl_stats
.narenas
) {
1289 for (i
= 0; i
< ctl_stats
.narenas
; i
++) {
1290 if (tarenas
[i
] != NULL
)
1291 arena_purge_all(tarenas
[i
]);
1294 assert(arena_ind
< ctl_stats
.narenas
);
1295 if (tarenas
[arena_ind
] != NULL
)
1296 arena_purge_all(tarenas
[arena_ind
]);
1301 arena_i_purge_ctl(const size_t *mib
, size_t miblen
, void *oldp
, size_t *oldlenp
,
1302 void *newp
, size_t newlen
)
1308 malloc_mutex_lock(&ctl_mtx
);
1309 arena_purge(mib
[1]);
1310 malloc_mutex_unlock(&ctl_mtx
);
1318 arena_i_dss_ctl(const size_t *mib
, size_t miblen
, void *oldp
, size_t *oldlenp
,
1319 void *newp
, size_t newlen
)
1324 unsigned arena_ind
= mib
[1];
1325 dss_prec_t dss_prec_old
= dss_prec_limit
;
1326 dss_prec_t dss_prec
= dss_prec_limit
;
1328 malloc_mutex_lock(&ctl_mtx
);
1329 WRITE(dss
, const char *);
1331 for (i
= 0; i
< dss_prec_limit
; i
++) {
1332 if (strcmp(dss_prec_names
[i
], dss
) == 0) {
1338 if (match
== false) {
1343 if (arena_ind
< ctl_stats
.narenas
) {
1344 arena_t
*arena
= arenas
[arena_ind
];
1345 if (arena
!= NULL
) {
1346 dss_prec_old
= arena_dss_prec_get(arena
);
1347 arena_dss_prec_set(arena
, dss_prec
);
1352 dss_prec_old
= chunk_dss_prec_get();
1353 err
= chunk_dss_prec_set(dss_prec
);
1355 dss
= dss_prec_names
[dss_prec_old
];
1356 READ(dss
, const char *);
1364 malloc_mutex_unlock(&ctl_mtx
);
1368 static const ctl_named_node_t
*
1369 arena_i_index(const size_t *mib
, size_t miblen
, size_t i
)
1371 const ctl_named_node_t
* ret
;
1373 malloc_mutex_lock(&ctl_mtx
);
1374 if (i
> ctl_stats
.narenas
) {
1379 ret
= super_arena_i_node
;
1381 malloc_mutex_unlock(&ctl_mtx
);
1386 /******************************************************************************/
1388 CTL_RO_NL_GEN(arenas_bin_i_size
, arena_bin_info
[mib
[2]].reg_size
, size_t)
1389 CTL_RO_NL_GEN(arenas_bin_i_nregs
, arena_bin_info
[mib
[2]].nregs
, uint32_t)
1390 CTL_RO_NL_GEN(arenas_bin_i_run_size
, arena_bin_info
[mib
[2]].run_size
, size_t)
1391 static const ctl_named_node_t
*
1392 arenas_bin_i_index(const size_t *mib
, size_t miblen
, size_t i
)
1397 return (super_arenas_bin_i_node
);
1400 CTL_RO_NL_GEN(arenas_lrun_i_size
, ((mib
[2]+1) << LG_PAGE
), size_t)
1401 static const ctl_named_node_t
*
1402 arenas_lrun_i_index(const size_t *mib
, size_t miblen
, size_t i
)
1407 return (super_arenas_lrun_i_node
);
1411 arenas_narenas_ctl(const size_t *mib
, size_t miblen
, void *oldp
,
1412 size_t *oldlenp
, void *newp
, size_t newlen
)
1417 malloc_mutex_lock(&ctl_mtx
);
1419 if (*oldlenp
!= sizeof(unsigned)) {
1423 narenas
= ctl_stats
.narenas
;
1424 READ(narenas
, unsigned);
1428 malloc_mutex_unlock(&ctl_mtx
);
1433 arenas_initialized_ctl(const size_t *mib
, size_t miblen
, void *oldp
,
1434 size_t *oldlenp
, void *newp
, size_t newlen
)
1439 malloc_mutex_lock(&ctl_mtx
);
1441 if (*oldlenp
!= ctl_stats
.narenas
* sizeof(bool)) {
1443 nread
= (*oldlenp
< ctl_stats
.narenas
* sizeof(bool))
1444 ? (*oldlenp
/ sizeof(bool)) : ctl_stats
.narenas
;
1447 nread
= ctl_stats
.narenas
;
1450 for (i
= 0; i
< nread
; i
++)
1451 ((bool *)oldp
)[i
] = ctl_stats
.arenas
[i
].initialized
;
1454 malloc_mutex_unlock(&ctl_mtx
);
1458 CTL_RO_NL_GEN(arenas_quantum
, QUANTUM
, size_t)
1459 CTL_RO_NL_GEN(arenas_page
, PAGE
, size_t)
1460 CTL_RO_NL_CGEN(config_tcache
, arenas_tcache_max
, tcache_maxclass
, size_t)
1461 CTL_RO_NL_GEN(arenas_nbins
, NBINS
, unsigned)
1462 CTL_RO_NL_CGEN(config_tcache
, arenas_nhbins
, nhbins
, unsigned)
1463 CTL_RO_NL_GEN(arenas_nlruns
, nlclasses
, size_t)
1466 arenas_purge_ctl(const size_t *mib
, size_t miblen
, void *oldp
, size_t *oldlenp
,
1467 void *newp
, size_t newlen
)
1472 malloc_mutex_lock(&ctl_mtx
);
1474 arena_ind
= UINT_MAX
;
1475 WRITE(arena_ind
, unsigned);
1476 if (newp
!= NULL
&& arena_ind
>= ctl_stats
.narenas
)
1479 if (arena_ind
== UINT_MAX
)
1480 arena_ind
= ctl_stats
.narenas
;
1481 arena_purge(arena_ind
);
1486 malloc_mutex_unlock(&ctl_mtx
);
1491 arenas_extend_ctl(const size_t *mib
, size_t miblen
, void *oldp
, size_t *oldlenp
,
1492 void *newp
, size_t newlen
)
1496 malloc_mutex_lock(&ctl_mtx
);
1502 READ(ctl_stats
.narenas
- 1, unsigned);
1506 malloc_mutex_unlock(&ctl_mtx
);
1510 /******************************************************************************/
1513 prof_active_ctl(const size_t *mib
, size_t miblen
, void *oldp
, size_t *oldlenp
,
1514 void *newp
, size_t newlen
)
1519 if (config_prof
== false)
1522 malloc_mutex_lock(&ctl_mtx
); /* Protect opt_prof_active. */
1523 oldval
= opt_prof_active
;
1526 * The memory barriers will tend to make opt_prof_active
1527 * propagate faster on systems with weak memory ordering.
1530 WRITE(opt_prof_active
, bool);
1537 malloc_mutex_unlock(&ctl_mtx
);
1542 prof_dump_ctl(const size_t *mib
, size_t miblen
, void *oldp
, size_t *oldlenp
,
1543 void *newp
, size_t newlen
)
1546 const char *filename
= NULL
;
1548 if (config_prof
== false)
1552 WRITE(filename
, const char *);
1554 if (prof_mdump(filename
)) {
1564 CTL_RO_NL_CGEN(config_prof
, prof_interval
, prof_interval
, uint64_t)
1566 /******************************************************************************/
1568 CTL_RO_CGEN(config_stats
, stats_chunks_current
, ctl_stats
.chunks
.current
,
1570 CTL_RO_CGEN(config_stats
, stats_chunks_total
, ctl_stats
.chunks
.total
, uint64_t)
1571 CTL_RO_CGEN(config_stats
, stats_chunks_high
, ctl_stats
.chunks
.high
, size_t)
1572 CTL_RO_CGEN(config_stats
, stats_huge_allocated
, huge_allocated
, size_t)
1573 CTL_RO_CGEN(config_stats
, stats_huge_nmalloc
, huge_nmalloc
, uint64_t)
1574 CTL_RO_CGEN(config_stats
, stats_huge_ndalloc
, huge_ndalloc
, uint64_t)
1575 CTL_RO_CGEN(config_stats
, stats_arenas_i_small_allocated
,
1576 ctl_stats
.arenas
[mib
[2]].allocated_small
, size_t)
1577 CTL_RO_CGEN(config_stats
, stats_arenas_i_small_nmalloc
,
1578 ctl_stats
.arenas
[mib
[2]].nmalloc_small
, uint64_t)
1579 CTL_RO_CGEN(config_stats
, stats_arenas_i_small_ndalloc
,
1580 ctl_stats
.arenas
[mib
[2]].ndalloc_small
, uint64_t)
1581 CTL_RO_CGEN(config_stats
, stats_arenas_i_small_nrequests
,
1582 ctl_stats
.arenas
[mib
[2]].nrequests_small
, uint64_t)
1583 CTL_RO_CGEN(config_stats
, stats_arenas_i_large_allocated
,
1584 ctl_stats
.arenas
[mib
[2]].astats
.allocated_large
, size_t)
1585 CTL_RO_CGEN(config_stats
, stats_arenas_i_large_nmalloc
,
1586 ctl_stats
.arenas
[mib
[2]].astats
.nmalloc_large
, uint64_t)
1587 CTL_RO_CGEN(config_stats
, stats_arenas_i_large_ndalloc
,
1588 ctl_stats
.arenas
[mib
[2]].astats
.ndalloc_large
, uint64_t)
1589 CTL_RO_CGEN(config_stats
, stats_arenas_i_large_nrequests
,
1590 ctl_stats
.arenas
[mib
[2]].astats
.nrequests_large
, uint64_t)
1592 CTL_RO_CGEN(config_stats
, stats_arenas_i_bins_j_allocated
,
1593 ctl_stats
.arenas
[mib
[2]].bstats
[mib
[4]].allocated
, size_t)
1594 CTL_RO_CGEN(config_stats
, stats_arenas_i_bins_j_nmalloc
,
1595 ctl_stats
.arenas
[mib
[2]].bstats
[mib
[4]].nmalloc
, uint64_t)
1596 CTL_RO_CGEN(config_stats
, stats_arenas_i_bins_j_ndalloc
,
1597 ctl_stats
.arenas
[mib
[2]].bstats
[mib
[4]].ndalloc
, uint64_t)
1598 CTL_RO_CGEN(config_stats
, stats_arenas_i_bins_j_nrequests
,
1599 ctl_stats
.arenas
[mib
[2]].bstats
[mib
[4]].nrequests
, uint64_t)
1600 CTL_RO_CGEN(config_stats
&& config_tcache
, stats_arenas_i_bins_j_nfills
,
1601 ctl_stats
.arenas
[mib
[2]].bstats
[mib
[4]].nfills
, uint64_t)
1602 CTL_RO_CGEN(config_stats
&& config_tcache
, stats_arenas_i_bins_j_nflushes
,
1603 ctl_stats
.arenas
[mib
[2]].bstats
[mib
[4]].nflushes
, uint64_t)
1604 CTL_RO_CGEN(config_stats
, stats_arenas_i_bins_j_nruns
,
1605 ctl_stats
.arenas
[mib
[2]].bstats
[mib
[4]].nruns
, uint64_t)
1606 CTL_RO_CGEN(config_stats
, stats_arenas_i_bins_j_nreruns
,
1607 ctl_stats
.arenas
[mib
[2]].bstats
[mib
[4]].reruns
, uint64_t)
1608 CTL_RO_CGEN(config_stats
, stats_arenas_i_bins_j_curruns
,
1609 ctl_stats
.arenas
[mib
[2]].bstats
[mib
[4]].curruns
, size_t)
1611 static const ctl_named_node_t
*
1612 stats_arenas_i_bins_j_index(const size_t *mib
, size_t miblen
, size_t j
)
1617 return (super_stats_arenas_i_bins_j_node
);
1620 CTL_RO_CGEN(config_stats
, stats_arenas_i_lruns_j_nmalloc
,
1621 ctl_stats
.arenas
[mib
[2]].lstats
[mib
[4]].nmalloc
, uint64_t)
1622 CTL_RO_CGEN(config_stats
, stats_arenas_i_lruns_j_ndalloc
,
1623 ctl_stats
.arenas
[mib
[2]].lstats
[mib
[4]].ndalloc
, uint64_t)
1624 CTL_RO_CGEN(config_stats
, stats_arenas_i_lruns_j_nrequests
,
1625 ctl_stats
.arenas
[mib
[2]].lstats
[mib
[4]].nrequests
, uint64_t)
1626 CTL_RO_CGEN(config_stats
, stats_arenas_i_lruns_j_curruns
,
1627 ctl_stats
.arenas
[mib
[2]].lstats
[mib
[4]].curruns
, size_t)
1629 static const ctl_named_node_t
*
1630 stats_arenas_i_lruns_j_index(const size_t *mib
, size_t miblen
, size_t j
)
1635 return (super_stats_arenas_i_lruns_j_node
);
1638 CTL_RO_GEN(stats_arenas_i_nthreads
, ctl_stats
.arenas
[mib
[2]].nthreads
, unsigned)
1639 CTL_RO_GEN(stats_arenas_i_dss
, ctl_stats
.arenas
[mib
[2]].dss
, const char *)
1640 CTL_RO_GEN(stats_arenas_i_pactive
, ctl_stats
.arenas
[mib
[2]].pactive
, size_t)
1641 CTL_RO_GEN(stats_arenas_i_pdirty
, ctl_stats
.arenas
[mib
[2]].pdirty
, size_t)
1642 CTL_RO_CGEN(config_stats
, stats_arenas_i_mapped
,
1643 ctl_stats
.arenas
[mib
[2]].astats
.mapped
, size_t)
1644 CTL_RO_CGEN(config_stats
, stats_arenas_i_npurge
,
1645 ctl_stats
.arenas
[mib
[2]].astats
.npurge
, uint64_t)
1646 CTL_RO_CGEN(config_stats
, stats_arenas_i_nmadvise
,
1647 ctl_stats
.arenas
[mib
[2]].astats
.nmadvise
, uint64_t)
1648 CTL_RO_CGEN(config_stats
, stats_arenas_i_purged
,
1649 ctl_stats
.arenas
[mib
[2]].astats
.purged
, uint64_t)
1651 static const ctl_named_node_t
*
1652 stats_arenas_i_index(const size_t *mib
, size_t miblen
, size_t i
)
1654 const ctl_named_node_t
* ret
;
1656 malloc_mutex_lock(&ctl_mtx
);
1657 if (i
> ctl_stats
.narenas
|| ctl_stats
.arenas
[i
].initialized
== false) {
1662 ret
= super_stats_arenas_i_node
;
1664 malloc_mutex_unlock(&ctl_mtx
);
1668 CTL_RO_CGEN(config_stats
, stats_cactive
, &stats_cactive
, size_t *)
1669 CTL_RO_CGEN(config_stats
, stats_allocated
, ctl_stats
.allocated
, size_t)
1670 CTL_RO_CGEN(config_stats
, stats_active
, ctl_stats
.active
, size_t)
1671 CTL_RO_CGEN(config_stats
, stats_mapped
, ctl_stats
.mapped
, size_t)