1 #define JEMALLOC_CTL_C_
2 #include "jemalloc/internal/jemalloc_internal.h"
4 /******************************************************************************/
8 * ctl_mtx protects the following:
14 static malloc_mutex_t ctl_mtx
;
15 static bool ctl_initialized
;
16 static uint64_t ctl_epoch
;
17 static ctl_stats_t ctl_stats
;
19 /******************************************************************************/
20 /* Function prototypes for non-inline static functions. */
22 #define CTL_PROTO(n) \
23 static int n##_ctl(const size_t *mib, size_t miblen, void *oldp, \
24 size_t *oldlenp, void *newp, size_t newlen);
26 #define INDEX_PROTO(n) \
27 const ctl_node_t *n##_index(const size_t *mib, size_t miblen, \
31 static bool ctl_arena_init(ctl_arena_stats_t
*astats
);
33 static void ctl_arena_clear(ctl_arena_stats_t
*astats
);
35 static void ctl_arena_stats_amerge(ctl_arena_stats_t
*cstats
,
37 static void ctl_arena_stats_smerge(ctl_arena_stats_t
*sstats
,
38 ctl_arena_stats_t
*astats
);
40 static void ctl_arena_refresh(arena_t
*arena
, unsigned i
);
41 static void ctl_refresh(void);
42 static bool ctl_init(void);
43 static int ctl_lookup(const char *name
, ctl_node_t
const **nodesp
,
44 size_t *mibp
, size_t *depthp
);
48 #ifdef JEMALLOC_TCACHE
49 CTL_PROTO(tcache_flush
)
51 CTL_PROTO(thread_arena
)
53 CTL_PROTO(thread_allocated
)
54 CTL_PROTO(thread_allocatedp
)
55 CTL_PROTO(thread_deallocated
)
56 CTL_PROTO(thread_deallocatedp
)
58 CTL_PROTO(config_debug
)
60 CTL_PROTO(config_dynamic_page_shift
)
61 CTL_PROTO(config_fill
)
62 CTL_PROTO(config_lazy_lock
)
63 CTL_PROTO(config_prof
)
64 CTL_PROTO(config_prof_libgcc
)
65 CTL_PROTO(config_prof_libunwind
)
66 CTL_PROTO(config_stats
)
67 CTL_PROTO(config_swap
)
68 CTL_PROTO(config_sysv
)
69 CTL_PROTO(config_tcache
)
70 CTL_PROTO(config_tiny
)
72 CTL_PROTO(config_xmalloc
)
74 CTL_PROTO(opt_lg_qspace_max
)
75 CTL_PROTO(opt_lg_cspace_max
)
76 CTL_PROTO(opt_lg_chunk
)
77 CTL_PROTO(opt_narenas
)
78 CTL_PROTO(opt_lg_dirty_mult
)
79 CTL_PROTO(opt_stats_print
)
87 #ifdef JEMALLOC_XMALLOC
88 CTL_PROTO(opt_xmalloc
)
90 #ifdef JEMALLOC_TCACHE
92 CTL_PROTO(opt_lg_tcache_gc_sweep
)
96 CTL_PROTO(opt_prof_prefix
)
97 CTL_PROTO(opt_prof_active
)
98 CTL_PROTO(opt_lg_prof_bt_max
)
99 CTL_PROTO(opt_lg_prof_sample
)
100 CTL_PROTO(opt_lg_prof_interval
)
101 CTL_PROTO(opt_prof_gdump
)
102 CTL_PROTO(opt_prof_leak
)
103 CTL_PROTO(opt_prof_accum
)
104 CTL_PROTO(opt_lg_prof_tcmax
)
107 CTL_PROTO(opt_overcommit
)
109 CTL_PROTO(arenas_bin_i_size
)
110 CTL_PROTO(arenas_bin_i_nregs
)
111 CTL_PROTO(arenas_bin_i_run_size
)
112 INDEX_PROTO(arenas_bin_i
)
113 CTL_PROTO(arenas_lrun_i_size
)
114 INDEX_PROTO(arenas_lrun_i
)
115 CTL_PROTO(arenas_narenas
)
116 CTL_PROTO(arenas_initialized
)
117 CTL_PROTO(arenas_quantum
)
118 CTL_PROTO(arenas_cacheline
)
119 CTL_PROTO(arenas_subpage
)
120 CTL_PROTO(arenas_pagesize
)
121 CTL_PROTO(arenas_chunksize
)
123 CTL_PROTO(arenas_tspace_min
)
124 CTL_PROTO(arenas_tspace_max
)
126 CTL_PROTO(arenas_qspace_min
)
127 CTL_PROTO(arenas_qspace_max
)
128 CTL_PROTO(arenas_cspace_min
)
129 CTL_PROTO(arenas_cspace_max
)
130 CTL_PROTO(arenas_sspace_min
)
131 CTL_PROTO(arenas_sspace_max
)
132 #ifdef JEMALLOC_TCACHE
133 CTL_PROTO(arenas_tcache_max
)
135 CTL_PROTO(arenas_ntbins
)
136 CTL_PROTO(arenas_nqbins
)
137 CTL_PROTO(arenas_ncbins
)
138 CTL_PROTO(arenas_nsbins
)
139 CTL_PROTO(arenas_nbins
)
140 #ifdef JEMALLOC_TCACHE
141 CTL_PROTO(arenas_nhbins
)
143 CTL_PROTO(arenas_nlruns
)
144 CTL_PROTO(arenas_purge
)
146 CTL_PROTO(prof_active
)
148 CTL_PROTO(prof_interval
)
150 #ifdef JEMALLOC_STATS
151 CTL_PROTO(stats_chunks_current
)
152 CTL_PROTO(stats_chunks_total
)
153 CTL_PROTO(stats_chunks_high
)
154 CTL_PROTO(stats_huge_allocated
)
155 CTL_PROTO(stats_huge_nmalloc
)
156 CTL_PROTO(stats_huge_ndalloc
)
157 CTL_PROTO(stats_arenas_i_small_allocated
)
158 CTL_PROTO(stats_arenas_i_small_nmalloc
)
159 CTL_PROTO(stats_arenas_i_small_ndalloc
)
160 CTL_PROTO(stats_arenas_i_small_nrequests
)
161 CTL_PROTO(stats_arenas_i_large_allocated
)
162 CTL_PROTO(stats_arenas_i_large_nmalloc
)
163 CTL_PROTO(stats_arenas_i_large_ndalloc
)
164 CTL_PROTO(stats_arenas_i_large_nrequests
)
165 CTL_PROTO(stats_arenas_i_bins_j_allocated
)
166 CTL_PROTO(stats_arenas_i_bins_j_nmalloc
)
167 CTL_PROTO(stats_arenas_i_bins_j_ndalloc
)
168 CTL_PROTO(stats_arenas_i_bins_j_nrequests
)
169 #ifdef JEMALLOC_TCACHE
170 CTL_PROTO(stats_arenas_i_bins_j_nfills
)
171 CTL_PROTO(stats_arenas_i_bins_j_nflushes
)
173 CTL_PROTO(stats_arenas_i_bins_j_nruns
)
174 CTL_PROTO(stats_arenas_i_bins_j_nreruns
)
175 CTL_PROTO(stats_arenas_i_bins_j_highruns
)
176 CTL_PROTO(stats_arenas_i_bins_j_curruns
)
177 INDEX_PROTO(stats_arenas_i_bins_j
)
178 CTL_PROTO(stats_arenas_i_lruns_j_nmalloc
)
179 CTL_PROTO(stats_arenas_i_lruns_j_ndalloc
)
180 CTL_PROTO(stats_arenas_i_lruns_j_nrequests
)
181 CTL_PROTO(stats_arenas_i_lruns_j_highruns
)
182 CTL_PROTO(stats_arenas_i_lruns_j_curruns
)
183 INDEX_PROTO(stats_arenas_i_lruns_j
)
185 CTL_PROTO(stats_arenas_i_nthreads
)
186 CTL_PROTO(stats_arenas_i_pactive
)
187 CTL_PROTO(stats_arenas_i_pdirty
)
188 #ifdef JEMALLOC_STATS
189 CTL_PROTO(stats_arenas_i_mapped
)
190 CTL_PROTO(stats_arenas_i_npurge
)
191 CTL_PROTO(stats_arenas_i_nmadvise
)
192 CTL_PROTO(stats_arenas_i_purged
)
194 INDEX_PROTO(stats_arenas_i
)
195 #ifdef JEMALLOC_STATS
196 CTL_PROTO(stats_cactive
)
197 CTL_PROTO(stats_allocated
)
198 CTL_PROTO(stats_active
)
199 CTL_PROTO(stats_mapped
)
202 # ifdef JEMALLOC_STATS
203 CTL_PROTO(swap_avail
)
205 CTL_PROTO(swap_prezeroed
)
210 /******************************************************************************/
213 /* Maximum tree depth. */
214 #define CTL_MAX_DEPTH 6
216 #define NAME(n) true, {.named = {n
217 #define CHILD(c) sizeof(c##_node) / sizeof(ctl_node_t), c##_node}}, NULL
218 #define CTL(c) 0, NULL}}, c##_ctl
221 * Only handles internal indexed nodes, since there are currently no external
224 #define INDEX(i) false, {.indexed = {i##_index}}, NULL
226 #ifdef JEMALLOC_TCACHE
227 static const ctl_node_t tcache_node
[] = {
228 {NAME("flush"), CTL(tcache_flush
)}
232 static const ctl_node_t thread_node
[] = {
233 {NAME("arena"), CTL(thread_arena
)}
234 #ifdef JEMALLOC_STATS
236 {NAME("allocated"), CTL(thread_allocated
)},
237 {NAME("allocatedp"), CTL(thread_allocatedp
)},
238 {NAME("deallocated"), CTL(thread_deallocated
)},
239 {NAME("deallocatedp"), CTL(thread_deallocatedp
)}
243 static const ctl_node_t config_node
[] = {
244 {NAME("debug"), CTL(config_debug
)},
245 {NAME("dss"), CTL(config_dss
)},
246 {NAME("dynamic_page_shift"), CTL(config_dynamic_page_shift
)},
247 {NAME("fill"), CTL(config_fill
)},
248 {NAME("lazy_lock"), CTL(config_lazy_lock
)},
249 {NAME("prof"), CTL(config_prof
)},
250 {NAME("prof_libgcc"), CTL(config_prof_libgcc
)},
251 {NAME("prof_libunwind"), CTL(config_prof_libunwind
)},
252 {NAME("stats"), CTL(config_stats
)},
253 {NAME("swap"), CTL(config_swap
)},
254 {NAME("sysv"), CTL(config_sysv
)},
255 {NAME("tcache"), CTL(config_tcache
)},
256 {NAME("tiny"), CTL(config_tiny
)},
257 {NAME("tls"), CTL(config_tls
)},
258 {NAME("xmalloc"), CTL(config_xmalloc
)}
261 static const ctl_node_t opt_node
[] = {
262 {NAME("abort"), CTL(opt_abort
)},
263 {NAME("lg_qspace_max"), CTL(opt_lg_qspace_max
)},
264 {NAME("lg_cspace_max"), CTL(opt_lg_cspace_max
)},
265 {NAME("lg_chunk"), CTL(opt_lg_chunk
)},
266 {NAME("narenas"), CTL(opt_narenas
)},
267 {NAME("lg_dirty_mult"), CTL(opt_lg_dirty_mult
)},
268 {NAME("stats_print"), CTL(opt_stats_print
)}
271 {NAME("junk"), CTL(opt_junk
)},
272 {NAME("zero"), CTL(opt_zero
)}
276 {NAME("sysv"), CTL(opt_sysv
)}
278 #ifdef JEMALLOC_XMALLOC
280 {NAME("xmalloc"), CTL(opt_xmalloc
)}
282 #ifdef JEMALLOC_TCACHE
284 {NAME("tcache"), CTL(opt_tcache
)},
285 {NAME("lg_tcache_gc_sweep"), CTL(opt_lg_tcache_gc_sweep
)}
289 {NAME("prof"), CTL(opt_prof
)},
290 {NAME("prof_prefix"), CTL(opt_prof_prefix
)},
291 {NAME("prof_active"), CTL(opt_prof_active
)},
292 {NAME("lg_prof_bt_max"), CTL(opt_lg_prof_bt_max
)},
293 {NAME("lg_prof_sample"), CTL(opt_lg_prof_sample
)},
294 {NAME("lg_prof_interval"), CTL(opt_lg_prof_interval
)},
295 {NAME("prof_gdump"), CTL(opt_prof_gdump
)},
296 {NAME("prof_leak"), CTL(opt_prof_leak
)},
297 {NAME("prof_accum"), CTL(opt_prof_accum
)},
298 {NAME("lg_prof_tcmax"), CTL(opt_lg_prof_tcmax
)}
302 {NAME("overcommit"), CTL(opt_overcommit
)}
306 static const ctl_node_t arenas_bin_i_node
[] = {
307 {NAME("size"), CTL(arenas_bin_i_size
)},
308 {NAME("nregs"), CTL(arenas_bin_i_nregs
)},
309 {NAME("run_size"), CTL(arenas_bin_i_run_size
)}
311 static const ctl_node_t super_arenas_bin_i_node
[] = {
312 {NAME(""), CHILD(arenas_bin_i
)}
315 static const ctl_node_t arenas_bin_node
[] = {
316 {INDEX(arenas_bin_i
)}
319 static const ctl_node_t arenas_lrun_i_node
[] = {
320 {NAME("size"), CTL(arenas_lrun_i_size
)}
322 static const ctl_node_t super_arenas_lrun_i_node
[] = {
323 {NAME(""), CHILD(arenas_lrun_i
)}
326 static const ctl_node_t arenas_lrun_node
[] = {
327 {INDEX(arenas_lrun_i
)}
330 static const ctl_node_t arenas_node
[] = {
331 {NAME("narenas"), CTL(arenas_narenas
)},
332 {NAME("initialized"), CTL(arenas_initialized
)},
333 {NAME("quantum"), CTL(arenas_quantum
)},
334 {NAME("cacheline"), CTL(arenas_cacheline
)},
335 {NAME("subpage"), CTL(arenas_subpage
)},
336 {NAME("pagesize"), CTL(arenas_pagesize
)},
337 {NAME("chunksize"), CTL(arenas_chunksize
)},
339 {NAME("tspace_min"), CTL(arenas_tspace_min
)},
340 {NAME("tspace_max"), CTL(arenas_tspace_max
)},
342 {NAME("qspace_min"), CTL(arenas_qspace_min
)},
343 {NAME("qspace_max"), CTL(arenas_qspace_max
)},
344 {NAME("cspace_min"), CTL(arenas_cspace_min
)},
345 {NAME("cspace_max"), CTL(arenas_cspace_max
)},
346 {NAME("sspace_min"), CTL(arenas_sspace_min
)},
347 {NAME("sspace_max"), CTL(arenas_sspace_max
)},
348 #ifdef JEMALLOC_TCACHE
349 {NAME("tcache_max"), CTL(arenas_tcache_max
)},
351 {NAME("ntbins"), CTL(arenas_ntbins
)},
352 {NAME("nqbins"), CTL(arenas_nqbins
)},
353 {NAME("ncbins"), CTL(arenas_ncbins
)},
354 {NAME("nsbins"), CTL(arenas_nsbins
)},
355 {NAME("nbins"), CTL(arenas_nbins
)},
356 #ifdef JEMALLOC_TCACHE
357 {NAME("nhbins"), CTL(arenas_nhbins
)},
359 {NAME("bin"), CHILD(arenas_bin
)},
360 {NAME("nlruns"), CTL(arenas_nlruns
)},
361 {NAME("lrun"), CHILD(arenas_lrun
)},
362 {NAME("purge"), CTL(arenas_purge
)}
366 static const ctl_node_t prof_node
[] = {
367 {NAME("active"), CTL(prof_active
)},
368 {NAME("dump"), CTL(prof_dump
)},
369 {NAME("interval"), CTL(prof_interval
)}
373 #ifdef JEMALLOC_STATS
374 static const ctl_node_t stats_chunks_node
[] = {
375 {NAME("current"), CTL(stats_chunks_current
)},
376 {NAME("total"), CTL(stats_chunks_total
)},
377 {NAME("high"), CTL(stats_chunks_high
)}
380 static const ctl_node_t stats_huge_node
[] = {
381 {NAME("allocated"), CTL(stats_huge_allocated
)},
382 {NAME("nmalloc"), CTL(stats_huge_nmalloc
)},
383 {NAME("ndalloc"), CTL(stats_huge_ndalloc
)}
386 static const ctl_node_t stats_arenas_i_small_node
[] = {
387 {NAME("allocated"), CTL(stats_arenas_i_small_allocated
)},
388 {NAME("nmalloc"), CTL(stats_arenas_i_small_nmalloc
)},
389 {NAME("ndalloc"), CTL(stats_arenas_i_small_ndalloc
)},
390 {NAME("nrequests"), CTL(stats_arenas_i_small_nrequests
)}
393 static const ctl_node_t stats_arenas_i_large_node
[] = {
394 {NAME("allocated"), CTL(stats_arenas_i_large_allocated
)},
395 {NAME("nmalloc"), CTL(stats_arenas_i_large_nmalloc
)},
396 {NAME("ndalloc"), CTL(stats_arenas_i_large_ndalloc
)},
397 {NAME("nrequests"), CTL(stats_arenas_i_large_nrequests
)}
400 static const ctl_node_t stats_arenas_i_bins_j_node
[] = {
401 {NAME("allocated"), CTL(stats_arenas_i_bins_j_allocated
)},
402 {NAME("nmalloc"), CTL(stats_arenas_i_bins_j_nmalloc
)},
403 {NAME("ndalloc"), CTL(stats_arenas_i_bins_j_ndalloc
)},
404 {NAME("nrequests"), CTL(stats_arenas_i_bins_j_nrequests
)},
405 #ifdef JEMALLOC_TCACHE
406 {NAME("nfills"), CTL(stats_arenas_i_bins_j_nfills
)},
407 {NAME("nflushes"), CTL(stats_arenas_i_bins_j_nflushes
)},
409 {NAME("nruns"), CTL(stats_arenas_i_bins_j_nruns
)},
410 {NAME("nreruns"), CTL(stats_arenas_i_bins_j_nreruns
)},
411 {NAME("highruns"), CTL(stats_arenas_i_bins_j_highruns
)},
412 {NAME("curruns"), CTL(stats_arenas_i_bins_j_curruns
)}
414 static const ctl_node_t super_stats_arenas_i_bins_j_node
[] = {
415 {NAME(""), CHILD(stats_arenas_i_bins_j
)}
418 static const ctl_node_t stats_arenas_i_bins_node
[] = {
419 {INDEX(stats_arenas_i_bins_j
)}
422 static const ctl_node_t stats_arenas_i_lruns_j_node
[] = {
423 {NAME("nmalloc"), CTL(stats_arenas_i_lruns_j_nmalloc
)},
424 {NAME("ndalloc"), CTL(stats_arenas_i_lruns_j_ndalloc
)},
425 {NAME("nrequests"), CTL(stats_arenas_i_lruns_j_nrequests
)},
426 {NAME("highruns"), CTL(stats_arenas_i_lruns_j_highruns
)},
427 {NAME("curruns"), CTL(stats_arenas_i_lruns_j_curruns
)}
429 static const ctl_node_t super_stats_arenas_i_lruns_j_node
[] = {
430 {NAME(""), CHILD(stats_arenas_i_lruns_j
)}
433 static const ctl_node_t stats_arenas_i_lruns_node
[] = {
434 {INDEX(stats_arenas_i_lruns_j
)}
438 static const ctl_node_t stats_arenas_i_node
[] = {
439 {NAME("nthreads"), CTL(stats_arenas_i_nthreads
)},
440 {NAME("pactive"), CTL(stats_arenas_i_pactive
)},
441 {NAME("pdirty"), CTL(stats_arenas_i_pdirty
)}
442 #ifdef JEMALLOC_STATS
444 {NAME("mapped"), CTL(stats_arenas_i_mapped
)},
445 {NAME("npurge"), CTL(stats_arenas_i_npurge
)},
446 {NAME("nmadvise"), CTL(stats_arenas_i_nmadvise
)},
447 {NAME("purged"), CTL(stats_arenas_i_purged
)},
448 {NAME("small"), CHILD(stats_arenas_i_small
)},
449 {NAME("large"), CHILD(stats_arenas_i_large
)},
450 {NAME("bins"), CHILD(stats_arenas_i_bins
)},
451 {NAME("lruns"), CHILD(stats_arenas_i_lruns
)}
454 static const ctl_node_t super_stats_arenas_i_node
[] = {
455 {NAME(""), CHILD(stats_arenas_i
)}
458 static const ctl_node_t stats_arenas_node
[] = {
459 {INDEX(stats_arenas_i
)}
462 static const ctl_node_t stats_node
[] = {
463 #ifdef JEMALLOC_STATS
464 {NAME("cactive"), CTL(stats_cactive
)},
465 {NAME("allocated"), CTL(stats_allocated
)},
466 {NAME("active"), CTL(stats_active
)},
467 {NAME("mapped"), CTL(stats_mapped
)},
468 {NAME("chunks"), CHILD(stats_chunks
)},
469 {NAME("huge"), CHILD(stats_huge
)},
471 {NAME("arenas"), CHILD(stats_arenas
)}
475 static const ctl_node_t swap_node
[] = {
476 # ifdef JEMALLOC_STATS
477 {NAME("avail"), CTL(swap_avail
)},
479 {NAME("prezeroed"), CTL(swap_prezeroed
)},
480 {NAME("nfds"), CTL(swap_nfds
)},
481 {NAME("fds"), CTL(swap_fds
)}
485 static const ctl_node_t root_node
[] = {
486 {NAME("version"), CTL(version
)},
487 {NAME("epoch"), CTL(epoch
)},
488 #ifdef JEMALLOC_TCACHE
489 {NAME("tcache"), CHILD(tcache
)},
491 {NAME("thread"), CHILD(thread
)},
492 {NAME("config"), CHILD(config
)},
493 {NAME("opt"), CHILD(opt
)},
494 {NAME("arenas"), CHILD(arenas
)},
496 {NAME("prof"), CHILD(prof
)},
498 {NAME("stats"), CHILD(stats
)}
501 {NAME("swap"), CHILD(swap
)}
504 static const ctl_node_t super_root_node
[] = {
505 {NAME(""), CHILD(root
)}
513 /******************************************************************************/
515 #ifdef JEMALLOC_STATS
517 ctl_arena_init(ctl_arena_stats_t
*astats
)
520 if (astats
->bstats
== NULL
) {
521 astats
->bstats
= (malloc_bin_stats_t
*)base_alloc(nbins
*
522 sizeof(malloc_bin_stats_t
));
523 if (astats
->bstats
== NULL
)
526 if (astats
->lstats
== NULL
) {
527 astats
->lstats
= (malloc_large_stats_t
*)base_alloc(nlclasses
*
528 sizeof(malloc_large_stats_t
));
529 if (astats
->lstats
== NULL
)
538 ctl_arena_clear(ctl_arena_stats_t
*astats
)
543 #ifdef JEMALLOC_STATS
544 memset(&astats
->astats
, 0, sizeof(arena_stats_t
));
545 astats
->allocated_small
= 0;
546 astats
->nmalloc_small
= 0;
547 astats
->ndalloc_small
= 0;
548 astats
->nrequests_small
= 0;
549 memset(astats
->bstats
, 0, nbins
* sizeof(malloc_bin_stats_t
));
550 memset(astats
->lstats
, 0, nlclasses
* sizeof(malloc_large_stats_t
));
554 #ifdef JEMALLOC_STATS
556 ctl_arena_stats_amerge(ctl_arena_stats_t
*cstats
, arena_t
*arena
)
560 arena_stats_merge(arena
, &cstats
->pactive
, &cstats
->pdirty
,
561 &cstats
->astats
, cstats
->bstats
, cstats
->lstats
);
563 for (i
= 0; i
< nbins
; i
++) {
564 cstats
->allocated_small
+= cstats
->bstats
[i
].allocated
;
565 cstats
->nmalloc_small
+= cstats
->bstats
[i
].nmalloc
;
566 cstats
->ndalloc_small
+= cstats
->bstats
[i
].ndalloc
;
567 cstats
->nrequests_small
+= cstats
->bstats
[i
].nrequests
;
572 ctl_arena_stats_smerge(ctl_arena_stats_t
*sstats
, ctl_arena_stats_t
*astats
)
576 sstats
->pactive
+= astats
->pactive
;
577 sstats
->pdirty
+= astats
->pdirty
;
579 sstats
->astats
.mapped
+= astats
->astats
.mapped
;
580 sstats
->astats
.npurge
+= astats
->astats
.npurge
;
581 sstats
->astats
.nmadvise
+= astats
->astats
.nmadvise
;
582 sstats
->astats
.purged
+= astats
->astats
.purged
;
584 sstats
->allocated_small
+= astats
->allocated_small
;
585 sstats
->nmalloc_small
+= astats
->nmalloc_small
;
586 sstats
->ndalloc_small
+= astats
->ndalloc_small
;
587 sstats
->nrequests_small
+= astats
->nrequests_small
;
589 sstats
->astats
.allocated_large
+= astats
->astats
.allocated_large
;
590 sstats
->astats
.nmalloc_large
+= astats
->astats
.nmalloc_large
;
591 sstats
->astats
.ndalloc_large
+= astats
->astats
.ndalloc_large
;
592 sstats
->astats
.nrequests_large
+= astats
->astats
.nrequests_large
;
594 for (i
= 0; i
< nlclasses
; i
++) {
595 sstats
->lstats
[i
].nmalloc
+= astats
->lstats
[i
].nmalloc
;
596 sstats
->lstats
[i
].ndalloc
+= astats
->lstats
[i
].ndalloc
;
597 sstats
->lstats
[i
].nrequests
+= astats
->lstats
[i
].nrequests
;
598 sstats
->lstats
[i
].highruns
+= astats
->lstats
[i
].highruns
;
599 sstats
->lstats
[i
].curruns
+= astats
->lstats
[i
].curruns
;
602 for (i
= 0; i
< nbins
; i
++) {
603 sstats
->bstats
[i
].allocated
+= astats
->bstats
[i
].allocated
;
604 sstats
->bstats
[i
].nmalloc
+= astats
->bstats
[i
].nmalloc
;
605 sstats
->bstats
[i
].ndalloc
+= astats
->bstats
[i
].ndalloc
;
606 sstats
->bstats
[i
].nrequests
+= astats
->bstats
[i
].nrequests
;
607 #ifdef JEMALLOC_TCACHE
608 sstats
->bstats
[i
].nfills
+= astats
->bstats
[i
].nfills
;
609 sstats
->bstats
[i
].nflushes
+= astats
->bstats
[i
].nflushes
;
611 sstats
->bstats
[i
].nruns
+= astats
->bstats
[i
].nruns
;
612 sstats
->bstats
[i
].reruns
+= astats
->bstats
[i
].reruns
;
613 sstats
->bstats
[i
].highruns
+= astats
->bstats
[i
].highruns
;
614 sstats
->bstats
[i
].curruns
+= astats
->bstats
[i
].curruns
;
620 ctl_arena_refresh(arena_t
*arena
, unsigned i
)
622 ctl_arena_stats_t
*astats
= &ctl_stats
.arenas
[i
];
623 ctl_arena_stats_t
*sstats
= &ctl_stats
.arenas
[narenas
];
625 ctl_arena_clear(astats
);
627 sstats
->nthreads
+= astats
->nthreads
;
628 #ifdef JEMALLOC_STATS
629 ctl_arena_stats_amerge(astats
, arena
);
630 /* Merge into sum stats as well. */
631 ctl_arena_stats_smerge(sstats
, astats
);
633 astats
->pactive
+= arena
->nactive
;
634 astats
->pdirty
+= arena
->ndirty
;
635 /* Merge into sum stats as well. */
636 sstats
->pactive
+= arena
->nactive
;
637 sstats
->pdirty
+= arena
->ndirty
;
645 arena_t
*tarenas
[narenas
];
647 #ifdef JEMALLOC_STATS
648 malloc_mutex_lock(&chunks_mtx
);
649 ctl_stats
.chunks
.current
= stats_chunks
.curchunks
;
650 ctl_stats
.chunks
.total
= stats_chunks
.nchunks
;
651 ctl_stats
.chunks
.high
= stats_chunks
.highchunks
;
652 malloc_mutex_unlock(&chunks_mtx
);
654 malloc_mutex_lock(&huge_mtx
);
655 ctl_stats
.huge
.allocated
= huge_allocated
;
656 ctl_stats
.huge
.nmalloc
= huge_nmalloc
;
657 ctl_stats
.huge
.ndalloc
= huge_ndalloc
;
658 malloc_mutex_unlock(&huge_mtx
);
662 * Clear sum stats, since they will be merged into by
663 * ctl_arena_refresh().
665 ctl_stats
.arenas
[narenas
].nthreads
= 0;
666 ctl_arena_clear(&ctl_stats
.arenas
[narenas
]);
668 malloc_mutex_lock(&arenas_lock
);
669 memcpy(tarenas
, arenas
, sizeof(arena_t
*) * narenas
);
670 for (i
= 0; i
< narenas
; i
++) {
671 if (arenas
[i
] != NULL
)
672 ctl_stats
.arenas
[i
].nthreads
= arenas
[i
]->nthreads
;
674 ctl_stats
.arenas
[i
].nthreads
= 0;
676 malloc_mutex_unlock(&arenas_lock
);
677 for (i
= 0; i
< narenas
; i
++) {
678 bool initialized
= (tarenas
[i
] != NULL
);
680 ctl_stats
.arenas
[i
].initialized
= initialized
;
682 ctl_arena_refresh(tarenas
[i
], i
);
685 #ifdef JEMALLOC_STATS
686 ctl_stats
.allocated
= ctl_stats
.arenas
[narenas
].allocated_small
687 + ctl_stats
.arenas
[narenas
].astats
.allocated_large
688 + ctl_stats
.huge
.allocated
;
689 ctl_stats
.active
= (ctl_stats
.arenas
[narenas
].pactive
<< PAGE_SHIFT
)
690 + ctl_stats
.huge
.allocated
;
691 ctl_stats
.mapped
= (ctl_stats
.chunks
.current
<< opt_lg_chunk
);
693 # ifdef JEMALLOC_SWAP
694 malloc_mutex_lock(&swap_mtx
);
695 ctl_stats
.swap_avail
= swap_avail
;
696 malloc_mutex_unlock(&swap_mtx
);
708 malloc_mutex_lock(&ctl_mtx
);
709 if (ctl_initialized
== false) {
710 #ifdef JEMALLOC_STATS
715 * Allocate space for one extra arena stats element, which
716 * contains summed stats across all arenas.
718 ctl_stats
.arenas
= (ctl_arena_stats_t
*)base_alloc(
719 (narenas
+ 1) * sizeof(ctl_arena_stats_t
));
720 if (ctl_stats
.arenas
== NULL
) {
724 memset(ctl_stats
.arenas
, 0, (narenas
+ 1) *
725 sizeof(ctl_arena_stats_t
));
728 * Initialize all stats structures, regardless of whether they
729 * ever get used. Lazy initialization would allow errors to
730 * cause inconsistent state to be viewable by the application.
732 #ifdef JEMALLOC_STATS
733 for (i
= 0; i
<= narenas
; i
++) {
734 if (ctl_arena_init(&ctl_stats
.arenas
[i
])) {
740 ctl_stats
.arenas
[narenas
].initialized
= true;
744 ctl_initialized
= true;
749 malloc_mutex_unlock(&ctl_mtx
);
754 ctl_lookup(const char *name
, ctl_node_t
const **nodesp
, size_t *mibp
,
758 const char *elm
, *tdot
, *dot
;
760 const ctl_node_t
*node
;
763 /* Equivalent to strchrnul(). */
764 dot
= ((tdot
= strchr(elm
, '.')) != NULL
) ? tdot
: strchr(elm
, '\0');
765 elen
= (size_t)((uintptr_t)dot
- (uintptr_t)elm
);
770 node
= super_root_node
;
771 for (i
= 0; i
< *depthp
; i
++) {
773 assert(node
->u
.named
.nchildren
> 0);
774 if (node
->u
.named
.children
[0].named
) {
775 const ctl_node_t
*pnode
= node
;
777 /* Children are named. */
778 for (j
= 0; j
< node
->u
.named
.nchildren
; j
++) {
779 const ctl_node_t
*child
=
780 &node
->u
.named
.children
[j
];
781 if (strlen(child
->u
.named
.name
) == elen
782 && strncmp(elm
, child
->u
.named
.name
,
797 const ctl_node_t
*inode
;
799 /* Children are indexed. */
800 index
= strtoul(elm
, NULL
, 10);
801 if (index
== ULONG_MAX
) {
806 inode
= &node
->u
.named
.children
[0];
807 node
= inode
->u
.indexed
.index(mibp
, *depthp
,
816 mibp
[i
] = (size_t)index
;
819 if (node
->ctl
!= NULL
) {
823 * The name contains more elements than are
824 * in this path through the tree.
829 /* Complete lookup successful. */
836 /* No more elements. */
841 dot
= ((tdot
= strchr(elm
, '.')) != NULL
) ? tdot
:
843 elen
= (size_t)((uintptr_t)dot
- (uintptr_t)elm
);
852 ctl_byname(const char *name
, void *oldp
, size_t *oldlenp
, void *newp
,
857 ctl_node_t
const *nodes
[CTL_MAX_DEPTH
];
858 size_t mib
[CTL_MAX_DEPTH
];
860 if (ctl_initialized
== false && ctl_init()) {
865 depth
= CTL_MAX_DEPTH
;
866 ret
= ctl_lookup(name
, nodes
, mib
, &depth
);
870 if (nodes
[depth
-1]->ctl
== NULL
) {
871 /* The name refers to a partial path through the ctl tree. */
876 ret
= nodes
[depth
-1]->ctl(mib
, depth
, oldp
, oldlenp
, newp
, newlen
);
882 ctl_nametomib(const char *name
, size_t *mibp
, size_t *miblenp
)
886 if (ctl_initialized
== false && ctl_init()) {
891 ret
= ctl_lookup(name
, NULL
, mibp
, miblenp
);
897 ctl_bymib(const size_t *mib
, size_t miblen
, void *oldp
, size_t *oldlenp
,
898 void *newp
, size_t newlen
)
901 const ctl_node_t
*node
;
904 if (ctl_initialized
== false && ctl_init()) {
909 /* Iterate down the tree. */
910 node
= super_root_node
;
911 for (i
= 0; i
< miblen
; i
++) {
912 if (node
->u
.named
.children
[0].named
) {
913 /* Children are named. */
914 if (node
->u
.named
.nchildren
<= mib
[i
]) {
918 node
= &node
->u
.named
.children
[mib
[i
]];
920 const ctl_node_t
*inode
;
922 /* Indexed element. */
923 inode
= &node
->u
.named
.children
[0];
924 node
= inode
->u
.indexed
.index(mib
, miblen
, mib
[i
]);
932 /* Call the ctl function. */
933 if (node
->ctl
== NULL
) {
938 ret
= node
->ctl(mib
, miblen
, oldp
, oldlenp
, newp
, newlen
);
948 if (malloc_mutex_init(&ctl_mtx
))
951 ctl_initialized
= false;
956 /******************************************************************************/
957 /* *_ctl() functions. */
959 #define READONLY() do { \
960 if (newp != NULL || newlen != 0) { \
966 #define WRITEONLY() do { \
967 if (oldp != NULL || oldlenp != NULL) { \
973 #define VOID() do { \
978 #define READ(v, t) do { \
979 if (oldp != NULL && oldlenp != NULL) { \
980 if (*oldlenp != sizeof(t)) { \
981 size_t copylen = (sizeof(t) <= *oldlenp) \
982 ? sizeof(t) : *oldlenp; \
983 memcpy(oldp, (void *)&v, copylen); \
991 #define WRITE(v, t) do { \
992 if (newp != NULL) { \
993 if (newlen != sizeof(t)) { \
1001 #define CTL_RO_GEN(n, v, t) \
1003 n##_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp, \
1004 void *newp, size_t newlen) \
1009 malloc_mutex_lock(&ctl_mtx); \
1016 malloc_mutex_unlock(&ctl_mtx); \
1021 * ctl_mtx is not acquired, under the assumption that no pertinent data will
1022 * mutate during the call.
1024 #define CTL_RO_NL_GEN(n, v, t) \
1026 n##_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp, \
1027 void *newp, size_t newlen) \
1041 #define CTL_RO_TRUE_GEN(n) \
1043 n##_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp, \
1044 void *newp, size_t newlen) \
1051 READ(oldval, bool); \
1058 #define CTL_RO_FALSE_GEN(n) \
1060 n##_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp, \
1061 void *newp, size_t newlen) \
1068 READ(oldval, bool); \
1075 CTL_RO_NL_GEN(version
, JEMALLOC_VERSION
, const char *)
1078 epoch_ctl(const size_t *mib
, size_t miblen
, void *oldp
, size_t *oldlenp
,
1079 void *newp
, size_t newlen
)
1084 malloc_mutex_lock(&ctl_mtx
);
1086 WRITE(newval
, uint64_t);
1089 READ(ctl_epoch
, uint64_t);
1093 malloc_mutex_unlock(&ctl_mtx
);
1097 #ifdef JEMALLOC_TCACHE
1099 tcache_flush_ctl(const size_t *mib
, size_t miblen
, void *oldp
, size_t *oldlenp
,
1100 void *newp
, size_t newlen
)
1107 tcache
= TCACHE_GET();
1108 if (tcache
== NULL
) {
1112 tcache_destroy(tcache
);
1122 thread_arena_ctl(const size_t *mib
, size_t miblen
, void *oldp
, size_t *oldlenp
,
1123 void *newp
, size_t newlen
)
1126 unsigned newind
, oldind
;
1128 newind
= oldind
= choose_arena()->ind
;
1129 WRITE(newind
, unsigned);
1130 READ(oldind
, unsigned);
1131 if (newind
!= oldind
) {
1134 if (newind
>= narenas
) {
1135 /* New arena index is out of range. */
1140 /* Initialize arena if necessary. */
1141 malloc_mutex_lock(&arenas_lock
);
1142 if ((arena
= arenas
[newind
]) == NULL
)
1143 arena
= arenas_extend(newind
);
1144 arenas
[oldind
]->nthreads
--;
1145 arenas
[newind
]->nthreads
++;
1146 malloc_mutex_unlock(&arenas_lock
);
1147 if (arena
== NULL
) {
1152 /* Set new arena association. */
1154 #ifdef JEMALLOC_TCACHE
1156 tcache_t
*tcache
= TCACHE_GET();
1158 tcache
->arena
= arena
;
1168 #ifdef JEMALLOC_STATS
1169 CTL_RO_NL_GEN(thread_allocated
, ALLOCATED_GET(), uint64_t);
1170 CTL_RO_NL_GEN(thread_allocatedp
, ALLOCATEDP_GET(), uint64_t *);
1171 CTL_RO_NL_GEN(thread_deallocated
, DEALLOCATED_GET(), uint64_t);
1172 CTL_RO_NL_GEN(thread_deallocatedp
, DEALLOCATEDP_GET(), uint64_t *);
1175 /******************************************************************************/
1177 #ifdef JEMALLOC_DEBUG
1178 CTL_RO_TRUE_GEN(config_debug
)
1180 CTL_RO_FALSE_GEN(config_debug
)
1184 CTL_RO_TRUE_GEN(config_dss
)
1186 CTL_RO_FALSE_GEN(config_dss
)
1189 #ifdef JEMALLOC_DYNAMIC_PAGE_SHIFT
1190 CTL_RO_TRUE_GEN(config_dynamic_page_shift
)
1192 CTL_RO_FALSE_GEN(config_dynamic_page_shift
)
1195 #ifdef JEMALLOC_FILL
1196 CTL_RO_TRUE_GEN(config_fill
)
1198 CTL_RO_FALSE_GEN(config_fill
)
1201 #ifdef JEMALLOC_LAZY_LOCK
1202 CTL_RO_TRUE_GEN(config_lazy_lock
)
1204 CTL_RO_FALSE_GEN(config_lazy_lock
)
1207 #ifdef JEMALLOC_PROF
1208 CTL_RO_TRUE_GEN(config_prof
)
1210 CTL_RO_FALSE_GEN(config_prof
)
1213 #ifdef JEMALLOC_PROF_LIBGCC
1214 CTL_RO_TRUE_GEN(config_prof_libgcc
)
1216 CTL_RO_FALSE_GEN(config_prof_libgcc
)
1219 #ifdef JEMALLOC_PROF_LIBUNWIND
1220 CTL_RO_TRUE_GEN(config_prof_libunwind
)
1222 CTL_RO_FALSE_GEN(config_prof_libunwind
)
1225 #ifdef JEMALLOC_STATS
1226 CTL_RO_TRUE_GEN(config_stats
)
1228 CTL_RO_FALSE_GEN(config_stats
)
1231 #ifdef JEMALLOC_SWAP
1232 CTL_RO_TRUE_GEN(config_swap
)
1234 CTL_RO_FALSE_GEN(config_swap
)
1237 #ifdef JEMALLOC_SYSV
1238 CTL_RO_TRUE_GEN(config_sysv
)
1240 CTL_RO_FALSE_GEN(config_sysv
)
1243 #ifdef JEMALLOC_TCACHE
1244 CTL_RO_TRUE_GEN(config_tcache
)
1246 CTL_RO_FALSE_GEN(config_tcache
)
1249 #ifdef JEMALLOC_TINY
1250 CTL_RO_TRUE_GEN(config_tiny
)
1252 CTL_RO_FALSE_GEN(config_tiny
)
1256 CTL_RO_TRUE_GEN(config_tls
)
1258 CTL_RO_FALSE_GEN(config_tls
)
1261 #ifdef JEMALLOC_XMALLOC
1262 CTL_RO_TRUE_GEN(config_xmalloc
)
1264 CTL_RO_FALSE_GEN(config_xmalloc
)
1267 /******************************************************************************/
1269 CTL_RO_NL_GEN(opt_abort
, opt_abort
, bool)
1270 CTL_RO_NL_GEN(opt_lg_qspace_max
, opt_lg_qspace_max
, size_t)
1271 CTL_RO_NL_GEN(opt_lg_cspace_max
, opt_lg_cspace_max
, size_t)
1272 CTL_RO_NL_GEN(opt_lg_chunk
, opt_lg_chunk
, size_t)
1273 CTL_RO_NL_GEN(opt_narenas
, opt_narenas
, size_t)
1274 CTL_RO_NL_GEN(opt_lg_dirty_mult
, opt_lg_dirty_mult
, ssize_t
)
1275 CTL_RO_NL_GEN(opt_stats_print
, opt_stats_print
, bool)
1276 #ifdef JEMALLOC_FILL
1277 CTL_RO_NL_GEN(opt_junk
, opt_junk
, bool)
1278 CTL_RO_NL_GEN(opt_zero
, opt_zero
, bool)
1280 #ifdef JEMALLOC_SYSV
1281 CTL_RO_NL_GEN(opt_sysv
, opt_sysv
, bool)
1283 #ifdef JEMALLOC_XMALLOC
1284 CTL_RO_NL_GEN(opt_xmalloc
, opt_xmalloc
, bool)
1286 #ifdef JEMALLOC_TCACHE
1287 CTL_RO_NL_GEN(opt_tcache
, opt_tcache
, bool)
1288 CTL_RO_NL_GEN(opt_lg_tcache_gc_sweep
, opt_lg_tcache_gc_sweep
, ssize_t
)
1290 #ifdef JEMALLOC_PROF
1291 CTL_RO_NL_GEN(opt_prof
, opt_prof
, bool)
1292 CTL_RO_NL_GEN(opt_prof_prefix
, opt_prof_prefix
, const char *)
1293 CTL_RO_GEN(opt_prof_active
, opt_prof_active
, bool) /* Mutable. */
1294 CTL_RO_NL_GEN(opt_lg_prof_bt_max
, opt_lg_prof_bt_max
, size_t)
1295 CTL_RO_NL_GEN(opt_lg_prof_sample
, opt_lg_prof_sample
, size_t)
1296 CTL_RO_NL_GEN(opt_lg_prof_interval
, opt_lg_prof_interval
, ssize_t
)
1297 CTL_RO_NL_GEN(opt_prof_gdump
, opt_prof_gdump
, bool)
1298 CTL_RO_NL_GEN(opt_prof_leak
, opt_prof_leak
, bool)
1299 CTL_RO_NL_GEN(opt_prof_accum
, opt_prof_accum
, bool)
1300 CTL_RO_NL_GEN(opt_lg_prof_tcmax
, opt_lg_prof_tcmax
, ssize_t
)
1302 #ifdef JEMALLOC_SWAP
1303 CTL_RO_NL_GEN(opt_overcommit
, opt_overcommit
, bool)
1306 /******************************************************************************/
1308 CTL_RO_NL_GEN(arenas_bin_i_size
, arena_bin_info
[mib
[2]].reg_size
, size_t)
1309 CTL_RO_NL_GEN(arenas_bin_i_nregs
, arena_bin_info
[mib
[2]].nregs
, uint32_t)
1310 CTL_RO_NL_GEN(arenas_bin_i_run_size
, arena_bin_info
[mib
[2]].run_size
, size_t)
1312 arenas_bin_i_index(const size_t *mib
, size_t miblen
, size_t i
)
1317 return (super_arenas_bin_i_node
);
1320 CTL_RO_NL_GEN(arenas_lrun_i_size
, ((mib
[2]+1) << PAGE_SHIFT
), size_t)
1322 arenas_lrun_i_index(const size_t *mib
, size_t miblen
, size_t i
)
1327 return (super_arenas_lrun_i_node
);
1330 CTL_RO_NL_GEN(arenas_narenas
, narenas
, unsigned)
1333 arenas_initialized_ctl(const size_t *mib
, size_t miblen
, void *oldp
,
1334 size_t *oldlenp
, void *newp
, size_t newlen
)
1339 malloc_mutex_lock(&ctl_mtx
);
1341 if (*oldlenp
!= narenas
* sizeof(bool)) {
1343 nread
= (*oldlenp
< narenas
* sizeof(bool))
1344 ? (*oldlenp
/ sizeof(bool)) : narenas
;
1350 for (i
= 0; i
< nread
; i
++)
1351 ((bool *)oldp
)[i
] = ctl_stats
.arenas
[i
].initialized
;
1354 malloc_mutex_unlock(&ctl_mtx
);
1358 CTL_RO_NL_GEN(arenas_quantum
, QUANTUM
, size_t)
1359 CTL_RO_NL_GEN(arenas_cacheline
, CACHELINE
, size_t)
1360 CTL_RO_NL_GEN(arenas_subpage
, SUBPAGE
, size_t)
1361 CTL_RO_NL_GEN(arenas_pagesize
, PAGE_SIZE
, size_t)
1362 CTL_RO_NL_GEN(arenas_chunksize
, chunksize
, size_t)
1363 #ifdef JEMALLOC_TINY
1364 CTL_RO_NL_GEN(arenas_tspace_min
, (1U << LG_TINY_MIN
), size_t)
1365 CTL_RO_NL_GEN(arenas_tspace_max
, (qspace_min
>> 1), size_t)
1367 CTL_RO_NL_GEN(arenas_qspace_min
, qspace_min
, size_t)
1368 CTL_RO_NL_GEN(arenas_qspace_max
, qspace_max
, size_t)
1369 CTL_RO_NL_GEN(arenas_cspace_min
, cspace_min
, size_t)
1370 CTL_RO_NL_GEN(arenas_cspace_max
, cspace_max
, size_t)
1371 CTL_RO_NL_GEN(arenas_sspace_min
, sspace_min
, size_t)
1372 CTL_RO_NL_GEN(arenas_sspace_max
, sspace_max
, size_t)
1373 #ifdef JEMALLOC_TCACHE
1374 CTL_RO_NL_GEN(arenas_tcache_max
, tcache_maxclass
, size_t)
1376 CTL_RO_NL_GEN(arenas_ntbins
, ntbins
, unsigned)
1377 CTL_RO_NL_GEN(arenas_nqbins
, nqbins
, unsigned)
1378 CTL_RO_NL_GEN(arenas_ncbins
, ncbins
, unsigned)
1379 CTL_RO_NL_GEN(arenas_nsbins
, nsbins
, unsigned)
1380 CTL_RO_NL_GEN(arenas_nbins
, nbins
, unsigned)
1381 #ifdef JEMALLOC_TCACHE
1382 CTL_RO_NL_GEN(arenas_nhbins
, nhbins
, unsigned)
1384 CTL_RO_NL_GEN(arenas_nlruns
, nlclasses
, size_t)
1387 arenas_purge_ctl(const size_t *mib
, size_t miblen
, void *oldp
, size_t *oldlenp
,
1388 void *newp
, size_t newlen
)
1395 WRITE(arena
, unsigned);
1396 if (newp
!= NULL
&& arena
>= narenas
) {
1400 arena_t
*tarenas
[narenas
];
1402 malloc_mutex_lock(&arenas_lock
);
1403 memcpy(tarenas
, arenas
, sizeof(arena_t
*) * narenas
);
1404 malloc_mutex_unlock(&arenas_lock
);
1406 if (arena
== UINT_MAX
) {
1408 for (i
= 0; i
< narenas
; i
++) {
1409 if (tarenas
[i
] != NULL
)
1410 arena_purge_all(tarenas
[i
]);
1413 assert(arena
< narenas
);
1414 if (tarenas
[arena
] != NULL
)
1415 arena_purge_all(tarenas
[arena
]);
1424 /******************************************************************************/
1426 #ifdef JEMALLOC_PROF
1428 prof_active_ctl(const size_t *mib
, size_t miblen
, void *oldp
, size_t *oldlenp
,
1429 void *newp
, size_t newlen
)
1434 malloc_mutex_lock(&ctl_mtx
); /* Protect opt_prof_active. */
1435 oldval
= opt_prof_active
;
1438 * The memory barriers will tend to make opt_prof_active
1439 * propagate faster on systems with weak memory ordering.
1442 WRITE(opt_prof_active
, bool);
1449 malloc_mutex_unlock(&ctl_mtx
);
1454 prof_dump_ctl(const size_t *mib
, size_t miblen
, void *oldp
, size_t *oldlenp
,
1455 void *newp
, size_t newlen
)
1458 const char *filename
= NULL
;
1461 WRITE(filename
, const char *);
1463 if (prof_mdump(filename
)) {
1473 CTL_RO_NL_GEN(prof_interval
, prof_interval
, uint64_t)
1476 /******************************************************************************/
1478 #ifdef JEMALLOC_STATS
1479 CTL_RO_GEN(stats_chunks_current
, ctl_stats
.chunks
.current
, size_t)
1480 CTL_RO_GEN(stats_chunks_total
, ctl_stats
.chunks
.total
, uint64_t)
1481 CTL_RO_GEN(stats_chunks_high
, ctl_stats
.chunks
.high
, size_t)
1482 CTL_RO_GEN(stats_huge_allocated
, huge_allocated
, size_t)
1483 CTL_RO_GEN(stats_huge_nmalloc
, huge_nmalloc
, uint64_t)
1484 CTL_RO_GEN(stats_huge_ndalloc
, huge_ndalloc
, uint64_t)
1485 CTL_RO_GEN(stats_arenas_i_small_allocated
,
1486 ctl_stats
.arenas
[mib
[2]].allocated_small
, size_t)
1487 CTL_RO_GEN(stats_arenas_i_small_nmalloc
,
1488 ctl_stats
.arenas
[mib
[2]].nmalloc_small
, uint64_t)
1489 CTL_RO_GEN(stats_arenas_i_small_ndalloc
,
1490 ctl_stats
.arenas
[mib
[2]].ndalloc_small
, uint64_t)
1491 CTL_RO_GEN(stats_arenas_i_small_nrequests
,
1492 ctl_stats
.arenas
[mib
[2]].nrequests_small
, uint64_t)
1493 CTL_RO_GEN(stats_arenas_i_large_allocated
,
1494 ctl_stats
.arenas
[mib
[2]].astats
.allocated_large
, size_t)
1495 CTL_RO_GEN(stats_arenas_i_large_nmalloc
,
1496 ctl_stats
.arenas
[mib
[2]].astats
.nmalloc_large
, uint64_t)
1497 CTL_RO_GEN(stats_arenas_i_large_ndalloc
,
1498 ctl_stats
.arenas
[mib
[2]].astats
.ndalloc_large
, uint64_t)
1499 CTL_RO_GEN(stats_arenas_i_large_nrequests
,
1500 ctl_stats
.arenas
[mib
[2]].astats
.nrequests_large
, uint64_t)
1502 CTL_RO_GEN(stats_arenas_i_bins_j_allocated
,
1503 ctl_stats
.arenas
[mib
[2]].bstats
[mib
[4]].allocated
, size_t)
1504 CTL_RO_GEN(stats_arenas_i_bins_j_nmalloc
,
1505 ctl_stats
.arenas
[mib
[2]].bstats
[mib
[4]].nmalloc
, uint64_t)
1506 CTL_RO_GEN(stats_arenas_i_bins_j_ndalloc
,
1507 ctl_stats
.arenas
[mib
[2]].bstats
[mib
[4]].ndalloc
, uint64_t)
1508 CTL_RO_GEN(stats_arenas_i_bins_j_nrequests
,
1509 ctl_stats
.arenas
[mib
[2]].bstats
[mib
[4]].nrequests
, uint64_t)
1510 #ifdef JEMALLOC_TCACHE
1511 CTL_RO_GEN(stats_arenas_i_bins_j_nfills
,
1512 ctl_stats
.arenas
[mib
[2]].bstats
[mib
[4]].nfills
, uint64_t)
1513 CTL_RO_GEN(stats_arenas_i_bins_j_nflushes
,
1514 ctl_stats
.arenas
[mib
[2]].bstats
[mib
[4]].nflushes
, uint64_t)
1516 CTL_RO_GEN(stats_arenas_i_bins_j_nruns
,
1517 ctl_stats
.arenas
[mib
[2]].bstats
[mib
[4]].nruns
, uint64_t)
1518 CTL_RO_GEN(stats_arenas_i_bins_j_nreruns
,
1519 ctl_stats
.arenas
[mib
[2]].bstats
[mib
[4]].reruns
, uint64_t)
1520 CTL_RO_GEN(stats_arenas_i_bins_j_highruns
,
1521 ctl_stats
.arenas
[mib
[2]].bstats
[mib
[4]].highruns
, size_t)
1522 CTL_RO_GEN(stats_arenas_i_bins_j_curruns
,
1523 ctl_stats
.arenas
[mib
[2]].bstats
[mib
[4]].curruns
, size_t)
1526 stats_arenas_i_bins_j_index(const size_t *mib
, size_t miblen
, size_t j
)
1531 return (super_stats_arenas_i_bins_j_node
);
1534 CTL_RO_GEN(stats_arenas_i_lruns_j_nmalloc
,
1535 ctl_stats
.arenas
[mib
[2]].lstats
[mib
[4]].nmalloc
, uint64_t)
1536 CTL_RO_GEN(stats_arenas_i_lruns_j_ndalloc
,
1537 ctl_stats
.arenas
[mib
[2]].lstats
[mib
[4]].ndalloc
, uint64_t)
1538 CTL_RO_GEN(stats_arenas_i_lruns_j_nrequests
,
1539 ctl_stats
.arenas
[mib
[2]].lstats
[mib
[4]].nrequests
, uint64_t)
1540 CTL_RO_GEN(stats_arenas_i_lruns_j_curruns
,
1541 ctl_stats
.arenas
[mib
[2]].lstats
[mib
[4]].curruns
, size_t)
1542 CTL_RO_GEN(stats_arenas_i_lruns_j_highruns
,
1543 ctl_stats
.arenas
[mib
[2]].lstats
[mib
[4]].highruns
, size_t)
1546 stats_arenas_i_lruns_j_index(const size_t *mib
, size_t miblen
, size_t j
)
1551 return (super_stats_arenas_i_lruns_j_node
);
1555 CTL_RO_GEN(stats_arenas_i_nthreads
, ctl_stats
.arenas
[mib
[2]].nthreads
, unsigned)
1556 CTL_RO_GEN(stats_arenas_i_pactive
, ctl_stats
.arenas
[mib
[2]].pactive
, size_t)
1557 CTL_RO_GEN(stats_arenas_i_pdirty
, ctl_stats
.arenas
[mib
[2]].pdirty
, size_t)
1558 #ifdef JEMALLOC_STATS
1559 CTL_RO_GEN(stats_arenas_i_mapped
, ctl_stats
.arenas
[mib
[2]].astats
.mapped
,
1561 CTL_RO_GEN(stats_arenas_i_npurge
, ctl_stats
.arenas
[mib
[2]].astats
.npurge
,
1563 CTL_RO_GEN(stats_arenas_i_nmadvise
, ctl_stats
.arenas
[mib
[2]].astats
.nmadvise
,
1565 CTL_RO_GEN(stats_arenas_i_purged
, ctl_stats
.arenas
[mib
[2]].astats
.purged
,
1570 stats_arenas_i_index(const size_t *mib
, size_t miblen
, size_t i
)
1572 const ctl_node_t
* ret
;
1574 malloc_mutex_lock(&ctl_mtx
);
1575 if (ctl_stats
.arenas
[i
].initialized
== false) {
1580 ret
= super_stats_arenas_i_node
;
1582 malloc_mutex_unlock(&ctl_mtx
);
1586 #ifdef JEMALLOC_STATS
1587 CTL_RO_GEN(stats_cactive
, &stats_cactive
, size_t *)
1588 CTL_RO_GEN(stats_allocated
, ctl_stats
.allocated
, size_t)
1589 CTL_RO_GEN(stats_active
, ctl_stats
.active
, size_t)
1590 CTL_RO_GEN(stats_mapped
, ctl_stats
.mapped
, size_t)
1593 /******************************************************************************/
1595 #ifdef JEMALLOC_SWAP
1596 # ifdef JEMALLOC_STATS
1597 CTL_RO_GEN(swap_avail
, ctl_stats
.swap_avail
, size_t)
1601 swap_prezeroed_ctl(const size_t *mib
, size_t miblen
, void *oldp
,
1602 size_t *oldlenp
, void *newp
, size_t newlen
)
1606 malloc_mutex_lock(&ctl_mtx
);
1611 * swap_prezeroed isn't actually used by the swap code until it
1612 * is set during a successful chunk_swap_enabled() call. We
1613 * use it here to store the value that we'll pass to
1614 * chunk_swap_enable() in a swap.fds mallctl(). This is not
1615 * very clean, but the obvious alternatives are even worse.
1617 WRITE(swap_prezeroed
, bool);
1620 READ(swap_prezeroed
, bool);
1624 malloc_mutex_unlock(&ctl_mtx
);
1628 CTL_RO_GEN(swap_nfds
, swap_nfds
, size_t)
1631 swap_fds_ctl(const size_t *mib
, size_t miblen
, void *oldp
, size_t *oldlenp
,
1632 void *newp
, size_t newlen
)
1636 malloc_mutex_lock(&ctl_mtx
);
1639 } else if (newp
!= NULL
) {
1640 size_t nfds
= newlen
/ sizeof(int);
1645 memcpy(fds
, newp
, nfds
* sizeof(int));
1646 if (chunk_swap_enable(fds
, nfds
, swap_prezeroed
)) {
1653 if (oldp
!= NULL
&& oldlenp
!= NULL
) {
1654 if (*oldlenp
!= swap_nfds
* sizeof(int)) {
1655 size_t copylen
= (swap_nfds
* sizeof(int) <= *oldlenp
)
1656 ? swap_nfds
* sizeof(int) : *oldlenp
;
1658 memcpy(oldp
, swap_fds
, copylen
);
1662 memcpy(oldp
, swap_fds
, *oldlenp
);
1667 malloc_mutex_unlock(&ctl_mtx
);