xref: /freebsd/contrib/jemalloc/src/ctl.c (revision cbd30a72ca196976c1c700400ecd424baa1b9c16)
1 #define	JEMALLOC_CTL_C_
2 #include "jemalloc/internal/jemalloc_internal.h"
3 
4 /******************************************************************************/
5 /* Data. */
6 
7 /*
8  * ctl_mtx protects the following:
9  * - ctl_stats.*
10  */
11 static malloc_mutex_t	ctl_mtx;
12 static bool		ctl_initialized;
13 static uint64_t		ctl_epoch;
14 static ctl_stats_t	ctl_stats;
15 
16 /******************************************************************************/
17 /* Helpers for named and indexed nodes. */
18 
19 JEMALLOC_INLINE_C const ctl_named_node_t *
20 ctl_named_node(const ctl_node_t *node)
21 {
22 
23 	return ((node->named) ? (const ctl_named_node_t *)node : NULL);
24 }
25 
26 JEMALLOC_INLINE_C const ctl_named_node_t *
27 ctl_named_children(const ctl_named_node_t *node, size_t index)
28 {
29 	const ctl_named_node_t *children = ctl_named_node(node->children);
30 
31 	return (children ? &children[index] : NULL);
32 }
33 
34 JEMALLOC_INLINE_C const ctl_indexed_node_t *
35 ctl_indexed_node(const ctl_node_t *node)
36 {
37 
38 	return (!node->named ? (const ctl_indexed_node_t *)node : NULL);
39 }
40 
41 /******************************************************************************/
42 /* Function prototypes for non-inline static functions. */
43 
44 #define	CTL_PROTO(n)							\
45 static int	n##_ctl(tsd_t *tsd, const size_t *mib, size_t miblen,	\
46     void *oldp, size_t *oldlenp, void *newp, size_t newlen);
47 
48 #define	INDEX_PROTO(n)							\
49 static const ctl_named_node_t	*n##_index(tsdn_t *tsdn,		\
50     const size_t *mib, size_t miblen, size_t i);
51 
52 static bool	ctl_arena_init(ctl_arena_stats_t *astats);
53 static void	ctl_arena_clear(ctl_arena_stats_t *astats);
54 static void	ctl_arena_stats_amerge(tsdn_t *tsdn, ctl_arena_stats_t *cstats,
55     arena_t *arena);
56 static void	ctl_arena_stats_smerge(ctl_arena_stats_t *sstats,
57     ctl_arena_stats_t *astats);
58 static void	ctl_arena_refresh(tsdn_t *tsdn, arena_t *arena, unsigned i);
59 static bool	ctl_grow(tsdn_t *tsdn);
60 static void	ctl_refresh(tsdn_t *tsdn);
61 static bool	ctl_init(tsdn_t *tsdn);
62 static int	ctl_lookup(tsdn_t *tsdn, const char *name,
63     ctl_node_t const **nodesp, size_t *mibp, size_t *depthp);
64 
65 CTL_PROTO(version)
66 CTL_PROTO(epoch)
67 CTL_PROTO(thread_tcache_enabled)
68 CTL_PROTO(thread_tcache_flush)
69 CTL_PROTO(thread_prof_name)
70 CTL_PROTO(thread_prof_active)
71 CTL_PROTO(thread_arena)
72 CTL_PROTO(thread_allocated)
73 CTL_PROTO(thread_allocatedp)
74 CTL_PROTO(thread_deallocated)
75 CTL_PROTO(thread_deallocatedp)
76 CTL_PROTO(config_cache_oblivious)
77 CTL_PROTO(config_debug)
78 CTL_PROTO(config_fill)
79 CTL_PROTO(config_lazy_lock)
80 CTL_PROTO(config_malloc_conf)
81 CTL_PROTO(config_munmap)
82 CTL_PROTO(config_prof)
83 CTL_PROTO(config_prof_libgcc)
84 CTL_PROTO(config_prof_libunwind)
85 CTL_PROTO(config_stats)
86 CTL_PROTO(config_tcache)
87 CTL_PROTO(config_thp)
88 CTL_PROTO(config_tls)
89 CTL_PROTO(config_utrace)
90 CTL_PROTO(config_valgrind)
91 CTL_PROTO(config_xmalloc)
92 CTL_PROTO(opt_abort)
93 CTL_PROTO(opt_dss)
94 CTL_PROTO(opt_lg_chunk)
95 CTL_PROTO(opt_narenas)
96 CTL_PROTO(opt_purge)
97 CTL_PROTO(opt_lg_dirty_mult)
98 CTL_PROTO(opt_decay_time)
99 CTL_PROTO(opt_stats_print)
100 CTL_PROTO(opt_junk)
101 CTL_PROTO(opt_zero)
102 CTL_PROTO(opt_quarantine)
103 CTL_PROTO(opt_redzone)
104 CTL_PROTO(opt_utrace)
105 CTL_PROTO(opt_xmalloc)
106 CTL_PROTO(opt_tcache)
107 CTL_PROTO(opt_lg_tcache_max)
108 CTL_PROTO(opt_thp)
109 CTL_PROTO(opt_prof)
110 CTL_PROTO(opt_prof_prefix)
111 CTL_PROTO(opt_prof_active)
112 CTL_PROTO(opt_prof_thread_active_init)
113 CTL_PROTO(opt_lg_prof_sample)
114 CTL_PROTO(opt_lg_prof_interval)
115 CTL_PROTO(opt_prof_gdump)
116 CTL_PROTO(opt_prof_final)
117 CTL_PROTO(opt_prof_leak)
118 CTL_PROTO(opt_prof_accum)
119 CTL_PROTO(tcache_create)
120 CTL_PROTO(tcache_flush)
121 CTL_PROTO(tcache_destroy)
122 static void	arena_i_purge(tsdn_t *tsdn, unsigned arena_ind, bool all);
123 CTL_PROTO(arena_i_purge)
124 CTL_PROTO(arena_i_decay)
125 CTL_PROTO(arena_i_reset)
126 CTL_PROTO(arena_i_dss)
127 CTL_PROTO(arena_i_lg_dirty_mult)
128 CTL_PROTO(arena_i_decay_time)
129 CTL_PROTO(arena_i_chunk_hooks)
130 INDEX_PROTO(arena_i)
131 CTL_PROTO(arenas_bin_i_size)
132 CTL_PROTO(arenas_bin_i_nregs)
133 CTL_PROTO(arenas_bin_i_run_size)
134 INDEX_PROTO(arenas_bin_i)
135 CTL_PROTO(arenas_lrun_i_size)
136 INDEX_PROTO(arenas_lrun_i)
137 CTL_PROTO(arenas_hchunk_i_size)
138 INDEX_PROTO(arenas_hchunk_i)
139 CTL_PROTO(arenas_narenas)
140 CTL_PROTO(arenas_initialized)
141 CTL_PROTO(arenas_lg_dirty_mult)
142 CTL_PROTO(arenas_decay_time)
143 CTL_PROTO(arenas_quantum)
144 CTL_PROTO(arenas_page)
145 CTL_PROTO(arenas_tcache_max)
146 CTL_PROTO(arenas_nbins)
147 CTL_PROTO(arenas_nhbins)
148 CTL_PROTO(arenas_nlruns)
149 CTL_PROTO(arenas_nhchunks)
150 CTL_PROTO(arenas_extend)
151 CTL_PROTO(prof_thread_active_init)
152 CTL_PROTO(prof_active)
153 CTL_PROTO(prof_dump)
154 CTL_PROTO(prof_gdump)
155 CTL_PROTO(prof_reset)
156 CTL_PROTO(prof_interval)
157 CTL_PROTO(lg_prof_sample)
158 CTL_PROTO(stats_arenas_i_small_allocated)
159 CTL_PROTO(stats_arenas_i_small_nmalloc)
160 CTL_PROTO(stats_arenas_i_small_ndalloc)
161 CTL_PROTO(stats_arenas_i_small_nrequests)
162 CTL_PROTO(stats_arenas_i_large_allocated)
163 CTL_PROTO(stats_arenas_i_large_nmalloc)
164 CTL_PROTO(stats_arenas_i_large_ndalloc)
165 CTL_PROTO(stats_arenas_i_large_nrequests)
166 CTL_PROTO(stats_arenas_i_huge_allocated)
167 CTL_PROTO(stats_arenas_i_huge_nmalloc)
168 CTL_PROTO(stats_arenas_i_huge_ndalloc)
169 CTL_PROTO(stats_arenas_i_huge_nrequests)
170 CTL_PROTO(stats_arenas_i_bins_j_nmalloc)
171 CTL_PROTO(stats_arenas_i_bins_j_ndalloc)
172 CTL_PROTO(stats_arenas_i_bins_j_nrequests)
173 CTL_PROTO(stats_arenas_i_bins_j_curregs)
174 CTL_PROTO(stats_arenas_i_bins_j_nfills)
175 CTL_PROTO(stats_arenas_i_bins_j_nflushes)
176 CTL_PROTO(stats_arenas_i_bins_j_nruns)
177 CTL_PROTO(stats_arenas_i_bins_j_nreruns)
178 CTL_PROTO(stats_arenas_i_bins_j_curruns)
179 INDEX_PROTO(stats_arenas_i_bins_j)
180 CTL_PROTO(stats_arenas_i_lruns_j_nmalloc)
181 CTL_PROTO(stats_arenas_i_lruns_j_ndalloc)
182 CTL_PROTO(stats_arenas_i_lruns_j_nrequests)
183 CTL_PROTO(stats_arenas_i_lruns_j_curruns)
184 INDEX_PROTO(stats_arenas_i_lruns_j)
185 CTL_PROTO(stats_arenas_i_hchunks_j_nmalloc)
186 CTL_PROTO(stats_arenas_i_hchunks_j_ndalloc)
187 CTL_PROTO(stats_arenas_i_hchunks_j_nrequests)
188 CTL_PROTO(stats_arenas_i_hchunks_j_curhchunks)
189 INDEX_PROTO(stats_arenas_i_hchunks_j)
190 CTL_PROTO(stats_arenas_i_nthreads)
191 CTL_PROTO(stats_arenas_i_dss)
192 CTL_PROTO(stats_arenas_i_lg_dirty_mult)
193 CTL_PROTO(stats_arenas_i_decay_time)
194 CTL_PROTO(stats_arenas_i_pactive)
195 CTL_PROTO(stats_arenas_i_pdirty)
196 CTL_PROTO(stats_arenas_i_mapped)
197 CTL_PROTO(stats_arenas_i_retained)
198 CTL_PROTO(stats_arenas_i_npurge)
199 CTL_PROTO(stats_arenas_i_nmadvise)
200 CTL_PROTO(stats_arenas_i_purged)
201 CTL_PROTO(stats_arenas_i_metadata_mapped)
202 CTL_PROTO(stats_arenas_i_metadata_allocated)
203 INDEX_PROTO(stats_arenas_i)
204 CTL_PROTO(stats_cactive)
205 CTL_PROTO(stats_allocated)
206 CTL_PROTO(stats_active)
207 CTL_PROTO(stats_metadata)
208 CTL_PROTO(stats_resident)
209 CTL_PROTO(stats_mapped)
210 CTL_PROTO(stats_retained)
211 
212 /******************************************************************************/
213 /* mallctl tree. */
214 
215 /* Maximum tree depth. */
216 #define	CTL_MAX_DEPTH	6
217 
218 #define	NAME(n)	{true},	n
219 #define	CHILD(t, c)							\
220 	sizeof(c##_node) / sizeof(ctl_##t##_node_t),			\
221 	(ctl_node_t *)c##_node,						\
222 	NULL
223 #define	CTL(c)	0, NULL, c##_ctl
224 
225 /*
226  * Only handles internal indexed nodes, since there are currently no external
227  * ones.
228  */
229 #define	INDEX(i)	{false},	i##_index
230 
231 static const ctl_named_node_t	thread_tcache_node[] = {
232 	{NAME("enabled"),	CTL(thread_tcache_enabled)},
233 	{NAME("flush"),		CTL(thread_tcache_flush)}
234 };
235 
236 static const ctl_named_node_t	thread_prof_node[] = {
237 	{NAME("name"),		CTL(thread_prof_name)},
238 	{NAME("active"),	CTL(thread_prof_active)}
239 };
240 
241 static const ctl_named_node_t	thread_node[] = {
242 	{NAME("arena"),		CTL(thread_arena)},
243 	{NAME("allocated"),	CTL(thread_allocated)},
244 	{NAME("allocatedp"),	CTL(thread_allocatedp)},
245 	{NAME("deallocated"),	CTL(thread_deallocated)},
246 	{NAME("deallocatedp"),	CTL(thread_deallocatedp)},
247 	{NAME("tcache"),	CHILD(named, thread_tcache)},
248 	{NAME("prof"),		CHILD(named, thread_prof)}
249 };
250 
251 static const ctl_named_node_t	config_node[] = {
252 	{NAME("cache_oblivious"), CTL(config_cache_oblivious)},
253 	{NAME("debug"),		CTL(config_debug)},
254 	{NAME("fill"),		CTL(config_fill)},
255 	{NAME("lazy_lock"),	CTL(config_lazy_lock)},
256 	{NAME("malloc_conf"),	CTL(config_malloc_conf)},
257 	{NAME("munmap"),	CTL(config_munmap)},
258 	{NAME("prof"),		CTL(config_prof)},
259 	{NAME("prof_libgcc"),	CTL(config_prof_libgcc)},
260 	{NAME("prof_libunwind"), CTL(config_prof_libunwind)},
261 	{NAME("stats"),		CTL(config_stats)},
262 	{NAME("tcache"),	CTL(config_tcache)},
263 	{NAME("thp"),		CTL(config_thp)},
264 	{NAME("tls"),		CTL(config_tls)},
265 	{NAME("utrace"),	CTL(config_utrace)},
266 	{NAME("valgrind"),	CTL(config_valgrind)},
267 	{NAME("xmalloc"),	CTL(config_xmalloc)}
268 };
269 
270 static const ctl_named_node_t opt_node[] = {
271 	{NAME("abort"),		CTL(opt_abort)},
272 	{NAME("dss"),		CTL(opt_dss)},
273 	{NAME("lg_chunk"),	CTL(opt_lg_chunk)},
274 	{NAME("narenas"),	CTL(opt_narenas)},
275 	{NAME("purge"),		CTL(opt_purge)},
276 	{NAME("lg_dirty_mult"),	CTL(opt_lg_dirty_mult)},
277 	{NAME("decay_time"),	CTL(opt_decay_time)},
278 	{NAME("stats_print"),	CTL(opt_stats_print)},
279 	{NAME("junk"),		CTL(opt_junk)},
280 	{NAME("zero"),		CTL(opt_zero)},
281 	{NAME("quarantine"),	CTL(opt_quarantine)},
282 	{NAME("redzone"),	CTL(opt_redzone)},
283 	{NAME("utrace"),	CTL(opt_utrace)},
284 	{NAME("xmalloc"),	CTL(opt_xmalloc)},
285 	{NAME("tcache"),	CTL(opt_tcache)},
286 	{NAME("lg_tcache_max"),	CTL(opt_lg_tcache_max)},
287 	{NAME("thp"),		CTL(opt_thp)},
288 	{NAME("prof"),		CTL(opt_prof)},
289 	{NAME("prof_prefix"),	CTL(opt_prof_prefix)},
290 	{NAME("prof_active"),	CTL(opt_prof_active)},
291 	{NAME("prof_thread_active_init"), CTL(opt_prof_thread_active_init)},
292 	{NAME("lg_prof_sample"), CTL(opt_lg_prof_sample)},
293 	{NAME("lg_prof_interval"), CTL(opt_lg_prof_interval)},
294 	{NAME("prof_gdump"),	CTL(opt_prof_gdump)},
295 	{NAME("prof_final"),	CTL(opt_prof_final)},
296 	{NAME("prof_leak"),	CTL(opt_prof_leak)},
297 	{NAME("prof_accum"),	CTL(opt_prof_accum)}
298 };
299 
300 static const ctl_named_node_t	tcache_node[] = {
301 	{NAME("create"),	CTL(tcache_create)},
302 	{NAME("flush"),		CTL(tcache_flush)},
303 	{NAME("destroy"),	CTL(tcache_destroy)}
304 };
305 
306 static const ctl_named_node_t arena_i_node[] = {
307 	{NAME("purge"),		CTL(arena_i_purge)},
308 	{NAME("decay"),		CTL(arena_i_decay)},
309 	{NAME("reset"),		CTL(arena_i_reset)},
310 	{NAME("dss"),		CTL(arena_i_dss)},
311 	{NAME("lg_dirty_mult"),	CTL(arena_i_lg_dirty_mult)},
312 	{NAME("decay_time"),	CTL(arena_i_decay_time)},
313 	{NAME("chunk_hooks"),	CTL(arena_i_chunk_hooks)}
314 };
315 static const ctl_named_node_t super_arena_i_node[] = {
316 	{NAME(""),		CHILD(named, arena_i)}
317 };
318 
319 static const ctl_indexed_node_t arena_node[] = {
320 	{INDEX(arena_i)}
321 };
322 
323 static const ctl_named_node_t arenas_bin_i_node[] = {
324 	{NAME("size"),		CTL(arenas_bin_i_size)},
325 	{NAME("nregs"),		CTL(arenas_bin_i_nregs)},
326 	{NAME("run_size"),	CTL(arenas_bin_i_run_size)}
327 };
328 static const ctl_named_node_t super_arenas_bin_i_node[] = {
329 	{NAME(""),		CHILD(named, arenas_bin_i)}
330 };
331 
332 static const ctl_indexed_node_t arenas_bin_node[] = {
333 	{INDEX(arenas_bin_i)}
334 };
335 
336 static const ctl_named_node_t arenas_lrun_i_node[] = {
337 	{NAME("size"),		CTL(arenas_lrun_i_size)}
338 };
339 static const ctl_named_node_t super_arenas_lrun_i_node[] = {
340 	{NAME(""),		CHILD(named, arenas_lrun_i)}
341 };
342 
343 static const ctl_indexed_node_t arenas_lrun_node[] = {
344 	{INDEX(arenas_lrun_i)}
345 };
346 
347 static const ctl_named_node_t arenas_hchunk_i_node[] = {
348 	{NAME("size"),		CTL(arenas_hchunk_i_size)}
349 };
350 static const ctl_named_node_t super_arenas_hchunk_i_node[] = {
351 	{NAME(""),		CHILD(named, arenas_hchunk_i)}
352 };
353 
354 static const ctl_indexed_node_t arenas_hchunk_node[] = {
355 	{INDEX(arenas_hchunk_i)}
356 };
357 
358 static const ctl_named_node_t arenas_node[] = {
359 	{NAME("narenas"),	CTL(arenas_narenas)},
360 	{NAME("initialized"),	CTL(arenas_initialized)},
361 	{NAME("lg_dirty_mult"),	CTL(arenas_lg_dirty_mult)},
362 	{NAME("decay_time"),	CTL(arenas_decay_time)},
363 	{NAME("quantum"),	CTL(arenas_quantum)},
364 	{NAME("page"),		CTL(arenas_page)},
365 	{NAME("tcache_max"),	CTL(arenas_tcache_max)},
366 	{NAME("nbins"),		CTL(arenas_nbins)},
367 	{NAME("nhbins"),	CTL(arenas_nhbins)},
368 	{NAME("bin"),		CHILD(indexed, arenas_bin)},
369 	{NAME("nlruns"),	CTL(arenas_nlruns)},
370 	{NAME("lrun"),		CHILD(indexed, arenas_lrun)},
371 	{NAME("nhchunks"),	CTL(arenas_nhchunks)},
372 	{NAME("hchunk"),	CHILD(indexed, arenas_hchunk)},
373 	{NAME("extend"),	CTL(arenas_extend)}
374 };
375 
376 static const ctl_named_node_t	prof_node[] = {
377 	{NAME("thread_active_init"), CTL(prof_thread_active_init)},
378 	{NAME("active"),	CTL(prof_active)},
379 	{NAME("dump"),		CTL(prof_dump)},
380 	{NAME("gdump"),		CTL(prof_gdump)},
381 	{NAME("reset"),		CTL(prof_reset)},
382 	{NAME("interval"),	CTL(prof_interval)},
383 	{NAME("lg_sample"),	CTL(lg_prof_sample)}
384 };
385 
386 static const ctl_named_node_t stats_arenas_i_metadata_node[] = {
387 	{NAME("mapped"),	CTL(stats_arenas_i_metadata_mapped)},
388 	{NAME("allocated"),	CTL(stats_arenas_i_metadata_allocated)}
389 };
390 
391 static const ctl_named_node_t stats_arenas_i_small_node[] = {
392 	{NAME("allocated"),	CTL(stats_arenas_i_small_allocated)},
393 	{NAME("nmalloc"),	CTL(stats_arenas_i_small_nmalloc)},
394 	{NAME("ndalloc"),	CTL(stats_arenas_i_small_ndalloc)},
395 	{NAME("nrequests"),	CTL(stats_arenas_i_small_nrequests)}
396 };
397 
398 static const ctl_named_node_t stats_arenas_i_large_node[] = {
399 	{NAME("allocated"),	CTL(stats_arenas_i_large_allocated)},
400 	{NAME("nmalloc"),	CTL(stats_arenas_i_large_nmalloc)},
401 	{NAME("ndalloc"),	CTL(stats_arenas_i_large_ndalloc)},
402 	{NAME("nrequests"),	CTL(stats_arenas_i_large_nrequests)}
403 };
404 
405 static const ctl_named_node_t stats_arenas_i_huge_node[] = {
406 	{NAME("allocated"),	CTL(stats_arenas_i_huge_allocated)},
407 	{NAME("nmalloc"),	CTL(stats_arenas_i_huge_nmalloc)},
408 	{NAME("ndalloc"),	CTL(stats_arenas_i_huge_ndalloc)},
409 	{NAME("nrequests"),	CTL(stats_arenas_i_huge_nrequests)}
410 };
411 
412 static const ctl_named_node_t stats_arenas_i_bins_j_node[] = {
413 	{NAME("nmalloc"),	CTL(stats_arenas_i_bins_j_nmalloc)},
414 	{NAME("ndalloc"),	CTL(stats_arenas_i_bins_j_ndalloc)},
415 	{NAME("nrequests"),	CTL(stats_arenas_i_bins_j_nrequests)},
416 	{NAME("curregs"),	CTL(stats_arenas_i_bins_j_curregs)},
417 	{NAME("nfills"),	CTL(stats_arenas_i_bins_j_nfills)},
418 	{NAME("nflushes"),	CTL(stats_arenas_i_bins_j_nflushes)},
419 	{NAME("nruns"),		CTL(stats_arenas_i_bins_j_nruns)},
420 	{NAME("nreruns"),	CTL(stats_arenas_i_bins_j_nreruns)},
421 	{NAME("curruns"),	CTL(stats_arenas_i_bins_j_curruns)}
422 };
423 static const ctl_named_node_t super_stats_arenas_i_bins_j_node[] = {
424 	{NAME(""),		CHILD(named, stats_arenas_i_bins_j)}
425 };
426 
427 static const ctl_indexed_node_t stats_arenas_i_bins_node[] = {
428 	{INDEX(stats_arenas_i_bins_j)}
429 };
430 
431 static const ctl_named_node_t stats_arenas_i_lruns_j_node[] = {
432 	{NAME("nmalloc"),	CTL(stats_arenas_i_lruns_j_nmalloc)},
433 	{NAME("ndalloc"),	CTL(stats_arenas_i_lruns_j_ndalloc)},
434 	{NAME("nrequests"),	CTL(stats_arenas_i_lruns_j_nrequests)},
435 	{NAME("curruns"),	CTL(stats_arenas_i_lruns_j_curruns)}
436 };
437 static const ctl_named_node_t super_stats_arenas_i_lruns_j_node[] = {
438 	{NAME(""),		CHILD(named, stats_arenas_i_lruns_j)}
439 };
440 
441 static const ctl_indexed_node_t stats_arenas_i_lruns_node[] = {
442 	{INDEX(stats_arenas_i_lruns_j)}
443 };
444 
445 static const ctl_named_node_t stats_arenas_i_hchunks_j_node[] = {
446 	{NAME("nmalloc"),	CTL(stats_arenas_i_hchunks_j_nmalloc)},
447 	{NAME("ndalloc"),	CTL(stats_arenas_i_hchunks_j_ndalloc)},
448 	{NAME("nrequests"),	CTL(stats_arenas_i_hchunks_j_nrequests)},
449 	{NAME("curhchunks"),	CTL(stats_arenas_i_hchunks_j_curhchunks)}
450 };
451 static const ctl_named_node_t super_stats_arenas_i_hchunks_j_node[] = {
452 	{NAME(""),		CHILD(named, stats_arenas_i_hchunks_j)}
453 };
454 
455 static const ctl_indexed_node_t stats_arenas_i_hchunks_node[] = {
456 	{INDEX(stats_arenas_i_hchunks_j)}
457 };
458 
459 static const ctl_named_node_t stats_arenas_i_node[] = {
460 	{NAME("nthreads"),	CTL(stats_arenas_i_nthreads)},
461 	{NAME("dss"),		CTL(stats_arenas_i_dss)},
462 	{NAME("lg_dirty_mult"),	CTL(stats_arenas_i_lg_dirty_mult)},
463 	{NAME("decay_time"),	CTL(stats_arenas_i_decay_time)},
464 	{NAME("pactive"),	CTL(stats_arenas_i_pactive)},
465 	{NAME("pdirty"),	CTL(stats_arenas_i_pdirty)},
466 	{NAME("mapped"),	CTL(stats_arenas_i_mapped)},
467 	{NAME("retained"),	CTL(stats_arenas_i_retained)},
468 	{NAME("npurge"),	CTL(stats_arenas_i_npurge)},
469 	{NAME("nmadvise"),	CTL(stats_arenas_i_nmadvise)},
470 	{NAME("purged"),	CTL(stats_arenas_i_purged)},
471 	{NAME("metadata"),	CHILD(named, stats_arenas_i_metadata)},
472 	{NAME("small"),		CHILD(named, stats_arenas_i_small)},
473 	{NAME("large"),		CHILD(named, stats_arenas_i_large)},
474 	{NAME("huge"),		CHILD(named, stats_arenas_i_huge)},
475 	{NAME("bins"),		CHILD(indexed, stats_arenas_i_bins)},
476 	{NAME("lruns"),		CHILD(indexed, stats_arenas_i_lruns)},
477 	{NAME("hchunks"),	CHILD(indexed, stats_arenas_i_hchunks)}
478 };
479 static const ctl_named_node_t super_stats_arenas_i_node[] = {
480 	{NAME(""),		CHILD(named, stats_arenas_i)}
481 };
482 
483 static const ctl_indexed_node_t stats_arenas_node[] = {
484 	{INDEX(stats_arenas_i)}
485 };
486 
487 static const ctl_named_node_t stats_node[] = {
488 	{NAME("cactive"),	CTL(stats_cactive)},
489 	{NAME("allocated"),	CTL(stats_allocated)},
490 	{NAME("active"),	CTL(stats_active)},
491 	{NAME("metadata"),	CTL(stats_metadata)},
492 	{NAME("resident"),	CTL(stats_resident)},
493 	{NAME("mapped"),	CTL(stats_mapped)},
494 	{NAME("retained"),	CTL(stats_retained)},
495 	{NAME("arenas"),	CHILD(indexed, stats_arenas)}
496 };
497 
498 static const ctl_named_node_t	root_node[] = {
499 	{NAME("version"),	CTL(version)},
500 	{NAME("epoch"),		CTL(epoch)},
501 	{NAME("thread"),	CHILD(named, thread)},
502 	{NAME("config"),	CHILD(named, config)},
503 	{NAME("opt"),		CHILD(named, opt)},
504 	{NAME("tcache"),	CHILD(named, tcache)},
505 	{NAME("arena"),		CHILD(indexed, arena)},
506 	{NAME("arenas"),	CHILD(named, arenas)},
507 	{NAME("prof"),		CHILD(named, prof)},
508 	{NAME("stats"),		CHILD(named, stats)}
509 };
510 static const ctl_named_node_t super_root_node[] = {
511 	{NAME(""),		CHILD(named, root)}
512 };
513 
514 #undef NAME
515 #undef CHILD
516 #undef CTL
517 #undef INDEX
518 
519 /******************************************************************************/
520 
521 static bool
522 ctl_arena_init(ctl_arena_stats_t *astats)
523 {
524 
525 	if (astats->lstats == NULL) {
526 		astats->lstats = (malloc_large_stats_t *)a0malloc(nlclasses *
527 		    sizeof(malloc_large_stats_t));
528 		if (astats->lstats == NULL)
529 			return (true);
530 	}
531 
532 	if (astats->hstats == NULL) {
533 		astats->hstats = (malloc_huge_stats_t *)a0malloc(nhclasses *
534 		    sizeof(malloc_huge_stats_t));
535 		if (astats->hstats == NULL)
536 			return (true);
537 	}
538 
539 	return (false);
540 }
541 
542 static void
543 ctl_arena_clear(ctl_arena_stats_t *astats)
544 {
545 
546 	astats->nthreads = 0;
547 	astats->dss = dss_prec_names[dss_prec_limit];
548 	astats->lg_dirty_mult = -1;
549 	astats->decay_time = -1;
550 	astats->pactive = 0;
551 	astats->pdirty = 0;
552 	if (config_stats) {
553 		memset(&astats->astats, 0, sizeof(arena_stats_t));
554 		astats->allocated_small = 0;
555 		astats->nmalloc_small = 0;
556 		astats->ndalloc_small = 0;
557 		astats->nrequests_small = 0;
558 		memset(astats->bstats, 0, NBINS * sizeof(malloc_bin_stats_t));
559 		memset(astats->lstats, 0, nlclasses *
560 		    sizeof(malloc_large_stats_t));
561 		memset(astats->hstats, 0, nhclasses *
562 		    sizeof(malloc_huge_stats_t));
563 	}
564 }
565 
566 static void
567 ctl_arena_stats_amerge(tsdn_t *tsdn, ctl_arena_stats_t *cstats, arena_t *arena)
568 {
569 	unsigned i;
570 
571 	if (config_stats) {
572 		arena_stats_merge(tsdn, arena, &cstats->nthreads, &cstats->dss,
573 		    &cstats->lg_dirty_mult, &cstats->decay_time,
574 		    &cstats->pactive, &cstats->pdirty, &cstats->astats,
575 		    cstats->bstats, cstats->lstats, cstats->hstats);
576 
577 		for (i = 0; i < NBINS; i++) {
578 			cstats->allocated_small += cstats->bstats[i].curregs *
579 			    index2size(i);
580 			cstats->nmalloc_small += cstats->bstats[i].nmalloc;
581 			cstats->ndalloc_small += cstats->bstats[i].ndalloc;
582 			cstats->nrequests_small += cstats->bstats[i].nrequests;
583 		}
584 	} else {
585 		arena_basic_stats_merge(tsdn, arena, &cstats->nthreads,
586 		    &cstats->dss, &cstats->lg_dirty_mult, &cstats->decay_time,
587 		    &cstats->pactive, &cstats->pdirty);
588 	}
589 }
590 
591 static void
592 ctl_arena_stats_smerge(ctl_arena_stats_t *sstats, ctl_arena_stats_t *astats)
593 {
594 	unsigned i;
595 
596 	sstats->nthreads += astats->nthreads;
597 	sstats->pactive += astats->pactive;
598 	sstats->pdirty += astats->pdirty;
599 
600 	if (config_stats) {
601 		sstats->astats.mapped += astats->astats.mapped;
602 		sstats->astats.retained += astats->astats.retained;
603 		sstats->astats.npurge += astats->astats.npurge;
604 		sstats->astats.nmadvise += astats->astats.nmadvise;
605 		sstats->astats.purged += astats->astats.purged;
606 
607 		sstats->astats.metadata_mapped +=
608 		    astats->astats.metadata_mapped;
609 		sstats->astats.metadata_allocated +=
610 		    astats->astats.metadata_allocated;
611 
612 		sstats->allocated_small += astats->allocated_small;
613 		sstats->nmalloc_small += astats->nmalloc_small;
614 		sstats->ndalloc_small += astats->ndalloc_small;
615 		sstats->nrequests_small += astats->nrequests_small;
616 
617 		sstats->astats.allocated_large +=
618 		    astats->astats.allocated_large;
619 		sstats->astats.nmalloc_large += astats->astats.nmalloc_large;
620 		sstats->astats.ndalloc_large += astats->astats.ndalloc_large;
621 		sstats->astats.nrequests_large +=
622 		    astats->astats.nrequests_large;
623 
624 		sstats->astats.allocated_huge += astats->astats.allocated_huge;
625 		sstats->astats.nmalloc_huge += astats->astats.nmalloc_huge;
626 		sstats->astats.ndalloc_huge += astats->astats.ndalloc_huge;
627 
628 		for (i = 0; i < NBINS; i++) {
629 			sstats->bstats[i].nmalloc += astats->bstats[i].nmalloc;
630 			sstats->bstats[i].ndalloc += astats->bstats[i].ndalloc;
631 			sstats->bstats[i].nrequests +=
632 			    astats->bstats[i].nrequests;
633 			sstats->bstats[i].curregs += astats->bstats[i].curregs;
634 			if (config_tcache) {
635 				sstats->bstats[i].nfills +=
636 				    astats->bstats[i].nfills;
637 				sstats->bstats[i].nflushes +=
638 				    astats->bstats[i].nflushes;
639 			}
640 			sstats->bstats[i].nruns += astats->bstats[i].nruns;
641 			sstats->bstats[i].reruns += astats->bstats[i].reruns;
642 			sstats->bstats[i].curruns += astats->bstats[i].curruns;
643 		}
644 
645 		for (i = 0; i < nlclasses; i++) {
646 			sstats->lstats[i].nmalloc += astats->lstats[i].nmalloc;
647 			sstats->lstats[i].ndalloc += astats->lstats[i].ndalloc;
648 			sstats->lstats[i].nrequests +=
649 			    astats->lstats[i].nrequests;
650 			sstats->lstats[i].curruns += astats->lstats[i].curruns;
651 		}
652 
653 		for (i = 0; i < nhclasses; i++) {
654 			sstats->hstats[i].nmalloc += astats->hstats[i].nmalloc;
655 			sstats->hstats[i].ndalloc += astats->hstats[i].ndalloc;
656 			sstats->hstats[i].curhchunks +=
657 			    astats->hstats[i].curhchunks;
658 		}
659 	}
660 }
661 
662 static void
663 ctl_arena_refresh(tsdn_t *tsdn, arena_t *arena, unsigned i)
664 {
665 	ctl_arena_stats_t *astats = &ctl_stats.arenas[i];
666 	ctl_arena_stats_t *sstats = &ctl_stats.arenas[ctl_stats.narenas];
667 
668 	ctl_arena_clear(astats);
669 	ctl_arena_stats_amerge(tsdn, astats, arena);
670 	/* Merge into sum stats as well. */
671 	ctl_arena_stats_smerge(sstats, astats);
672 }
673 
674 static bool
675 ctl_grow(tsdn_t *tsdn)
676 {
677 	ctl_arena_stats_t *astats;
678 
679 	/* Initialize new arena. */
680 	if (arena_init(tsdn, ctl_stats.narenas) == NULL)
681 		return (true);
682 
683 	/* Allocate extended arena stats. */
684 	astats = (ctl_arena_stats_t *)a0malloc((ctl_stats.narenas + 2) *
685 	    sizeof(ctl_arena_stats_t));
686 	if (astats == NULL)
687 		return (true);
688 
689 	/* Initialize the new astats element. */
690 	memcpy(astats, ctl_stats.arenas, (ctl_stats.narenas + 1) *
691 	    sizeof(ctl_arena_stats_t));
692 	memset(&astats[ctl_stats.narenas + 1], 0, sizeof(ctl_arena_stats_t));
693 	if (ctl_arena_init(&astats[ctl_stats.narenas + 1])) {
694 		a0dalloc(astats);
695 		return (true);
696 	}
697 	/* Swap merged stats to their new location. */
698 	{
699 		ctl_arena_stats_t tstats;
700 		memcpy(&tstats, &astats[ctl_stats.narenas],
701 		    sizeof(ctl_arena_stats_t));
702 		memcpy(&astats[ctl_stats.narenas],
703 		    &astats[ctl_stats.narenas + 1], sizeof(ctl_arena_stats_t));
704 		memcpy(&astats[ctl_stats.narenas + 1], &tstats,
705 		    sizeof(ctl_arena_stats_t));
706 	}
707 	a0dalloc(ctl_stats.arenas);
708 	ctl_stats.arenas = astats;
709 	ctl_stats.narenas++;
710 
711 	return (false);
712 }
713 
714 static void
715 ctl_refresh(tsdn_t *tsdn)
716 {
717 	unsigned i;
718 	VARIABLE_ARRAY(arena_t *, tarenas, ctl_stats.narenas);
719 
720 	/*
721 	 * Clear sum stats, since they will be merged into by
722 	 * ctl_arena_refresh().
723 	 */
724 	ctl_arena_clear(&ctl_stats.arenas[ctl_stats.narenas]);
725 
726 	for (i = 0; i < ctl_stats.narenas; i++)
727 		tarenas[i] = arena_get(tsdn, i, false);
728 
729 	for (i = 0; i < ctl_stats.narenas; i++) {
730 		bool initialized = (tarenas[i] != NULL);
731 
732 		ctl_stats.arenas[i].initialized = initialized;
733 		if (initialized)
734 			ctl_arena_refresh(tsdn, tarenas[i], i);
735 	}
736 
737 	if (config_stats) {
738 		size_t base_allocated, base_resident, base_mapped;
739 		base_stats_get(tsdn, &base_allocated, &base_resident,
740 		    &base_mapped);
741 		ctl_stats.allocated =
742 		    ctl_stats.arenas[ctl_stats.narenas].allocated_small +
743 		    ctl_stats.arenas[ctl_stats.narenas].astats.allocated_large +
744 		    ctl_stats.arenas[ctl_stats.narenas].astats.allocated_huge;
745 		ctl_stats.active =
746 		    (ctl_stats.arenas[ctl_stats.narenas].pactive << LG_PAGE);
747 		ctl_stats.metadata = base_allocated +
748 		    ctl_stats.arenas[ctl_stats.narenas].astats.metadata_mapped +
749 		    ctl_stats.arenas[ctl_stats.narenas].astats
750 		    .metadata_allocated;
751 		ctl_stats.resident = base_resident +
752 		    ctl_stats.arenas[ctl_stats.narenas].astats.metadata_mapped +
753 		    ((ctl_stats.arenas[ctl_stats.narenas].pactive +
754 		    ctl_stats.arenas[ctl_stats.narenas].pdirty) << LG_PAGE);
755 		ctl_stats.mapped = base_mapped +
756 		    ctl_stats.arenas[ctl_stats.narenas].astats.mapped;
757 		ctl_stats.retained =
758 		    ctl_stats.arenas[ctl_stats.narenas].astats.retained;
759 	}
760 
761 	ctl_epoch++;
762 }
763 
764 static bool
765 ctl_init(tsdn_t *tsdn)
766 {
767 	bool ret;
768 
769 	malloc_mutex_lock(tsdn, &ctl_mtx);
770 	if (!ctl_initialized) {
771 		/*
772 		 * Allocate space for one extra arena stats element, which
773 		 * contains summed stats across all arenas.
774 		 */
775 		ctl_stats.narenas = narenas_total_get();
776 		ctl_stats.arenas = (ctl_arena_stats_t *)a0malloc(
777 		    (ctl_stats.narenas + 1) * sizeof(ctl_arena_stats_t));
778 		if (ctl_stats.arenas == NULL) {
779 			ret = true;
780 			goto label_return;
781 		}
782 		memset(ctl_stats.arenas, 0, (ctl_stats.narenas + 1) *
783 		    sizeof(ctl_arena_stats_t));
784 
785 		/*
786 		 * Initialize all stats structures, regardless of whether they
787 		 * ever get used.  Lazy initialization would allow errors to
788 		 * cause inconsistent state to be viewable by the application.
789 		 */
790 		if (config_stats) {
791 			unsigned i;
792 			for (i = 0; i <= ctl_stats.narenas; i++) {
793 				if (ctl_arena_init(&ctl_stats.arenas[i])) {
794 					unsigned j;
795 					for (j = 0; j < i; j++) {
796 						a0dalloc(
797 						    ctl_stats.arenas[j].lstats);
798 						a0dalloc(
799 						    ctl_stats.arenas[j].hstats);
800 					}
801 					a0dalloc(ctl_stats.arenas);
802 					ctl_stats.arenas = NULL;
803 					ret = true;
804 					goto label_return;
805 				}
806 			}
807 		}
808 		ctl_stats.arenas[ctl_stats.narenas].initialized = true;
809 
810 		ctl_epoch = 0;
811 		ctl_refresh(tsdn);
812 		ctl_initialized = true;
813 	}
814 
815 	ret = false;
816 label_return:
817 	malloc_mutex_unlock(tsdn, &ctl_mtx);
818 	return (ret);
819 }
820 
821 static int
822 ctl_lookup(tsdn_t *tsdn, const char *name, ctl_node_t const **nodesp,
823     size_t *mibp, size_t *depthp)
824 {
825 	int ret;
826 	const char *elm, *tdot, *dot;
827 	size_t elen, i, j;
828 	const ctl_named_node_t *node;
829 
830 	elm = name;
831 	/* Equivalent to strchrnul(). */
832 	dot = ((tdot = strchr(elm, '.')) != NULL) ? tdot : strchr(elm, '\0');
833 	elen = (size_t)((uintptr_t)dot - (uintptr_t)elm);
834 	if (elen == 0) {
835 		ret = ENOENT;
836 		goto label_return;
837 	}
838 	node = super_root_node;
839 	for (i = 0; i < *depthp; i++) {
840 		assert(node);
841 		assert(node->nchildren > 0);
842 		if (ctl_named_node(node->children) != NULL) {
843 			const ctl_named_node_t *pnode = node;
844 
845 			/* Children are named. */
846 			for (j = 0; j < node->nchildren; j++) {
847 				const ctl_named_node_t *child =
848 				    ctl_named_children(node, j);
849 				if (strlen(child->name) == elen &&
850 				    strncmp(elm, child->name, elen) == 0) {
851 					node = child;
852 					if (nodesp != NULL)
853 						nodesp[i] =
854 						    (const ctl_node_t *)node;
855 					mibp[i] = j;
856 					break;
857 				}
858 			}
859 			if (node == pnode) {
860 				ret = ENOENT;
861 				goto label_return;
862 			}
863 		} else {
864 			uintmax_t index;
865 			const ctl_indexed_node_t *inode;
866 
867 			/* Children are indexed. */
868 			index = malloc_strtoumax(elm, NULL, 10);
869 			if (index == UINTMAX_MAX || index > SIZE_T_MAX) {
870 				ret = ENOENT;
871 				goto label_return;
872 			}
873 
874 			inode = ctl_indexed_node(node->children);
875 			node = inode->index(tsdn, mibp, *depthp, (size_t)index);
876 			if (node == NULL) {
877 				ret = ENOENT;
878 				goto label_return;
879 			}
880 
881 			if (nodesp != NULL)
882 				nodesp[i] = (const ctl_node_t *)node;
883 			mibp[i] = (size_t)index;
884 		}
885 
886 		if (node->ctl != NULL) {
887 			/* Terminal node. */
888 			if (*dot != '\0') {
889 				/*
890 				 * The name contains more elements than are
891 				 * in this path through the tree.
892 				 */
893 				ret = ENOENT;
894 				goto label_return;
895 			}
896 			/* Complete lookup successful. */
897 			*depthp = i + 1;
898 			break;
899 		}
900 
901 		/* Update elm. */
902 		if (*dot == '\0') {
903 			/* No more elements. */
904 			ret = ENOENT;
905 			goto label_return;
906 		}
907 		elm = &dot[1];
908 		dot = ((tdot = strchr(elm, '.')) != NULL) ? tdot :
909 		    strchr(elm, '\0');
910 		elen = (size_t)((uintptr_t)dot - (uintptr_t)elm);
911 	}
912 
913 	ret = 0;
914 label_return:
915 	return (ret);
916 }
917 
918 int
919 ctl_byname(tsd_t *tsd, const char *name, void *oldp, size_t *oldlenp,
920     void *newp, size_t newlen)
921 {
922 	int ret;
923 	size_t depth;
924 	ctl_node_t const *nodes[CTL_MAX_DEPTH];
925 	size_t mib[CTL_MAX_DEPTH];
926 	const ctl_named_node_t *node;
927 
928 	if (!ctl_initialized && ctl_init(tsd_tsdn(tsd))) {
929 		ret = EAGAIN;
930 		goto label_return;
931 	}
932 
933 	depth = CTL_MAX_DEPTH;
934 	ret = ctl_lookup(tsd_tsdn(tsd), name, nodes, mib, &depth);
935 	if (ret != 0)
936 		goto label_return;
937 
938 	node = ctl_named_node(nodes[depth-1]);
939 	if (node != NULL && node->ctl)
940 		ret = node->ctl(tsd, mib, depth, oldp, oldlenp, newp, newlen);
941 	else {
942 		/* The name refers to a partial path through the ctl tree. */
943 		ret = ENOENT;
944 	}
945 
946 label_return:
947 	return(ret);
948 }
949 
950 int
951 ctl_nametomib(tsdn_t *tsdn, const char *name, size_t *mibp, size_t *miblenp)
952 {
953 	int ret;
954 
955 	if (!ctl_initialized && ctl_init(tsdn)) {
956 		ret = EAGAIN;
957 		goto label_return;
958 	}
959 
960 	ret = ctl_lookup(tsdn, name, NULL, mibp, miblenp);
961 label_return:
962 	return(ret);
963 }
964 
965 int
966 ctl_bymib(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
967     size_t *oldlenp, void *newp, size_t newlen)
968 {
969 	int ret;
970 	const ctl_named_node_t *node;
971 	size_t i;
972 
973 	if (!ctl_initialized && ctl_init(tsd_tsdn(tsd))) {
974 		ret = EAGAIN;
975 		goto label_return;
976 	}
977 
978 	/* Iterate down the tree. */
979 	node = super_root_node;
980 	for (i = 0; i < miblen; i++) {
981 		assert(node);
982 		assert(node->nchildren > 0);
983 		if (ctl_named_node(node->children) != NULL) {
984 			/* Children are named. */
985 			if (node->nchildren <= (unsigned)mib[i]) {
986 				ret = ENOENT;
987 				goto label_return;
988 			}
989 			node = ctl_named_children(node, mib[i]);
990 		} else {
991 			const ctl_indexed_node_t *inode;
992 
993 			/* Indexed element. */
994 			inode = ctl_indexed_node(node->children);
995 			node = inode->index(tsd_tsdn(tsd), mib, miblen, mib[i]);
996 			if (node == NULL) {
997 				ret = ENOENT;
998 				goto label_return;
999 			}
1000 		}
1001 	}
1002 
1003 	/* Call the ctl function. */
1004 	if (node && node->ctl)
1005 		ret = node->ctl(tsd, mib, miblen, oldp, oldlenp, newp, newlen);
1006 	else {
1007 		/* Partial MIB. */
1008 		ret = ENOENT;
1009 	}
1010 
1011 label_return:
1012 	return(ret);
1013 }
1014 
1015 bool
1016 ctl_boot(void)
1017 {
1018 
1019 	if (malloc_mutex_init(&ctl_mtx, "ctl", WITNESS_RANK_CTL))
1020 		return (true);
1021 
1022 	ctl_initialized = false;
1023 
1024 	return (false);
1025 }
1026 
1027 void
1028 ctl_prefork(tsdn_t *tsdn)
1029 {
1030 
1031 	malloc_mutex_prefork(tsdn, &ctl_mtx);
1032 }
1033 
1034 void
1035 ctl_postfork_parent(tsdn_t *tsdn)
1036 {
1037 
1038 	malloc_mutex_postfork_parent(tsdn, &ctl_mtx);
1039 }
1040 
1041 void
1042 ctl_postfork_child(tsdn_t *tsdn)
1043 {
1044 
1045 	malloc_mutex_postfork_child(tsdn, &ctl_mtx);
1046 }
1047 
1048 /******************************************************************************/
1049 /* *_ctl() functions. */
1050 
1051 #define	READONLY()	do {						\
1052 	if (newp != NULL || newlen != 0) {				\
1053 		ret = EPERM;						\
1054 		goto label_return;					\
1055 	}								\
1056 } while (0)
1057 
1058 #define	WRITEONLY()	do {						\
1059 	if (oldp != NULL || oldlenp != NULL) {				\
1060 		ret = EPERM;						\
1061 		goto label_return;					\
1062 	}								\
1063 } while (0)
1064 
1065 #define	READ_XOR_WRITE()	do {					\
1066 	if ((oldp != NULL && oldlenp != NULL) && (newp != NULL ||	\
1067 	    newlen != 0)) {						\
1068 		ret = EPERM;						\
1069 		goto label_return;					\
1070 	}								\
1071 } while (0)
1072 
1073 #define	READ(v, t)	do {						\
1074 	if (oldp != NULL && oldlenp != NULL) {				\
1075 		if (*oldlenp != sizeof(t)) {				\
1076 			size_t	copylen = (sizeof(t) <= *oldlenp)	\
1077 			    ? sizeof(t) : *oldlenp;			\
1078 			memcpy(oldp, (void *)&(v), copylen);		\
1079 			ret = EINVAL;					\
1080 			goto label_return;				\
1081 		}							\
1082 		*(t *)oldp = (v);					\
1083 	}								\
1084 } while (0)
1085 
1086 #define	WRITE(v, t)	do {						\
1087 	if (newp != NULL) {						\
1088 		if (newlen != sizeof(t)) {				\
1089 			ret = EINVAL;					\
1090 			goto label_return;				\
1091 		}							\
1092 		(v) = *(t *)newp;					\
1093 	}								\
1094 } while (0)
1095 
1096 /*
1097  * There's a lot of code duplication in the following macros due to limitations
1098  * in how nested cpp macros are expanded.
1099  */
1100 #define	CTL_RO_CLGEN(c, l, n, v, t)					\
1101 static int								\
1102 n##_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,	\
1103     size_t *oldlenp, void *newp, size_t newlen)				\
1104 {									\
1105 	int ret;							\
1106 	t oldval;							\
1107 									\
1108 	if (!(c))							\
1109 		return (ENOENT);					\
1110 	if (l)								\
1111 		malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx);		\
1112 	READONLY();							\
1113 	oldval = (v);							\
1114 	READ(oldval, t);						\
1115 									\
1116 	ret = 0;							\
1117 label_return:								\
1118 	if (l)								\
1119 		malloc_mutex_unlock(tsd_tsdn(tsd), &ctl_mtx);		\
1120 	return (ret);							\
1121 }
1122 
1123 #define	CTL_RO_CGEN(c, n, v, t)						\
1124 static int								\
1125 n##_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,	\
1126     size_t *oldlenp, void *newp, size_t newlen)				\
1127 {									\
1128 	int ret;							\
1129 	t oldval;							\
1130 									\
1131 	if (!(c))							\
1132 		return (ENOENT);					\
1133 	malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx);			\
1134 	READONLY();							\
1135 	oldval = (v);							\
1136 	READ(oldval, t);						\
1137 									\
1138 	ret = 0;							\
1139 label_return:								\
1140 	malloc_mutex_unlock(tsd_tsdn(tsd), &ctl_mtx);			\
1141 	return (ret);							\
1142 }
1143 
1144 #define	CTL_RO_GEN(n, v, t)						\
1145 static int								\
1146 n##_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,	\
1147     size_t *oldlenp, void *newp, size_t newlen)				\
1148 {									\
1149 	int ret;							\
1150 	t oldval;							\
1151 									\
1152 	malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx);			\
1153 	READONLY();							\
1154 	oldval = (v);							\
1155 	READ(oldval, t);						\
1156 									\
1157 	ret = 0;							\
1158 label_return:								\
1159 	malloc_mutex_unlock(tsd_tsdn(tsd), &ctl_mtx);			\
1160 	return (ret);							\
1161 }
1162 
1163 /*
1164  * ctl_mtx is not acquired, under the assumption that no pertinent data will
1165  * mutate during the call.
1166  */
1167 #define	CTL_RO_NL_CGEN(c, n, v, t)					\
1168 static int								\
1169 n##_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,	\
1170     size_t *oldlenp, void *newp, size_t newlen)				\
1171 {									\
1172 	int ret;							\
1173 	t oldval;							\
1174 									\
1175 	if (!(c))							\
1176 		return (ENOENT);					\
1177 	READONLY();							\
1178 	oldval = (v);							\
1179 	READ(oldval, t);						\
1180 									\
1181 	ret = 0;							\
1182 label_return:								\
1183 	return (ret);							\
1184 }
1185 
1186 #define	CTL_RO_NL_GEN(n, v, t)						\
1187 static int								\
1188 n##_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,	\
1189     size_t *oldlenp, void *newp, size_t newlen)				\
1190 {									\
1191 	int ret;							\
1192 	t oldval;							\
1193 									\
1194 	READONLY();							\
1195 	oldval = (v);							\
1196 	READ(oldval, t);						\
1197 									\
1198 	ret = 0;							\
1199 label_return:								\
1200 	return (ret);							\
1201 }
1202 
1203 #define	CTL_TSD_RO_NL_CGEN(c, n, m, t)					\
1204 static int								\
1205 n##_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,	\
1206     size_t *oldlenp, void *newp, size_t newlen)				\
1207 {									\
1208 	int ret;							\
1209 	t oldval;							\
1210 									\
1211 	if (!(c))							\
1212 		return (ENOENT);					\
1213 	READONLY();							\
1214 	oldval = (m(tsd));						\
1215 	READ(oldval, t);						\
1216 									\
1217 	ret = 0;							\
1218 label_return:								\
1219 	return (ret);							\
1220 }
1221 
1222 #define	CTL_RO_CONFIG_GEN(n, t)						\
1223 static int								\
1224 n##_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,	\
1225     size_t *oldlenp, void *newp, size_t newlen)				\
1226 {									\
1227 	int ret;							\
1228 	t oldval;							\
1229 									\
1230 	READONLY();							\
1231 	oldval = n;							\
1232 	READ(oldval, t);						\
1233 									\
1234 	ret = 0;							\
1235 label_return:								\
1236 	return (ret);							\
1237 }
1238 
1239 /******************************************************************************/
1240 
1241 CTL_RO_NL_GEN(version, JEMALLOC_VERSION, const char *)
1242 
1243 static int
1244 epoch_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
1245     size_t *oldlenp, void *newp, size_t newlen)
1246 {
1247 	int ret;
1248 	UNUSED uint64_t newval;
1249 
1250 	malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx);
1251 	WRITE(newval, uint64_t);
1252 	if (newp != NULL)
1253 		ctl_refresh(tsd_tsdn(tsd));
1254 	READ(ctl_epoch, uint64_t);
1255 
1256 	ret = 0;
1257 label_return:
1258 	malloc_mutex_unlock(tsd_tsdn(tsd), &ctl_mtx);
1259 	return (ret);
1260 }
1261 
1262 /******************************************************************************/
1263 
1264 CTL_RO_CONFIG_GEN(config_cache_oblivious, bool)
1265 CTL_RO_CONFIG_GEN(config_debug, bool)
1266 CTL_RO_CONFIG_GEN(config_fill, bool)
1267 CTL_RO_CONFIG_GEN(config_lazy_lock, bool)
1268 CTL_RO_CONFIG_GEN(config_malloc_conf, const char *)
1269 CTL_RO_CONFIG_GEN(config_munmap, bool)
1270 CTL_RO_CONFIG_GEN(config_prof, bool)
1271 CTL_RO_CONFIG_GEN(config_prof_libgcc, bool)
1272 CTL_RO_CONFIG_GEN(config_prof_libunwind, bool)
1273 CTL_RO_CONFIG_GEN(config_stats, bool)
1274 CTL_RO_CONFIG_GEN(config_tcache, bool)
1275 CTL_RO_CONFIG_GEN(config_thp, bool)
1276 CTL_RO_CONFIG_GEN(config_tls, bool)
1277 CTL_RO_CONFIG_GEN(config_utrace, bool)
1278 CTL_RO_CONFIG_GEN(config_valgrind, bool)
1279 CTL_RO_CONFIG_GEN(config_xmalloc, bool)
1280 
1281 /******************************************************************************/
1282 
1283 CTL_RO_NL_GEN(opt_abort, opt_abort, bool)
1284 CTL_RO_NL_GEN(opt_dss, opt_dss, const char *)
1285 CTL_RO_NL_GEN(opt_lg_chunk, opt_lg_chunk, size_t)
1286 CTL_RO_NL_GEN(opt_narenas, opt_narenas, unsigned)
1287 CTL_RO_NL_GEN(opt_purge, purge_mode_names[opt_purge], const char *)
1288 CTL_RO_NL_GEN(opt_lg_dirty_mult, opt_lg_dirty_mult, ssize_t)
1289 CTL_RO_NL_GEN(opt_decay_time, opt_decay_time, ssize_t)
1290 CTL_RO_NL_GEN(opt_stats_print, opt_stats_print, bool)
1291 CTL_RO_NL_CGEN(config_fill, opt_junk, opt_junk, const char *)
1292 CTL_RO_NL_CGEN(config_fill, opt_quarantine, opt_quarantine, size_t)
1293 CTL_RO_NL_CGEN(config_fill, opt_redzone, opt_redzone, bool)
1294 CTL_RO_NL_CGEN(config_fill, opt_zero, opt_zero, bool)
1295 CTL_RO_NL_CGEN(config_utrace, opt_utrace, opt_utrace, bool)
1296 CTL_RO_NL_CGEN(config_xmalloc, opt_xmalloc, opt_xmalloc, bool)
1297 CTL_RO_NL_CGEN(config_tcache, opt_tcache, opt_tcache, bool)
1298 CTL_RO_NL_CGEN(config_tcache, opt_lg_tcache_max, opt_lg_tcache_max, ssize_t)
1299 CTL_RO_NL_CGEN(config_thp, opt_thp, opt_thp, bool)
1300 CTL_RO_NL_CGEN(config_prof, opt_prof, opt_prof, bool)
1301 CTL_RO_NL_CGEN(config_prof, opt_prof_prefix, opt_prof_prefix, const char *)
1302 CTL_RO_NL_CGEN(config_prof, opt_prof_active, opt_prof_active, bool)
1303 CTL_RO_NL_CGEN(config_prof, opt_prof_thread_active_init,
1304     opt_prof_thread_active_init, bool)
1305 CTL_RO_NL_CGEN(config_prof, opt_lg_prof_sample, opt_lg_prof_sample, size_t)
1306 CTL_RO_NL_CGEN(config_prof, opt_prof_accum, opt_prof_accum, bool)
1307 CTL_RO_NL_CGEN(config_prof, opt_lg_prof_interval, opt_lg_prof_interval, ssize_t)
1308 CTL_RO_NL_CGEN(config_prof, opt_prof_gdump, opt_prof_gdump, bool)
1309 CTL_RO_NL_CGEN(config_prof, opt_prof_final, opt_prof_final, bool)
1310 CTL_RO_NL_CGEN(config_prof, opt_prof_leak, opt_prof_leak, bool)
1311 
1312 /******************************************************************************/
1313 
1314 static int
1315 thread_arena_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
1316     size_t *oldlenp, void *newp, size_t newlen)
1317 {
1318 	int ret;
1319 	arena_t *oldarena;
1320 	unsigned newind, oldind;
1321 
1322 	oldarena = arena_choose(tsd, NULL);
1323 	if (oldarena == NULL)
1324 		return (EAGAIN);
1325 
1326 	malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx);
1327 	newind = oldind = oldarena->ind;
1328 	WRITE(newind, unsigned);
1329 	READ(oldind, unsigned);
1330 	if (newind != oldind) {
1331 		arena_t *newarena;
1332 
1333 		if (newind >= ctl_stats.narenas) {
1334 			/* New arena index is out of range. */
1335 			ret = EFAULT;
1336 			goto label_return;
1337 		}
1338 
1339 		/* Initialize arena if necessary. */
1340 		newarena = arena_get(tsd_tsdn(tsd), newind, true);
1341 		if (newarena == NULL) {
1342 			ret = EAGAIN;
1343 			goto label_return;
1344 		}
1345 		/* Set new arena/tcache associations. */
1346 		arena_migrate(tsd, oldind, newind);
1347 		if (config_tcache) {
1348 			tcache_t *tcache = tsd_tcache_get(tsd);
1349 			if (tcache != NULL) {
1350 				tcache_arena_reassociate(tsd_tsdn(tsd), tcache,
1351 				    oldarena, newarena);
1352 			}
1353 		}
1354 	}
1355 
1356 	ret = 0;
1357 label_return:
1358 	malloc_mutex_unlock(tsd_tsdn(tsd), &ctl_mtx);
1359 	return (ret);
1360 }
1361 
1362 CTL_TSD_RO_NL_CGEN(config_stats, thread_allocated, tsd_thread_allocated_get,
1363     uint64_t)
1364 CTL_TSD_RO_NL_CGEN(config_stats, thread_allocatedp, tsd_thread_allocatedp_get,
1365     uint64_t *)
1366 CTL_TSD_RO_NL_CGEN(config_stats, thread_deallocated, tsd_thread_deallocated_get,
1367     uint64_t)
1368 CTL_TSD_RO_NL_CGEN(config_stats, thread_deallocatedp,
1369     tsd_thread_deallocatedp_get, uint64_t *)
1370 
1371 static int
1372 thread_tcache_enabled_ctl(tsd_t *tsd, const size_t *mib, size_t miblen,
1373     void *oldp, size_t *oldlenp, void *newp, size_t newlen)
1374 {
1375 	int ret;
1376 	bool oldval;
1377 
1378 	if (!config_tcache)
1379 		return (ENOENT);
1380 
1381 	oldval = tcache_enabled_get();
1382 	if (newp != NULL) {
1383 		if (newlen != sizeof(bool)) {
1384 			ret = EINVAL;
1385 			goto label_return;
1386 		}
1387 		tcache_enabled_set(*(bool *)newp);
1388 	}
1389 	READ(oldval, bool);
1390 
1391 	ret = 0;
1392 label_return:
1393 	return (ret);
1394 }
1395 
1396 static int
1397 thread_tcache_flush_ctl(tsd_t *tsd, const size_t *mib, size_t miblen,
1398     void *oldp, size_t *oldlenp, void *newp, size_t newlen)
1399 {
1400 	int ret;
1401 
1402 	if (!config_tcache)
1403 		return (ENOENT);
1404 
1405 	READONLY();
1406 	WRITEONLY();
1407 
1408 	tcache_flush();
1409 
1410 	ret = 0;
1411 label_return:
1412 	return (ret);
1413 }
1414 
1415 static int
1416 thread_prof_name_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
1417     size_t *oldlenp, void *newp, size_t newlen)
1418 {
1419 	int ret;
1420 
1421 	if (!config_prof)
1422 		return (ENOENT);
1423 
1424 	READ_XOR_WRITE();
1425 
1426 	if (newp != NULL) {
1427 		if (newlen != sizeof(const char *)) {
1428 			ret = EINVAL;
1429 			goto label_return;
1430 		}
1431 
1432 		if ((ret = prof_thread_name_set(tsd, *(const char **)newp)) !=
1433 		    0)
1434 			goto label_return;
1435 	} else {
1436 		const char *oldname = prof_thread_name_get(tsd);
1437 		READ(oldname, const char *);
1438 	}
1439 
1440 	ret = 0;
1441 label_return:
1442 	return (ret);
1443 }
1444 
1445 static int
1446 thread_prof_active_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
1447     size_t *oldlenp, void *newp, size_t newlen)
1448 {
1449 	int ret;
1450 	bool oldval;
1451 
1452 	if (!config_prof)
1453 		return (ENOENT);
1454 
1455 	oldval = prof_thread_active_get(tsd);
1456 	if (newp != NULL) {
1457 		if (newlen != sizeof(bool)) {
1458 			ret = EINVAL;
1459 			goto label_return;
1460 		}
1461 		if (prof_thread_active_set(tsd, *(bool *)newp)) {
1462 			ret = EAGAIN;
1463 			goto label_return;
1464 		}
1465 	}
1466 	READ(oldval, bool);
1467 
1468 	ret = 0;
1469 label_return:
1470 	return (ret);
1471 }
1472 
1473 /******************************************************************************/
1474 
1475 static int
1476 tcache_create_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
1477     size_t *oldlenp, void *newp, size_t newlen)
1478 {
1479 	int ret;
1480 	unsigned tcache_ind;
1481 
1482 	if (!config_tcache)
1483 		return (ENOENT);
1484 
1485 	READONLY();
1486 	if (tcaches_create(tsd, &tcache_ind)) {
1487 		ret = EFAULT;
1488 		goto label_return;
1489 	}
1490 	READ(tcache_ind, unsigned);
1491 
1492 	ret = 0;
1493 label_return:
1494 	return ret;
1495 }
1496 
1497 static int
1498 tcache_flush_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
1499     size_t *oldlenp, void *newp, size_t newlen)
1500 {
1501 	int ret;
1502 	unsigned tcache_ind;
1503 
1504 	if (!config_tcache)
1505 		return (ENOENT);
1506 
1507 	WRITEONLY();
1508 	tcache_ind = UINT_MAX;
1509 	WRITE(tcache_ind, unsigned);
1510 	if (tcache_ind == UINT_MAX) {
1511 		ret = EFAULT;
1512 		goto label_return;
1513 	}
1514 	tcaches_flush(tsd, tcache_ind);
1515 
1516 	ret = 0;
1517 label_return:
1518 	return (ret);
1519 }
1520 
1521 static int
1522 tcache_destroy_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
1523     size_t *oldlenp, void *newp, size_t newlen)
1524 {
1525 	int ret;
1526 	unsigned tcache_ind;
1527 
1528 	if (!config_tcache)
1529 		return (ENOENT);
1530 
1531 	WRITEONLY();
1532 	tcache_ind = UINT_MAX;
1533 	WRITE(tcache_ind, unsigned);
1534 	if (tcache_ind == UINT_MAX) {
1535 		ret = EFAULT;
1536 		goto label_return;
1537 	}
1538 	tcaches_destroy(tsd, tcache_ind);
1539 
1540 	ret = 0;
1541 label_return:
1542 	return (ret);
1543 }
1544 
1545 /******************************************************************************/
1546 
1547 static void
1548 arena_i_purge(tsdn_t *tsdn, unsigned arena_ind, bool all)
1549 {
1550 
1551 	malloc_mutex_lock(tsdn, &ctl_mtx);
1552 	{
1553 		unsigned narenas = ctl_stats.narenas;
1554 
1555 		if (arena_ind == narenas) {
1556 			unsigned i;
1557 			VARIABLE_ARRAY(arena_t *, tarenas, narenas);
1558 
1559 			for (i = 0; i < narenas; i++)
1560 				tarenas[i] = arena_get(tsdn, i, false);
1561 
1562 			/*
1563 			 * No further need to hold ctl_mtx, since narenas and
1564 			 * tarenas contain everything needed below.
1565 			 */
1566 			malloc_mutex_unlock(tsdn, &ctl_mtx);
1567 
1568 			for (i = 0; i < narenas; i++) {
1569 				if (tarenas[i] != NULL)
1570 					arena_purge(tsdn, tarenas[i], all);
1571 			}
1572 		} else {
1573 			arena_t *tarena;
1574 
1575 			assert(arena_ind < narenas);
1576 
1577 			tarena = arena_get(tsdn, arena_ind, false);
1578 
1579 			/* No further need to hold ctl_mtx. */
1580 			malloc_mutex_unlock(tsdn, &ctl_mtx);
1581 
1582 			if (tarena != NULL)
1583 				arena_purge(tsdn, tarena, all);
1584 		}
1585 	}
1586 }
1587 
1588 static int
1589 arena_i_purge_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
1590     size_t *oldlenp, void *newp, size_t newlen)
1591 {
1592 	int ret;
1593 
1594 	READONLY();
1595 	WRITEONLY();
1596 	arena_i_purge(tsd_tsdn(tsd), (unsigned)mib[1], true);
1597 
1598 	ret = 0;
1599 label_return:
1600 	return (ret);
1601 }
1602 
1603 static int
1604 arena_i_decay_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
1605     size_t *oldlenp, void *newp, size_t newlen)
1606 {
1607 	int ret;
1608 
1609 	READONLY();
1610 	WRITEONLY();
1611 	arena_i_purge(tsd_tsdn(tsd), (unsigned)mib[1], false);
1612 
1613 	ret = 0;
1614 label_return:
1615 	return (ret);
1616 }
1617 
1618 static int
1619 arena_i_reset_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
1620     size_t *oldlenp, void *newp, size_t newlen)
1621 {
1622 	int ret;
1623 	unsigned arena_ind;
1624 	arena_t *arena;
1625 
1626 	READONLY();
1627 	WRITEONLY();
1628 
1629 	if ((config_valgrind && unlikely(in_valgrind)) || (config_fill &&
1630 	    unlikely(opt_quarantine))) {
1631 		ret = EFAULT;
1632 		goto label_return;
1633 	}
1634 
1635 	arena_ind = (unsigned)mib[1];
1636 	if (config_debug) {
1637 		malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx);
1638 		assert(arena_ind < ctl_stats.narenas);
1639 		malloc_mutex_unlock(tsd_tsdn(tsd), &ctl_mtx);
1640 	}
1641 	assert(arena_ind >= opt_narenas);
1642 
1643 	arena = arena_get(tsd_tsdn(tsd), arena_ind, false);
1644 
1645 	arena_reset(tsd, arena);
1646 
1647 	ret = 0;
1648 label_return:
1649 	return (ret);
1650 }
1651 
1652 static int
1653 arena_i_dss_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
1654     size_t *oldlenp, void *newp, size_t newlen)
1655 {
1656 	int ret;
1657 	const char *dss = NULL;
1658 	unsigned arena_ind = (unsigned)mib[1];
1659 	dss_prec_t dss_prec_old = dss_prec_limit;
1660 	dss_prec_t dss_prec = dss_prec_limit;
1661 
1662 	malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx);
1663 	WRITE(dss, const char *);
1664 	if (dss != NULL) {
1665 		int i;
1666 		bool match = false;
1667 
1668 		for (i = 0; i < dss_prec_limit; i++) {
1669 			if (strcmp(dss_prec_names[i], dss) == 0) {
1670 				dss_prec = i;
1671 				match = true;
1672 				break;
1673 			}
1674 		}
1675 
1676 		if (!match) {
1677 			ret = EINVAL;
1678 			goto label_return;
1679 		}
1680 	}
1681 
1682 	if (arena_ind < ctl_stats.narenas) {
1683 		arena_t *arena = arena_get(tsd_tsdn(tsd), arena_ind, false);
1684 		if (arena == NULL || (dss_prec != dss_prec_limit &&
1685 		    arena_dss_prec_set(tsd_tsdn(tsd), arena, dss_prec))) {
1686 			ret = EFAULT;
1687 			goto label_return;
1688 		}
1689 		dss_prec_old = arena_dss_prec_get(tsd_tsdn(tsd), arena);
1690 	} else {
1691 		if (dss_prec != dss_prec_limit &&
1692 		    chunk_dss_prec_set(dss_prec)) {
1693 			ret = EFAULT;
1694 			goto label_return;
1695 		}
1696 		dss_prec_old = chunk_dss_prec_get();
1697 	}
1698 
1699 	dss = dss_prec_names[dss_prec_old];
1700 	READ(dss, const char *);
1701 
1702 	ret = 0;
1703 label_return:
1704 	malloc_mutex_unlock(tsd_tsdn(tsd), &ctl_mtx);
1705 	return (ret);
1706 }
1707 
1708 static int
1709 arena_i_lg_dirty_mult_ctl(tsd_t *tsd, const size_t *mib, size_t miblen,
1710     void *oldp, size_t *oldlenp, void *newp, size_t newlen)
1711 {
1712 	int ret;
1713 	unsigned arena_ind = (unsigned)mib[1];
1714 	arena_t *arena;
1715 
1716 	arena = arena_get(tsd_tsdn(tsd), arena_ind, false);
1717 	if (arena == NULL) {
1718 		ret = EFAULT;
1719 		goto label_return;
1720 	}
1721 
1722 	if (oldp != NULL && oldlenp != NULL) {
1723 		size_t oldval = arena_lg_dirty_mult_get(tsd_tsdn(tsd), arena);
1724 		READ(oldval, ssize_t);
1725 	}
1726 	if (newp != NULL) {
1727 		if (newlen != sizeof(ssize_t)) {
1728 			ret = EINVAL;
1729 			goto label_return;
1730 		}
1731 		if (arena_lg_dirty_mult_set(tsd_tsdn(tsd), arena,
1732 		    *(ssize_t *)newp)) {
1733 			ret = EFAULT;
1734 			goto label_return;
1735 		}
1736 	}
1737 
1738 	ret = 0;
1739 label_return:
1740 	return (ret);
1741 }
1742 
1743 static int
1744 arena_i_decay_time_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
1745     size_t *oldlenp, void *newp, size_t newlen)
1746 {
1747 	int ret;
1748 	unsigned arena_ind = (unsigned)mib[1];
1749 	arena_t *arena;
1750 
1751 	arena = arena_get(tsd_tsdn(tsd), arena_ind, false);
1752 	if (arena == NULL) {
1753 		ret = EFAULT;
1754 		goto label_return;
1755 	}
1756 
1757 	if (oldp != NULL && oldlenp != NULL) {
1758 		size_t oldval = arena_decay_time_get(tsd_tsdn(tsd), arena);
1759 		READ(oldval, ssize_t);
1760 	}
1761 	if (newp != NULL) {
1762 		if (newlen != sizeof(ssize_t)) {
1763 			ret = EINVAL;
1764 			goto label_return;
1765 		}
1766 		if (arena_decay_time_set(tsd_tsdn(tsd), arena,
1767 		    *(ssize_t *)newp)) {
1768 			ret = EFAULT;
1769 			goto label_return;
1770 		}
1771 	}
1772 
1773 	ret = 0;
1774 label_return:
1775 	return (ret);
1776 }
1777 
1778 static int
1779 arena_i_chunk_hooks_ctl(tsd_t *tsd, const size_t *mib, size_t miblen,
1780     void *oldp, size_t *oldlenp, void *newp, size_t newlen)
1781 {
1782 	int ret;
1783 	unsigned arena_ind = (unsigned)mib[1];
1784 	arena_t *arena;
1785 
1786 	malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx);
1787 	if (arena_ind < narenas_total_get() && (arena =
1788 	    arena_get(tsd_tsdn(tsd), arena_ind, false)) != NULL) {
1789 		if (newp != NULL) {
1790 			chunk_hooks_t old_chunk_hooks, new_chunk_hooks;
1791 			WRITE(new_chunk_hooks, chunk_hooks_t);
1792 			old_chunk_hooks = chunk_hooks_set(tsd_tsdn(tsd), arena,
1793 			    &new_chunk_hooks);
1794 			READ(old_chunk_hooks, chunk_hooks_t);
1795 		} else {
1796 			chunk_hooks_t old_chunk_hooks =
1797 			    chunk_hooks_get(tsd_tsdn(tsd), arena);
1798 			READ(old_chunk_hooks, chunk_hooks_t);
1799 		}
1800 	} else {
1801 		ret = EFAULT;
1802 		goto label_return;
1803 	}
1804 	ret = 0;
1805 label_return:
1806 	malloc_mutex_unlock(tsd_tsdn(tsd), &ctl_mtx);
1807 	return (ret);
1808 }
1809 
1810 static const ctl_named_node_t *
1811 arena_i_index(tsdn_t *tsdn, const size_t *mib, size_t miblen, size_t i)
1812 {
1813 	const ctl_named_node_t *ret;
1814 
1815 	malloc_mutex_lock(tsdn, &ctl_mtx);
1816 	if (i > ctl_stats.narenas) {
1817 		ret = NULL;
1818 		goto label_return;
1819 	}
1820 
1821 	ret = super_arena_i_node;
1822 label_return:
1823 	malloc_mutex_unlock(tsdn, &ctl_mtx);
1824 	return (ret);
1825 }
1826 
1827 /******************************************************************************/
1828 
1829 static int
1830 arenas_narenas_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
1831     size_t *oldlenp, void *newp, size_t newlen)
1832 {
1833 	int ret;
1834 	unsigned narenas;
1835 
1836 	malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx);
1837 	READONLY();
1838 	if (*oldlenp != sizeof(unsigned)) {
1839 		ret = EINVAL;
1840 		goto label_return;
1841 	}
1842 	narenas = ctl_stats.narenas;
1843 	READ(narenas, unsigned);
1844 
1845 	ret = 0;
1846 label_return:
1847 	malloc_mutex_unlock(tsd_tsdn(tsd), &ctl_mtx);
1848 	return (ret);
1849 }
1850 
1851 static int
1852 arenas_initialized_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
1853     size_t *oldlenp, void *newp, size_t newlen)
1854 {
1855 	int ret;
1856 	unsigned nread, i;
1857 
1858 	malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx);
1859 	READONLY();
1860 	if (*oldlenp != ctl_stats.narenas * sizeof(bool)) {
1861 		ret = EINVAL;
1862 		nread = (*oldlenp < ctl_stats.narenas * sizeof(bool))
1863 		    ? (unsigned)(*oldlenp / sizeof(bool)) : ctl_stats.narenas;
1864 	} else {
1865 		ret = 0;
1866 		nread = ctl_stats.narenas;
1867 	}
1868 
1869 	for (i = 0; i < nread; i++)
1870 		((bool *)oldp)[i] = ctl_stats.arenas[i].initialized;
1871 
1872 label_return:
1873 	malloc_mutex_unlock(tsd_tsdn(tsd), &ctl_mtx);
1874 	return (ret);
1875 }
1876 
1877 static int
1878 arenas_lg_dirty_mult_ctl(tsd_t *tsd, const size_t *mib, size_t miblen,
1879     void *oldp, size_t *oldlenp, void *newp, size_t newlen)
1880 {
1881 	int ret;
1882 
1883 	if (oldp != NULL && oldlenp != NULL) {
1884 		size_t oldval = arena_lg_dirty_mult_default_get();
1885 		READ(oldval, ssize_t);
1886 	}
1887 	if (newp != NULL) {
1888 		if (newlen != sizeof(ssize_t)) {
1889 			ret = EINVAL;
1890 			goto label_return;
1891 		}
1892 		if (arena_lg_dirty_mult_default_set(*(ssize_t *)newp)) {
1893 			ret = EFAULT;
1894 			goto label_return;
1895 		}
1896 	}
1897 
1898 	ret = 0;
1899 label_return:
1900 	return (ret);
1901 }
1902 
1903 static int
1904 arenas_decay_time_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
1905     size_t *oldlenp, void *newp, size_t newlen)
1906 {
1907 	int ret;
1908 
1909 	if (oldp != NULL && oldlenp != NULL) {
1910 		size_t oldval = arena_decay_time_default_get();
1911 		READ(oldval, ssize_t);
1912 	}
1913 	if (newp != NULL) {
1914 		if (newlen != sizeof(ssize_t)) {
1915 			ret = EINVAL;
1916 			goto label_return;
1917 		}
1918 		if (arena_decay_time_default_set(*(ssize_t *)newp)) {
1919 			ret = EFAULT;
1920 			goto label_return;
1921 		}
1922 	}
1923 
1924 	ret = 0;
1925 label_return:
1926 	return (ret);
1927 }
1928 
1929 CTL_RO_NL_GEN(arenas_quantum, QUANTUM, size_t)
1930 CTL_RO_NL_GEN(arenas_page, PAGE, size_t)
1931 CTL_RO_NL_CGEN(config_tcache, arenas_tcache_max, tcache_maxclass, size_t)
1932 CTL_RO_NL_GEN(arenas_nbins, NBINS, unsigned)
1933 CTL_RO_NL_CGEN(config_tcache, arenas_nhbins, nhbins, unsigned)
1934 CTL_RO_NL_GEN(arenas_bin_i_size, arena_bin_info[mib[2]].reg_size, size_t)
1935 CTL_RO_NL_GEN(arenas_bin_i_nregs, arena_bin_info[mib[2]].nregs, uint32_t)
1936 CTL_RO_NL_GEN(arenas_bin_i_run_size, arena_bin_info[mib[2]].run_size, size_t)
1937 static const ctl_named_node_t *
1938 arenas_bin_i_index(tsdn_t *tsdn, const size_t *mib, size_t miblen, size_t i)
1939 {
1940 
1941 	if (i > NBINS)
1942 		return (NULL);
1943 	return (super_arenas_bin_i_node);
1944 }
1945 
1946 CTL_RO_NL_GEN(arenas_nlruns, nlclasses, unsigned)
1947 CTL_RO_NL_GEN(arenas_lrun_i_size, index2size(NBINS+(szind_t)mib[2]), size_t)
1948 static const ctl_named_node_t *
1949 arenas_lrun_i_index(tsdn_t *tsdn, const size_t *mib, size_t miblen, size_t i)
1950 {
1951 
1952 	if (i > nlclasses)
1953 		return (NULL);
1954 	return (super_arenas_lrun_i_node);
1955 }
1956 
1957 CTL_RO_NL_GEN(arenas_nhchunks, nhclasses, unsigned)
1958 CTL_RO_NL_GEN(arenas_hchunk_i_size, index2size(NBINS+nlclasses+(szind_t)mib[2]),
1959     size_t)
1960 static const ctl_named_node_t *
1961 arenas_hchunk_i_index(tsdn_t *tsdn, const size_t *mib, size_t miblen, size_t i)
1962 {
1963 
1964 	if (i > nhclasses)
1965 		return (NULL);
1966 	return (super_arenas_hchunk_i_node);
1967 }
1968 
1969 static int
1970 arenas_extend_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
1971     size_t *oldlenp, void *newp, size_t newlen)
1972 {
1973 	int ret;
1974 	unsigned narenas;
1975 
1976 	malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx);
1977 	READONLY();
1978 	if (ctl_grow(tsd_tsdn(tsd))) {
1979 		ret = EAGAIN;
1980 		goto label_return;
1981 	}
1982 	narenas = ctl_stats.narenas - 1;
1983 	READ(narenas, unsigned);
1984 
1985 	ret = 0;
1986 label_return:
1987 	malloc_mutex_unlock(tsd_tsdn(tsd), &ctl_mtx);
1988 	return (ret);
1989 }
1990 
1991 /******************************************************************************/
1992 
1993 static int
1994 prof_thread_active_init_ctl(tsd_t *tsd, const size_t *mib, size_t miblen,
1995     void *oldp, size_t *oldlenp, void *newp, size_t newlen)
1996 {
1997 	int ret;
1998 	bool oldval;
1999 
2000 	if (!config_prof)
2001 		return (ENOENT);
2002 
2003 	if (newp != NULL) {
2004 		if (newlen != sizeof(bool)) {
2005 			ret = EINVAL;
2006 			goto label_return;
2007 		}
2008 		oldval = prof_thread_active_init_set(tsd_tsdn(tsd),
2009 		    *(bool *)newp);
2010 	} else
2011 		oldval = prof_thread_active_init_get(tsd_tsdn(tsd));
2012 	READ(oldval, bool);
2013 
2014 	ret = 0;
2015 label_return:
2016 	return (ret);
2017 }
2018 
2019 static int
2020 prof_active_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
2021     size_t *oldlenp, void *newp, size_t newlen)
2022 {
2023 	int ret;
2024 	bool oldval;
2025 
2026 	if (!config_prof)
2027 		return (ENOENT);
2028 
2029 	if (newp != NULL) {
2030 		if (newlen != sizeof(bool)) {
2031 			ret = EINVAL;
2032 			goto label_return;
2033 		}
2034 		oldval = prof_active_set(tsd_tsdn(tsd), *(bool *)newp);
2035 	} else
2036 		oldval = prof_active_get(tsd_tsdn(tsd));
2037 	READ(oldval, bool);
2038 
2039 	ret = 0;
2040 label_return:
2041 	return (ret);
2042 }
2043 
2044 static int
2045 prof_dump_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
2046     size_t *oldlenp, void *newp, size_t newlen)
2047 {
2048 	int ret;
2049 	const char *filename = NULL;
2050 
2051 	if (!config_prof)
2052 		return (ENOENT);
2053 
2054 	WRITEONLY();
2055 	WRITE(filename, const char *);
2056 
2057 	if (prof_mdump(tsd, filename)) {
2058 		ret = EFAULT;
2059 		goto label_return;
2060 	}
2061 
2062 	ret = 0;
2063 label_return:
2064 	return (ret);
2065 }
2066 
2067 static int
2068 prof_gdump_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
2069     size_t *oldlenp, void *newp, size_t newlen)
2070 {
2071 	int ret;
2072 	bool oldval;
2073 
2074 	if (!config_prof)
2075 		return (ENOENT);
2076 
2077 	if (newp != NULL) {
2078 		if (newlen != sizeof(bool)) {
2079 			ret = EINVAL;
2080 			goto label_return;
2081 		}
2082 		oldval = prof_gdump_set(tsd_tsdn(tsd), *(bool *)newp);
2083 	} else
2084 		oldval = prof_gdump_get(tsd_tsdn(tsd));
2085 	READ(oldval, bool);
2086 
2087 	ret = 0;
2088 label_return:
2089 	return (ret);
2090 }
2091 
2092 static int
2093 prof_reset_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
2094     size_t *oldlenp, void *newp, size_t newlen)
2095 {
2096 	int ret;
2097 	size_t lg_sample = lg_prof_sample;
2098 
2099 	if (!config_prof)
2100 		return (ENOENT);
2101 
2102 	WRITEONLY();
2103 	WRITE(lg_sample, size_t);
2104 	if (lg_sample >= (sizeof(uint64_t) << 3))
2105 		lg_sample = (sizeof(uint64_t) << 3) - 1;
2106 
2107 	prof_reset(tsd, lg_sample);
2108 
2109 	ret = 0;
2110 label_return:
2111 	return (ret);
2112 }
2113 
2114 CTL_RO_NL_CGEN(config_prof, prof_interval, prof_interval, uint64_t)
2115 CTL_RO_NL_CGEN(config_prof, lg_prof_sample, lg_prof_sample, size_t)
2116 
2117 /******************************************************************************/
2118 
2119 CTL_RO_CGEN(config_stats, stats_cactive, &stats_cactive, size_t *)
2120 CTL_RO_CGEN(config_stats, stats_allocated, ctl_stats.allocated, size_t)
2121 CTL_RO_CGEN(config_stats, stats_active, ctl_stats.active, size_t)
2122 CTL_RO_CGEN(config_stats, stats_metadata, ctl_stats.metadata, size_t)
2123 CTL_RO_CGEN(config_stats, stats_resident, ctl_stats.resident, size_t)
2124 CTL_RO_CGEN(config_stats, stats_mapped, ctl_stats.mapped, size_t)
2125 CTL_RO_CGEN(config_stats, stats_retained, ctl_stats.retained, size_t)
2126 
2127 CTL_RO_GEN(stats_arenas_i_dss, ctl_stats.arenas[mib[2]].dss, const char *)
2128 CTL_RO_GEN(stats_arenas_i_lg_dirty_mult, ctl_stats.arenas[mib[2]].lg_dirty_mult,
2129     ssize_t)
2130 CTL_RO_GEN(stats_arenas_i_decay_time, ctl_stats.arenas[mib[2]].decay_time,
2131     ssize_t)
2132 CTL_RO_GEN(stats_arenas_i_nthreads, ctl_stats.arenas[mib[2]].nthreads, unsigned)
2133 CTL_RO_GEN(stats_arenas_i_pactive, ctl_stats.arenas[mib[2]].pactive, size_t)
2134 CTL_RO_GEN(stats_arenas_i_pdirty, ctl_stats.arenas[mib[2]].pdirty, size_t)
2135 CTL_RO_CGEN(config_stats, stats_arenas_i_mapped,
2136     ctl_stats.arenas[mib[2]].astats.mapped, size_t)
2137 CTL_RO_CGEN(config_stats, stats_arenas_i_retained,
2138     ctl_stats.arenas[mib[2]].astats.retained, size_t)
2139 CTL_RO_CGEN(config_stats, stats_arenas_i_npurge,
2140     ctl_stats.arenas[mib[2]].astats.npurge, uint64_t)
2141 CTL_RO_CGEN(config_stats, stats_arenas_i_nmadvise,
2142     ctl_stats.arenas[mib[2]].astats.nmadvise, uint64_t)
2143 CTL_RO_CGEN(config_stats, stats_arenas_i_purged,
2144     ctl_stats.arenas[mib[2]].astats.purged, uint64_t)
2145 CTL_RO_CGEN(config_stats, stats_arenas_i_metadata_mapped,
2146     ctl_stats.arenas[mib[2]].astats.metadata_mapped, size_t)
2147 CTL_RO_CGEN(config_stats, stats_arenas_i_metadata_allocated,
2148     ctl_stats.arenas[mib[2]].astats.metadata_allocated, size_t)
2149 
2150 CTL_RO_CGEN(config_stats, stats_arenas_i_small_allocated,
2151     ctl_stats.arenas[mib[2]].allocated_small, size_t)
2152 CTL_RO_CGEN(config_stats, stats_arenas_i_small_nmalloc,
2153     ctl_stats.arenas[mib[2]].nmalloc_small, uint64_t)
2154 CTL_RO_CGEN(config_stats, stats_arenas_i_small_ndalloc,
2155     ctl_stats.arenas[mib[2]].ndalloc_small, uint64_t)
2156 CTL_RO_CGEN(config_stats, stats_arenas_i_small_nrequests,
2157     ctl_stats.arenas[mib[2]].nrequests_small, uint64_t)
2158 CTL_RO_CGEN(config_stats, stats_arenas_i_large_allocated,
2159     ctl_stats.arenas[mib[2]].astats.allocated_large, size_t)
2160 CTL_RO_CGEN(config_stats, stats_arenas_i_large_nmalloc,
2161     ctl_stats.arenas[mib[2]].astats.nmalloc_large, uint64_t)
2162 CTL_RO_CGEN(config_stats, stats_arenas_i_large_ndalloc,
2163     ctl_stats.arenas[mib[2]].astats.ndalloc_large, uint64_t)
2164 CTL_RO_CGEN(config_stats, stats_arenas_i_large_nrequests,
2165     ctl_stats.arenas[mib[2]].astats.nrequests_large, uint64_t)
2166 CTL_RO_CGEN(config_stats, stats_arenas_i_huge_allocated,
2167     ctl_stats.arenas[mib[2]].astats.allocated_huge, size_t)
2168 CTL_RO_CGEN(config_stats, stats_arenas_i_huge_nmalloc,
2169     ctl_stats.arenas[mib[2]].astats.nmalloc_huge, uint64_t)
2170 CTL_RO_CGEN(config_stats, stats_arenas_i_huge_ndalloc,
2171     ctl_stats.arenas[mib[2]].astats.ndalloc_huge, uint64_t)
2172 CTL_RO_CGEN(config_stats, stats_arenas_i_huge_nrequests,
2173     ctl_stats.arenas[mib[2]].astats.nmalloc_huge, uint64_t) /* Intentional. */
2174 
2175 CTL_RO_CGEN(config_stats, stats_arenas_i_bins_j_nmalloc,
2176     ctl_stats.arenas[mib[2]].bstats[mib[4]].nmalloc, uint64_t)
2177 CTL_RO_CGEN(config_stats, stats_arenas_i_bins_j_ndalloc,
2178     ctl_stats.arenas[mib[2]].bstats[mib[4]].ndalloc, uint64_t)
2179 CTL_RO_CGEN(config_stats, stats_arenas_i_bins_j_nrequests,
2180     ctl_stats.arenas[mib[2]].bstats[mib[4]].nrequests, uint64_t)
2181 CTL_RO_CGEN(config_stats, stats_arenas_i_bins_j_curregs,
2182     ctl_stats.arenas[mib[2]].bstats[mib[4]].curregs, size_t)
2183 CTL_RO_CGEN(config_stats && config_tcache, stats_arenas_i_bins_j_nfills,
2184     ctl_stats.arenas[mib[2]].bstats[mib[4]].nfills, uint64_t)
2185 CTL_RO_CGEN(config_stats && config_tcache, stats_arenas_i_bins_j_nflushes,
2186     ctl_stats.arenas[mib[2]].bstats[mib[4]].nflushes, uint64_t)
2187 CTL_RO_CGEN(config_stats, stats_arenas_i_bins_j_nruns,
2188     ctl_stats.arenas[mib[2]].bstats[mib[4]].nruns, uint64_t)
2189 CTL_RO_CGEN(config_stats, stats_arenas_i_bins_j_nreruns,
2190     ctl_stats.arenas[mib[2]].bstats[mib[4]].reruns, uint64_t)
2191 CTL_RO_CGEN(config_stats, stats_arenas_i_bins_j_curruns,
2192     ctl_stats.arenas[mib[2]].bstats[mib[4]].curruns, size_t)
2193 
2194 static const ctl_named_node_t *
2195 stats_arenas_i_bins_j_index(tsdn_t *tsdn, const size_t *mib, size_t miblen,
2196     size_t j)
2197 {
2198 
2199 	if (j > NBINS)
2200 		return (NULL);
2201 	return (super_stats_arenas_i_bins_j_node);
2202 }
2203 
2204 CTL_RO_CGEN(config_stats, stats_arenas_i_lruns_j_nmalloc,
2205     ctl_stats.arenas[mib[2]].lstats[mib[4]].nmalloc, uint64_t)
2206 CTL_RO_CGEN(config_stats, stats_arenas_i_lruns_j_ndalloc,
2207     ctl_stats.arenas[mib[2]].lstats[mib[4]].ndalloc, uint64_t)
2208 CTL_RO_CGEN(config_stats, stats_arenas_i_lruns_j_nrequests,
2209     ctl_stats.arenas[mib[2]].lstats[mib[4]].nrequests, uint64_t)
2210 CTL_RO_CGEN(config_stats, stats_arenas_i_lruns_j_curruns,
2211     ctl_stats.arenas[mib[2]].lstats[mib[4]].curruns, size_t)
2212 
2213 static const ctl_named_node_t *
2214 stats_arenas_i_lruns_j_index(tsdn_t *tsdn, const size_t *mib, size_t miblen,
2215     size_t j)
2216 {
2217 
2218 	if (j > nlclasses)
2219 		return (NULL);
2220 	return (super_stats_arenas_i_lruns_j_node);
2221 }
2222 
2223 CTL_RO_CGEN(config_stats, stats_arenas_i_hchunks_j_nmalloc,
2224     ctl_stats.arenas[mib[2]].hstats[mib[4]].nmalloc, uint64_t)
2225 CTL_RO_CGEN(config_stats, stats_arenas_i_hchunks_j_ndalloc,
2226     ctl_stats.arenas[mib[2]].hstats[mib[4]].ndalloc, uint64_t)
2227 CTL_RO_CGEN(config_stats, stats_arenas_i_hchunks_j_nrequests,
2228     ctl_stats.arenas[mib[2]].hstats[mib[4]].nmalloc, /* Intentional. */
2229     uint64_t)
2230 CTL_RO_CGEN(config_stats, stats_arenas_i_hchunks_j_curhchunks,
2231     ctl_stats.arenas[mib[2]].hstats[mib[4]].curhchunks, size_t)
2232 
2233 static const ctl_named_node_t *
2234 stats_arenas_i_hchunks_j_index(tsdn_t *tsdn, const size_t *mib, size_t miblen,
2235     size_t j)
2236 {
2237 
2238 	if (j > nhclasses)
2239 		return (NULL);
2240 	return (super_stats_arenas_i_hchunks_j_node);
2241 }
2242 
2243 static const ctl_named_node_t *
2244 stats_arenas_i_index(tsdn_t *tsdn, const size_t *mib, size_t miblen, size_t i)
2245 {
2246 	const ctl_named_node_t * ret;
2247 
2248 	malloc_mutex_lock(tsdn, &ctl_mtx);
2249 	if (i > ctl_stats.narenas || !ctl_stats.arenas[i].initialized) {
2250 		ret = NULL;
2251 		goto label_return;
2252 	}
2253 
2254 	ret = super_stats_arenas_i_node;
2255 label_return:
2256 	malloc_mutex_unlock(tsdn, &ctl_mtx);
2257 	return (ret);
2258 }
2259