xref: /freebsd/contrib/jemalloc/src/ctl.c (revision c9dbb1cc52b063bbd9ab078a7afc89a8696da659)
1 #define	JEMALLOC_CTL_C_
2 #include "jemalloc/internal/jemalloc_internal.h"
3 
4 /******************************************************************************/
5 /* Data. */
6 
7 /*
8  * ctl_mtx protects the following:
9  * - ctl_stats.*
10  */
11 static malloc_mutex_t	ctl_mtx;
12 static bool		ctl_initialized;
13 static uint64_t		ctl_epoch;
14 static ctl_stats_t	ctl_stats;
15 
16 /******************************************************************************/
17 /* Helpers for named and indexed nodes. */
18 
19 JEMALLOC_INLINE_C const ctl_named_node_t *
20 ctl_named_node(const ctl_node_t *node)
21 {
22 
23 	return ((node->named) ? (const ctl_named_node_t *)node : NULL);
24 }
25 
26 JEMALLOC_INLINE_C const ctl_named_node_t *
27 ctl_named_children(const ctl_named_node_t *node, int index)
28 {
29 	const ctl_named_node_t *children = ctl_named_node(node->children);
30 
31 	return (children ? &children[index] : NULL);
32 }
33 
34 JEMALLOC_INLINE_C const ctl_indexed_node_t *
35 ctl_indexed_node(const ctl_node_t *node)
36 {
37 
38 	return (!node->named ? (const ctl_indexed_node_t *)node : NULL);
39 }
40 
41 /******************************************************************************/
42 /* Function prototypes for non-inline static functions. */
43 
44 #define	CTL_PROTO(n)							\
45 static int	n##_ctl(const size_t *mib, size_t miblen, void *oldp,	\
46     size_t *oldlenp, void *newp, size_t newlen);
47 
48 #define	INDEX_PROTO(n)							\
49 static const ctl_named_node_t	*n##_index(const size_t *mib,		\
50     size_t miblen, size_t i);
51 
52 static bool	ctl_arena_init(ctl_arena_stats_t *astats);
53 static void	ctl_arena_clear(ctl_arena_stats_t *astats);
54 static void	ctl_arena_stats_amerge(ctl_arena_stats_t *cstats,
55     arena_t *arena);
56 static void	ctl_arena_stats_smerge(ctl_arena_stats_t *sstats,
57     ctl_arena_stats_t *astats);
58 static void	ctl_arena_refresh(arena_t *arena, unsigned i);
59 static bool	ctl_grow(void);
60 static void	ctl_refresh(void);
61 static bool	ctl_init(void);
62 static int	ctl_lookup(const char *name, ctl_node_t const **nodesp,
63     size_t *mibp, size_t *depthp);
64 
65 CTL_PROTO(version)
66 CTL_PROTO(epoch)
67 CTL_PROTO(thread_tcache_enabled)
68 CTL_PROTO(thread_tcache_flush)
69 CTL_PROTO(thread_prof_name)
70 CTL_PROTO(thread_prof_active)
71 CTL_PROTO(thread_arena)
72 CTL_PROTO(thread_allocated)
73 CTL_PROTO(thread_allocatedp)
74 CTL_PROTO(thread_deallocated)
75 CTL_PROTO(thread_deallocatedp)
76 CTL_PROTO(config_cache_oblivious)
77 CTL_PROTO(config_debug)
78 CTL_PROTO(config_fill)
79 CTL_PROTO(config_lazy_lock)
80 CTL_PROTO(config_munmap)
81 CTL_PROTO(config_prof)
82 CTL_PROTO(config_prof_libgcc)
83 CTL_PROTO(config_prof_libunwind)
84 CTL_PROTO(config_stats)
85 CTL_PROTO(config_tcache)
86 CTL_PROTO(config_tls)
87 CTL_PROTO(config_utrace)
88 CTL_PROTO(config_valgrind)
89 CTL_PROTO(config_xmalloc)
90 CTL_PROTO(opt_abort)
91 CTL_PROTO(opt_dss)
92 CTL_PROTO(opt_lg_chunk)
93 CTL_PROTO(opt_narenas)
94 CTL_PROTO(opt_lg_dirty_mult)
95 CTL_PROTO(opt_stats_print)
96 CTL_PROTO(opt_junk)
97 CTL_PROTO(opt_zero)
98 CTL_PROTO(opt_quarantine)
99 CTL_PROTO(opt_redzone)
100 CTL_PROTO(opt_utrace)
101 CTL_PROTO(opt_xmalloc)
102 CTL_PROTO(opt_tcache)
103 CTL_PROTO(opt_lg_tcache_max)
104 CTL_PROTO(opt_prof)
105 CTL_PROTO(opt_prof_prefix)
106 CTL_PROTO(opt_prof_active)
107 CTL_PROTO(opt_prof_thread_active_init)
108 CTL_PROTO(opt_lg_prof_sample)
109 CTL_PROTO(opt_lg_prof_interval)
110 CTL_PROTO(opt_prof_gdump)
111 CTL_PROTO(opt_prof_final)
112 CTL_PROTO(opt_prof_leak)
113 CTL_PROTO(opt_prof_accum)
114 CTL_PROTO(tcache_create)
115 CTL_PROTO(tcache_flush)
116 CTL_PROTO(tcache_destroy)
117 CTL_PROTO(arena_i_purge)
118 static void	arena_purge(unsigned arena_ind);
119 CTL_PROTO(arena_i_dss)
120 CTL_PROTO(arena_i_lg_dirty_mult)
121 CTL_PROTO(arena_i_chunk_hooks)
122 INDEX_PROTO(arena_i)
123 CTL_PROTO(arenas_bin_i_size)
124 CTL_PROTO(arenas_bin_i_nregs)
125 CTL_PROTO(arenas_bin_i_run_size)
126 INDEX_PROTO(arenas_bin_i)
127 CTL_PROTO(arenas_lrun_i_size)
128 INDEX_PROTO(arenas_lrun_i)
129 CTL_PROTO(arenas_hchunk_i_size)
130 INDEX_PROTO(arenas_hchunk_i)
131 CTL_PROTO(arenas_narenas)
132 CTL_PROTO(arenas_initialized)
133 CTL_PROTO(arenas_lg_dirty_mult)
134 CTL_PROTO(arenas_quantum)
135 CTL_PROTO(arenas_page)
136 CTL_PROTO(arenas_tcache_max)
137 CTL_PROTO(arenas_nbins)
138 CTL_PROTO(arenas_nhbins)
139 CTL_PROTO(arenas_nlruns)
140 CTL_PROTO(arenas_nhchunks)
141 CTL_PROTO(arenas_extend)
142 CTL_PROTO(prof_thread_active_init)
143 CTL_PROTO(prof_active)
144 CTL_PROTO(prof_dump)
145 CTL_PROTO(prof_gdump)
146 CTL_PROTO(prof_reset)
147 CTL_PROTO(prof_interval)
148 CTL_PROTO(lg_prof_sample)
149 CTL_PROTO(stats_arenas_i_small_allocated)
150 CTL_PROTO(stats_arenas_i_small_nmalloc)
151 CTL_PROTO(stats_arenas_i_small_ndalloc)
152 CTL_PROTO(stats_arenas_i_small_nrequests)
153 CTL_PROTO(stats_arenas_i_large_allocated)
154 CTL_PROTO(stats_arenas_i_large_nmalloc)
155 CTL_PROTO(stats_arenas_i_large_ndalloc)
156 CTL_PROTO(stats_arenas_i_large_nrequests)
157 CTL_PROTO(stats_arenas_i_huge_allocated)
158 CTL_PROTO(stats_arenas_i_huge_nmalloc)
159 CTL_PROTO(stats_arenas_i_huge_ndalloc)
160 CTL_PROTO(stats_arenas_i_huge_nrequests)
161 CTL_PROTO(stats_arenas_i_bins_j_nmalloc)
162 CTL_PROTO(stats_arenas_i_bins_j_ndalloc)
163 CTL_PROTO(stats_arenas_i_bins_j_nrequests)
164 CTL_PROTO(stats_arenas_i_bins_j_curregs)
165 CTL_PROTO(stats_arenas_i_bins_j_nfills)
166 CTL_PROTO(stats_arenas_i_bins_j_nflushes)
167 CTL_PROTO(stats_arenas_i_bins_j_nruns)
168 CTL_PROTO(stats_arenas_i_bins_j_nreruns)
169 CTL_PROTO(stats_arenas_i_bins_j_curruns)
170 INDEX_PROTO(stats_arenas_i_bins_j)
171 CTL_PROTO(stats_arenas_i_lruns_j_nmalloc)
172 CTL_PROTO(stats_arenas_i_lruns_j_ndalloc)
173 CTL_PROTO(stats_arenas_i_lruns_j_nrequests)
174 CTL_PROTO(stats_arenas_i_lruns_j_curruns)
175 INDEX_PROTO(stats_arenas_i_lruns_j)
176 CTL_PROTO(stats_arenas_i_hchunks_j_nmalloc)
177 CTL_PROTO(stats_arenas_i_hchunks_j_ndalloc)
178 CTL_PROTO(stats_arenas_i_hchunks_j_nrequests)
179 CTL_PROTO(stats_arenas_i_hchunks_j_curhchunks)
180 INDEX_PROTO(stats_arenas_i_hchunks_j)
181 CTL_PROTO(stats_arenas_i_nthreads)
182 CTL_PROTO(stats_arenas_i_dss)
183 CTL_PROTO(stats_arenas_i_lg_dirty_mult)
184 CTL_PROTO(stats_arenas_i_pactive)
185 CTL_PROTO(stats_arenas_i_pdirty)
186 CTL_PROTO(stats_arenas_i_mapped)
187 CTL_PROTO(stats_arenas_i_npurge)
188 CTL_PROTO(stats_arenas_i_nmadvise)
189 CTL_PROTO(stats_arenas_i_purged)
190 CTL_PROTO(stats_arenas_i_metadata_mapped)
191 CTL_PROTO(stats_arenas_i_metadata_allocated)
192 INDEX_PROTO(stats_arenas_i)
193 CTL_PROTO(stats_cactive)
194 CTL_PROTO(stats_allocated)
195 CTL_PROTO(stats_active)
196 CTL_PROTO(stats_metadata)
197 CTL_PROTO(stats_resident)
198 CTL_PROTO(stats_mapped)
199 
200 /******************************************************************************/
201 /* mallctl tree. */
202 
203 /* Maximum tree depth. */
204 #define	CTL_MAX_DEPTH	6
205 
206 #define	NAME(n)	{true},	n
207 #define	CHILD(t, c)							\
208 	sizeof(c##_node) / sizeof(ctl_##t##_node_t),			\
209 	(ctl_node_t *)c##_node,						\
210 	NULL
211 #define	CTL(c)	0, NULL, c##_ctl
212 
213 /*
214  * Only handles internal indexed nodes, since there are currently no external
215  * ones.
216  */
217 #define	INDEX(i)	{false},	i##_index
218 
219 static const ctl_named_node_t	thread_tcache_node[] = {
220 	{NAME("enabled"),	CTL(thread_tcache_enabled)},
221 	{NAME("flush"),		CTL(thread_tcache_flush)}
222 };
223 
224 static const ctl_named_node_t	thread_prof_node[] = {
225 	{NAME("name"),		CTL(thread_prof_name)},
226 	{NAME("active"),	CTL(thread_prof_active)}
227 };
228 
229 static const ctl_named_node_t	thread_node[] = {
230 	{NAME("arena"),		CTL(thread_arena)},
231 	{NAME("allocated"),	CTL(thread_allocated)},
232 	{NAME("allocatedp"),	CTL(thread_allocatedp)},
233 	{NAME("deallocated"),	CTL(thread_deallocated)},
234 	{NAME("deallocatedp"),	CTL(thread_deallocatedp)},
235 	{NAME("tcache"),	CHILD(named, thread_tcache)},
236 	{NAME("prof"),		CHILD(named, thread_prof)}
237 };
238 
239 static const ctl_named_node_t	config_node[] = {
240 	{NAME("cache_oblivious"), CTL(config_cache_oblivious)},
241 	{NAME("debug"),		CTL(config_debug)},
242 	{NAME("fill"),		CTL(config_fill)},
243 	{NAME("lazy_lock"),	CTL(config_lazy_lock)},
244 	{NAME("munmap"),	CTL(config_munmap)},
245 	{NAME("prof"),		CTL(config_prof)},
246 	{NAME("prof_libgcc"),	CTL(config_prof_libgcc)},
247 	{NAME("prof_libunwind"), CTL(config_prof_libunwind)},
248 	{NAME("stats"),		CTL(config_stats)},
249 	{NAME("tcache"),	CTL(config_tcache)},
250 	{NAME("tls"),		CTL(config_tls)},
251 	{NAME("utrace"),	CTL(config_utrace)},
252 	{NAME("valgrind"),	CTL(config_valgrind)},
253 	{NAME("xmalloc"),	CTL(config_xmalloc)}
254 };
255 
256 static const ctl_named_node_t opt_node[] = {
257 	{NAME("abort"),		CTL(opt_abort)},
258 	{NAME("dss"),		CTL(opt_dss)},
259 	{NAME("lg_chunk"),	CTL(opt_lg_chunk)},
260 	{NAME("narenas"),	CTL(opt_narenas)},
261 	{NAME("lg_dirty_mult"),	CTL(opt_lg_dirty_mult)},
262 	{NAME("stats_print"),	CTL(opt_stats_print)},
263 	{NAME("junk"),		CTL(opt_junk)},
264 	{NAME("zero"),		CTL(opt_zero)},
265 	{NAME("quarantine"),	CTL(opt_quarantine)},
266 	{NAME("redzone"),	CTL(opt_redzone)},
267 	{NAME("utrace"),	CTL(opt_utrace)},
268 	{NAME("xmalloc"),	CTL(opt_xmalloc)},
269 	{NAME("tcache"),	CTL(opt_tcache)},
270 	{NAME("lg_tcache_max"),	CTL(opt_lg_tcache_max)},
271 	{NAME("prof"),		CTL(opt_prof)},
272 	{NAME("prof_prefix"),	CTL(opt_prof_prefix)},
273 	{NAME("prof_active"),	CTL(opt_prof_active)},
274 	{NAME("prof_thread_active_init"), CTL(opt_prof_thread_active_init)},
275 	{NAME("lg_prof_sample"), CTL(opt_lg_prof_sample)},
276 	{NAME("lg_prof_interval"), CTL(opt_lg_prof_interval)},
277 	{NAME("prof_gdump"),	CTL(opt_prof_gdump)},
278 	{NAME("prof_final"),	CTL(opt_prof_final)},
279 	{NAME("prof_leak"),	CTL(opt_prof_leak)},
280 	{NAME("prof_accum"),	CTL(opt_prof_accum)}
281 };
282 
283 static const ctl_named_node_t	tcache_node[] = {
284 	{NAME("create"),	CTL(tcache_create)},
285 	{NAME("flush"),		CTL(tcache_flush)},
286 	{NAME("destroy"),	CTL(tcache_destroy)}
287 };
288 
289 static const ctl_named_node_t arena_i_node[] = {
290 	{NAME("purge"),		CTL(arena_i_purge)},
291 	{NAME("dss"),		CTL(arena_i_dss)},
292 	{NAME("lg_dirty_mult"),	CTL(arena_i_lg_dirty_mult)},
293 	{NAME("chunk_hooks"),	CTL(arena_i_chunk_hooks)}
294 };
295 static const ctl_named_node_t super_arena_i_node[] = {
296 	{NAME(""),		CHILD(named, arena_i)}
297 };
298 
299 static const ctl_indexed_node_t arena_node[] = {
300 	{INDEX(arena_i)}
301 };
302 
303 static const ctl_named_node_t arenas_bin_i_node[] = {
304 	{NAME("size"),		CTL(arenas_bin_i_size)},
305 	{NAME("nregs"),		CTL(arenas_bin_i_nregs)},
306 	{NAME("run_size"),	CTL(arenas_bin_i_run_size)}
307 };
308 static const ctl_named_node_t super_arenas_bin_i_node[] = {
309 	{NAME(""),		CHILD(named, arenas_bin_i)}
310 };
311 
312 static const ctl_indexed_node_t arenas_bin_node[] = {
313 	{INDEX(arenas_bin_i)}
314 };
315 
316 static const ctl_named_node_t arenas_lrun_i_node[] = {
317 	{NAME("size"),		CTL(arenas_lrun_i_size)}
318 };
319 static const ctl_named_node_t super_arenas_lrun_i_node[] = {
320 	{NAME(""),		CHILD(named, arenas_lrun_i)}
321 };
322 
323 static const ctl_indexed_node_t arenas_lrun_node[] = {
324 	{INDEX(arenas_lrun_i)}
325 };
326 
327 static const ctl_named_node_t arenas_hchunk_i_node[] = {
328 	{NAME("size"),		CTL(arenas_hchunk_i_size)}
329 };
330 static const ctl_named_node_t super_arenas_hchunk_i_node[] = {
331 	{NAME(""),		CHILD(named, arenas_hchunk_i)}
332 };
333 
334 static const ctl_indexed_node_t arenas_hchunk_node[] = {
335 	{INDEX(arenas_hchunk_i)}
336 };
337 
338 static const ctl_named_node_t arenas_node[] = {
339 	{NAME("narenas"),	CTL(arenas_narenas)},
340 	{NAME("initialized"),	CTL(arenas_initialized)},
341 	{NAME("lg_dirty_mult"),	CTL(arenas_lg_dirty_mult)},
342 	{NAME("quantum"),	CTL(arenas_quantum)},
343 	{NAME("page"),		CTL(arenas_page)},
344 	{NAME("tcache_max"),	CTL(arenas_tcache_max)},
345 	{NAME("nbins"),		CTL(arenas_nbins)},
346 	{NAME("nhbins"),	CTL(arenas_nhbins)},
347 	{NAME("bin"),		CHILD(indexed, arenas_bin)},
348 	{NAME("nlruns"),	CTL(arenas_nlruns)},
349 	{NAME("lrun"),		CHILD(indexed, arenas_lrun)},
350 	{NAME("nhchunks"),	CTL(arenas_nhchunks)},
351 	{NAME("hchunk"),	CHILD(indexed, arenas_hchunk)},
352 	{NAME("extend"),	CTL(arenas_extend)}
353 };
354 
355 static const ctl_named_node_t	prof_node[] = {
356 	{NAME("thread_active_init"), CTL(prof_thread_active_init)},
357 	{NAME("active"),	CTL(prof_active)},
358 	{NAME("dump"),		CTL(prof_dump)},
359 	{NAME("gdump"),		CTL(prof_gdump)},
360 	{NAME("reset"),		CTL(prof_reset)},
361 	{NAME("interval"),	CTL(prof_interval)},
362 	{NAME("lg_sample"),	CTL(lg_prof_sample)}
363 };
364 
365 static const ctl_named_node_t stats_arenas_i_metadata_node[] = {
366 	{NAME("mapped"),	CTL(stats_arenas_i_metadata_mapped)},
367 	{NAME("allocated"),	CTL(stats_arenas_i_metadata_allocated)}
368 };
369 
370 static const ctl_named_node_t stats_arenas_i_small_node[] = {
371 	{NAME("allocated"),	CTL(stats_arenas_i_small_allocated)},
372 	{NAME("nmalloc"),	CTL(stats_arenas_i_small_nmalloc)},
373 	{NAME("ndalloc"),	CTL(stats_arenas_i_small_ndalloc)},
374 	{NAME("nrequests"),	CTL(stats_arenas_i_small_nrequests)}
375 };
376 
377 static const ctl_named_node_t stats_arenas_i_large_node[] = {
378 	{NAME("allocated"),	CTL(stats_arenas_i_large_allocated)},
379 	{NAME("nmalloc"),	CTL(stats_arenas_i_large_nmalloc)},
380 	{NAME("ndalloc"),	CTL(stats_arenas_i_large_ndalloc)},
381 	{NAME("nrequests"),	CTL(stats_arenas_i_large_nrequests)}
382 };
383 
384 static const ctl_named_node_t stats_arenas_i_huge_node[] = {
385 	{NAME("allocated"),	CTL(stats_arenas_i_huge_allocated)},
386 	{NAME("nmalloc"),	CTL(stats_arenas_i_huge_nmalloc)},
387 	{NAME("ndalloc"),	CTL(stats_arenas_i_huge_ndalloc)},
388 	{NAME("nrequests"),	CTL(stats_arenas_i_huge_nrequests)}
389 };
390 
391 static const ctl_named_node_t stats_arenas_i_bins_j_node[] = {
392 	{NAME("nmalloc"),	CTL(stats_arenas_i_bins_j_nmalloc)},
393 	{NAME("ndalloc"),	CTL(stats_arenas_i_bins_j_ndalloc)},
394 	{NAME("nrequests"),	CTL(stats_arenas_i_bins_j_nrequests)},
395 	{NAME("curregs"),	CTL(stats_arenas_i_bins_j_curregs)},
396 	{NAME("nfills"),	CTL(stats_arenas_i_bins_j_nfills)},
397 	{NAME("nflushes"),	CTL(stats_arenas_i_bins_j_nflushes)},
398 	{NAME("nruns"),		CTL(stats_arenas_i_bins_j_nruns)},
399 	{NAME("nreruns"),	CTL(stats_arenas_i_bins_j_nreruns)},
400 	{NAME("curruns"),	CTL(stats_arenas_i_bins_j_curruns)}
401 };
402 static const ctl_named_node_t super_stats_arenas_i_bins_j_node[] = {
403 	{NAME(""),		CHILD(named, stats_arenas_i_bins_j)}
404 };
405 
406 static const ctl_indexed_node_t stats_arenas_i_bins_node[] = {
407 	{INDEX(stats_arenas_i_bins_j)}
408 };
409 
410 static const ctl_named_node_t stats_arenas_i_lruns_j_node[] = {
411 	{NAME("nmalloc"),	CTL(stats_arenas_i_lruns_j_nmalloc)},
412 	{NAME("ndalloc"),	CTL(stats_arenas_i_lruns_j_ndalloc)},
413 	{NAME("nrequests"),	CTL(stats_arenas_i_lruns_j_nrequests)},
414 	{NAME("curruns"),	CTL(stats_arenas_i_lruns_j_curruns)}
415 };
416 static const ctl_named_node_t super_stats_arenas_i_lruns_j_node[] = {
417 	{NAME(""),		CHILD(named, stats_arenas_i_lruns_j)}
418 };
419 
420 static const ctl_indexed_node_t stats_arenas_i_lruns_node[] = {
421 	{INDEX(stats_arenas_i_lruns_j)}
422 };
423 
424 static const ctl_named_node_t stats_arenas_i_hchunks_j_node[] = {
425 	{NAME("nmalloc"),	CTL(stats_arenas_i_hchunks_j_nmalloc)},
426 	{NAME("ndalloc"),	CTL(stats_arenas_i_hchunks_j_ndalloc)},
427 	{NAME("nrequests"),	CTL(stats_arenas_i_hchunks_j_nrequests)},
428 	{NAME("curhchunks"),	CTL(stats_arenas_i_hchunks_j_curhchunks)}
429 };
430 static const ctl_named_node_t super_stats_arenas_i_hchunks_j_node[] = {
431 	{NAME(""),		CHILD(named, stats_arenas_i_hchunks_j)}
432 };
433 
434 static const ctl_indexed_node_t stats_arenas_i_hchunks_node[] = {
435 	{INDEX(stats_arenas_i_hchunks_j)}
436 };
437 
438 static const ctl_named_node_t stats_arenas_i_node[] = {
439 	{NAME("nthreads"),	CTL(stats_arenas_i_nthreads)},
440 	{NAME("dss"),		CTL(stats_arenas_i_dss)},
441 	{NAME("lg_dirty_mult"),	CTL(stats_arenas_i_lg_dirty_mult)},
442 	{NAME("pactive"),	CTL(stats_arenas_i_pactive)},
443 	{NAME("pdirty"),	CTL(stats_arenas_i_pdirty)},
444 	{NAME("mapped"),	CTL(stats_arenas_i_mapped)},
445 	{NAME("npurge"),	CTL(stats_arenas_i_npurge)},
446 	{NAME("nmadvise"),	CTL(stats_arenas_i_nmadvise)},
447 	{NAME("purged"),	CTL(stats_arenas_i_purged)},
448 	{NAME("metadata"),	CHILD(named, stats_arenas_i_metadata)},
449 	{NAME("small"),		CHILD(named, stats_arenas_i_small)},
450 	{NAME("large"),		CHILD(named, stats_arenas_i_large)},
451 	{NAME("huge"),		CHILD(named, stats_arenas_i_huge)},
452 	{NAME("bins"),		CHILD(indexed, stats_arenas_i_bins)},
453 	{NAME("lruns"),		CHILD(indexed, stats_arenas_i_lruns)},
454 	{NAME("hchunks"),	CHILD(indexed, stats_arenas_i_hchunks)}
455 };
456 static const ctl_named_node_t super_stats_arenas_i_node[] = {
457 	{NAME(""),		CHILD(named, stats_arenas_i)}
458 };
459 
460 static const ctl_indexed_node_t stats_arenas_node[] = {
461 	{INDEX(stats_arenas_i)}
462 };
463 
464 static const ctl_named_node_t stats_node[] = {
465 	{NAME("cactive"),	CTL(stats_cactive)},
466 	{NAME("allocated"),	CTL(stats_allocated)},
467 	{NAME("active"),	CTL(stats_active)},
468 	{NAME("metadata"),	CTL(stats_metadata)},
469 	{NAME("resident"),	CTL(stats_resident)},
470 	{NAME("mapped"),	CTL(stats_mapped)},
471 	{NAME("arenas"),	CHILD(indexed, stats_arenas)}
472 };
473 
474 static const ctl_named_node_t	root_node[] = {
475 	{NAME("version"),	CTL(version)},
476 	{NAME("epoch"),		CTL(epoch)},
477 	{NAME("thread"),	CHILD(named, thread)},
478 	{NAME("config"),	CHILD(named, config)},
479 	{NAME("opt"),		CHILD(named, opt)},
480 	{NAME("tcache"),	CHILD(named, tcache)},
481 	{NAME("arena"),		CHILD(indexed, arena)},
482 	{NAME("arenas"),	CHILD(named, arenas)},
483 	{NAME("prof"),		CHILD(named, prof)},
484 	{NAME("stats"),		CHILD(named, stats)}
485 };
486 static const ctl_named_node_t super_root_node[] = {
487 	{NAME(""),		CHILD(named, root)}
488 };
489 
490 #undef NAME
491 #undef CHILD
492 #undef CTL
493 #undef INDEX
494 
495 /******************************************************************************/
496 
497 static bool
498 ctl_arena_init(ctl_arena_stats_t *astats)
499 {
500 
501 	if (astats->lstats == NULL) {
502 		astats->lstats = (malloc_large_stats_t *)a0malloc(nlclasses *
503 		    sizeof(malloc_large_stats_t));
504 		if (astats->lstats == NULL)
505 			return (true);
506 	}
507 
508 	if (astats->hstats == NULL) {
509 		astats->hstats = (malloc_huge_stats_t *)a0malloc(nhclasses *
510 		    sizeof(malloc_huge_stats_t));
511 		if (astats->hstats == NULL)
512 			return (true);
513 	}
514 
515 	return (false);
516 }
517 
518 static void
519 ctl_arena_clear(ctl_arena_stats_t *astats)
520 {
521 
522 	astats->dss = dss_prec_names[dss_prec_limit];
523 	astats->lg_dirty_mult = -1;
524 	astats->pactive = 0;
525 	astats->pdirty = 0;
526 	if (config_stats) {
527 		memset(&astats->astats, 0, sizeof(arena_stats_t));
528 		astats->allocated_small = 0;
529 		astats->nmalloc_small = 0;
530 		astats->ndalloc_small = 0;
531 		astats->nrequests_small = 0;
532 		memset(astats->bstats, 0, NBINS * sizeof(malloc_bin_stats_t));
533 		memset(astats->lstats, 0, nlclasses *
534 		    sizeof(malloc_large_stats_t));
535 		memset(astats->hstats, 0, nhclasses *
536 		    sizeof(malloc_huge_stats_t));
537 	}
538 }
539 
540 static void
541 ctl_arena_stats_amerge(ctl_arena_stats_t *cstats, arena_t *arena)
542 {
543 	unsigned i;
544 
545 	arena_stats_merge(arena, &cstats->dss, &cstats->lg_dirty_mult,
546 	    &cstats->pactive, &cstats->pdirty, &cstats->astats, cstats->bstats,
547 	    cstats->lstats, cstats->hstats);
548 
549 	for (i = 0; i < NBINS; i++) {
550 		cstats->allocated_small += cstats->bstats[i].curregs *
551 		    index2size(i);
552 		cstats->nmalloc_small += cstats->bstats[i].nmalloc;
553 		cstats->ndalloc_small += cstats->bstats[i].ndalloc;
554 		cstats->nrequests_small += cstats->bstats[i].nrequests;
555 	}
556 }
557 
558 static void
559 ctl_arena_stats_smerge(ctl_arena_stats_t *sstats, ctl_arena_stats_t *astats)
560 {
561 	unsigned i;
562 
563 	sstats->pactive += astats->pactive;
564 	sstats->pdirty += astats->pdirty;
565 
566 	sstats->astats.mapped += astats->astats.mapped;
567 	sstats->astats.npurge += astats->astats.npurge;
568 	sstats->astats.nmadvise += astats->astats.nmadvise;
569 	sstats->astats.purged += astats->astats.purged;
570 
571 	sstats->astats.metadata_mapped += astats->astats.metadata_mapped;
572 	sstats->astats.metadata_allocated += astats->astats.metadata_allocated;
573 
574 	sstats->allocated_small += astats->allocated_small;
575 	sstats->nmalloc_small += astats->nmalloc_small;
576 	sstats->ndalloc_small += astats->ndalloc_small;
577 	sstats->nrequests_small += astats->nrequests_small;
578 
579 	sstats->astats.allocated_large += astats->astats.allocated_large;
580 	sstats->astats.nmalloc_large += astats->astats.nmalloc_large;
581 	sstats->astats.ndalloc_large += astats->astats.ndalloc_large;
582 	sstats->astats.nrequests_large += astats->astats.nrequests_large;
583 
584 	sstats->astats.allocated_huge += astats->astats.allocated_huge;
585 	sstats->astats.nmalloc_huge += astats->astats.nmalloc_huge;
586 	sstats->astats.ndalloc_huge += astats->astats.ndalloc_huge;
587 
588 	for (i = 0; i < NBINS; i++) {
589 		sstats->bstats[i].nmalloc += astats->bstats[i].nmalloc;
590 		sstats->bstats[i].ndalloc += astats->bstats[i].ndalloc;
591 		sstats->bstats[i].nrequests += astats->bstats[i].nrequests;
592 		sstats->bstats[i].curregs += astats->bstats[i].curregs;
593 		if (config_tcache) {
594 			sstats->bstats[i].nfills += astats->bstats[i].nfills;
595 			sstats->bstats[i].nflushes +=
596 			    astats->bstats[i].nflushes;
597 		}
598 		sstats->bstats[i].nruns += astats->bstats[i].nruns;
599 		sstats->bstats[i].reruns += astats->bstats[i].reruns;
600 		sstats->bstats[i].curruns += astats->bstats[i].curruns;
601 	}
602 
603 	for (i = 0; i < nlclasses; i++) {
604 		sstats->lstats[i].nmalloc += astats->lstats[i].nmalloc;
605 		sstats->lstats[i].ndalloc += astats->lstats[i].ndalloc;
606 		sstats->lstats[i].nrequests += astats->lstats[i].nrequests;
607 		sstats->lstats[i].curruns += astats->lstats[i].curruns;
608 	}
609 
610 	for (i = 0; i < nhclasses; i++) {
611 		sstats->hstats[i].nmalloc += astats->hstats[i].nmalloc;
612 		sstats->hstats[i].ndalloc += astats->hstats[i].ndalloc;
613 		sstats->hstats[i].curhchunks += astats->hstats[i].curhchunks;
614 	}
615 }
616 
617 static void
618 ctl_arena_refresh(arena_t *arena, unsigned i)
619 {
620 	ctl_arena_stats_t *astats = &ctl_stats.arenas[i];
621 	ctl_arena_stats_t *sstats = &ctl_stats.arenas[ctl_stats.narenas];
622 
623 	ctl_arena_clear(astats);
624 
625 	sstats->nthreads += astats->nthreads;
626 	if (config_stats) {
627 		ctl_arena_stats_amerge(astats, arena);
628 		/* Merge into sum stats as well. */
629 		ctl_arena_stats_smerge(sstats, astats);
630 	} else {
631 		astats->pactive += arena->nactive;
632 		astats->pdirty += arena->ndirty;
633 		/* Merge into sum stats as well. */
634 		sstats->pactive += arena->nactive;
635 		sstats->pdirty += arena->ndirty;
636 	}
637 }
638 
639 static bool
640 ctl_grow(void)
641 {
642 	ctl_arena_stats_t *astats;
643 
644 	/* Initialize new arena. */
645 	if (arena_init(ctl_stats.narenas) == NULL)
646 		return (true);
647 
648 	/* Allocate extended arena stats. */
649 	astats = (ctl_arena_stats_t *)a0malloc((ctl_stats.narenas + 2) *
650 	    sizeof(ctl_arena_stats_t));
651 	if (astats == NULL)
652 		return (true);
653 
654 	/* Initialize the new astats element. */
655 	memcpy(astats, ctl_stats.arenas, (ctl_stats.narenas + 1) *
656 	    sizeof(ctl_arena_stats_t));
657 	memset(&astats[ctl_stats.narenas + 1], 0, sizeof(ctl_arena_stats_t));
658 	if (ctl_arena_init(&astats[ctl_stats.narenas + 1])) {
659 		a0dalloc(astats);
660 		return (true);
661 	}
662 	/* Swap merged stats to their new location. */
663 	{
664 		ctl_arena_stats_t tstats;
665 		memcpy(&tstats, &astats[ctl_stats.narenas],
666 		    sizeof(ctl_arena_stats_t));
667 		memcpy(&astats[ctl_stats.narenas],
668 		    &astats[ctl_stats.narenas + 1], sizeof(ctl_arena_stats_t));
669 		memcpy(&astats[ctl_stats.narenas + 1], &tstats,
670 		    sizeof(ctl_arena_stats_t));
671 	}
672 	a0dalloc(ctl_stats.arenas);
673 	ctl_stats.arenas = astats;
674 	ctl_stats.narenas++;
675 
676 	return (false);
677 }
678 
679 static void
680 ctl_refresh(void)
681 {
682 	tsd_t *tsd;
683 	unsigned i;
684 	bool refreshed;
685 	VARIABLE_ARRAY(arena_t *, tarenas, ctl_stats.narenas);
686 
687 	/*
688 	 * Clear sum stats, since they will be merged into by
689 	 * ctl_arena_refresh().
690 	 */
691 	ctl_stats.arenas[ctl_stats.narenas].nthreads = 0;
692 	ctl_arena_clear(&ctl_stats.arenas[ctl_stats.narenas]);
693 
694 	tsd = tsd_fetch();
695 	for (i = 0, refreshed = false; i < ctl_stats.narenas; i++) {
696 		tarenas[i] = arena_get(tsd, i, false, false);
697 		if (tarenas[i] == NULL && !refreshed) {
698 			tarenas[i] = arena_get(tsd, i, false, true);
699 			refreshed = true;
700 		}
701 	}
702 
703 	for (i = 0; i < ctl_stats.narenas; i++) {
704 		if (tarenas[i] != NULL)
705 			ctl_stats.arenas[i].nthreads = arena_nbound(i);
706 		else
707 			ctl_stats.arenas[i].nthreads = 0;
708 	}
709 
710 	for (i = 0; i < ctl_stats.narenas; i++) {
711 		bool initialized = (tarenas[i] != NULL);
712 
713 		ctl_stats.arenas[i].initialized = initialized;
714 		if (initialized)
715 			ctl_arena_refresh(tarenas[i], i);
716 	}
717 
718 	if (config_stats) {
719 		size_t base_allocated, base_resident, base_mapped;
720 		base_stats_get(&base_allocated, &base_resident, &base_mapped);
721 		ctl_stats.allocated =
722 		    ctl_stats.arenas[ctl_stats.narenas].allocated_small +
723 		    ctl_stats.arenas[ctl_stats.narenas].astats.allocated_large +
724 		    ctl_stats.arenas[ctl_stats.narenas].astats.allocated_huge;
725 		ctl_stats.active =
726 		    (ctl_stats.arenas[ctl_stats.narenas].pactive << LG_PAGE);
727 		ctl_stats.metadata = base_allocated +
728 		    ctl_stats.arenas[ctl_stats.narenas].astats.metadata_mapped +
729 		    ctl_stats.arenas[ctl_stats.narenas].astats
730 		    .metadata_allocated;
731 		ctl_stats.resident = base_resident +
732 		    ctl_stats.arenas[ctl_stats.narenas].astats.metadata_mapped +
733 		    ((ctl_stats.arenas[ctl_stats.narenas].pactive +
734 		    ctl_stats.arenas[ctl_stats.narenas].pdirty) << LG_PAGE);
735 		ctl_stats.mapped = base_mapped +
736 		    ctl_stats.arenas[ctl_stats.narenas].astats.mapped;
737 	}
738 
739 	ctl_epoch++;
740 }
741 
742 static bool
743 ctl_init(void)
744 {
745 	bool ret;
746 
747 	malloc_mutex_lock(&ctl_mtx);
748 	if (!ctl_initialized) {
749 		/*
750 		 * Allocate space for one extra arena stats element, which
751 		 * contains summed stats across all arenas.
752 		 */
753 		ctl_stats.narenas = narenas_total_get();
754 		ctl_stats.arenas = (ctl_arena_stats_t *)a0malloc(
755 		    (ctl_stats.narenas + 1) * sizeof(ctl_arena_stats_t));
756 		if (ctl_stats.arenas == NULL) {
757 			ret = true;
758 			goto label_return;
759 		}
760 		memset(ctl_stats.arenas, 0, (ctl_stats.narenas + 1) *
761 		    sizeof(ctl_arena_stats_t));
762 
763 		/*
764 		 * Initialize all stats structures, regardless of whether they
765 		 * ever get used.  Lazy initialization would allow errors to
766 		 * cause inconsistent state to be viewable by the application.
767 		 */
768 		if (config_stats) {
769 			unsigned i;
770 			for (i = 0; i <= ctl_stats.narenas; i++) {
771 				if (ctl_arena_init(&ctl_stats.arenas[i])) {
772 					unsigned j;
773 					for (j = 0; j < i; j++) {
774 						a0dalloc(
775 						    ctl_stats.arenas[j].lstats);
776 						a0dalloc(
777 						    ctl_stats.arenas[j].hstats);
778 					}
779 					a0dalloc(ctl_stats.arenas);
780 					ctl_stats.arenas = NULL;
781 					ret = true;
782 					goto label_return;
783 				}
784 			}
785 		}
786 		ctl_stats.arenas[ctl_stats.narenas].initialized = true;
787 
788 		ctl_epoch = 0;
789 		ctl_refresh();
790 		ctl_initialized = true;
791 	}
792 
793 	ret = false;
794 label_return:
795 	malloc_mutex_unlock(&ctl_mtx);
796 	return (ret);
797 }
798 
799 static int
800 ctl_lookup(const char *name, ctl_node_t const **nodesp, size_t *mibp,
801     size_t *depthp)
802 {
803 	int ret;
804 	const char *elm, *tdot, *dot;
805 	size_t elen, i, j;
806 	const ctl_named_node_t *node;
807 
808 	elm = name;
809 	/* Equivalent to strchrnul(). */
810 	dot = ((tdot = strchr(elm, '.')) != NULL) ? tdot : strchr(elm, '\0');
811 	elen = (size_t)((uintptr_t)dot - (uintptr_t)elm);
812 	if (elen == 0) {
813 		ret = ENOENT;
814 		goto label_return;
815 	}
816 	node = super_root_node;
817 	for (i = 0; i < *depthp; i++) {
818 		assert(node);
819 		assert(node->nchildren > 0);
820 		if (ctl_named_node(node->children) != NULL) {
821 			const ctl_named_node_t *pnode = node;
822 
823 			/* Children are named. */
824 			for (j = 0; j < node->nchildren; j++) {
825 				const ctl_named_node_t *child =
826 				    ctl_named_children(node, j);
827 				if (strlen(child->name) == elen &&
828 				    strncmp(elm, child->name, elen) == 0) {
829 					node = child;
830 					if (nodesp != NULL)
831 						nodesp[i] =
832 						    (const ctl_node_t *)node;
833 					mibp[i] = j;
834 					break;
835 				}
836 			}
837 			if (node == pnode) {
838 				ret = ENOENT;
839 				goto label_return;
840 			}
841 		} else {
842 			uintmax_t index;
843 			const ctl_indexed_node_t *inode;
844 
845 			/* Children are indexed. */
846 			index = malloc_strtoumax(elm, NULL, 10);
847 			if (index == UINTMAX_MAX || index > SIZE_T_MAX) {
848 				ret = ENOENT;
849 				goto label_return;
850 			}
851 
852 			inode = ctl_indexed_node(node->children);
853 			node = inode->index(mibp, *depthp, (size_t)index);
854 			if (node == NULL) {
855 				ret = ENOENT;
856 				goto label_return;
857 			}
858 
859 			if (nodesp != NULL)
860 				nodesp[i] = (const ctl_node_t *)node;
861 			mibp[i] = (size_t)index;
862 		}
863 
864 		if (node->ctl != NULL) {
865 			/* Terminal node. */
866 			if (*dot != '\0') {
867 				/*
868 				 * The name contains more elements than are
869 				 * in this path through the tree.
870 				 */
871 				ret = ENOENT;
872 				goto label_return;
873 			}
874 			/* Complete lookup successful. */
875 			*depthp = i + 1;
876 			break;
877 		}
878 
879 		/* Update elm. */
880 		if (*dot == '\0') {
881 			/* No more elements. */
882 			ret = ENOENT;
883 			goto label_return;
884 		}
885 		elm = &dot[1];
886 		dot = ((tdot = strchr(elm, '.')) != NULL) ? tdot :
887 		    strchr(elm, '\0');
888 		elen = (size_t)((uintptr_t)dot - (uintptr_t)elm);
889 	}
890 
891 	ret = 0;
892 label_return:
893 	return (ret);
894 }
895 
896 int
897 ctl_byname(const char *name, void *oldp, size_t *oldlenp, void *newp,
898     size_t newlen)
899 {
900 	int ret;
901 	size_t depth;
902 	ctl_node_t const *nodes[CTL_MAX_DEPTH];
903 	size_t mib[CTL_MAX_DEPTH];
904 	const ctl_named_node_t *node;
905 
906 	if (!ctl_initialized && ctl_init()) {
907 		ret = EAGAIN;
908 		goto label_return;
909 	}
910 
911 	depth = CTL_MAX_DEPTH;
912 	ret = ctl_lookup(name, nodes, mib, &depth);
913 	if (ret != 0)
914 		goto label_return;
915 
916 	node = ctl_named_node(nodes[depth-1]);
917 	if (node != NULL && node->ctl)
918 		ret = node->ctl(mib, depth, oldp, oldlenp, newp, newlen);
919 	else {
920 		/* The name refers to a partial path through the ctl tree. */
921 		ret = ENOENT;
922 	}
923 
924 label_return:
925 	return(ret);
926 }
927 
928 int
929 ctl_nametomib(const char *name, size_t *mibp, size_t *miblenp)
930 {
931 	int ret;
932 
933 	if (!ctl_initialized && ctl_init()) {
934 		ret = EAGAIN;
935 		goto label_return;
936 	}
937 
938 	ret = ctl_lookup(name, NULL, mibp, miblenp);
939 label_return:
940 	return(ret);
941 }
942 
943 int
944 ctl_bymib(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
945     void *newp, size_t newlen)
946 {
947 	int ret;
948 	const ctl_named_node_t *node;
949 	size_t i;
950 
951 	if (!ctl_initialized && ctl_init()) {
952 		ret = EAGAIN;
953 		goto label_return;
954 	}
955 
956 	/* Iterate down the tree. */
957 	node = super_root_node;
958 	for (i = 0; i < miblen; i++) {
959 		assert(node);
960 		assert(node->nchildren > 0);
961 		if (ctl_named_node(node->children) != NULL) {
962 			/* Children are named. */
963 			if (node->nchildren <= mib[i]) {
964 				ret = ENOENT;
965 				goto label_return;
966 			}
967 			node = ctl_named_children(node, mib[i]);
968 		} else {
969 			const ctl_indexed_node_t *inode;
970 
971 			/* Indexed element. */
972 			inode = ctl_indexed_node(node->children);
973 			node = inode->index(mib, miblen, mib[i]);
974 			if (node == NULL) {
975 				ret = ENOENT;
976 				goto label_return;
977 			}
978 		}
979 	}
980 
981 	/* Call the ctl function. */
982 	if (node && node->ctl)
983 		ret = node->ctl(mib, miblen, oldp, oldlenp, newp, newlen);
984 	else {
985 		/* Partial MIB. */
986 		ret = ENOENT;
987 	}
988 
989 label_return:
990 	return(ret);
991 }
992 
993 bool
994 ctl_boot(void)
995 {
996 
997 	if (malloc_mutex_init(&ctl_mtx))
998 		return (true);
999 
1000 	ctl_initialized = false;
1001 
1002 	return (false);
1003 }
1004 
1005 void
1006 ctl_prefork(void)
1007 {
1008 
1009 	malloc_mutex_prefork(&ctl_mtx);
1010 }
1011 
1012 void
1013 ctl_postfork_parent(void)
1014 {
1015 
1016 	malloc_mutex_postfork_parent(&ctl_mtx);
1017 }
1018 
1019 void
1020 ctl_postfork_child(void)
1021 {
1022 
1023 	malloc_mutex_postfork_child(&ctl_mtx);
1024 }
1025 
1026 /******************************************************************************/
1027 /* *_ctl() functions. */
1028 
1029 #define	READONLY()	do {						\
1030 	if (newp != NULL || newlen != 0) {				\
1031 		ret = EPERM;						\
1032 		goto label_return;					\
1033 	}								\
1034 } while (0)
1035 
1036 #define	WRITEONLY()	do {						\
1037 	if (oldp != NULL || oldlenp != NULL) {				\
1038 		ret = EPERM;						\
1039 		goto label_return;					\
1040 	}								\
1041 } while (0)
1042 
1043 #define	READ_XOR_WRITE()	do {					\
1044 	if ((oldp != NULL && oldlenp != NULL) && (newp != NULL ||	\
1045 	    newlen != 0)) {						\
1046 		ret = EPERM;						\
1047 		goto label_return;					\
1048 	}								\
1049 } while (0)
1050 
1051 #define	READ(v, t)	do {						\
1052 	if (oldp != NULL && oldlenp != NULL) {				\
1053 		if (*oldlenp != sizeof(t)) {				\
1054 			size_t	copylen = (sizeof(t) <= *oldlenp)	\
1055 			    ? sizeof(t) : *oldlenp;			\
1056 			memcpy(oldp, (void *)&(v), copylen);		\
1057 			ret = EINVAL;					\
1058 			goto label_return;				\
1059 		}							\
1060 		*(t *)oldp = (v);					\
1061 	}								\
1062 } while (0)
1063 
1064 #define	WRITE(v, t)	do {						\
1065 	if (newp != NULL) {						\
1066 		if (newlen != sizeof(t)) {				\
1067 			ret = EINVAL;					\
1068 			goto label_return;				\
1069 		}							\
1070 		(v) = *(t *)newp;					\
1071 	}								\
1072 } while (0)
1073 
1074 /*
1075  * There's a lot of code duplication in the following macros due to limitations
1076  * in how nested cpp macros are expanded.
1077  */
1078 #define	CTL_RO_CLGEN(c, l, n, v, t)					\
1079 static int								\
1080 n##_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,	\
1081     void *newp, size_t newlen)						\
1082 {									\
1083 	int ret;							\
1084 	t oldval;							\
1085 									\
1086 	if (!(c))							\
1087 		return (ENOENT);					\
1088 	if (l)								\
1089 		malloc_mutex_lock(&ctl_mtx);				\
1090 	READONLY();							\
1091 	oldval = (v);							\
1092 	READ(oldval, t);						\
1093 									\
1094 	ret = 0;							\
1095 label_return:								\
1096 	if (l)								\
1097 		malloc_mutex_unlock(&ctl_mtx);				\
1098 	return (ret);							\
1099 }
1100 
1101 #define	CTL_RO_CGEN(c, n, v, t)						\
1102 static int								\
1103 n##_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,	\
1104     void *newp, size_t newlen)						\
1105 {									\
1106 	int ret;							\
1107 	t oldval;							\
1108 									\
1109 	if (!(c))							\
1110 		return (ENOENT);					\
1111 	malloc_mutex_lock(&ctl_mtx);					\
1112 	READONLY();							\
1113 	oldval = (v);							\
1114 	READ(oldval, t);						\
1115 									\
1116 	ret = 0;							\
1117 label_return:								\
1118 	malloc_mutex_unlock(&ctl_mtx);					\
1119 	return (ret);							\
1120 }
1121 
1122 #define	CTL_RO_GEN(n, v, t)						\
1123 static int								\
1124 n##_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,	\
1125     void *newp, size_t newlen)						\
1126 {									\
1127 	int ret;							\
1128 	t oldval;							\
1129 									\
1130 	malloc_mutex_lock(&ctl_mtx);					\
1131 	READONLY();							\
1132 	oldval = (v);							\
1133 	READ(oldval, t);						\
1134 									\
1135 	ret = 0;							\
1136 label_return:								\
1137 	malloc_mutex_unlock(&ctl_mtx);					\
1138 	return (ret);							\
1139 }
1140 
1141 /*
1142  * ctl_mtx is not acquired, under the assumption that no pertinent data will
1143  * mutate during the call.
1144  */
1145 #define	CTL_RO_NL_CGEN(c, n, v, t)					\
1146 static int								\
1147 n##_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,	\
1148     void *newp, size_t newlen)						\
1149 {									\
1150 	int ret;							\
1151 	t oldval;							\
1152 									\
1153 	if (!(c))							\
1154 		return (ENOENT);					\
1155 	READONLY();							\
1156 	oldval = (v);							\
1157 	READ(oldval, t);						\
1158 									\
1159 	ret = 0;							\
1160 label_return:								\
1161 	return (ret);							\
1162 }
1163 
1164 #define	CTL_RO_NL_GEN(n, v, t)						\
1165 static int								\
1166 n##_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,	\
1167     void *newp, size_t newlen)						\
1168 {									\
1169 	int ret;							\
1170 	t oldval;							\
1171 									\
1172 	READONLY();							\
1173 	oldval = (v);							\
1174 	READ(oldval, t);						\
1175 									\
1176 	ret = 0;							\
1177 label_return:								\
1178 	return (ret);							\
1179 }
1180 
1181 #define	CTL_TSD_RO_NL_CGEN(c, n, m, t)					\
1182 static int								\
1183 n##_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,	\
1184     void *newp, size_t newlen)						\
1185 {									\
1186 	int ret;							\
1187 	t oldval;							\
1188 	tsd_t *tsd;							\
1189 									\
1190 	if (!(c))							\
1191 		return (ENOENT);					\
1192 	READONLY();							\
1193 	tsd = tsd_fetch();						\
1194 	oldval = (m(tsd));						\
1195 	READ(oldval, t);						\
1196 									\
1197 	ret = 0;							\
1198 label_return:								\
1199 	return (ret);							\
1200 }
1201 
1202 #define	CTL_RO_BOOL_CONFIG_GEN(n)					\
1203 static int								\
1204 n##_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,	\
1205     void *newp, size_t newlen)						\
1206 {									\
1207 	int ret;							\
1208 	bool oldval;							\
1209 									\
1210 	READONLY();							\
1211 	oldval = n;							\
1212 	READ(oldval, bool);						\
1213 									\
1214 	ret = 0;							\
1215 label_return:								\
1216 	return (ret);							\
1217 }
1218 
1219 /******************************************************************************/
1220 
1221 CTL_RO_NL_GEN(version, JEMALLOC_VERSION, const char *)
1222 
1223 static int
1224 epoch_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
1225     void *newp, size_t newlen)
1226 {
1227 	int ret;
1228 	UNUSED uint64_t newval;
1229 
1230 	malloc_mutex_lock(&ctl_mtx);
1231 	WRITE(newval, uint64_t);
1232 	if (newp != NULL)
1233 		ctl_refresh();
1234 	READ(ctl_epoch, uint64_t);
1235 
1236 	ret = 0;
1237 label_return:
1238 	malloc_mutex_unlock(&ctl_mtx);
1239 	return (ret);
1240 }
1241 
1242 /******************************************************************************/
1243 
1244 CTL_RO_BOOL_CONFIG_GEN(config_cache_oblivious)
1245 CTL_RO_BOOL_CONFIG_GEN(config_debug)
1246 CTL_RO_BOOL_CONFIG_GEN(config_fill)
1247 CTL_RO_BOOL_CONFIG_GEN(config_lazy_lock)
1248 CTL_RO_BOOL_CONFIG_GEN(config_munmap)
1249 CTL_RO_BOOL_CONFIG_GEN(config_prof)
1250 CTL_RO_BOOL_CONFIG_GEN(config_prof_libgcc)
1251 CTL_RO_BOOL_CONFIG_GEN(config_prof_libunwind)
1252 CTL_RO_BOOL_CONFIG_GEN(config_stats)
1253 CTL_RO_BOOL_CONFIG_GEN(config_tcache)
1254 CTL_RO_BOOL_CONFIG_GEN(config_tls)
1255 CTL_RO_BOOL_CONFIG_GEN(config_utrace)
1256 CTL_RO_BOOL_CONFIG_GEN(config_valgrind)
1257 CTL_RO_BOOL_CONFIG_GEN(config_xmalloc)
1258 
1259 /******************************************************************************/
1260 
1261 CTL_RO_NL_GEN(opt_abort, opt_abort, bool)
1262 CTL_RO_NL_GEN(opt_dss, opt_dss, const char *)
1263 CTL_RO_NL_GEN(opt_lg_chunk, opt_lg_chunk, size_t)
1264 CTL_RO_NL_GEN(opt_narenas, opt_narenas, size_t)
1265 CTL_RO_NL_GEN(opt_lg_dirty_mult, opt_lg_dirty_mult, ssize_t)
1266 CTL_RO_NL_GEN(opt_stats_print, opt_stats_print, bool)
1267 CTL_RO_NL_CGEN(config_fill, opt_junk, opt_junk, const char *)
1268 CTL_RO_NL_CGEN(config_fill, opt_quarantine, opt_quarantine, size_t)
1269 CTL_RO_NL_CGEN(config_fill, opt_redzone, opt_redzone, bool)
1270 CTL_RO_NL_CGEN(config_fill, opt_zero, opt_zero, bool)
1271 CTL_RO_NL_CGEN(config_utrace, opt_utrace, opt_utrace, bool)
1272 CTL_RO_NL_CGEN(config_xmalloc, opt_xmalloc, opt_xmalloc, bool)
1273 CTL_RO_NL_CGEN(config_tcache, opt_tcache, opt_tcache, bool)
1274 CTL_RO_NL_CGEN(config_tcache, opt_lg_tcache_max, opt_lg_tcache_max, ssize_t)
1275 CTL_RO_NL_CGEN(config_prof, opt_prof, opt_prof, bool)
1276 CTL_RO_NL_CGEN(config_prof, opt_prof_prefix, opt_prof_prefix, const char *)
1277 CTL_RO_NL_CGEN(config_prof, opt_prof_active, opt_prof_active, bool)
1278 CTL_RO_NL_CGEN(config_prof, opt_prof_thread_active_init,
1279     opt_prof_thread_active_init, bool)
1280 CTL_RO_NL_CGEN(config_prof, opt_lg_prof_sample, opt_lg_prof_sample, size_t)
1281 CTL_RO_NL_CGEN(config_prof, opt_prof_accum, opt_prof_accum, bool)
1282 CTL_RO_NL_CGEN(config_prof, opt_lg_prof_interval, opt_lg_prof_interval, ssize_t)
1283 CTL_RO_NL_CGEN(config_prof, opt_prof_gdump, opt_prof_gdump, bool)
1284 CTL_RO_NL_CGEN(config_prof, opt_prof_final, opt_prof_final, bool)
1285 CTL_RO_NL_CGEN(config_prof, opt_prof_leak, opt_prof_leak, bool)
1286 
1287 /******************************************************************************/
1288 
1289 static int
1290 thread_arena_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
1291     void *newp, size_t newlen)
1292 {
1293 	int ret;
1294 	tsd_t *tsd;
1295 	arena_t *oldarena;
1296 	unsigned newind, oldind;
1297 
1298 	tsd = tsd_fetch();
1299 	oldarena = arena_choose(tsd, NULL);
1300 	if (oldarena == NULL)
1301 		return (EAGAIN);
1302 
1303 	malloc_mutex_lock(&ctl_mtx);
1304 	newind = oldind = oldarena->ind;
1305 	WRITE(newind, unsigned);
1306 	READ(oldind, unsigned);
1307 	if (newind != oldind) {
1308 		arena_t *newarena;
1309 
1310 		if (newind >= ctl_stats.narenas) {
1311 			/* New arena index is out of range. */
1312 			ret = EFAULT;
1313 			goto label_return;
1314 		}
1315 
1316 		/* Initialize arena if necessary. */
1317 		newarena = arena_get(tsd, newind, true, true);
1318 		if (newarena == NULL) {
1319 			ret = EAGAIN;
1320 			goto label_return;
1321 		}
1322 		/* Set new arena/tcache associations. */
1323 		arena_migrate(tsd, oldind, newind);
1324 		if (config_tcache) {
1325 			tcache_t *tcache = tsd_tcache_get(tsd);
1326 			if (tcache != NULL) {
1327 				tcache_arena_reassociate(tcache, oldarena,
1328 				    newarena);
1329 			}
1330 		}
1331 	}
1332 
1333 	ret = 0;
1334 label_return:
1335 	malloc_mutex_unlock(&ctl_mtx);
1336 	return (ret);
1337 }
1338 
1339 CTL_TSD_RO_NL_CGEN(config_stats, thread_allocated, tsd_thread_allocated_get,
1340     uint64_t)
1341 CTL_TSD_RO_NL_CGEN(config_stats, thread_allocatedp, tsd_thread_allocatedp_get,
1342     uint64_t *)
1343 CTL_TSD_RO_NL_CGEN(config_stats, thread_deallocated, tsd_thread_deallocated_get,
1344     uint64_t)
1345 CTL_TSD_RO_NL_CGEN(config_stats, thread_deallocatedp,
1346     tsd_thread_deallocatedp_get, uint64_t *)
1347 
1348 static int
1349 thread_tcache_enabled_ctl(const size_t *mib, size_t miblen, void *oldp,
1350     size_t *oldlenp, void *newp, size_t newlen)
1351 {
1352 	int ret;
1353 	bool oldval;
1354 
1355 	if (!config_tcache)
1356 		return (ENOENT);
1357 
1358 	oldval = tcache_enabled_get();
1359 	if (newp != NULL) {
1360 		if (newlen != sizeof(bool)) {
1361 			ret = EINVAL;
1362 			goto label_return;
1363 		}
1364 		tcache_enabled_set(*(bool *)newp);
1365 	}
1366 	READ(oldval, bool);
1367 
1368 	ret = 0;
1369 label_return:
1370 	return (ret);
1371 }
1372 
1373 static int
1374 thread_tcache_flush_ctl(const size_t *mib, size_t miblen, void *oldp,
1375     size_t *oldlenp, void *newp, size_t newlen)
1376 {
1377 	int ret;
1378 
1379 	if (!config_tcache)
1380 		return (ENOENT);
1381 
1382 	READONLY();
1383 	WRITEONLY();
1384 
1385 	tcache_flush();
1386 
1387 	ret = 0;
1388 label_return:
1389 	return (ret);
1390 }
1391 
1392 static int
1393 thread_prof_name_ctl(const size_t *mib, size_t miblen, void *oldp,
1394     size_t *oldlenp, void *newp, size_t newlen)
1395 {
1396 	int ret;
1397 
1398 	if (!config_prof)
1399 		return (ENOENT);
1400 
1401 	READ_XOR_WRITE();
1402 
1403 	if (newp != NULL) {
1404 		tsd_t *tsd;
1405 
1406 		if (newlen != sizeof(const char *)) {
1407 			ret = EINVAL;
1408 			goto label_return;
1409 		}
1410 
1411 		tsd = tsd_fetch();
1412 
1413 		if ((ret = prof_thread_name_set(tsd, *(const char **)newp)) !=
1414 		    0)
1415 			goto label_return;
1416 	} else {
1417 		const char *oldname = prof_thread_name_get();
1418 		READ(oldname, const char *);
1419 	}
1420 
1421 	ret = 0;
1422 label_return:
1423 	return (ret);
1424 }
1425 
1426 static int
1427 thread_prof_active_ctl(const size_t *mib, size_t miblen, void *oldp,
1428     size_t *oldlenp, void *newp, size_t newlen)
1429 {
1430 	int ret;
1431 	bool oldval;
1432 
1433 	if (!config_prof)
1434 		return (ENOENT);
1435 
1436 	oldval = prof_thread_active_get();
1437 	if (newp != NULL) {
1438 		if (newlen != sizeof(bool)) {
1439 			ret = EINVAL;
1440 			goto label_return;
1441 		}
1442 		if (prof_thread_active_set(*(bool *)newp)) {
1443 			ret = EAGAIN;
1444 			goto label_return;
1445 		}
1446 	}
1447 	READ(oldval, bool);
1448 
1449 	ret = 0;
1450 label_return:
1451 	return (ret);
1452 }
1453 
1454 /******************************************************************************/
1455 
1456 static int
1457 tcache_create_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
1458     void *newp, size_t newlen)
1459 {
1460 	int ret;
1461 	tsd_t *tsd;
1462 	unsigned tcache_ind;
1463 
1464 	if (!config_tcache)
1465 		return (ENOENT);
1466 
1467 	tsd = tsd_fetch();
1468 
1469 	malloc_mutex_lock(&ctl_mtx);
1470 	READONLY();
1471 	if (tcaches_create(tsd, &tcache_ind)) {
1472 		ret = EFAULT;
1473 		goto label_return;
1474 	}
1475 	READ(tcache_ind, unsigned);
1476 
1477 	ret = 0;
1478 label_return:
1479 	malloc_mutex_unlock(&ctl_mtx);
1480 	return (ret);
1481 }
1482 
1483 static int
1484 tcache_flush_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
1485     void *newp, size_t newlen)
1486 {
1487 	int ret;
1488 	tsd_t *tsd;
1489 	unsigned tcache_ind;
1490 
1491 	if (!config_tcache)
1492 		return (ENOENT);
1493 
1494 	tsd = tsd_fetch();
1495 
1496 	WRITEONLY();
1497 	tcache_ind = UINT_MAX;
1498 	WRITE(tcache_ind, unsigned);
1499 	if (tcache_ind == UINT_MAX) {
1500 		ret = EFAULT;
1501 		goto label_return;
1502 	}
1503 	tcaches_flush(tsd, tcache_ind);
1504 
1505 	ret = 0;
1506 label_return:
1507 	return (ret);
1508 }
1509 
1510 static int
1511 tcache_destroy_ctl(const size_t *mib, size_t miblen, void *oldp,
1512     size_t *oldlenp, void *newp, size_t newlen)
1513 {
1514 	int ret;
1515 	tsd_t *tsd;
1516 	unsigned tcache_ind;
1517 
1518 	if (!config_tcache)
1519 		return (ENOENT);
1520 
1521 	tsd = tsd_fetch();
1522 
1523 	WRITEONLY();
1524 	tcache_ind = UINT_MAX;
1525 	WRITE(tcache_ind, unsigned);
1526 	if (tcache_ind == UINT_MAX) {
1527 		ret = EFAULT;
1528 		goto label_return;
1529 	}
1530 	tcaches_destroy(tsd, tcache_ind);
1531 
1532 	ret = 0;
1533 label_return:
1534 	return (ret);
1535 }
1536 
1537 /******************************************************************************/
1538 
1539 /* ctl_mutex must be held during execution of this function. */
1540 static void
1541 arena_purge(unsigned arena_ind)
1542 {
1543 	tsd_t *tsd;
1544 	unsigned i;
1545 	bool refreshed;
1546 	VARIABLE_ARRAY(arena_t *, tarenas, ctl_stats.narenas);
1547 
1548 	tsd = tsd_fetch();
1549 	for (i = 0, refreshed = false; i < ctl_stats.narenas; i++) {
1550 		tarenas[i] = arena_get(tsd, i, false, false);
1551 		if (tarenas[i] == NULL && !refreshed) {
1552 			tarenas[i] = arena_get(tsd, i, false, true);
1553 			refreshed = true;
1554 		}
1555 	}
1556 
1557 	if (arena_ind == ctl_stats.narenas) {
1558 		unsigned i;
1559 		for (i = 0; i < ctl_stats.narenas; i++) {
1560 			if (tarenas[i] != NULL)
1561 				arena_purge_all(tarenas[i]);
1562 		}
1563 	} else {
1564 		assert(arena_ind < ctl_stats.narenas);
1565 		if (tarenas[arena_ind] != NULL)
1566 			arena_purge_all(tarenas[arena_ind]);
1567 	}
1568 }
1569 
1570 static int
1571 arena_i_purge_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
1572     void *newp, size_t newlen)
1573 {
1574 	int ret;
1575 
1576 	READONLY();
1577 	WRITEONLY();
1578 	malloc_mutex_lock(&ctl_mtx);
1579 	arena_purge(mib[1]);
1580 	malloc_mutex_unlock(&ctl_mtx);
1581 
1582 	ret = 0;
1583 label_return:
1584 	return (ret);
1585 }
1586 
1587 static int
1588 arena_i_dss_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
1589     void *newp, size_t newlen)
1590 {
1591 	int ret;
1592 	const char *dss = NULL;
1593 	unsigned arena_ind = mib[1];
1594 	dss_prec_t dss_prec_old = dss_prec_limit;
1595 	dss_prec_t dss_prec = dss_prec_limit;
1596 
1597 	malloc_mutex_lock(&ctl_mtx);
1598 	WRITE(dss, const char *);
1599 	if (dss != NULL) {
1600 		int i;
1601 		bool match = false;
1602 
1603 		for (i = 0; i < dss_prec_limit; i++) {
1604 			if (strcmp(dss_prec_names[i], dss) == 0) {
1605 				dss_prec = i;
1606 				match = true;
1607 				break;
1608 			}
1609 		}
1610 
1611 		if (!match) {
1612 			ret = EINVAL;
1613 			goto label_return;
1614 		}
1615 	}
1616 
1617 	if (arena_ind < ctl_stats.narenas) {
1618 		arena_t *arena = arena_get(tsd_fetch(), arena_ind, false, true);
1619 		if (arena == NULL || (dss_prec != dss_prec_limit &&
1620 		    arena_dss_prec_set(arena, dss_prec))) {
1621 			ret = EFAULT;
1622 			goto label_return;
1623 		}
1624 		dss_prec_old = arena_dss_prec_get(arena);
1625 	} else {
1626 		if (dss_prec != dss_prec_limit &&
1627 		    chunk_dss_prec_set(dss_prec)) {
1628 			ret = EFAULT;
1629 			goto label_return;
1630 		}
1631 		dss_prec_old = chunk_dss_prec_get();
1632 	}
1633 
1634 	dss = dss_prec_names[dss_prec_old];
1635 	READ(dss, const char *);
1636 
1637 	ret = 0;
1638 label_return:
1639 	malloc_mutex_unlock(&ctl_mtx);
1640 	return (ret);
1641 }
1642 
1643 static int
1644 arena_i_lg_dirty_mult_ctl(const size_t *mib, size_t miblen, void *oldp,
1645     size_t *oldlenp, void *newp, size_t newlen)
1646 {
1647 	int ret;
1648 	unsigned arena_ind = mib[1];
1649 	arena_t *arena;
1650 
1651 	arena = arena_get(tsd_fetch(), arena_ind, false, true);
1652 	if (arena == NULL) {
1653 		ret = EFAULT;
1654 		goto label_return;
1655 	}
1656 
1657 	if (oldp != NULL && oldlenp != NULL) {
1658 		size_t oldval = arena_lg_dirty_mult_get(arena);
1659 		READ(oldval, ssize_t);
1660 	}
1661 	if (newp != NULL) {
1662 		if (newlen != sizeof(ssize_t)) {
1663 			ret = EINVAL;
1664 			goto label_return;
1665 		}
1666 		if (arena_lg_dirty_mult_set(arena, *(ssize_t *)newp)) {
1667 			ret = EFAULT;
1668 			goto label_return;
1669 		}
1670 	}
1671 
1672 	ret = 0;
1673 label_return:
1674 	return (ret);
1675 }
1676 
1677 static int
1678 arena_i_chunk_hooks_ctl(const size_t *mib, size_t miblen, void *oldp,
1679     size_t *oldlenp, void *newp, size_t newlen)
1680 {
1681 	int ret;
1682 	unsigned arena_ind = mib[1];
1683 	arena_t *arena;
1684 
1685 	malloc_mutex_lock(&ctl_mtx);
1686 	if (arena_ind < narenas_total_get() && (arena =
1687 	    arena_get(tsd_fetch(), arena_ind, false, true)) != NULL) {
1688 		if (newp != NULL) {
1689 			chunk_hooks_t old_chunk_hooks, new_chunk_hooks;
1690 			WRITE(new_chunk_hooks, chunk_hooks_t);
1691 			old_chunk_hooks = chunk_hooks_set(arena,
1692 			    &new_chunk_hooks);
1693 			READ(old_chunk_hooks, chunk_hooks_t);
1694 		} else {
1695 			chunk_hooks_t old_chunk_hooks = chunk_hooks_get(arena);
1696 			READ(old_chunk_hooks, chunk_hooks_t);
1697 		}
1698 	} else {
1699 		ret = EFAULT;
1700 		goto label_return;
1701 	}
1702 	ret = 0;
1703 label_return:
1704 	malloc_mutex_unlock(&ctl_mtx);
1705 	return (ret);
1706 }
1707 
1708 static const ctl_named_node_t *
1709 arena_i_index(const size_t *mib, size_t miblen, size_t i)
1710 {
1711 	const ctl_named_node_t * ret;
1712 
1713 	malloc_mutex_lock(&ctl_mtx);
1714 	if (i > ctl_stats.narenas) {
1715 		ret = NULL;
1716 		goto label_return;
1717 	}
1718 
1719 	ret = super_arena_i_node;
1720 label_return:
1721 	malloc_mutex_unlock(&ctl_mtx);
1722 	return (ret);
1723 }
1724 
1725 /******************************************************************************/
1726 
1727 static int
1728 arenas_narenas_ctl(const size_t *mib, size_t miblen, void *oldp,
1729     size_t *oldlenp, void *newp, size_t newlen)
1730 {
1731 	int ret;
1732 	unsigned narenas;
1733 
1734 	malloc_mutex_lock(&ctl_mtx);
1735 	READONLY();
1736 	if (*oldlenp != sizeof(unsigned)) {
1737 		ret = EINVAL;
1738 		goto label_return;
1739 	}
1740 	narenas = ctl_stats.narenas;
1741 	READ(narenas, unsigned);
1742 
1743 	ret = 0;
1744 label_return:
1745 	malloc_mutex_unlock(&ctl_mtx);
1746 	return (ret);
1747 }
1748 
1749 static int
1750 arenas_initialized_ctl(const size_t *mib, size_t miblen, void *oldp,
1751     size_t *oldlenp, void *newp, size_t newlen)
1752 {
1753 	int ret;
1754 	unsigned nread, i;
1755 
1756 	malloc_mutex_lock(&ctl_mtx);
1757 	READONLY();
1758 	if (*oldlenp != ctl_stats.narenas * sizeof(bool)) {
1759 		ret = EINVAL;
1760 		nread = (*oldlenp < ctl_stats.narenas * sizeof(bool))
1761 		    ? (*oldlenp / sizeof(bool)) : ctl_stats.narenas;
1762 	} else {
1763 		ret = 0;
1764 		nread = ctl_stats.narenas;
1765 	}
1766 
1767 	for (i = 0; i < nread; i++)
1768 		((bool *)oldp)[i] = ctl_stats.arenas[i].initialized;
1769 
1770 label_return:
1771 	malloc_mutex_unlock(&ctl_mtx);
1772 	return (ret);
1773 }
1774 
1775 static int
1776 arenas_lg_dirty_mult_ctl(const size_t *mib, size_t miblen, void *oldp,
1777     size_t *oldlenp, void *newp, size_t newlen)
1778 {
1779 	int ret;
1780 
1781 	if (oldp != NULL && oldlenp != NULL) {
1782 		size_t oldval = arena_lg_dirty_mult_default_get();
1783 		READ(oldval, ssize_t);
1784 	}
1785 	if (newp != NULL) {
1786 		if (newlen != sizeof(ssize_t)) {
1787 			ret = EINVAL;
1788 			goto label_return;
1789 		}
1790 		if (arena_lg_dirty_mult_default_set(*(ssize_t *)newp)) {
1791 			ret = EFAULT;
1792 			goto label_return;
1793 		}
1794 	}
1795 
1796 	ret = 0;
1797 label_return:
1798 	return (ret);
1799 }
1800 
1801 CTL_RO_NL_GEN(arenas_quantum, QUANTUM, size_t)
1802 CTL_RO_NL_GEN(arenas_page, PAGE, size_t)
1803 CTL_RO_NL_CGEN(config_tcache, arenas_tcache_max, tcache_maxclass, size_t)
1804 CTL_RO_NL_GEN(arenas_nbins, NBINS, unsigned)
1805 CTL_RO_NL_CGEN(config_tcache, arenas_nhbins, nhbins, unsigned)
1806 CTL_RO_NL_GEN(arenas_bin_i_size, arena_bin_info[mib[2]].reg_size, size_t)
1807 CTL_RO_NL_GEN(arenas_bin_i_nregs, arena_bin_info[mib[2]].nregs, uint32_t)
1808 CTL_RO_NL_GEN(arenas_bin_i_run_size, arena_bin_info[mib[2]].run_size, size_t)
1809 static const ctl_named_node_t *
1810 arenas_bin_i_index(const size_t *mib, size_t miblen, size_t i)
1811 {
1812 
1813 	if (i > NBINS)
1814 		return (NULL);
1815 	return (super_arenas_bin_i_node);
1816 }
1817 
1818 CTL_RO_NL_GEN(arenas_nlruns, nlclasses, unsigned)
1819 CTL_RO_NL_GEN(arenas_lrun_i_size, index2size(NBINS+mib[2]), size_t)
1820 static const ctl_named_node_t *
1821 arenas_lrun_i_index(const size_t *mib, size_t miblen, size_t i)
1822 {
1823 
1824 	if (i > nlclasses)
1825 		return (NULL);
1826 	return (super_arenas_lrun_i_node);
1827 }
1828 
1829 CTL_RO_NL_GEN(arenas_nhchunks, nhclasses, unsigned)
1830 CTL_RO_NL_GEN(arenas_hchunk_i_size, index2size(NBINS+nlclasses+mib[2]), size_t)
1831 static const ctl_named_node_t *
1832 arenas_hchunk_i_index(const size_t *mib, size_t miblen, size_t i)
1833 {
1834 
1835 	if (i > nhclasses)
1836 		return (NULL);
1837 	return (super_arenas_hchunk_i_node);
1838 }
1839 
1840 static int
1841 arenas_extend_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
1842     void *newp, size_t newlen)
1843 {
1844 	int ret;
1845 	unsigned narenas;
1846 
1847 	malloc_mutex_lock(&ctl_mtx);
1848 	READONLY();
1849 	if (ctl_grow()) {
1850 		ret = EAGAIN;
1851 		goto label_return;
1852 	}
1853 	narenas = ctl_stats.narenas - 1;
1854 	READ(narenas, unsigned);
1855 
1856 	ret = 0;
1857 label_return:
1858 	malloc_mutex_unlock(&ctl_mtx);
1859 	return (ret);
1860 }
1861 
1862 /******************************************************************************/
1863 
1864 static int
1865 prof_thread_active_init_ctl(const size_t *mib, size_t miblen, void *oldp,
1866     size_t *oldlenp, void *newp, size_t newlen)
1867 {
1868 	int ret;
1869 	bool oldval;
1870 
1871 	if (!config_prof)
1872 		return (ENOENT);
1873 
1874 	if (newp != NULL) {
1875 		if (newlen != sizeof(bool)) {
1876 			ret = EINVAL;
1877 			goto label_return;
1878 		}
1879 		oldval = prof_thread_active_init_set(*(bool *)newp);
1880 	} else
1881 		oldval = prof_thread_active_init_get();
1882 	READ(oldval, bool);
1883 
1884 	ret = 0;
1885 label_return:
1886 	return (ret);
1887 }
1888 
1889 static int
1890 prof_active_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
1891     void *newp, size_t newlen)
1892 {
1893 	int ret;
1894 	bool oldval;
1895 
1896 	if (!config_prof)
1897 		return (ENOENT);
1898 
1899 	if (newp != NULL) {
1900 		if (newlen != sizeof(bool)) {
1901 			ret = EINVAL;
1902 			goto label_return;
1903 		}
1904 		oldval = prof_active_set(*(bool *)newp);
1905 	} else
1906 		oldval = prof_active_get();
1907 	READ(oldval, bool);
1908 
1909 	ret = 0;
1910 label_return:
1911 	return (ret);
1912 }
1913 
1914 static int
1915 prof_dump_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
1916     void *newp, size_t newlen)
1917 {
1918 	int ret;
1919 	const char *filename = NULL;
1920 
1921 	if (!config_prof)
1922 		return (ENOENT);
1923 
1924 	WRITEONLY();
1925 	WRITE(filename, const char *);
1926 
1927 	if (prof_mdump(filename)) {
1928 		ret = EFAULT;
1929 		goto label_return;
1930 	}
1931 
1932 	ret = 0;
1933 label_return:
1934 	return (ret);
1935 }
1936 
1937 static int
1938 prof_gdump_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
1939     void *newp, size_t newlen)
1940 {
1941 	int ret;
1942 	bool oldval;
1943 
1944 	if (!config_prof)
1945 		return (ENOENT);
1946 
1947 	if (newp != NULL) {
1948 		if (newlen != sizeof(bool)) {
1949 			ret = EINVAL;
1950 			goto label_return;
1951 		}
1952 		oldval = prof_gdump_set(*(bool *)newp);
1953 	} else
1954 		oldval = prof_gdump_get();
1955 	READ(oldval, bool);
1956 
1957 	ret = 0;
1958 label_return:
1959 	return (ret);
1960 }
1961 
1962 static int
1963 prof_reset_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
1964     void *newp, size_t newlen)
1965 {
1966 	int ret;
1967 	size_t lg_sample = lg_prof_sample;
1968 	tsd_t *tsd;
1969 
1970 	if (!config_prof)
1971 		return (ENOENT);
1972 
1973 	WRITEONLY();
1974 	WRITE(lg_sample, size_t);
1975 	if (lg_sample >= (sizeof(uint64_t) << 3))
1976 		lg_sample = (sizeof(uint64_t) << 3) - 1;
1977 
1978 	tsd = tsd_fetch();
1979 
1980 	prof_reset(tsd, lg_sample);
1981 
1982 	ret = 0;
1983 label_return:
1984 	return (ret);
1985 }
1986 
1987 CTL_RO_NL_CGEN(config_prof, prof_interval, prof_interval, uint64_t)
1988 CTL_RO_NL_CGEN(config_prof, lg_prof_sample, lg_prof_sample, size_t)
1989 
1990 /******************************************************************************/
1991 
1992 CTL_RO_CGEN(config_stats, stats_cactive, &stats_cactive, size_t *)
1993 CTL_RO_CGEN(config_stats, stats_allocated, ctl_stats.allocated, size_t)
1994 CTL_RO_CGEN(config_stats, stats_active, ctl_stats.active, size_t)
1995 CTL_RO_CGEN(config_stats, stats_metadata, ctl_stats.metadata, size_t)
1996 CTL_RO_CGEN(config_stats, stats_resident, ctl_stats.resident, size_t)
1997 CTL_RO_CGEN(config_stats, stats_mapped, ctl_stats.mapped, size_t)
1998 
1999 CTL_RO_GEN(stats_arenas_i_dss, ctl_stats.arenas[mib[2]].dss, const char *)
2000 CTL_RO_GEN(stats_arenas_i_lg_dirty_mult, ctl_stats.arenas[mib[2]].lg_dirty_mult,
2001     ssize_t)
2002 CTL_RO_GEN(stats_arenas_i_nthreads, ctl_stats.arenas[mib[2]].nthreads, unsigned)
2003 CTL_RO_GEN(stats_arenas_i_pactive, ctl_stats.arenas[mib[2]].pactive, size_t)
2004 CTL_RO_GEN(stats_arenas_i_pdirty, ctl_stats.arenas[mib[2]].pdirty, size_t)
2005 CTL_RO_CGEN(config_stats, stats_arenas_i_mapped,
2006     ctl_stats.arenas[mib[2]].astats.mapped, size_t)
2007 CTL_RO_CGEN(config_stats, stats_arenas_i_npurge,
2008     ctl_stats.arenas[mib[2]].astats.npurge, uint64_t)
2009 CTL_RO_CGEN(config_stats, stats_arenas_i_nmadvise,
2010     ctl_stats.arenas[mib[2]].astats.nmadvise, uint64_t)
2011 CTL_RO_CGEN(config_stats, stats_arenas_i_purged,
2012     ctl_stats.arenas[mib[2]].astats.purged, uint64_t)
2013 CTL_RO_CGEN(config_stats, stats_arenas_i_metadata_mapped,
2014     ctl_stats.arenas[mib[2]].astats.metadata_mapped, size_t)
2015 CTL_RO_CGEN(config_stats, stats_arenas_i_metadata_allocated,
2016     ctl_stats.arenas[mib[2]].astats.metadata_allocated, size_t)
2017 
2018 CTL_RO_CGEN(config_stats, stats_arenas_i_small_allocated,
2019     ctl_stats.arenas[mib[2]].allocated_small, size_t)
2020 CTL_RO_CGEN(config_stats, stats_arenas_i_small_nmalloc,
2021     ctl_stats.arenas[mib[2]].nmalloc_small, uint64_t)
2022 CTL_RO_CGEN(config_stats, stats_arenas_i_small_ndalloc,
2023     ctl_stats.arenas[mib[2]].ndalloc_small, uint64_t)
2024 CTL_RO_CGEN(config_stats, stats_arenas_i_small_nrequests,
2025     ctl_stats.arenas[mib[2]].nrequests_small, uint64_t)
2026 CTL_RO_CGEN(config_stats, stats_arenas_i_large_allocated,
2027     ctl_stats.arenas[mib[2]].astats.allocated_large, size_t)
2028 CTL_RO_CGEN(config_stats, stats_arenas_i_large_nmalloc,
2029     ctl_stats.arenas[mib[2]].astats.nmalloc_large, uint64_t)
2030 CTL_RO_CGEN(config_stats, stats_arenas_i_large_ndalloc,
2031     ctl_stats.arenas[mib[2]].astats.ndalloc_large, uint64_t)
2032 CTL_RO_CGEN(config_stats, stats_arenas_i_large_nrequests,
2033     ctl_stats.arenas[mib[2]].astats.nrequests_large, uint64_t)
2034 CTL_RO_CGEN(config_stats, stats_arenas_i_huge_allocated,
2035     ctl_stats.arenas[mib[2]].astats.allocated_huge, size_t)
2036 CTL_RO_CGEN(config_stats, stats_arenas_i_huge_nmalloc,
2037     ctl_stats.arenas[mib[2]].astats.nmalloc_huge, uint64_t)
2038 CTL_RO_CGEN(config_stats, stats_arenas_i_huge_ndalloc,
2039     ctl_stats.arenas[mib[2]].astats.ndalloc_huge, uint64_t)
2040 CTL_RO_CGEN(config_stats, stats_arenas_i_huge_nrequests,
2041     ctl_stats.arenas[mib[2]].astats.nmalloc_huge, uint64_t) /* Intentional. */
2042 
2043 CTL_RO_CGEN(config_stats, stats_arenas_i_bins_j_nmalloc,
2044     ctl_stats.arenas[mib[2]].bstats[mib[4]].nmalloc, uint64_t)
2045 CTL_RO_CGEN(config_stats, stats_arenas_i_bins_j_ndalloc,
2046     ctl_stats.arenas[mib[2]].bstats[mib[4]].ndalloc, uint64_t)
2047 CTL_RO_CGEN(config_stats, stats_arenas_i_bins_j_nrequests,
2048     ctl_stats.arenas[mib[2]].bstats[mib[4]].nrequests, uint64_t)
2049 CTL_RO_CGEN(config_stats, stats_arenas_i_bins_j_curregs,
2050     ctl_stats.arenas[mib[2]].bstats[mib[4]].curregs, size_t)
2051 CTL_RO_CGEN(config_stats && config_tcache, stats_arenas_i_bins_j_nfills,
2052     ctl_stats.arenas[mib[2]].bstats[mib[4]].nfills, uint64_t)
2053 CTL_RO_CGEN(config_stats && config_tcache, stats_arenas_i_bins_j_nflushes,
2054     ctl_stats.arenas[mib[2]].bstats[mib[4]].nflushes, uint64_t)
2055 CTL_RO_CGEN(config_stats, stats_arenas_i_bins_j_nruns,
2056     ctl_stats.arenas[mib[2]].bstats[mib[4]].nruns, uint64_t)
2057 CTL_RO_CGEN(config_stats, stats_arenas_i_bins_j_nreruns,
2058     ctl_stats.arenas[mib[2]].bstats[mib[4]].reruns, uint64_t)
2059 CTL_RO_CGEN(config_stats, stats_arenas_i_bins_j_curruns,
2060     ctl_stats.arenas[mib[2]].bstats[mib[4]].curruns, size_t)
2061 
2062 static const ctl_named_node_t *
2063 stats_arenas_i_bins_j_index(const size_t *mib, size_t miblen, size_t j)
2064 {
2065 
2066 	if (j > NBINS)
2067 		return (NULL);
2068 	return (super_stats_arenas_i_bins_j_node);
2069 }
2070 
2071 CTL_RO_CGEN(config_stats, stats_arenas_i_lruns_j_nmalloc,
2072     ctl_stats.arenas[mib[2]].lstats[mib[4]].nmalloc, uint64_t)
2073 CTL_RO_CGEN(config_stats, stats_arenas_i_lruns_j_ndalloc,
2074     ctl_stats.arenas[mib[2]].lstats[mib[4]].ndalloc, uint64_t)
2075 CTL_RO_CGEN(config_stats, stats_arenas_i_lruns_j_nrequests,
2076     ctl_stats.arenas[mib[2]].lstats[mib[4]].nrequests, uint64_t)
2077 CTL_RO_CGEN(config_stats, stats_arenas_i_lruns_j_curruns,
2078     ctl_stats.arenas[mib[2]].lstats[mib[4]].curruns, size_t)
2079 
2080 static const ctl_named_node_t *
2081 stats_arenas_i_lruns_j_index(const size_t *mib, size_t miblen, size_t j)
2082 {
2083 
2084 	if (j > nlclasses)
2085 		return (NULL);
2086 	return (super_stats_arenas_i_lruns_j_node);
2087 }
2088 
2089 CTL_RO_CGEN(config_stats, stats_arenas_i_hchunks_j_nmalloc,
2090     ctl_stats.arenas[mib[2]].hstats[mib[4]].nmalloc, uint64_t)
2091 CTL_RO_CGEN(config_stats, stats_arenas_i_hchunks_j_ndalloc,
2092     ctl_stats.arenas[mib[2]].hstats[mib[4]].ndalloc, uint64_t)
2093 CTL_RO_CGEN(config_stats, stats_arenas_i_hchunks_j_nrequests,
2094     ctl_stats.arenas[mib[2]].hstats[mib[4]].nmalloc, /* Intentional. */
2095     uint64_t)
2096 CTL_RO_CGEN(config_stats, stats_arenas_i_hchunks_j_curhchunks,
2097     ctl_stats.arenas[mib[2]].hstats[mib[4]].curhchunks, size_t)
2098 
2099 static const ctl_named_node_t *
2100 stats_arenas_i_hchunks_j_index(const size_t *mib, size_t miblen, size_t j)
2101 {
2102 
2103 	if (j > nhclasses)
2104 		return (NULL);
2105 	return (super_stats_arenas_i_hchunks_j_node);
2106 }
2107 
2108 static const ctl_named_node_t *
2109 stats_arenas_i_index(const size_t *mib, size_t miblen, size_t i)
2110 {
2111 	const ctl_named_node_t * ret;
2112 
2113 	malloc_mutex_lock(&ctl_mtx);
2114 	if (i > ctl_stats.narenas || !ctl_stats.arenas[i].initialized) {
2115 		ret = NULL;
2116 		goto label_return;
2117 	}
2118 
2119 	ret = super_stats_arenas_i_node;
2120 label_return:
2121 	malloc_mutex_unlock(&ctl_mtx);
2122 	return (ret);
2123 }
2124