• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 #define	JEMALLOC_CTL_C_
2 #include "jemalloc/internal/jemalloc_internal.h"
3 
4 /******************************************************************************/
5 /* Data. */
6 
7 /*
8  * ctl_mtx protects the following:
9  * - ctl_stats.*
10  */
11 static malloc_mutex_t	ctl_mtx;
12 static bool		ctl_initialized;
13 static uint64_t		ctl_epoch;
14 static ctl_stats_t	ctl_stats;
15 
16 /******************************************************************************/
17 /* Helpers for named and indexed nodes. */
18 
19 JEMALLOC_INLINE_C const ctl_named_node_t *
ctl_named_node(const ctl_node_t * node)20 ctl_named_node(const ctl_node_t *node)
21 {
22 
23 	return ((node->named) ? (const ctl_named_node_t *)node : NULL);
24 }
25 
26 JEMALLOC_INLINE_C const ctl_named_node_t *
ctl_named_children(const ctl_named_node_t * node,size_t index)27 ctl_named_children(const ctl_named_node_t *node, size_t index)
28 {
29 	const ctl_named_node_t *children = ctl_named_node(node->children);
30 
31 	return (children ? &children[index] : NULL);
32 }
33 
34 JEMALLOC_INLINE_C const ctl_indexed_node_t *
ctl_indexed_node(const ctl_node_t * node)35 ctl_indexed_node(const ctl_node_t *node)
36 {
37 
38 	return (!node->named ? (const ctl_indexed_node_t *)node : NULL);
39 }
40 
41 /******************************************************************************/
42 /* Function prototypes for non-inline static functions. */
43 
44 #define	CTL_PROTO(n)							\
45 static int	n##_ctl(const size_t *mib, size_t miblen, void *oldp,	\
46     size_t *oldlenp, void *newp, size_t newlen);
47 
48 #define	INDEX_PROTO(n)							\
49 static const ctl_named_node_t	*n##_index(const size_t *mib,		\
50     size_t miblen, size_t i);
51 
52 static bool	ctl_arena_init(ctl_arena_stats_t *astats);
53 static void	ctl_arena_clear(ctl_arena_stats_t *astats);
54 static void	ctl_arena_stats_amerge(ctl_arena_stats_t *cstats,
55     arena_t *arena);
56 static void	ctl_arena_stats_smerge(ctl_arena_stats_t *sstats,
57     ctl_arena_stats_t *astats);
58 static void	ctl_arena_refresh(arena_t *arena, unsigned i);
59 static bool	ctl_grow(void);
60 static void	ctl_refresh(void);
61 static bool	ctl_init(void);
62 static int	ctl_lookup(const char *name, ctl_node_t const **nodesp,
63     size_t *mibp, size_t *depthp);
64 
65 CTL_PROTO(version)
66 CTL_PROTO(epoch)
67 CTL_PROTO(thread_tcache_enabled)
68 CTL_PROTO(thread_tcache_flush)
69 CTL_PROTO(thread_prof_name)
70 CTL_PROTO(thread_prof_active)
71 CTL_PROTO(thread_arena)
72 CTL_PROTO(thread_allocated)
73 CTL_PROTO(thread_allocatedp)
74 CTL_PROTO(thread_deallocated)
75 CTL_PROTO(thread_deallocatedp)
76 CTL_PROTO(config_cache_oblivious)
77 CTL_PROTO(config_debug)
78 CTL_PROTO(config_fill)
79 CTL_PROTO(config_lazy_lock)
80 CTL_PROTO(config_malloc_conf)
81 CTL_PROTO(config_munmap)
82 CTL_PROTO(config_prof)
83 CTL_PROTO(config_prof_libgcc)
84 CTL_PROTO(config_prof_libunwind)
85 CTL_PROTO(config_stats)
86 CTL_PROTO(config_tcache)
87 CTL_PROTO(config_tls)
88 CTL_PROTO(config_utrace)
89 CTL_PROTO(config_valgrind)
90 CTL_PROTO(config_xmalloc)
91 CTL_PROTO(opt_abort)
92 CTL_PROTO(opt_dss)
93 CTL_PROTO(opt_lg_chunk)
94 CTL_PROTO(opt_narenas)
95 CTL_PROTO(opt_purge)
96 CTL_PROTO(opt_lg_dirty_mult)
97 CTL_PROTO(opt_decay_time)
98 CTL_PROTO(opt_stats_print)
99 CTL_PROTO(opt_junk)
100 CTL_PROTO(opt_zero)
101 CTL_PROTO(opt_quarantine)
102 CTL_PROTO(opt_redzone)
103 CTL_PROTO(opt_utrace)
104 CTL_PROTO(opt_xmalloc)
105 CTL_PROTO(opt_tcache)
106 CTL_PROTO(opt_lg_tcache_max)
107 CTL_PROTO(opt_prof)
108 CTL_PROTO(opt_prof_prefix)
109 CTL_PROTO(opt_prof_active)
110 CTL_PROTO(opt_prof_thread_active_init)
111 CTL_PROTO(opt_lg_prof_sample)
112 CTL_PROTO(opt_lg_prof_interval)
113 CTL_PROTO(opt_prof_gdump)
114 CTL_PROTO(opt_prof_final)
115 CTL_PROTO(opt_prof_leak)
116 CTL_PROTO(opt_prof_accum)
117 CTL_PROTO(tcache_create)
118 CTL_PROTO(tcache_flush)
119 CTL_PROTO(tcache_destroy)
120 static void	arena_i_purge(unsigned arena_ind, bool all);
121 CTL_PROTO(arena_i_purge)
122 CTL_PROTO(arena_i_decay)
123 CTL_PROTO(arena_i_dss)
124 CTL_PROTO(arena_i_lg_dirty_mult)
125 CTL_PROTO(arena_i_decay_time)
126 CTL_PROTO(arena_i_chunk_hooks)
127 INDEX_PROTO(arena_i)
128 CTL_PROTO(arenas_bin_i_size)
129 CTL_PROTO(arenas_bin_i_nregs)
130 CTL_PROTO(arenas_bin_i_run_size)
131 INDEX_PROTO(arenas_bin_i)
132 CTL_PROTO(arenas_lrun_i_size)
133 INDEX_PROTO(arenas_lrun_i)
134 CTL_PROTO(arenas_hchunk_i_size)
135 INDEX_PROTO(arenas_hchunk_i)
136 CTL_PROTO(arenas_narenas)
137 CTL_PROTO(arenas_initialized)
138 CTL_PROTO(arenas_lg_dirty_mult)
139 CTL_PROTO(arenas_decay_time)
140 CTL_PROTO(arenas_quantum)
141 CTL_PROTO(arenas_page)
142 CTL_PROTO(arenas_tcache_max)
143 CTL_PROTO(arenas_nbins)
144 CTL_PROTO(arenas_nhbins)
145 CTL_PROTO(arenas_nlruns)
146 CTL_PROTO(arenas_nhchunks)
147 CTL_PROTO(arenas_extend)
148 CTL_PROTO(prof_thread_active_init)
149 CTL_PROTO(prof_active)
150 CTL_PROTO(prof_dump)
151 CTL_PROTO(prof_gdump)
152 CTL_PROTO(prof_reset)
153 CTL_PROTO(prof_interval)
154 CTL_PROTO(lg_prof_sample)
155 CTL_PROTO(stats_arenas_i_small_allocated)
156 CTL_PROTO(stats_arenas_i_small_nmalloc)
157 CTL_PROTO(stats_arenas_i_small_ndalloc)
158 CTL_PROTO(stats_arenas_i_small_nrequests)
159 CTL_PROTO(stats_arenas_i_large_allocated)
160 CTL_PROTO(stats_arenas_i_large_nmalloc)
161 CTL_PROTO(stats_arenas_i_large_ndalloc)
162 CTL_PROTO(stats_arenas_i_large_nrequests)
163 CTL_PROTO(stats_arenas_i_huge_allocated)
164 CTL_PROTO(stats_arenas_i_huge_nmalloc)
165 CTL_PROTO(stats_arenas_i_huge_ndalloc)
166 CTL_PROTO(stats_arenas_i_huge_nrequests)
167 CTL_PROTO(stats_arenas_i_bins_j_nmalloc)
168 CTL_PROTO(stats_arenas_i_bins_j_ndalloc)
169 CTL_PROTO(stats_arenas_i_bins_j_nrequests)
170 CTL_PROTO(stats_arenas_i_bins_j_curregs)
171 CTL_PROTO(stats_arenas_i_bins_j_nfills)
172 CTL_PROTO(stats_arenas_i_bins_j_nflushes)
173 CTL_PROTO(stats_arenas_i_bins_j_nruns)
174 CTL_PROTO(stats_arenas_i_bins_j_nreruns)
175 CTL_PROTO(stats_arenas_i_bins_j_curruns)
176 INDEX_PROTO(stats_arenas_i_bins_j)
177 CTL_PROTO(stats_arenas_i_lruns_j_nmalloc)
178 CTL_PROTO(stats_arenas_i_lruns_j_ndalloc)
179 CTL_PROTO(stats_arenas_i_lruns_j_nrequests)
180 CTL_PROTO(stats_arenas_i_lruns_j_curruns)
181 INDEX_PROTO(stats_arenas_i_lruns_j)
182 CTL_PROTO(stats_arenas_i_hchunks_j_nmalloc)
183 CTL_PROTO(stats_arenas_i_hchunks_j_ndalloc)
184 CTL_PROTO(stats_arenas_i_hchunks_j_nrequests)
185 CTL_PROTO(stats_arenas_i_hchunks_j_curhchunks)
186 INDEX_PROTO(stats_arenas_i_hchunks_j)
187 CTL_PROTO(stats_arenas_i_nthreads)
188 CTL_PROTO(stats_arenas_i_dss)
189 CTL_PROTO(stats_arenas_i_lg_dirty_mult)
190 CTL_PROTO(stats_arenas_i_decay_time)
191 CTL_PROTO(stats_arenas_i_pactive)
192 CTL_PROTO(stats_arenas_i_pdirty)
193 CTL_PROTO(stats_arenas_i_mapped)
194 CTL_PROTO(stats_arenas_i_npurge)
195 CTL_PROTO(stats_arenas_i_nmadvise)
196 CTL_PROTO(stats_arenas_i_purged)
197 CTL_PROTO(stats_arenas_i_metadata_mapped)
198 CTL_PROTO(stats_arenas_i_metadata_allocated)
199 INDEX_PROTO(stats_arenas_i)
200 CTL_PROTO(stats_cactive)
201 CTL_PROTO(stats_allocated)
202 CTL_PROTO(stats_active)
203 CTL_PROTO(stats_metadata)
204 CTL_PROTO(stats_resident)
205 CTL_PROTO(stats_mapped)
206 
207 /******************************************************************************/
208 /* mallctl tree. */
209 
210 /* Maximum tree depth. */
211 #define	CTL_MAX_DEPTH	6
212 
213 #define	NAME(n)	{true},	n
214 #define	CHILD(t, c)							\
215 	sizeof(c##_node) / sizeof(ctl_##t##_node_t),			\
216 	(ctl_node_t *)c##_node,						\
217 	NULL
218 #define	CTL(c)	0, NULL, c##_ctl
219 
220 /*
221  * Only handles internal indexed nodes, since there are currently no external
222  * ones.
223  */
224 #define	INDEX(i)	{false},	i##_index
225 
226 static const ctl_named_node_t	thread_tcache_node[] = {
227 	{NAME("enabled"),	CTL(thread_tcache_enabled)},
228 	{NAME("flush"),		CTL(thread_tcache_flush)}
229 };
230 
231 static const ctl_named_node_t	thread_prof_node[] = {
232 	{NAME("name"),		CTL(thread_prof_name)},
233 	{NAME("active"),	CTL(thread_prof_active)}
234 };
235 
236 static const ctl_named_node_t	thread_node[] = {
237 	{NAME("arena"),		CTL(thread_arena)},
238 	{NAME("allocated"),	CTL(thread_allocated)},
239 	{NAME("allocatedp"),	CTL(thread_allocatedp)},
240 	{NAME("deallocated"),	CTL(thread_deallocated)},
241 	{NAME("deallocatedp"),	CTL(thread_deallocatedp)},
242 	{NAME("tcache"),	CHILD(named, thread_tcache)},
243 	{NAME("prof"),		CHILD(named, thread_prof)}
244 };
245 
246 static const ctl_named_node_t	config_node[] = {
247 	{NAME("cache_oblivious"), CTL(config_cache_oblivious)},
248 	{NAME("debug"),		CTL(config_debug)},
249 	{NAME("fill"),		CTL(config_fill)},
250 	{NAME("lazy_lock"),	CTL(config_lazy_lock)},
251 	{NAME("malloc_conf"),	CTL(config_malloc_conf)},
252 	{NAME("munmap"),	CTL(config_munmap)},
253 	{NAME("prof"),		CTL(config_prof)},
254 	{NAME("prof_libgcc"),	CTL(config_prof_libgcc)},
255 	{NAME("prof_libunwind"), CTL(config_prof_libunwind)},
256 	{NAME("stats"),		CTL(config_stats)},
257 	{NAME("tcache"),	CTL(config_tcache)},
258 	{NAME("tls"),		CTL(config_tls)},
259 	{NAME("utrace"),	CTL(config_utrace)},
260 	{NAME("valgrind"),	CTL(config_valgrind)},
261 	{NAME("xmalloc"),	CTL(config_xmalloc)}
262 };
263 
264 static const ctl_named_node_t opt_node[] = {
265 	{NAME("abort"),		CTL(opt_abort)},
266 	{NAME("dss"),		CTL(opt_dss)},
267 	{NAME("lg_chunk"),	CTL(opt_lg_chunk)},
268 	{NAME("narenas"),	CTL(opt_narenas)},
269 	{NAME("purge"),		CTL(opt_purge)},
270 	{NAME("lg_dirty_mult"),	CTL(opt_lg_dirty_mult)},
271 	{NAME("decay_time"),	CTL(opt_decay_time)},
272 	{NAME("stats_print"),	CTL(opt_stats_print)},
273 	{NAME("junk"),		CTL(opt_junk)},
274 	{NAME("zero"),		CTL(opt_zero)},
275 	{NAME("quarantine"),	CTL(opt_quarantine)},
276 	{NAME("redzone"),	CTL(opt_redzone)},
277 	{NAME("utrace"),	CTL(opt_utrace)},
278 	{NAME("xmalloc"),	CTL(opt_xmalloc)},
279 	{NAME("tcache"),	CTL(opt_tcache)},
280 	{NAME("lg_tcache_max"),	CTL(opt_lg_tcache_max)},
281 	{NAME("prof"),		CTL(opt_prof)},
282 	{NAME("prof_prefix"),	CTL(opt_prof_prefix)},
283 	{NAME("prof_active"),	CTL(opt_prof_active)},
284 	{NAME("prof_thread_active_init"), CTL(opt_prof_thread_active_init)},
285 	{NAME("lg_prof_sample"), CTL(opt_lg_prof_sample)},
286 	{NAME("lg_prof_interval"), CTL(opt_lg_prof_interval)},
287 	{NAME("prof_gdump"),	CTL(opt_prof_gdump)},
288 	{NAME("prof_final"),	CTL(opt_prof_final)},
289 	{NAME("prof_leak"),	CTL(opt_prof_leak)},
290 	{NAME("prof_accum"),	CTL(opt_prof_accum)}
291 };
292 
293 static const ctl_named_node_t	tcache_node[] = {
294 	{NAME("create"),	CTL(tcache_create)},
295 	{NAME("flush"),		CTL(tcache_flush)},
296 	{NAME("destroy"),	CTL(tcache_destroy)}
297 };
298 
299 static const ctl_named_node_t arena_i_node[] = {
300 	{NAME("purge"),		CTL(arena_i_purge)},
301 	{NAME("decay"),		CTL(arena_i_decay)},
302 	{NAME("dss"),		CTL(arena_i_dss)},
303 	{NAME("lg_dirty_mult"),	CTL(arena_i_lg_dirty_mult)},
304 	{NAME("decay_time"),	CTL(arena_i_decay_time)},
305 	{NAME("chunk_hooks"),	CTL(arena_i_chunk_hooks)}
306 };
307 static const ctl_named_node_t super_arena_i_node[] = {
308 	{NAME(""),		CHILD(named, arena_i)}
309 };
310 
311 static const ctl_indexed_node_t arena_node[] = {
312 	{INDEX(arena_i)}
313 };
314 
315 static const ctl_named_node_t arenas_bin_i_node[] = {
316 	{NAME("size"),		CTL(arenas_bin_i_size)},
317 	{NAME("nregs"),		CTL(arenas_bin_i_nregs)},
318 	{NAME("run_size"),	CTL(arenas_bin_i_run_size)}
319 };
320 static const ctl_named_node_t super_arenas_bin_i_node[] = {
321 	{NAME(""),		CHILD(named, arenas_bin_i)}
322 };
323 
324 static const ctl_indexed_node_t arenas_bin_node[] = {
325 	{INDEX(arenas_bin_i)}
326 };
327 
328 static const ctl_named_node_t arenas_lrun_i_node[] = {
329 	{NAME("size"),		CTL(arenas_lrun_i_size)}
330 };
331 static const ctl_named_node_t super_arenas_lrun_i_node[] = {
332 	{NAME(""),		CHILD(named, arenas_lrun_i)}
333 };
334 
335 static const ctl_indexed_node_t arenas_lrun_node[] = {
336 	{INDEX(arenas_lrun_i)}
337 };
338 
339 static const ctl_named_node_t arenas_hchunk_i_node[] = {
340 	{NAME("size"),		CTL(arenas_hchunk_i_size)}
341 };
342 static const ctl_named_node_t super_arenas_hchunk_i_node[] = {
343 	{NAME(""),		CHILD(named, arenas_hchunk_i)}
344 };
345 
346 static const ctl_indexed_node_t arenas_hchunk_node[] = {
347 	{INDEX(arenas_hchunk_i)}
348 };
349 
350 static const ctl_named_node_t arenas_node[] = {
351 	{NAME("narenas"),	CTL(arenas_narenas)},
352 	{NAME("initialized"),	CTL(arenas_initialized)},
353 	{NAME("lg_dirty_mult"),	CTL(arenas_lg_dirty_mult)},
354 	{NAME("decay_time"),	CTL(arenas_decay_time)},
355 	{NAME("quantum"),	CTL(arenas_quantum)},
356 	{NAME("page"),		CTL(arenas_page)},
357 	{NAME("tcache_max"),	CTL(arenas_tcache_max)},
358 	{NAME("nbins"),		CTL(arenas_nbins)},
359 	{NAME("nhbins"),	CTL(arenas_nhbins)},
360 	{NAME("bin"),		CHILD(indexed, arenas_bin)},
361 	{NAME("nlruns"),	CTL(arenas_nlruns)},
362 	{NAME("lrun"),		CHILD(indexed, arenas_lrun)},
363 	{NAME("nhchunks"),	CTL(arenas_nhchunks)},
364 	{NAME("hchunk"),	CHILD(indexed, arenas_hchunk)},
365 	{NAME("extend"),	CTL(arenas_extend)}
366 };
367 
368 static const ctl_named_node_t	prof_node[] = {
369 	{NAME("thread_active_init"), CTL(prof_thread_active_init)},
370 	{NAME("active"),	CTL(prof_active)},
371 	{NAME("dump"),		CTL(prof_dump)},
372 	{NAME("gdump"),		CTL(prof_gdump)},
373 	{NAME("reset"),		CTL(prof_reset)},
374 	{NAME("interval"),	CTL(prof_interval)},
375 	{NAME("lg_sample"),	CTL(lg_prof_sample)}
376 };
377 
378 static const ctl_named_node_t stats_arenas_i_metadata_node[] = {
379 	{NAME("mapped"),	CTL(stats_arenas_i_metadata_mapped)},
380 	{NAME("allocated"),	CTL(stats_arenas_i_metadata_allocated)}
381 };
382 
383 static const ctl_named_node_t stats_arenas_i_small_node[] = {
384 	{NAME("allocated"),	CTL(stats_arenas_i_small_allocated)},
385 	{NAME("nmalloc"),	CTL(stats_arenas_i_small_nmalloc)},
386 	{NAME("ndalloc"),	CTL(stats_arenas_i_small_ndalloc)},
387 	{NAME("nrequests"),	CTL(stats_arenas_i_small_nrequests)}
388 };
389 
390 static const ctl_named_node_t stats_arenas_i_large_node[] = {
391 	{NAME("allocated"),	CTL(stats_arenas_i_large_allocated)},
392 	{NAME("nmalloc"),	CTL(stats_arenas_i_large_nmalloc)},
393 	{NAME("ndalloc"),	CTL(stats_arenas_i_large_ndalloc)},
394 	{NAME("nrequests"),	CTL(stats_arenas_i_large_nrequests)}
395 };
396 
397 static const ctl_named_node_t stats_arenas_i_huge_node[] = {
398 	{NAME("allocated"),	CTL(stats_arenas_i_huge_allocated)},
399 	{NAME("nmalloc"),	CTL(stats_arenas_i_huge_nmalloc)},
400 	{NAME("ndalloc"),	CTL(stats_arenas_i_huge_ndalloc)},
401 	{NAME("nrequests"),	CTL(stats_arenas_i_huge_nrequests)}
402 };
403 
404 static const ctl_named_node_t stats_arenas_i_bins_j_node[] = {
405 	{NAME("nmalloc"),	CTL(stats_arenas_i_bins_j_nmalloc)},
406 	{NAME("ndalloc"),	CTL(stats_arenas_i_bins_j_ndalloc)},
407 	{NAME("nrequests"),	CTL(stats_arenas_i_bins_j_nrequests)},
408 	{NAME("curregs"),	CTL(stats_arenas_i_bins_j_curregs)},
409 	{NAME("nfills"),	CTL(stats_arenas_i_bins_j_nfills)},
410 	{NAME("nflushes"),	CTL(stats_arenas_i_bins_j_nflushes)},
411 	{NAME("nruns"),		CTL(stats_arenas_i_bins_j_nruns)},
412 	{NAME("nreruns"),	CTL(stats_arenas_i_bins_j_nreruns)},
413 	{NAME("curruns"),	CTL(stats_arenas_i_bins_j_curruns)}
414 };
415 static const ctl_named_node_t super_stats_arenas_i_bins_j_node[] = {
416 	{NAME(""),		CHILD(named, stats_arenas_i_bins_j)}
417 };
418 
419 static const ctl_indexed_node_t stats_arenas_i_bins_node[] = {
420 	{INDEX(stats_arenas_i_bins_j)}
421 };
422 
423 static const ctl_named_node_t stats_arenas_i_lruns_j_node[] = {
424 	{NAME("nmalloc"),	CTL(stats_arenas_i_lruns_j_nmalloc)},
425 	{NAME("ndalloc"),	CTL(stats_arenas_i_lruns_j_ndalloc)},
426 	{NAME("nrequests"),	CTL(stats_arenas_i_lruns_j_nrequests)},
427 	{NAME("curruns"),	CTL(stats_arenas_i_lruns_j_curruns)}
428 };
429 static const ctl_named_node_t super_stats_arenas_i_lruns_j_node[] = {
430 	{NAME(""),		CHILD(named, stats_arenas_i_lruns_j)}
431 };
432 
433 static const ctl_indexed_node_t stats_arenas_i_lruns_node[] = {
434 	{INDEX(stats_arenas_i_lruns_j)}
435 };
436 
437 static const ctl_named_node_t stats_arenas_i_hchunks_j_node[] = {
438 	{NAME("nmalloc"),	CTL(stats_arenas_i_hchunks_j_nmalloc)},
439 	{NAME("ndalloc"),	CTL(stats_arenas_i_hchunks_j_ndalloc)},
440 	{NAME("nrequests"),	CTL(stats_arenas_i_hchunks_j_nrequests)},
441 	{NAME("curhchunks"),	CTL(stats_arenas_i_hchunks_j_curhchunks)}
442 };
443 static const ctl_named_node_t super_stats_arenas_i_hchunks_j_node[] = {
444 	{NAME(""),		CHILD(named, stats_arenas_i_hchunks_j)}
445 };
446 
447 static const ctl_indexed_node_t stats_arenas_i_hchunks_node[] = {
448 	{INDEX(stats_arenas_i_hchunks_j)}
449 };
450 
451 static const ctl_named_node_t stats_arenas_i_node[] = {
452 	{NAME("nthreads"),	CTL(stats_arenas_i_nthreads)},
453 	{NAME("dss"),		CTL(stats_arenas_i_dss)},
454 	{NAME("lg_dirty_mult"),	CTL(stats_arenas_i_lg_dirty_mult)},
455 	{NAME("decay_time"),	CTL(stats_arenas_i_decay_time)},
456 	{NAME("pactive"),	CTL(stats_arenas_i_pactive)},
457 	{NAME("pdirty"),	CTL(stats_arenas_i_pdirty)},
458 	{NAME("mapped"),	CTL(stats_arenas_i_mapped)},
459 	{NAME("npurge"),	CTL(stats_arenas_i_npurge)},
460 	{NAME("nmadvise"),	CTL(stats_arenas_i_nmadvise)},
461 	{NAME("purged"),	CTL(stats_arenas_i_purged)},
462 	{NAME("metadata"),	CHILD(named, stats_arenas_i_metadata)},
463 	{NAME("small"),		CHILD(named, stats_arenas_i_small)},
464 	{NAME("large"),		CHILD(named, stats_arenas_i_large)},
465 	{NAME("huge"),		CHILD(named, stats_arenas_i_huge)},
466 	{NAME("bins"),		CHILD(indexed, stats_arenas_i_bins)},
467 	{NAME("lruns"),		CHILD(indexed, stats_arenas_i_lruns)},
468 	{NAME("hchunks"),	CHILD(indexed, stats_arenas_i_hchunks)}
469 };
470 static const ctl_named_node_t super_stats_arenas_i_node[] = {
471 	{NAME(""),		CHILD(named, stats_arenas_i)}
472 };
473 
474 static const ctl_indexed_node_t stats_arenas_node[] = {
475 	{INDEX(stats_arenas_i)}
476 };
477 
478 static const ctl_named_node_t stats_node[] = {
479 	{NAME("cactive"),	CTL(stats_cactive)},
480 	{NAME("allocated"),	CTL(stats_allocated)},
481 	{NAME("active"),	CTL(stats_active)},
482 	{NAME("metadata"),	CTL(stats_metadata)},
483 	{NAME("resident"),	CTL(stats_resident)},
484 	{NAME("mapped"),	CTL(stats_mapped)},
485 	{NAME("arenas"),	CHILD(indexed, stats_arenas)}
486 };
487 
488 static const ctl_named_node_t	root_node[] = {
489 	{NAME("version"),	CTL(version)},
490 	{NAME("epoch"),		CTL(epoch)},
491 	{NAME("thread"),	CHILD(named, thread)},
492 	{NAME("config"),	CHILD(named, config)},
493 	{NAME("opt"),		CHILD(named, opt)},
494 	{NAME("tcache"),	CHILD(named, tcache)},
495 	{NAME("arena"),		CHILD(indexed, arena)},
496 	{NAME("arenas"),	CHILD(named, arenas)},
497 	{NAME("prof"),		CHILD(named, prof)},
498 	{NAME("stats"),		CHILD(named, stats)}
499 };
500 static const ctl_named_node_t super_root_node[] = {
501 	{NAME(""),		CHILD(named, root)}
502 };
503 
504 #undef NAME
505 #undef CHILD
506 #undef CTL
507 #undef INDEX
508 
509 /******************************************************************************/
510 
511 static bool
ctl_arena_init(ctl_arena_stats_t * astats)512 ctl_arena_init(ctl_arena_stats_t *astats)
513 {
514 
515 	if (astats->lstats == NULL) {
516 		astats->lstats = (malloc_large_stats_t *)a0malloc(nlclasses *
517 		    sizeof(malloc_large_stats_t));
518 		if (astats->lstats == NULL)
519 			return (true);
520 	}
521 
522 	if (astats->hstats == NULL) {
523 		astats->hstats = (malloc_huge_stats_t *)a0malloc(nhclasses *
524 		    sizeof(malloc_huge_stats_t));
525 		if (astats->hstats == NULL)
526 			return (true);
527 	}
528 
529 	return (false);
530 }
531 
532 static void
ctl_arena_clear(ctl_arena_stats_t * astats)533 ctl_arena_clear(ctl_arena_stats_t *astats)
534 {
535 
536 	astats->nthreads = 0;
537 	astats->dss = dss_prec_names[dss_prec_limit];
538 	astats->lg_dirty_mult = -1;
539 	astats->decay_time = -1;
540 	astats->pactive = 0;
541 	astats->pdirty = 0;
542 	if (config_stats) {
543 		memset(&astats->astats, 0, sizeof(arena_stats_t));
544 		astats->allocated_small = 0;
545 		astats->nmalloc_small = 0;
546 		astats->ndalloc_small = 0;
547 		astats->nrequests_small = 0;
548 		memset(astats->bstats, 0, NBINS * sizeof(malloc_bin_stats_t));
549 		memset(astats->lstats, 0, nlclasses *
550 		    sizeof(malloc_large_stats_t));
551 		memset(astats->hstats, 0, nhclasses *
552 		    sizeof(malloc_huge_stats_t));
553 	}
554 }
555 
556 static void
ctl_arena_stats_amerge(ctl_arena_stats_t * cstats,arena_t * arena)557 ctl_arena_stats_amerge(ctl_arena_stats_t *cstats, arena_t *arena)
558 {
559 	unsigned i;
560 
561 	if (config_stats) {
562 		arena_stats_merge(arena, &cstats->nthreads, &cstats->dss,
563 		    &cstats->lg_dirty_mult, &cstats->decay_time,
564 		    &cstats->pactive, &cstats->pdirty, &cstats->astats,
565 		    cstats->bstats, cstats->lstats, cstats->hstats);
566 
567 		for (i = 0; i < NBINS; i++) {
568 			cstats->allocated_small += cstats->bstats[i].curregs *
569 			    index2size(i);
570 			cstats->nmalloc_small += cstats->bstats[i].nmalloc;
571 			cstats->ndalloc_small += cstats->bstats[i].ndalloc;
572 			cstats->nrequests_small += cstats->bstats[i].nrequests;
573 		}
574 	} else {
575 		arena_basic_stats_merge(arena, &cstats->nthreads, &cstats->dss,
576 		    &cstats->lg_dirty_mult, &cstats->decay_time,
577 		    &cstats->pactive, &cstats->pdirty);
578 	}
579 }
580 
581 static void
ctl_arena_stats_smerge(ctl_arena_stats_t * sstats,ctl_arena_stats_t * astats)582 ctl_arena_stats_smerge(ctl_arena_stats_t *sstats, ctl_arena_stats_t *astats)
583 {
584 	unsigned i;
585 
586 	sstats->nthreads += astats->nthreads;
587 	sstats->pactive += astats->pactive;
588 	sstats->pdirty += astats->pdirty;
589 
590 	if (config_stats) {
591 		sstats->astats.mapped += astats->astats.mapped;
592 		sstats->astats.npurge += astats->astats.npurge;
593 		sstats->astats.nmadvise += astats->astats.nmadvise;
594 		sstats->astats.purged += astats->astats.purged;
595 
596 		sstats->astats.metadata_mapped +=
597 		    astats->astats.metadata_mapped;
598 		sstats->astats.metadata_allocated +=
599 		    astats->astats.metadata_allocated;
600 
601 		sstats->allocated_small += astats->allocated_small;
602 		sstats->nmalloc_small += astats->nmalloc_small;
603 		sstats->ndalloc_small += astats->ndalloc_small;
604 		sstats->nrequests_small += astats->nrequests_small;
605 
606 		sstats->astats.allocated_large +=
607 		    astats->astats.allocated_large;
608 		sstats->astats.nmalloc_large += astats->astats.nmalloc_large;
609 		sstats->astats.ndalloc_large += astats->astats.ndalloc_large;
610 		sstats->astats.nrequests_large +=
611 		    astats->astats.nrequests_large;
612 
613 		sstats->astats.allocated_huge += astats->astats.allocated_huge;
614 		sstats->astats.nmalloc_huge += astats->astats.nmalloc_huge;
615 		sstats->astats.ndalloc_huge += astats->astats.ndalloc_huge;
616 
617 		for (i = 0; i < NBINS; i++) {
618 			sstats->bstats[i].nmalloc += astats->bstats[i].nmalloc;
619 			sstats->bstats[i].ndalloc += astats->bstats[i].ndalloc;
620 			sstats->bstats[i].nrequests +=
621 			    astats->bstats[i].nrequests;
622 			sstats->bstats[i].curregs += astats->bstats[i].curregs;
623 			if (config_tcache) {
624 				sstats->bstats[i].nfills +=
625 				    astats->bstats[i].nfills;
626 				sstats->bstats[i].nflushes +=
627 				    astats->bstats[i].nflushes;
628 			}
629 			sstats->bstats[i].nruns += astats->bstats[i].nruns;
630 			sstats->bstats[i].reruns += astats->bstats[i].reruns;
631 			sstats->bstats[i].curruns += astats->bstats[i].curruns;
632 		}
633 
634 		for (i = 0; i < nlclasses; i++) {
635 			sstats->lstats[i].nmalloc += astats->lstats[i].nmalloc;
636 			sstats->lstats[i].ndalloc += astats->lstats[i].ndalloc;
637 			sstats->lstats[i].nrequests +=
638 			    astats->lstats[i].nrequests;
639 			sstats->lstats[i].curruns += astats->lstats[i].curruns;
640 		}
641 
642 		for (i = 0; i < nhclasses; i++) {
643 			sstats->hstats[i].nmalloc += astats->hstats[i].nmalloc;
644 			sstats->hstats[i].ndalloc += astats->hstats[i].ndalloc;
645 			sstats->hstats[i].curhchunks +=
646 			    astats->hstats[i].curhchunks;
647 		}
648 	}
649 }
650 
651 static void
ctl_arena_refresh(arena_t * arena,unsigned i)652 ctl_arena_refresh(arena_t *arena, unsigned i)
653 {
654 	ctl_arena_stats_t *astats = &ctl_stats.arenas[i];
655 	ctl_arena_stats_t *sstats = &ctl_stats.arenas[ctl_stats.narenas];
656 
657 	ctl_arena_clear(astats);
658 	ctl_arena_stats_amerge(astats, arena);
659 	/* Merge into sum stats as well. */
660 	ctl_arena_stats_smerge(sstats, astats);
661 }
662 
663 static bool
ctl_grow(void)664 ctl_grow(void)
665 {
666 	ctl_arena_stats_t *astats;
667 
668 	/* Initialize new arena. */
669 	if (arena_init(ctl_stats.narenas) == NULL)
670 		return (true);
671 
672 	/* Allocate extended arena stats. */
673 	astats = (ctl_arena_stats_t *)a0malloc((ctl_stats.narenas + 2) *
674 	    sizeof(ctl_arena_stats_t));
675 	if (astats == NULL)
676 		return (true);
677 
678 	/* Initialize the new astats element. */
679 	memcpy(astats, ctl_stats.arenas, (ctl_stats.narenas + 1) *
680 	    sizeof(ctl_arena_stats_t));
681 	memset(&astats[ctl_stats.narenas + 1], 0, sizeof(ctl_arena_stats_t));
682 	if (ctl_arena_init(&astats[ctl_stats.narenas + 1])) {
683 		a0dalloc(astats);
684 		return (true);
685 	}
686 	/* Swap merged stats to their new location. */
687 	{
688 		ctl_arena_stats_t tstats;
689 		memcpy(&tstats, &astats[ctl_stats.narenas],
690 		    sizeof(ctl_arena_stats_t));
691 		memcpy(&astats[ctl_stats.narenas],
692 		    &astats[ctl_stats.narenas + 1], sizeof(ctl_arena_stats_t));
693 		memcpy(&astats[ctl_stats.narenas + 1], &tstats,
694 		    sizeof(ctl_arena_stats_t));
695 	}
696 	a0dalloc(ctl_stats.arenas);
697 	ctl_stats.arenas = astats;
698 	ctl_stats.narenas++;
699 
700 	return (false);
701 }
702 
703 static void
ctl_refresh(void)704 ctl_refresh(void)
705 {
706 	unsigned i;
707 	VARIABLE_ARRAY(arena_t *, tarenas, ctl_stats.narenas);
708 
709 	/*
710 	 * Clear sum stats, since they will be merged into by
711 	 * ctl_arena_refresh().
712 	 */
713 	ctl_arena_clear(&ctl_stats.arenas[ctl_stats.narenas]);
714 
715 	for (i = 0; i < ctl_stats.narenas; i++)
716 		tarenas[i] = arena_get(i, false);
717 
718 	for (i = 0; i < ctl_stats.narenas; i++) {
719 		bool initialized = (tarenas[i] != NULL);
720 
721 		ctl_stats.arenas[i].initialized = initialized;
722 		if (initialized)
723 			ctl_arena_refresh(tarenas[i], i);
724 	}
725 
726 	if (config_stats) {
727 		size_t base_allocated, base_resident, base_mapped;
728 		base_stats_get(&base_allocated, &base_resident, &base_mapped);
729 		ctl_stats.allocated =
730 		    ctl_stats.arenas[ctl_stats.narenas].allocated_small +
731 		    ctl_stats.arenas[ctl_stats.narenas].astats.allocated_large +
732 		    ctl_stats.arenas[ctl_stats.narenas].astats.allocated_huge;
733 		ctl_stats.active =
734 		    (ctl_stats.arenas[ctl_stats.narenas].pactive << LG_PAGE);
735 		ctl_stats.metadata = base_allocated +
736 		    ctl_stats.arenas[ctl_stats.narenas].astats.metadata_mapped +
737 		    ctl_stats.arenas[ctl_stats.narenas].astats
738 		    .metadata_allocated;
739 		ctl_stats.resident = base_resident +
740 		    ctl_stats.arenas[ctl_stats.narenas].astats.metadata_mapped +
741 		    ((ctl_stats.arenas[ctl_stats.narenas].pactive +
742 		    ctl_stats.arenas[ctl_stats.narenas].pdirty) << LG_PAGE);
743 		ctl_stats.mapped = base_mapped +
744 		    ctl_stats.arenas[ctl_stats.narenas].astats.mapped;
745 	}
746 
747 	ctl_epoch++;
748 }
749 
750 static bool
ctl_init(void)751 ctl_init(void)
752 {
753 	bool ret;
754 
755 	malloc_mutex_lock(&ctl_mtx);
756 	if (!ctl_initialized) {
757 		/*
758 		 * Allocate space for one extra arena stats element, which
759 		 * contains summed stats across all arenas.
760 		 */
761 		ctl_stats.narenas = narenas_total_get();
762 		ctl_stats.arenas = (ctl_arena_stats_t *)a0malloc(
763 		    (ctl_stats.narenas + 1) * sizeof(ctl_arena_stats_t));
764 		if (ctl_stats.arenas == NULL) {
765 			ret = true;
766 			goto label_return;
767 		}
768 		memset(ctl_stats.arenas, 0, (ctl_stats.narenas + 1) *
769 		    sizeof(ctl_arena_stats_t));
770 
771 		/*
772 		 * Initialize all stats structures, regardless of whether they
773 		 * ever get used.  Lazy initialization would allow errors to
774 		 * cause inconsistent state to be viewable by the application.
775 		 */
776 		if (config_stats) {
777 			unsigned i;
778 			for (i = 0; i <= ctl_stats.narenas; i++) {
779 				if (ctl_arena_init(&ctl_stats.arenas[i])) {
780 					unsigned j;
781 					for (j = 0; j < i; j++) {
782 						a0dalloc(
783 						    ctl_stats.arenas[j].lstats);
784 						a0dalloc(
785 						    ctl_stats.arenas[j].hstats);
786 					}
787 					a0dalloc(ctl_stats.arenas);
788 					ctl_stats.arenas = NULL;
789 					ret = true;
790 					goto label_return;
791 				}
792 			}
793 		}
794 		ctl_stats.arenas[ctl_stats.narenas].initialized = true;
795 
796 		ctl_epoch = 0;
797 		ctl_refresh();
798 		ctl_initialized = true;
799 	}
800 
801 	ret = false;
802 label_return:
803 	malloc_mutex_unlock(&ctl_mtx);
804 	return (ret);
805 }
806 
807 static int
ctl_lookup(const char * name,ctl_node_t const ** nodesp,size_t * mibp,size_t * depthp)808 ctl_lookup(const char *name, ctl_node_t const **nodesp, size_t *mibp,
809     size_t *depthp)
810 {
811 	int ret;
812 	const char *elm, *tdot, *dot;
813 	size_t elen, i, j;
814 	const ctl_named_node_t *node;
815 
816 	elm = name;
817 	/* Equivalent to strchrnul(). */
818 	dot = ((tdot = strchr(elm, '.')) != NULL) ? tdot : strchr(elm, '\0');
819 	elen = (size_t)((uintptr_t)dot - (uintptr_t)elm);
820 	if (elen == 0) {
821 		ret = ENOENT;
822 		goto label_return;
823 	}
824 	node = super_root_node;
825 	for (i = 0; i < *depthp; i++) {
826 		assert(node);
827 		assert(node->nchildren > 0);
828 		if (ctl_named_node(node->children) != NULL) {
829 			const ctl_named_node_t *pnode = node;
830 
831 			/* Children are named. */
832 			for (j = 0; j < node->nchildren; j++) {
833 				const ctl_named_node_t *child =
834 				    ctl_named_children(node, j);
835 				if (strlen(child->name) == elen &&
836 				    strncmp(elm, child->name, elen) == 0) {
837 					node = child;
838 					if (nodesp != NULL)
839 						nodesp[i] =
840 						    (const ctl_node_t *)node;
841 					mibp[i] = j;
842 					break;
843 				}
844 			}
845 			if (node == pnode) {
846 				ret = ENOENT;
847 				goto label_return;
848 			}
849 		} else {
850 			uintmax_t index;
851 			const ctl_indexed_node_t *inode;
852 
853 			/* Children are indexed. */
854 			index = malloc_strtoumax(elm, NULL, 10);
855 			if (index == UINTMAX_MAX || index > SIZE_T_MAX) {
856 				ret = ENOENT;
857 				goto label_return;
858 			}
859 
860 			inode = ctl_indexed_node(node->children);
861 			node = inode->index(mibp, *depthp, (size_t)index);
862 			if (node == NULL) {
863 				ret = ENOENT;
864 				goto label_return;
865 			}
866 
867 			if (nodesp != NULL)
868 				nodesp[i] = (const ctl_node_t *)node;
869 			mibp[i] = (size_t)index;
870 		}
871 
872 		if (node->ctl != NULL) {
873 			/* Terminal node. */
874 			if (*dot != '\0') {
875 				/*
876 				 * The name contains more elements than are
877 				 * in this path through the tree.
878 				 */
879 				ret = ENOENT;
880 				goto label_return;
881 			}
882 			/* Complete lookup successful. */
883 			*depthp = i + 1;
884 			break;
885 		}
886 
887 		/* Update elm. */
888 		if (*dot == '\0') {
889 			/* No more elements. */
890 			ret = ENOENT;
891 			goto label_return;
892 		}
893 		elm = &dot[1];
894 		dot = ((tdot = strchr(elm, '.')) != NULL) ? tdot :
895 		    strchr(elm, '\0');
896 		elen = (size_t)((uintptr_t)dot - (uintptr_t)elm);
897 	}
898 
899 	ret = 0;
900 label_return:
901 	return (ret);
902 }
903 
904 int
ctl_byname(const char * name,void * oldp,size_t * oldlenp,void * newp,size_t newlen)905 ctl_byname(const char *name, void *oldp, size_t *oldlenp, void *newp,
906     size_t newlen)
907 {
908 	int ret;
909 	size_t depth;
910 	ctl_node_t const *nodes[CTL_MAX_DEPTH];
911 	size_t mib[CTL_MAX_DEPTH];
912 	const ctl_named_node_t *node;
913 
914 	if (!ctl_initialized && ctl_init()) {
915 		ret = EAGAIN;
916 		goto label_return;
917 	}
918 
919 	depth = CTL_MAX_DEPTH;
920 	ret = ctl_lookup(name, nodes, mib, &depth);
921 	if (ret != 0)
922 		goto label_return;
923 
924 	node = ctl_named_node(nodes[depth-1]);
925 	if (node != NULL && node->ctl)
926 		ret = node->ctl(mib, depth, oldp, oldlenp, newp, newlen);
927 	else {
928 		/* The name refers to a partial path through the ctl tree. */
929 		ret = ENOENT;
930 	}
931 
932 label_return:
933 	return(ret);
934 }
935 
936 int
ctl_nametomib(const char * name,size_t * mibp,size_t * miblenp)937 ctl_nametomib(const char *name, size_t *mibp, size_t *miblenp)
938 {
939 	int ret;
940 
941 	if (!ctl_initialized && ctl_init()) {
942 		ret = EAGAIN;
943 		goto label_return;
944 	}
945 
946 	ret = ctl_lookup(name, NULL, mibp, miblenp);
947 label_return:
948 	return(ret);
949 }
950 
951 int
ctl_bymib(const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen)952 ctl_bymib(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
953     void *newp, size_t newlen)
954 {
955 	int ret;
956 	const ctl_named_node_t *node;
957 	size_t i;
958 
959 	if (!ctl_initialized && ctl_init()) {
960 		ret = EAGAIN;
961 		goto label_return;
962 	}
963 
964 	/* Iterate down the tree. */
965 	node = super_root_node;
966 	for (i = 0; i < miblen; i++) {
967 		assert(node);
968 		assert(node->nchildren > 0);
969 		if (ctl_named_node(node->children) != NULL) {
970 			/* Children are named. */
971 			if (node->nchildren <= (unsigned)mib[i]) {
972 				ret = ENOENT;
973 				goto label_return;
974 			}
975 			node = ctl_named_children(node, mib[i]);
976 		} else {
977 			const ctl_indexed_node_t *inode;
978 
979 			/* Indexed element. */
980 			inode = ctl_indexed_node(node->children);
981 			node = inode->index(mib, miblen, mib[i]);
982 			if (node == NULL) {
983 				ret = ENOENT;
984 				goto label_return;
985 			}
986 		}
987 	}
988 
989 	/* Call the ctl function. */
990 	if (node && node->ctl)
991 		ret = node->ctl(mib, miblen, oldp, oldlenp, newp, newlen);
992 	else {
993 		/* Partial MIB. */
994 		ret = ENOENT;
995 	}
996 
997 label_return:
998 	return(ret);
999 }
1000 
1001 bool
ctl_boot(void)1002 ctl_boot(void)
1003 {
1004 
1005 	if (malloc_mutex_init(&ctl_mtx))
1006 		return (true);
1007 
1008 	ctl_initialized = false;
1009 
1010 	return (false);
1011 }
1012 
1013 void
ctl_prefork(void)1014 ctl_prefork(void)
1015 {
1016 
1017 	malloc_mutex_prefork(&ctl_mtx);
1018 }
1019 
1020 void
ctl_postfork_parent(void)1021 ctl_postfork_parent(void)
1022 {
1023 
1024 	malloc_mutex_postfork_parent(&ctl_mtx);
1025 }
1026 
1027 void
ctl_postfork_child(void)1028 ctl_postfork_child(void)
1029 {
1030 
1031 	malloc_mutex_postfork_child(&ctl_mtx);
1032 }
1033 
1034 /******************************************************************************/
1035 /* *_ctl() functions. */
1036 
1037 #define	READONLY()	do {						\
1038 	if (newp != NULL || newlen != 0) {				\
1039 		ret = EPERM;						\
1040 		goto label_return;					\
1041 	}								\
1042 } while (0)
1043 
1044 #define	WRITEONLY()	do {						\
1045 	if (oldp != NULL || oldlenp != NULL) {				\
1046 		ret = EPERM;						\
1047 		goto label_return;					\
1048 	}								\
1049 } while (0)
1050 
1051 #define	READ_XOR_WRITE()	do {					\
1052 	if ((oldp != NULL && oldlenp != NULL) && (newp != NULL ||	\
1053 	    newlen != 0)) {						\
1054 		ret = EPERM;						\
1055 		goto label_return;					\
1056 	}								\
1057 } while (0)
1058 
1059 #define	READ(v, t)	do {						\
1060 	if (oldp != NULL && oldlenp != NULL) {				\
1061 		if (*oldlenp != sizeof(t)) {				\
1062 			size_t	copylen = (sizeof(t) <= *oldlenp)	\
1063 			    ? sizeof(t) : *oldlenp;			\
1064 			memcpy(oldp, (void *)&(v), copylen);		\
1065 			ret = EINVAL;					\
1066 			goto label_return;				\
1067 		}							\
1068 		*(t *)oldp = (v);					\
1069 	}								\
1070 } while (0)
1071 
1072 #define	WRITE(v, t)	do {						\
1073 	if (newp != NULL) {						\
1074 		if (newlen != sizeof(t)) {				\
1075 			ret = EINVAL;					\
1076 			goto label_return;				\
1077 		}							\
1078 		(v) = *(t *)newp;					\
1079 	}								\
1080 } while (0)
1081 
1082 /*
1083  * There's a lot of code duplication in the following macros due to limitations
1084  * in how nested cpp macros are expanded.
1085  */
1086 #define	CTL_RO_CLGEN(c, l, n, v, t)					\
1087 static int								\
1088 n##_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,	\
1089     void *newp, size_t newlen)						\
1090 {									\
1091 	int ret;							\
1092 	t oldval;							\
1093 									\
1094 	if (!(c))							\
1095 		return (ENOENT);					\
1096 	if (l)								\
1097 		malloc_mutex_lock(&ctl_mtx);				\
1098 	READONLY();							\
1099 	oldval = (v);							\
1100 	READ(oldval, t);						\
1101 									\
1102 	ret = 0;							\
1103 label_return:								\
1104 	if (l)								\
1105 		malloc_mutex_unlock(&ctl_mtx);				\
1106 	return (ret);							\
1107 }
1108 
1109 #define	CTL_RO_CGEN(c, n, v, t)						\
1110 static int								\
1111 n##_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,	\
1112     void *newp, size_t newlen)						\
1113 {									\
1114 	int ret;							\
1115 	t oldval;							\
1116 									\
1117 	if (!(c))							\
1118 		return (ENOENT);					\
1119 	malloc_mutex_lock(&ctl_mtx);					\
1120 	READONLY();							\
1121 	oldval = (v);							\
1122 	READ(oldval, t);						\
1123 									\
1124 	ret = 0;							\
1125 label_return:								\
1126 	malloc_mutex_unlock(&ctl_mtx);					\
1127 	return (ret);							\
1128 }
1129 
1130 #define	CTL_RO_GEN(n, v, t)						\
1131 static int								\
1132 n##_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,	\
1133     void *newp, size_t newlen)						\
1134 {									\
1135 	int ret;							\
1136 	t oldval;							\
1137 									\
1138 	malloc_mutex_lock(&ctl_mtx);					\
1139 	READONLY();							\
1140 	oldval = (v);							\
1141 	READ(oldval, t);						\
1142 									\
1143 	ret = 0;							\
1144 label_return:								\
1145 	malloc_mutex_unlock(&ctl_mtx);					\
1146 	return (ret);							\
1147 }
1148 
1149 /*
1150  * ctl_mtx is not acquired, under the assumption that no pertinent data will
1151  * mutate during the call.
1152  */
1153 #define	CTL_RO_NL_CGEN(c, n, v, t)					\
1154 static int								\
1155 n##_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,	\
1156     void *newp, size_t newlen)						\
1157 {									\
1158 	int ret;							\
1159 	t oldval;							\
1160 									\
1161 	if (!(c))							\
1162 		return (ENOENT);					\
1163 	READONLY();							\
1164 	oldval = (v);							\
1165 	READ(oldval, t);						\
1166 									\
1167 	ret = 0;							\
1168 label_return:								\
1169 	return (ret);							\
1170 }
1171 
1172 #define	CTL_RO_NL_GEN(n, v, t)						\
1173 static int								\
1174 n##_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,	\
1175     void *newp, size_t newlen)						\
1176 {									\
1177 	int ret;							\
1178 	t oldval;							\
1179 									\
1180 	READONLY();							\
1181 	oldval = (v);							\
1182 	READ(oldval, t);						\
1183 									\
1184 	ret = 0;							\
1185 label_return:								\
1186 	return (ret);							\
1187 }
1188 
1189 #define	CTL_TSD_RO_NL_CGEN(c, n, m, t)					\
1190 static int								\
1191 n##_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,	\
1192     void *newp, size_t newlen)						\
1193 {									\
1194 	int ret;							\
1195 	t oldval;							\
1196 	tsd_t *tsd;							\
1197 									\
1198 	if (!(c))							\
1199 		return (ENOENT);					\
1200 	READONLY();							\
1201 	tsd = tsd_fetch();						\
1202 	oldval = (m(tsd));						\
1203 	READ(oldval, t);						\
1204 									\
1205 	ret = 0;							\
1206 label_return:								\
1207 	return (ret);							\
1208 }
1209 
1210 #define	CTL_RO_CONFIG_GEN(n, t)						\
1211 static int								\
1212 n##_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,	\
1213     void *newp, size_t newlen)						\
1214 {									\
1215 	int ret;							\
1216 	t oldval;							\
1217 									\
1218 	READONLY();							\
1219 	oldval = n;							\
1220 	READ(oldval, t);						\
1221 									\
1222 	ret = 0;							\
1223 label_return:								\
1224 	return (ret);							\
1225 }
1226 
1227 /******************************************************************************/
1228 
CTL_RO_NL_GEN(version,JEMALLOC_VERSION,const char *)1229 CTL_RO_NL_GEN(version, JEMALLOC_VERSION, const char *)
1230 
1231 static int
1232 epoch_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
1233     void *newp, size_t newlen)
1234 {
1235 	int ret;
1236 	UNUSED uint64_t newval;
1237 
1238 	malloc_mutex_lock(&ctl_mtx);
1239 	WRITE(newval, uint64_t);
1240 	if (newp != NULL)
1241 		ctl_refresh();
1242 	READ(ctl_epoch, uint64_t);
1243 
1244 	ret = 0;
1245 label_return:
1246 	malloc_mutex_unlock(&ctl_mtx);
1247 	return (ret);
1248 }
1249 
1250 /******************************************************************************/
1251 
CTL_RO_CONFIG_GEN(config_cache_oblivious,bool)1252 CTL_RO_CONFIG_GEN(config_cache_oblivious, bool)
1253 CTL_RO_CONFIG_GEN(config_debug, bool)
1254 CTL_RO_CONFIG_GEN(config_fill, bool)
1255 CTL_RO_CONFIG_GEN(config_lazy_lock, bool)
1256 CTL_RO_CONFIG_GEN(config_malloc_conf, const char *)
1257 CTL_RO_CONFIG_GEN(config_munmap, bool)
1258 CTL_RO_CONFIG_GEN(config_prof, bool)
1259 CTL_RO_CONFIG_GEN(config_prof_libgcc, bool)
1260 CTL_RO_CONFIG_GEN(config_prof_libunwind, bool)
1261 CTL_RO_CONFIG_GEN(config_stats, bool)
1262 CTL_RO_CONFIG_GEN(config_tcache, bool)
1263 CTL_RO_CONFIG_GEN(config_tls, bool)
1264 CTL_RO_CONFIG_GEN(config_utrace, bool)
1265 CTL_RO_CONFIG_GEN(config_valgrind, bool)
1266 CTL_RO_CONFIG_GEN(config_xmalloc, bool)
1267 
1268 /******************************************************************************/
1269 
1270 CTL_RO_NL_GEN(opt_abort, opt_abort, bool)
1271 CTL_RO_NL_GEN(opt_dss, opt_dss, const char *)
1272 CTL_RO_NL_GEN(opt_lg_chunk, opt_lg_chunk, size_t)
1273 CTL_RO_NL_GEN(opt_narenas, opt_narenas, unsigned)
1274 CTL_RO_NL_GEN(opt_purge, purge_mode_names[opt_purge], const char *)
1275 CTL_RO_NL_GEN(opt_lg_dirty_mult, opt_lg_dirty_mult, ssize_t)
1276 CTL_RO_NL_GEN(opt_decay_time, opt_decay_time, ssize_t)
1277 CTL_RO_NL_GEN(opt_stats_print, opt_stats_print, bool)
1278 CTL_RO_NL_CGEN(config_fill, opt_junk, opt_junk, const char *)
1279 CTL_RO_NL_CGEN(config_fill, opt_quarantine, opt_quarantine, size_t)
1280 CTL_RO_NL_CGEN(config_fill, opt_redzone, opt_redzone, bool)
1281 CTL_RO_NL_CGEN(config_fill, opt_zero, opt_zero, bool)
1282 CTL_RO_NL_CGEN(config_utrace, opt_utrace, opt_utrace, bool)
1283 CTL_RO_NL_CGEN(config_xmalloc, opt_xmalloc, opt_xmalloc, bool)
1284 CTL_RO_NL_CGEN(config_tcache, opt_tcache, opt_tcache, bool)
1285 CTL_RO_NL_CGEN(config_tcache, opt_lg_tcache_max, opt_lg_tcache_max, ssize_t)
1286 CTL_RO_NL_CGEN(config_prof, opt_prof, opt_prof, bool)
1287 CTL_RO_NL_CGEN(config_prof, opt_prof_prefix, opt_prof_prefix, const char *)
1288 CTL_RO_NL_CGEN(config_prof, opt_prof_active, opt_prof_active, bool)
1289 CTL_RO_NL_CGEN(config_prof, opt_prof_thread_active_init,
1290     opt_prof_thread_active_init, bool)
1291 CTL_RO_NL_CGEN(config_prof, opt_lg_prof_sample, opt_lg_prof_sample, size_t)
1292 CTL_RO_NL_CGEN(config_prof, opt_prof_accum, opt_prof_accum, bool)
1293 CTL_RO_NL_CGEN(config_prof, opt_lg_prof_interval, opt_lg_prof_interval, ssize_t)
1294 CTL_RO_NL_CGEN(config_prof, opt_prof_gdump, opt_prof_gdump, bool)
1295 CTL_RO_NL_CGEN(config_prof, opt_prof_final, opt_prof_final, bool)
1296 CTL_RO_NL_CGEN(config_prof, opt_prof_leak, opt_prof_leak, bool)
1297 
1298 /******************************************************************************/
1299 
1300 static int
1301 thread_arena_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
1302     void *newp, size_t newlen)
1303 {
1304 	int ret;
1305 	tsd_t *tsd;
1306 	arena_t *oldarena;
1307 	unsigned newind, oldind;
1308 
1309 	tsd = tsd_fetch();
1310 	oldarena = arena_choose(tsd, NULL);
1311 	if (oldarena == NULL)
1312 		return (EAGAIN);
1313 
1314 	malloc_mutex_lock(&ctl_mtx);
1315 	newind = oldind = oldarena->ind;
1316 	WRITE(newind, unsigned);
1317 	READ(oldind, unsigned);
1318 	if (newind != oldind) {
1319 		arena_t *newarena;
1320 
1321 		if (newind >= ctl_stats.narenas) {
1322 			/* New arena index is out of range. */
1323 			ret = EFAULT;
1324 			goto label_return;
1325 		}
1326 
1327 		/* Initialize arena if necessary. */
1328 		newarena = arena_get(newind, true);
1329 		if (newarena == NULL) {
1330 			ret = EAGAIN;
1331 			goto label_return;
1332 		}
1333 		/* Set new arena/tcache associations. */
1334 		arena_migrate(tsd, oldind, newind);
1335 		if (config_tcache) {
1336 			tcache_t *tcache = tsd_tcache_get(tsd);
1337 			if (tcache != NULL) {
1338 				tcache_arena_reassociate(tcache, oldarena,
1339 				    newarena);
1340 			}
1341 		}
1342 	}
1343 
1344 	ret = 0;
1345 label_return:
1346 	malloc_mutex_unlock(&ctl_mtx);
1347 	return (ret);
1348 }
1349 
CTL_TSD_RO_NL_CGEN(config_stats,thread_allocated,tsd_thread_allocated_get,uint64_t)1350 CTL_TSD_RO_NL_CGEN(config_stats, thread_allocated, tsd_thread_allocated_get,
1351     uint64_t)
1352 CTL_TSD_RO_NL_CGEN(config_stats, thread_allocatedp, tsd_thread_allocatedp_get,
1353     uint64_t *)
1354 CTL_TSD_RO_NL_CGEN(config_stats, thread_deallocated, tsd_thread_deallocated_get,
1355     uint64_t)
1356 CTL_TSD_RO_NL_CGEN(config_stats, thread_deallocatedp,
1357     tsd_thread_deallocatedp_get, uint64_t *)
1358 
1359 static int
1360 thread_tcache_enabled_ctl(const size_t *mib, size_t miblen, void *oldp,
1361     size_t *oldlenp, void *newp, size_t newlen)
1362 {
1363 	int ret;
1364 	bool oldval;
1365 
1366 	if (!config_tcache)
1367 		return (ENOENT);
1368 
1369 	oldval = tcache_enabled_get();
1370 	if (newp != NULL) {
1371 		if (newlen != sizeof(bool)) {
1372 			ret = EINVAL;
1373 			goto label_return;
1374 		}
1375 		tcache_enabled_set(*(bool *)newp);
1376 	}
1377 	READ(oldval, bool);
1378 
1379 	ret = 0;
1380 label_return:
1381 	return (ret);
1382 }
1383 
1384 static int
thread_tcache_flush_ctl(const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen)1385 thread_tcache_flush_ctl(const size_t *mib, size_t miblen, void *oldp,
1386     size_t *oldlenp, void *newp, size_t newlen)
1387 {
1388 	int ret;
1389 
1390 	if (!config_tcache)
1391 		return (ENOENT);
1392 
1393 	READONLY();
1394 	WRITEONLY();
1395 
1396 	tcache_flush();
1397 
1398 	ret = 0;
1399 label_return:
1400 	return (ret);
1401 }
1402 
1403 static int
thread_prof_name_ctl(const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen)1404 thread_prof_name_ctl(const size_t *mib, size_t miblen, void *oldp,
1405     size_t *oldlenp, void *newp, size_t newlen)
1406 {
1407 	int ret;
1408 
1409 	if (!config_prof)
1410 		return (ENOENT);
1411 
1412 	READ_XOR_WRITE();
1413 
1414 	if (newp != NULL) {
1415 		tsd_t *tsd;
1416 
1417 		if (newlen != sizeof(const char *)) {
1418 			ret = EINVAL;
1419 			goto label_return;
1420 		}
1421 
1422 		tsd = tsd_fetch();
1423 
1424 		if ((ret = prof_thread_name_set(tsd, *(const char **)newp)) !=
1425 		    0)
1426 			goto label_return;
1427 	} else {
1428 		const char *oldname = prof_thread_name_get();
1429 		READ(oldname, const char *);
1430 	}
1431 
1432 	ret = 0;
1433 label_return:
1434 	return (ret);
1435 }
1436 
1437 static int
thread_prof_active_ctl(const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen)1438 thread_prof_active_ctl(const size_t *mib, size_t miblen, void *oldp,
1439     size_t *oldlenp, void *newp, size_t newlen)
1440 {
1441 	int ret;
1442 	bool oldval;
1443 
1444 	if (!config_prof)
1445 		return (ENOENT);
1446 
1447 	oldval = prof_thread_active_get();
1448 	if (newp != NULL) {
1449 		if (newlen != sizeof(bool)) {
1450 			ret = EINVAL;
1451 			goto label_return;
1452 		}
1453 		if (prof_thread_active_set(*(bool *)newp)) {
1454 			ret = EAGAIN;
1455 			goto label_return;
1456 		}
1457 	}
1458 	READ(oldval, bool);
1459 
1460 	ret = 0;
1461 label_return:
1462 	return (ret);
1463 }
1464 
1465 /******************************************************************************/
1466 
1467 static int
tcache_create_ctl(const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen)1468 tcache_create_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
1469     void *newp, size_t newlen)
1470 {
1471 	int ret;
1472 	tsd_t *tsd;
1473 	unsigned tcache_ind;
1474 
1475 	if (!config_tcache)
1476 		return (ENOENT);
1477 
1478 	tsd = tsd_fetch();
1479 
1480 	malloc_mutex_lock(&ctl_mtx);
1481 	READONLY();
1482 	if (tcaches_create(tsd, &tcache_ind)) {
1483 		ret = EFAULT;
1484 		goto label_return;
1485 	}
1486 	READ(tcache_ind, unsigned);
1487 
1488 	ret = 0;
1489 label_return:
1490 	malloc_mutex_unlock(&ctl_mtx);
1491 	return (ret);
1492 }
1493 
1494 static int
tcache_flush_ctl(const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen)1495 tcache_flush_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
1496     void *newp, size_t newlen)
1497 {
1498 	int ret;
1499 	tsd_t *tsd;
1500 	unsigned tcache_ind;
1501 
1502 	if (!config_tcache)
1503 		return (ENOENT);
1504 
1505 	tsd = tsd_fetch();
1506 
1507 	WRITEONLY();
1508 	tcache_ind = UINT_MAX;
1509 	WRITE(tcache_ind, unsigned);
1510 	if (tcache_ind == UINT_MAX) {
1511 		ret = EFAULT;
1512 		goto label_return;
1513 	}
1514 	tcaches_flush(tsd, tcache_ind);
1515 
1516 	ret = 0;
1517 label_return:
1518 	return (ret);
1519 }
1520 
1521 static int
tcache_destroy_ctl(const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen)1522 tcache_destroy_ctl(const size_t *mib, size_t miblen, void *oldp,
1523     size_t *oldlenp, void *newp, size_t newlen)
1524 {
1525 	int ret;
1526 	tsd_t *tsd;
1527 	unsigned tcache_ind;
1528 
1529 	if (!config_tcache)
1530 		return (ENOENT);
1531 
1532 	tsd = tsd_fetch();
1533 
1534 	WRITEONLY();
1535 	tcache_ind = UINT_MAX;
1536 	WRITE(tcache_ind, unsigned);
1537 	if (tcache_ind == UINT_MAX) {
1538 		ret = EFAULT;
1539 		goto label_return;
1540 	}
1541 	tcaches_destroy(tsd, tcache_ind);
1542 
1543 	ret = 0;
1544 label_return:
1545 	return (ret);
1546 }
1547 
1548 /******************************************************************************/
1549 
1550 static void
arena_i_purge(unsigned arena_ind,bool all)1551 arena_i_purge(unsigned arena_ind, bool all)
1552 {
1553 
1554 	malloc_mutex_lock(&ctl_mtx);
1555 	{
1556 		unsigned narenas = ctl_stats.narenas;
1557 
1558 		if (arena_ind == narenas) {
1559 			unsigned i;
1560 			VARIABLE_ARRAY(arena_t *, tarenas, narenas);
1561 
1562 			for (i = 0; i < narenas; i++)
1563 				tarenas[i] = arena_get(i, false);
1564 
1565 			/*
1566 			 * No further need to hold ctl_mtx, since narenas and
1567 			 * tarenas contain everything needed below.
1568 			 */
1569 			malloc_mutex_unlock(&ctl_mtx);
1570 
1571 			for (i = 0; i < narenas; i++) {
1572 				if (tarenas[i] != NULL)
1573 					arena_purge(tarenas[i], all);
1574 			}
1575 		} else {
1576 			arena_t *tarena;
1577 
1578 			assert(arena_ind < narenas);
1579 
1580 			tarena = arena_get(arena_ind, false);
1581 
1582 			/* No further need to hold ctl_mtx. */
1583 			malloc_mutex_unlock(&ctl_mtx);
1584 
1585 			if (tarena != NULL)
1586 				arena_purge(tarena, all);
1587 		}
1588 	}
1589 }
1590 
1591 static int
arena_i_purge_ctl(const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen)1592 arena_i_purge_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
1593     void *newp, size_t newlen)
1594 {
1595 	int ret;
1596 
1597 	READONLY();
1598 	WRITEONLY();
1599 	arena_i_purge((unsigned)mib[1], true);
1600 
1601 	ret = 0;
1602 label_return:
1603 	return (ret);
1604 }
1605 
1606 static int
arena_i_decay_ctl(const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen)1607 arena_i_decay_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
1608     void *newp, size_t newlen)
1609 {
1610 	int ret;
1611 
1612 	READONLY();
1613 	WRITEONLY();
1614 	arena_i_purge((unsigned)mib[1], false);
1615 
1616 	ret = 0;
1617 label_return:
1618 	return (ret);
1619 }
1620 
1621 static int
arena_i_dss_ctl(const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen)1622 arena_i_dss_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
1623     void *newp, size_t newlen)
1624 {
1625 	int ret;
1626 	const char *dss = NULL;
1627 	unsigned arena_ind = (unsigned)mib[1];
1628 	dss_prec_t dss_prec_old = dss_prec_limit;
1629 	dss_prec_t dss_prec = dss_prec_limit;
1630 
1631 	malloc_mutex_lock(&ctl_mtx);
1632 	WRITE(dss, const char *);
1633 	if (dss != NULL) {
1634 		int i;
1635 		bool match = false;
1636 
1637 		for (i = 0; i < dss_prec_limit; i++) {
1638 			if (strcmp(dss_prec_names[i], dss) == 0) {
1639 				dss_prec = i;
1640 				match = true;
1641 				break;
1642 			}
1643 		}
1644 
1645 		if (!match) {
1646 			ret = EINVAL;
1647 			goto label_return;
1648 		}
1649 	}
1650 
1651 	if (arena_ind < ctl_stats.narenas) {
1652 		arena_t *arena = arena_get(arena_ind, false);
1653 		if (arena == NULL || (dss_prec != dss_prec_limit &&
1654 		    arena_dss_prec_set(arena, dss_prec))) {
1655 			ret = EFAULT;
1656 			goto label_return;
1657 		}
1658 		dss_prec_old = arena_dss_prec_get(arena);
1659 	} else {
1660 		if (dss_prec != dss_prec_limit &&
1661 		    chunk_dss_prec_set(dss_prec)) {
1662 			ret = EFAULT;
1663 			goto label_return;
1664 		}
1665 		dss_prec_old = chunk_dss_prec_get();
1666 	}
1667 
1668 	dss = dss_prec_names[dss_prec_old];
1669 	READ(dss, const char *);
1670 
1671 	ret = 0;
1672 label_return:
1673 	malloc_mutex_unlock(&ctl_mtx);
1674 	return (ret);
1675 }
1676 
1677 static int
arena_i_lg_dirty_mult_ctl(const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen)1678 arena_i_lg_dirty_mult_ctl(const size_t *mib, size_t miblen, void *oldp,
1679     size_t *oldlenp, void *newp, size_t newlen)
1680 {
1681 	int ret;
1682 	unsigned arena_ind = (unsigned)mib[1];
1683 	arena_t *arena;
1684 
1685 	arena = arena_get(arena_ind, false);
1686 	if (arena == NULL) {
1687 		ret = EFAULT;
1688 		goto label_return;
1689 	}
1690 
1691 	if (oldp != NULL && oldlenp != NULL) {
1692 		size_t oldval = arena_lg_dirty_mult_get(arena);
1693 		READ(oldval, ssize_t);
1694 	}
1695 	if (newp != NULL) {
1696 		if (newlen != sizeof(ssize_t)) {
1697 			ret = EINVAL;
1698 			goto label_return;
1699 		}
1700 		if (arena_lg_dirty_mult_set(arena, *(ssize_t *)newp)) {
1701 			ret = EFAULT;
1702 			goto label_return;
1703 		}
1704 	}
1705 
1706 	ret = 0;
1707 label_return:
1708 	return (ret);
1709 }
1710 
1711 static int
arena_i_decay_time_ctl(const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen)1712 arena_i_decay_time_ctl(const size_t *mib, size_t miblen, void *oldp,
1713     size_t *oldlenp, void *newp, size_t newlen)
1714 {
1715 	int ret;
1716 	unsigned arena_ind = (unsigned)mib[1];
1717 	arena_t *arena;
1718 
1719 	arena = arena_get(arena_ind, false);
1720 	if (arena == NULL) {
1721 		ret = EFAULT;
1722 		goto label_return;
1723 	}
1724 
1725 	if (oldp != NULL && oldlenp != NULL) {
1726 		size_t oldval = arena_decay_time_get(arena);
1727 		READ(oldval, ssize_t);
1728 	}
1729 	if (newp != NULL) {
1730 		if (newlen != sizeof(ssize_t)) {
1731 			ret = EINVAL;
1732 			goto label_return;
1733 		}
1734 		if (arena_decay_time_set(arena, *(ssize_t *)newp)) {
1735 			ret = EFAULT;
1736 			goto label_return;
1737 		}
1738 	}
1739 
1740 	ret = 0;
1741 label_return:
1742 	return (ret);
1743 }
1744 
1745 static int
arena_i_chunk_hooks_ctl(const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen)1746 arena_i_chunk_hooks_ctl(const size_t *mib, size_t miblen, void *oldp,
1747     size_t *oldlenp, void *newp, size_t newlen)
1748 {
1749 	int ret;
1750 	unsigned arena_ind = (unsigned)mib[1];
1751 	arena_t *arena;
1752 
1753 	malloc_mutex_lock(&ctl_mtx);
1754 	if (arena_ind < narenas_total_get() && (arena =
1755 	    arena_get(arena_ind, false)) != NULL) {
1756 		if (newp != NULL) {
1757 			chunk_hooks_t old_chunk_hooks, new_chunk_hooks;
1758 			WRITE(new_chunk_hooks, chunk_hooks_t);
1759 			old_chunk_hooks = chunk_hooks_set(arena,
1760 			    &new_chunk_hooks);
1761 			READ(old_chunk_hooks, chunk_hooks_t);
1762 		} else {
1763 			chunk_hooks_t old_chunk_hooks = chunk_hooks_get(arena);
1764 			READ(old_chunk_hooks, chunk_hooks_t);
1765 		}
1766 	} else {
1767 		ret = EFAULT;
1768 		goto label_return;
1769 	}
1770 	ret = 0;
1771 label_return:
1772 	malloc_mutex_unlock(&ctl_mtx);
1773 	return (ret);
1774 }
1775 
1776 static const ctl_named_node_t *
arena_i_index(const size_t * mib,size_t miblen,size_t i)1777 arena_i_index(const size_t *mib, size_t miblen, size_t i)
1778 {
1779 	const ctl_named_node_t * ret;
1780 
1781 	malloc_mutex_lock(&ctl_mtx);
1782 	if (i > ctl_stats.narenas) {
1783 		ret = NULL;
1784 		goto label_return;
1785 	}
1786 
1787 	ret = super_arena_i_node;
1788 label_return:
1789 	malloc_mutex_unlock(&ctl_mtx);
1790 	return (ret);
1791 }
1792 
1793 /******************************************************************************/
1794 
1795 static int
arenas_narenas_ctl(const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen)1796 arenas_narenas_ctl(const size_t *mib, size_t miblen, void *oldp,
1797     size_t *oldlenp, void *newp, size_t newlen)
1798 {
1799 	int ret;
1800 	unsigned narenas;
1801 
1802 	malloc_mutex_lock(&ctl_mtx);
1803 	READONLY();
1804 	if (*oldlenp != sizeof(unsigned)) {
1805 		ret = EINVAL;
1806 		goto label_return;
1807 	}
1808 	narenas = ctl_stats.narenas;
1809 	READ(narenas, unsigned);
1810 
1811 	ret = 0;
1812 label_return:
1813 	malloc_mutex_unlock(&ctl_mtx);
1814 	return (ret);
1815 }
1816 
1817 static int
arenas_initialized_ctl(const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen)1818 arenas_initialized_ctl(const size_t *mib, size_t miblen, void *oldp,
1819     size_t *oldlenp, void *newp, size_t newlen)
1820 {
1821 	int ret;
1822 	unsigned nread, i;
1823 
1824 	malloc_mutex_lock(&ctl_mtx);
1825 	READONLY();
1826 	if (*oldlenp != ctl_stats.narenas * sizeof(bool)) {
1827 		ret = EINVAL;
1828 		nread = (*oldlenp < ctl_stats.narenas * sizeof(bool))
1829 		    ? (unsigned)(*oldlenp / sizeof(bool)) : ctl_stats.narenas;
1830 	} else {
1831 		ret = 0;
1832 		nread = ctl_stats.narenas;
1833 	}
1834 
1835 	for (i = 0; i < nread; i++)
1836 		((bool *)oldp)[i] = ctl_stats.arenas[i].initialized;
1837 
1838 label_return:
1839 	malloc_mutex_unlock(&ctl_mtx);
1840 	return (ret);
1841 }
1842 
1843 static int
arenas_lg_dirty_mult_ctl(const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen)1844 arenas_lg_dirty_mult_ctl(const size_t *mib, size_t miblen, void *oldp,
1845     size_t *oldlenp, void *newp, size_t newlen)
1846 {
1847 	int ret;
1848 
1849 	if (oldp != NULL && oldlenp != NULL) {
1850 		size_t oldval = arena_lg_dirty_mult_default_get();
1851 		READ(oldval, ssize_t);
1852 	}
1853 	if (newp != NULL) {
1854 		if (newlen != sizeof(ssize_t)) {
1855 			ret = EINVAL;
1856 			goto label_return;
1857 		}
1858 		if (arena_lg_dirty_mult_default_set(*(ssize_t *)newp)) {
1859 			ret = EFAULT;
1860 			goto label_return;
1861 		}
1862 	}
1863 
1864 	ret = 0;
1865 label_return:
1866 	return (ret);
1867 }
1868 
1869 static int
arenas_decay_time_ctl(const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen)1870 arenas_decay_time_ctl(const size_t *mib, size_t miblen, void *oldp,
1871     size_t *oldlenp, void *newp, size_t newlen)
1872 {
1873 	int ret;
1874 
1875 	if (oldp != NULL && oldlenp != NULL) {
1876 		size_t oldval = arena_decay_time_default_get();
1877 		READ(oldval, ssize_t);
1878 	}
1879 	if (newp != NULL) {
1880 		if (newlen != sizeof(ssize_t)) {
1881 			ret = EINVAL;
1882 			goto label_return;
1883 		}
1884 		if (arena_decay_time_default_set(*(ssize_t *)newp)) {
1885 			ret = EFAULT;
1886 			goto label_return;
1887 		}
1888 	}
1889 
1890 	ret = 0;
1891 label_return:
1892 	return (ret);
1893 }
1894 
CTL_RO_NL_GEN(arenas_quantum,QUANTUM,size_t)1895 CTL_RO_NL_GEN(arenas_quantum, QUANTUM, size_t)
1896 CTL_RO_NL_GEN(arenas_page, PAGE, size_t)
1897 CTL_RO_NL_CGEN(config_tcache, arenas_tcache_max, tcache_maxclass, size_t)
1898 CTL_RO_NL_GEN(arenas_nbins, NBINS, unsigned)
1899 CTL_RO_NL_CGEN(config_tcache, arenas_nhbins, nhbins, unsigned)
1900 CTL_RO_NL_GEN(arenas_bin_i_size, arena_bin_info[mib[2]].reg_size, size_t)
1901 CTL_RO_NL_GEN(arenas_bin_i_nregs, arena_bin_info[mib[2]].nregs, uint32_t)
1902 CTL_RO_NL_GEN(arenas_bin_i_run_size, arena_bin_info[mib[2]].run_size, size_t)
1903 static const ctl_named_node_t *
1904 arenas_bin_i_index(const size_t *mib, size_t miblen, size_t i)
1905 {
1906 
1907 	if (i > NBINS)
1908 		return (NULL);
1909 	return (super_arenas_bin_i_node);
1910 }
1911 
CTL_RO_NL_GEN(arenas_nlruns,nlclasses,unsigned)1912 CTL_RO_NL_GEN(arenas_nlruns, nlclasses, unsigned)
1913 CTL_RO_NL_GEN(arenas_lrun_i_size, index2size(NBINS+(szind_t)mib[2]), size_t)
1914 static const ctl_named_node_t *
1915 arenas_lrun_i_index(const size_t *mib, size_t miblen, size_t i)
1916 {
1917 
1918 	if (i > nlclasses)
1919 		return (NULL);
1920 	return (super_arenas_lrun_i_node);
1921 }
1922 
CTL_RO_NL_GEN(arenas_nhchunks,nhclasses,unsigned)1923 CTL_RO_NL_GEN(arenas_nhchunks, nhclasses, unsigned)
1924 CTL_RO_NL_GEN(arenas_hchunk_i_size, index2size(NBINS+nlclasses+(szind_t)mib[2]),
1925     size_t)
1926 static const ctl_named_node_t *
1927 arenas_hchunk_i_index(const size_t *mib, size_t miblen, size_t i)
1928 {
1929 
1930 	if (i > nhclasses)
1931 		return (NULL);
1932 	return (super_arenas_hchunk_i_node);
1933 }
1934 
1935 static int
arenas_extend_ctl(const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen)1936 arenas_extend_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
1937     void *newp, size_t newlen)
1938 {
1939 	int ret;
1940 	unsigned narenas;
1941 
1942 	malloc_mutex_lock(&ctl_mtx);
1943 	READONLY();
1944 	if (ctl_grow()) {
1945 		ret = EAGAIN;
1946 		goto label_return;
1947 	}
1948 	narenas = ctl_stats.narenas - 1;
1949 	READ(narenas, unsigned);
1950 
1951 	ret = 0;
1952 label_return:
1953 	malloc_mutex_unlock(&ctl_mtx);
1954 	return (ret);
1955 }
1956 
1957 /******************************************************************************/
1958 
1959 static int
prof_thread_active_init_ctl(const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen)1960 prof_thread_active_init_ctl(const size_t *mib, size_t miblen, void *oldp,
1961     size_t *oldlenp, void *newp, size_t newlen)
1962 {
1963 	int ret;
1964 	bool oldval;
1965 
1966 	if (!config_prof)
1967 		return (ENOENT);
1968 
1969 	if (newp != NULL) {
1970 		if (newlen != sizeof(bool)) {
1971 			ret = EINVAL;
1972 			goto label_return;
1973 		}
1974 		oldval = prof_thread_active_init_set(*(bool *)newp);
1975 	} else
1976 		oldval = prof_thread_active_init_get();
1977 	READ(oldval, bool);
1978 
1979 	ret = 0;
1980 label_return:
1981 	return (ret);
1982 }
1983 
1984 static int
prof_active_ctl(const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen)1985 prof_active_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
1986     void *newp, size_t newlen)
1987 {
1988 	int ret;
1989 	bool oldval;
1990 
1991 	if (!config_prof)
1992 		return (ENOENT);
1993 
1994 	if (newp != NULL) {
1995 		if (newlen != sizeof(bool)) {
1996 			ret = EINVAL;
1997 			goto label_return;
1998 		}
1999 		oldval = prof_active_set(*(bool *)newp);
2000 	} else
2001 		oldval = prof_active_get();
2002 	READ(oldval, bool);
2003 
2004 	ret = 0;
2005 label_return:
2006 	return (ret);
2007 }
2008 
2009 static int
prof_dump_ctl(const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen)2010 prof_dump_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
2011     void *newp, size_t newlen)
2012 {
2013 	int ret;
2014 	const char *filename = NULL;
2015 
2016 	if (!config_prof)
2017 		return (ENOENT);
2018 
2019 	WRITEONLY();
2020 	WRITE(filename, const char *);
2021 
2022 	if (prof_mdump(filename)) {
2023 		ret = EFAULT;
2024 		goto label_return;
2025 	}
2026 
2027 	ret = 0;
2028 label_return:
2029 	return (ret);
2030 }
2031 
2032 static int
prof_gdump_ctl(const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen)2033 prof_gdump_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
2034     void *newp, size_t newlen)
2035 {
2036 	int ret;
2037 	bool oldval;
2038 
2039 	if (!config_prof)
2040 		return (ENOENT);
2041 
2042 	if (newp != NULL) {
2043 		if (newlen != sizeof(bool)) {
2044 			ret = EINVAL;
2045 			goto label_return;
2046 		}
2047 		oldval = prof_gdump_set(*(bool *)newp);
2048 	} else
2049 		oldval = prof_gdump_get();
2050 	READ(oldval, bool);
2051 
2052 	ret = 0;
2053 label_return:
2054 	return (ret);
2055 }
2056 
2057 static int
prof_reset_ctl(const size_t * mib,size_t miblen,void * oldp,size_t * oldlenp,void * newp,size_t newlen)2058 prof_reset_ctl(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
2059     void *newp, size_t newlen)
2060 {
2061 	int ret;
2062 	size_t lg_sample = lg_prof_sample;
2063 	tsd_t *tsd;
2064 
2065 	if (!config_prof)
2066 		return (ENOENT);
2067 
2068 	WRITEONLY();
2069 	WRITE(lg_sample, size_t);
2070 	if (lg_sample >= (sizeof(uint64_t) << 3))
2071 		lg_sample = (sizeof(uint64_t) << 3) - 1;
2072 
2073 	tsd = tsd_fetch();
2074 
2075 	prof_reset(tsd, lg_sample);
2076 
2077 	ret = 0;
2078 label_return:
2079 	return (ret);
2080 }
2081 
CTL_RO_NL_CGEN(config_prof,prof_interval,prof_interval,uint64_t)2082 CTL_RO_NL_CGEN(config_prof, prof_interval, prof_interval, uint64_t)
2083 CTL_RO_NL_CGEN(config_prof, lg_prof_sample, lg_prof_sample, size_t)
2084 
2085 /******************************************************************************/
2086 
2087 CTL_RO_CGEN(config_stats, stats_cactive, &stats_cactive, size_t *)
2088 CTL_RO_CGEN(config_stats, stats_allocated, ctl_stats.allocated, size_t)
2089 CTL_RO_CGEN(config_stats, stats_active, ctl_stats.active, size_t)
2090 CTL_RO_CGEN(config_stats, stats_metadata, ctl_stats.metadata, size_t)
2091 CTL_RO_CGEN(config_stats, stats_resident, ctl_stats.resident, size_t)
2092 CTL_RO_CGEN(config_stats, stats_mapped, ctl_stats.mapped, size_t)
2093 
2094 CTL_RO_GEN(stats_arenas_i_dss, ctl_stats.arenas[mib[2]].dss, const char *)
2095 CTL_RO_GEN(stats_arenas_i_lg_dirty_mult, ctl_stats.arenas[mib[2]].lg_dirty_mult,
2096     ssize_t)
2097 CTL_RO_GEN(stats_arenas_i_decay_time, ctl_stats.arenas[mib[2]].decay_time,
2098     ssize_t)
2099 CTL_RO_GEN(stats_arenas_i_nthreads, ctl_stats.arenas[mib[2]].nthreads, unsigned)
2100 CTL_RO_GEN(stats_arenas_i_pactive, ctl_stats.arenas[mib[2]].pactive, size_t)
2101 CTL_RO_GEN(stats_arenas_i_pdirty, ctl_stats.arenas[mib[2]].pdirty, size_t)
2102 CTL_RO_CGEN(config_stats, stats_arenas_i_mapped,
2103     ctl_stats.arenas[mib[2]].astats.mapped, size_t)
2104 CTL_RO_CGEN(config_stats, stats_arenas_i_npurge,
2105     ctl_stats.arenas[mib[2]].astats.npurge, uint64_t)
2106 CTL_RO_CGEN(config_stats, stats_arenas_i_nmadvise,
2107     ctl_stats.arenas[mib[2]].astats.nmadvise, uint64_t)
2108 CTL_RO_CGEN(config_stats, stats_arenas_i_purged,
2109     ctl_stats.arenas[mib[2]].astats.purged, uint64_t)
2110 CTL_RO_CGEN(config_stats, stats_arenas_i_metadata_mapped,
2111     ctl_stats.arenas[mib[2]].astats.metadata_mapped, size_t)
2112 CTL_RO_CGEN(config_stats, stats_arenas_i_metadata_allocated,
2113     ctl_stats.arenas[mib[2]].astats.metadata_allocated, size_t)
2114 
2115 CTL_RO_CGEN(config_stats, stats_arenas_i_small_allocated,
2116     ctl_stats.arenas[mib[2]].allocated_small, size_t)
2117 CTL_RO_CGEN(config_stats, stats_arenas_i_small_nmalloc,
2118     ctl_stats.arenas[mib[2]].nmalloc_small, uint64_t)
2119 CTL_RO_CGEN(config_stats, stats_arenas_i_small_ndalloc,
2120     ctl_stats.arenas[mib[2]].ndalloc_small, uint64_t)
2121 CTL_RO_CGEN(config_stats, stats_arenas_i_small_nrequests,
2122     ctl_stats.arenas[mib[2]].nrequests_small, uint64_t)
2123 CTL_RO_CGEN(config_stats, stats_arenas_i_large_allocated,
2124     ctl_stats.arenas[mib[2]].astats.allocated_large, size_t)
2125 CTL_RO_CGEN(config_stats, stats_arenas_i_large_nmalloc,
2126     ctl_stats.arenas[mib[2]].astats.nmalloc_large, uint64_t)
2127 CTL_RO_CGEN(config_stats, stats_arenas_i_large_ndalloc,
2128     ctl_stats.arenas[mib[2]].astats.ndalloc_large, uint64_t)
2129 CTL_RO_CGEN(config_stats, stats_arenas_i_large_nrequests,
2130     ctl_stats.arenas[mib[2]].astats.nrequests_large, uint64_t)
2131 CTL_RO_CGEN(config_stats, stats_arenas_i_huge_allocated,
2132     ctl_stats.arenas[mib[2]].astats.allocated_huge, size_t)
2133 CTL_RO_CGEN(config_stats, stats_arenas_i_huge_nmalloc,
2134     ctl_stats.arenas[mib[2]].astats.nmalloc_huge, uint64_t)
2135 CTL_RO_CGEN(config_stats, stats_arenas_i_huge_ndalloc,
2136     ctl_stats.arenas[mib[2]].astats.ndalloc_huge, uint64_t)
2137 CTL_RO_CGEN(config_stats, stats_arenas_i_huge_nrequests,
2138     ctl_stats.arenas[mib[2]].astats.nmalloc_huge, uint64_t) /* Intentional. */
2139 
2140 CTL_RO_CGEN(config_stats, stats_arenas_i_bins_j_nmalloc,
2141     ctl_stats.arenas[mib[2]].bstats[mib[4]].nmalloc, uint64_t)
2142 CTL_RO_CGEN(config_stats, stats_arenas_i_bins_j_ndalloc,
2143     ctl_stats.arenas[mib[2]].bstats[mib[4]].ndalloc, uint64_t)
2144 CTL_RO_CGEN(config_stats, stats_arenas_i_bins_j_nrequests,
2145     ctl_stats.arenas[mib[2]].bstats[mib[4]].nrequests, uint64_t)
2146 CTL_RO_CGEN(config_stats, stats_arenas_i_bins_j_curregs,
2147     ctl_stats.arenas[mib[2]].bstats[mib[4]].curregs, size_t)
2148 CTL_RO_CGEN(config_stats && config_tcache, stats_arenas_i_bins_j_nfills,
2149     ctl_stats.arenas[mib[2]].bstats[mib[4]].nfills, uint64_t)
2150 CTL_RO_CGEN(config_stats && config_tcache, stats_arenas_i_bins_j_nflushes,
2151     ctl_stats.arenas[mib[2]].bstats[mib[4]].nflushes, uint64_t)
2152 CTL_RO_CGEN(config_stats, stats_arenas_i_bins_j_nruns,
2153     ctl_stats.arenas[mib[2]].bstats[mib[4]].nruns, uint64_t)
2154 CTL_RO_CGEN(config_stats, stats_arenas_i_bins_j_nreruns,
2155     ctl_stats.arenas[mib[2]].bstats[mib[4]].reruns, uint64_t)
2156 CTL_RO_CGEN(config_stats, stats_arenas_i_bins_j_curruns,
2157     ctl_stats.arenas[mib[2]].bstats[mib[4]].curruns, size_t)
2158 
2159 static const ctl_named_node_t *
2160 stats_arenas_i_bins_j_index(const size_t *mib, size_t miblen, size_t j)
2161 {
2162 
2163 	if (j > NBINS)
2164 		return (NULL);
2165 	return (super_stats_arenas_i_bins_j_node);
2166 }
2167 
2168 CTL_RO_CGEN(config_stats, stats_arenas_i_lruns_j_nmalloc,
2169     ctl_stats.arenas[mib[2]].lstats[mib[4]].nmalloc, uint64_t)
2170 CTL_RO_CGEN(config_stats, stats_arenas_i_lruns_j_ndalloc,
2171     ctl_stats.arenas[mib[2]].lstats[mib[4]].ndalloc, uint64_t)
2172 CTL_RO_CGEN(config_stats, stats_arenas_i_lruns_j_nrequests,
2173     ctl_stats.arenas[mib[2]].lstats[mib[4]].nrequests, uint64_t)
2174 CTL_RO_CGEN(config_stats, stats_arenas_i_lruns_j_curruns,
2175     ctl_stats.arenas[mib[2]].lstats[mib[4]].curruns, size_t)
2176 
2177 static const ctl_named_node_t *
stats_arenas_i_lruns_j_index(const size_t * mib,size_t miblen,size_t j)2178 stats_arenas_i_lruns_j_index(const size_t *mib, size_t miblen, size_t j)
2179 {
2180 
2181 	if (j > nlclasses)
2182 		return (NULL);
2183 	return (super_stats_arenas_i_lruns_j_node);
2184 }
2185 
2186 CTL_RO_CGEN(config_stats, stats_arenas_i_hchunks_j_nmalloc,
2187     ctl_stats.arenas[mib[2]].hstats[mib[4]].nmalloc, uint64_t)
2188 CTL_RO_CGEN(config_stats, stats_arenas_i_hchunks_j_ndalloc,
2189     ctl_stats.arenas[mib[2]].hstats[mib[4]].ndalloc, uint64_t)
2190 CTL_RO_CGEN(config_stats, stats_arenas_i_hchunks_j_nrequests,
2191     ctl_stats.arenas[mib[2]].hstats[mib[4]].nmalloc, /* Intentional. */
2192     uint64_t)
2193 CTL_RO_CGEN(config_stats, stats_arenas_i_hchunks_j_curhchunks,
2194     ctl_stats.arenas[mib[2]].hstats[mib[4]].curhchunks, size_t)
2195 
2196 static const ctl_named_node_t *
stats_arenas_i_hchunks_j_index(const size_t * mib,size_t miblen,size_t j)2197 stats_arenas_i_hchunks_j_index(const size_t *mib, size_t miblen, size_t j)
2198 {
2199 
2200 	if (j > nhclasses)
2201 		return (NULL);
2202 	return (super_stats_arenas_i_hchunks_j_node);
2203 }
2204 
2205 static const ctl_named_node_t *
stats_arenas_i_index(const size_t * mib,size_t miblen,size_t i)2206 stats_arenas_i_index(const size_t *mib, size_t miblen, size_t i)
2207 {
2208 	const ctl_named_node_t * ret;
2209 
2210 	malloc_mutex_lock(&ctl_mtx);
2211 	if (i > ctl_stats.narenas || !ctl_stats.arenas[i].initialized) {
2212 		ret = NULL;
2213 		goto label_return;
2214 	}
2215 
2216 	ret = super_stats_arenas_i_node;
2217 label_return:
2218 	malloc_mutex_unlock(&ctl_mtx);
2219 	return (ret);
2220 }
2221