xref: /netbsd-src/external/bsd/jemalloc/dist/test/unit/mallctl.c (revision 32d1c65c71fbdb65a012e8392a62a757dd6853e9)
1 #include "test/jemalloc_test.h"
2 
3 #include "jemalloc/internal/ctl.h"
4 #include "jemalloc/internal/hook.h"
5 #include "jemalloc/internal/util.h"
6 
7 TEST_BEGIN(test_mallctl_errors) {
8 	uint64_t epoch;
9 	size_t sz;
10 
11 	expect_d_eq(mallctl("no_such_name", NULL, NULL, NULL, 0), ENOENT,
12 	    "mallctl() should return ENOENT for non-existent names");
13 
14 	expect_d_eq(mallctl("version", NULL, NULL, "0.0.0", strlen("0.0.0")),
15 	    EPERM, "mallctl() should return EPERM on attempt to write "
16 	    "read-only value");
17 
18 	expect_d_eq(mallctl("epoch", NULL, NULL, (void *)&epoch,
19 	    sizeof(epoch)-1), EINVAL,
20 	    "mallctl() should return EINVAL for input size mismatch");
21 	expect_d_eq(mallctl("epoch", NULL, NULL, (void *)&epoch,
22 	    sizeof(epoch)+1), EINVAL,
23 	    "mallctl() should return EINVAL for input size mismatch");
24 
25 	sz = sizeof(epoch)-1;
26 	expect_d_eq(mallctl("epoch", (void *)&epoch, &sz, NULL, 0), EINVAL,
27 	    "mallctl() should return EINVAL for output size mismatch");
28 	sz = sizeof(epoch)+1;
29 	expect_d_eq(mallctl("epoch", (void *)&epoch, &sz, NULL, 0), EINVAL,
30 	    "mallctl() should return EINVAL for output size mismatch");
31 }
32 TEST_END
33 
34 TEST_BEGIN(test_mallctlnametomib_errors) {
35 	size_t mib[1];
36 	size_t miblen;
37 
38 	miblen = sizeof(mib)/sizeof(size_t);
39 	expect_d_eq(mallctlnametomib("no_such_name", mib, &miblen), ENOENT,
40 	    "mallctlnametomib() should return ENOENT for non-existent names");
41 }
42 TEST_END
43 
44 TEST_BEGIN(test_mallctlbymib_errors) {
45 	uint64_t epoch;
46 	size_t sz;
47 	size_t mib[1];
48 	size_t miblen;
49 
50 	miblen = sizeof(mib)/sizeof(size_t);
51 	expect_d_eq(mallctlnametomib("version", mib, &miblen), 0,
52 	    "Unexpected mallctlnametomib() failure");
53 
54 	expect_d_eq(mallctlbymib(mib, miblen, NULL, NULL, "0.0.0",
55 	    strlen("0.0.0")), EPERM, "mallctl() should return EPERM on "
56 	    "attempt to write read-only value");
57 
58 	miblen = sizeof(mib)/sizeof(size_t);
59 	expect_d_eq(mallctlnametomib("epoch", mib, &miblen), 0,
60 	    "Unexpected mallctlnametomib() failure");
61 
62 	expect_d_eq(mallctlbymib(mib, miblen, NULL, NULL, (void *)&epoch,
63 	    sizeof(epoch)-1), EINVAL,
64 	    "mallctlbymib() should return EINVAL for input size mismatch");
65 	expect_d_eq(mallctlbymib(mib, miblen, NULL, NULL, (void *)&epoch,
66 	    sizeof(epoch)+1), EINVAL,
67 	    "mallctlbymib() should return EINVAL for input size mismatch");
68 
69 	sz = sizeof(epoch)-1;
70 	expect_d_eq(mallctlbymib(mib, miblen, (void *)&epoch, &sz, NULL, 0),
71 	    EINVAL,
72 	    "mallctlbymib() should return EINVAL for output size mismatch");
73 	sz = sizeof(epoch)+1;
74 	expect_d_eq(mallctlbymib(mib, miblen, (void *)&epoch, &sz, NULL, 0),
75 	    EINVAL,
76 	    "mallctlbymib() should return EINVAL for output size mismatch");
77 }
78 TEST_END
79 
80 TEST_BEGIN(test_mallctl_read_write) {
81 	uint64_t old_epoch, new_epoch;
82 	size_t sz = sizeof(old_epoch);
83 
84 	/* Blind. */
85 	expect_d_eq(mallctl("epoch", NULL, NULL, NULL, 0), 0,
86 	    "Unexpected mallctl() failure");
87 	expect_zu_eq(sz, sizeof(old_epoch), "Unexpected output size");
88 
89 	/* Read. */
90 	expect_d_eq(mallctl("epoch", (void *)&old_epoch, &sz, NULL, 0), 0,
91 	    "Unexpected mallctl() failure");
92 	expect_zu_eq(sz, sizeof(old_epoch), "Unexpected output size");
93 
94 	/* Write. */
95 	expect_d_eq(mallctl("epoch", NULL, NULL, (void *)&new_epoch,
96 	    sizeof(new_epoch)), 0, "Unexpected mallctl() failure");
97 	expect_zu_eq(sz, sizeof(old_epoch), "Unexpected output size");
98 
99 	/* Read+write. */
100 	expect_d_eq(mallctl("epoch", (void *)&old_epoch, &sz,
101 	    (void *)&new_epoch, sizeof(new_epoch)), 0,
102 	    "Unexpected mallctl() failure");
103 	expect_zu_eq(sz, sizeof(old_epoch), "Unexpected output size");
104 }
105 TEST_END
106 
107 TEST_BEGIN(test_mallctlnametomib_short_mib) {
108 	size_t mib[4];
109 	size_t miblen;
110 
111 	miblen = 3;
112 	mib[3] = 42;
113 	expect_d_eq(mallctlnametomib("arenas.bin.0.nregs", mib, &miblen), 0,
114 	    "Unexpected mallctlnametomib() failure");
115 	expect_zu_eq(miblen, 3, "Unexpected mib output length");
116 	expect_zu_eq(mib[3], 42,
117 	    "mallctlnametomib() wrote past the end of the input mib");
118 }
119 TEST_END
120 
121 TEST_BEGIN(test_mallctlnametomib_short_name) {
122 	size_t mib[4];
123 	size_t miblen;
124 
125 	miblen = 4;
126 	mib[3] = 42;
127 	expect_d_eq(mallctlnametomib("arenas.bin.0", mib, &miblen), 0,
128 	    "Unexpected mallctlnametomib() failure");
129 	expect_zu_eq(miblen, 3, "Unexpected mib output length");
130 	expect_zu_eq(mib[3], 42,
131 	    "mallctlnametomib() wrote past the end of the input mib");
132 }
133 TEST_END
134 
135 TEST_BEGIN(test_mallctlmibnametomib) {
136 	size_t mib[4];
137 	size_t miblen = 4;
138 	uint32_t result, result_ref;
139 	size_t len_result = sizeof(uint32_t);
140 
141 	tsd_t *tsd = tsd_fetch();
142 
143 	/* Error cases */
144 	assert_d_eq(ctl_mibnametomib(tsd, mib, 0, "bob", &miblen), ENOENT, "");
145 	assert_zu_eq(miblen, 4, "");
146 	assert_d_eq(ctl_mibnametomib(tsd, mib, 0, "9999", &miblen), ENOENT, "");
147 	assert_zu_eq(miblen, 4, "");
148 
149 	/* Valid case. */
150 	assert_d_eq(ctl_mibnametomib(tsd, mib, 0, "arenas", &miblen), 0, "");
151 	assert_zu_eq(miblen, 1, "");
152 	miblen = 4;
153 	assert_d_eq(ctl_mibnametomib(tsd, mib, 1, "bin", &miblen), 0, "");
154 	assert_zu_eq(miblen, 2, "");
155 	expect_d_eq(mallctlbymib(mib, miblen, &result, &len_result, NULL, 0),
156 	    ENOENT, "mallctlbymib() should fail on partial path");
157 
158 	/* Error cases. */
159 	miblen = 4;
160 	assert_d_eq(ctl_mibnametomib(tsd, mib, 2, "bob", &miblen), ENOENT, "");
161 	assert_zu_eq(miblen, 4, "");
162 	assert_d_eq(ctl_mibnametomib(tsd, mib, 2, "9999", &miblen), ENOENT, "");
163 	assert_zu_eq(miblen, 4, "");
164 
165 	/* Valid case. */
166 	assert_d_eq(ctl_mibnametomib(tsd, mib, 2, "0", &miblen), 0, "");
167 	assert_zu_eq(miblen, 3, "");
168 	expect_d_eq(mallctlbymib(mib, miblen, &result, &len_result, NULL, 0),
169 	    ENOENT, "mallctlbymib() should fail on partial path");
170 
171 	/* Error cases. */
172 	miblen = 4;
173 	assert_d_eq(ctl_mibnametomib(tsd, mib, 3, "bob", &miblen), ENOENT, "");
174 	assert_zu_eq(miblen, 4, "");
175 	assert_d_eq(ctl_mibnametomib(tsd, mib, 3, "9999", &miblen), ENOENT, "");
176 	assert_zu_eq(miblen, 4, "");
177 
178 	/* Valid case. */
179 	assert_d_eq(ctl_mibnametomib(tsd, mib, 3, "nregs", &miblen), 0, "");
180 	assert_zu_eq(miblen, 4, "");
181 	assert_d_eq(mallctlbymib(mib, miblen, &result, &len_result, NULL, 0),
182 	    0, "Unexpected mallctlbymib() failure");
183 	assert_d_eq(mallctl("arenas.bin.0.nregs", &result_ref, &len_result,
184 	    NULL, 0), 0, "Unexpected mallctl() failure");
185 	expect_zu_eq(result, result_ref,
186 	    "mallctlbymib() and mallctl() returned different result");
187 }
188 TEST_END
189 
190 TEST_BEGIN(test_mallctlbymibname) {
191 	size_t mib[4];
192 	size_t miblen = 4;
193 	uint32_t result, result_ref;
194 	size_t len_result = sizeof(uint32_t);
195 
196 	tsd_t *tsd = tsd_fetch();
197 
198 	/* Error cases. */
199 
200 	assert_d_eq(mallctlnametomib("arenas", mib, &miblen), 0,
201 	    "Unexpected mallctlnametomib() failure");
202 	assert_zu_eq(miblen, 1, "");
203 
204 	miblen = 4;
205 	assert_d_eq(ctl_bymibname(tsd, mib, 1, "bin.0", &miblen,
206 	    &result, &len_result, NULL, 0), ENOENT, "");
207 	miblen = 4;
208 	assert_d_eq(ctl_bymibname(tsd, mib, 1, "bin.0.bob", &miblen,
209 	    &result, &len_result, NULL, 0), ENOENT, "");
210 	assert_zu_eq(miblen, 4, "");
211 
212 	/* Valid cases. */
213 
214 	assert_d_eq(mallctl("arenas.bin.0.nregs", &result_ref, &len_result,
215 	    NULL, 0), 0, "Unexpected mallctl() failure");
216 	miblen = 4;
217 
218 	assert_d_eq(ctl_bymibname(tsd, mib, 0, "arenas.bin.0.nregs", &miblen,
219 	    &result, &len_result, NULL, 0), 0, "");
220 	assert_zu_eq(miblen, 4, "");
221 	expect_zu_eq(result, result_ref, "Unexpected result");
222 
223 	assert_d_eq(ctl_bymibname(tsd, mib, 1, "bin.0.nregs", &miblen, &result,
224 	    &len_result, NULL, 0), 0, "");
225 	assert_zu_eq(miblen, 4, "");
226 	expect_zu_eq(result, result_ref, "Unexpected result");
227 
228 	assert_d_eq(ctl_bymibname(tsd, mib, 2, "0.nregs", &miblen, &result,
229 	    &len_result, NULL, 0), 0, "");
230 	assert_zu_eq(miblen, 4, "");
231 	expect_zu_eq(result, result_ref, "Unexpected result");
232 
233 	assert_d_eq(ctl_bymibname(tsd, mib, 3, "nregs", &miblen, &result,
234 	    &len_result, NULL, 0), 0, "");
235 	assert_zu_eq(miblen, 4, "");
236 	expect_zu_eq(result, result_ref, "Unexpected result");
237 }
238 TEST_END
239 
240 TEST_BEGIN(test_mallctl_config) {
241 #define TEST_MALLCTL_CONFIG(config, t) do {				\
242 	t oldval;							\
243 	size_t sz = sizeof(oldval);					\
244 	expect_d_eq(mallctl("config."#config, (void *)&oldval, &sz,	\
245 	    NULL, 0), 0, "Unexpected mallctl() failure");		\
246 	expect_b_eq(oldval, config_##config, "Incorrect config value");	\
247 	expect_zu_eq(sz, sizeof(oldval), "Unexpected output size");	\
248 } while (0)
249 
250 	TEST_MALLCTL_CONFIG(cache_oblivious, bool);
251 	TEST_MALLCTL_CONFIG(debug, bool);
252 	TEST_MALLCTL_CONFIG(fill, bool);
253 	TEST_MALLCTL_CONFIG(lazy_lock, bool);
254 	TEST_MALLCTL_CONFIG(malloc_conf, const char *);
255 	TEST_MALLCTL_CONFIG(prof, bool);
256 	TEST_MALLCTL_CONFIG(prof_libgcc, bool);
257 	TEST_MALLCTL_CONFIG(prof_libunwind, bool);
258 	TEST_MALLCTL_CONFIG(stats, bool);
259 	TEST_MALLCTL_CONFIG(utrace, bool);
260 	TEST_MALLCTL_CONFIG(xmalloc, bool);
261 
262 #undef TEST_MALLCTL_CONFIG
263 }
264 TEST_END
265 
266 TEST_BEGIN(test_mallctl_opt) {
267 	bool config_always = true;
268 
269 #define TEST_MALLCTL_OPT(t, opt, config) do {				\
270 	t oldval;							\
271 	size_t sz = sizeof(oldval);					\
272 	int expected = config_##config ? 0 : ENOENT;			\
273 	int result = mallctl("opt."#opt, (void *)&oldval, &sz, NULL,	\
274 	    0);								\
275 	expect_d_eq(result, expected,					\
276 	    "Unexpected mallctl() result for opt."#opt);		\
277 	expect_zu_eq(sz, sizeof(oldval), "Unexpected output size");	\
278 } while (0)
279 
280 	TEST_MALLCTL_OPT(bool, abort, always);
281 	TEST_MALLCTL_OPT(bool, abort_conf, always);
282 	TEST_MALLCTL_OPT(bool, cache_oblivious, always);
283 	TEST_MALLCTL_OPT(bool, trust_madvise, always);
284 	TEST_MALLCTL_OPT(bool, confirm_conf, always);
285 	TEST_MALLCTL_OPT(const char *, metadata_thp, always);
286 	TEST_MALLCTL_OPT(bool, retain, always);
287 	TEST_MALLCTL_OPT(const char *, dss, always);
288 	TEST_MALLCTL_OPT(bool, hpa, always);
289 	TEST_MALLCTL_OPT(size_t, hpa_slab_max_alloc, always);
290 	TEST_MALLCTL_OPT(size_t, hpa_sec_nshards, always);
291 	TEST_MALLCTL_OPT(size_t, hpa_sec_max_alloc, always);
292 	TEST_MALLCTL_OPT(size_t, hpa_sec_max_bytes, always);
293 	TEST_MALLCTL_OPT(size_t, hpa_sec_bytes_after_flush, always);
294 	TEST_MALLCTL_OPT(size_t, hpa_sec_batch_fill_extra, always);
295 	TEST_MALLCTL_OPT(unsigned, narenas, always);
296 	TEST_MALLCTL_OPT(const char *, percpu_arena, always);
297 	TEST_MALLCTL_OPT(size_t, oversize_threshold, always);
298 	TEST_MALLCTL_OPT(bool, background_thread, always);
299 	TEST_MALLCTL_OPT(ssize_t, dirty_decay_ms, always);
300 	TEST_MALLCTL_OPT(ssize_t, muzzy_decay_ms, always);
301 	TEST_MALLCTL_OPT(bool, stats_print, always);
302 	TEST_MALLCTL_OPT(const char *, stats_print_opts, always);
303 	TEST_MALLCTL_OPT(int64_t, stats_interval, always);
304 	TEST_MALLCTL_OPT(const char *, stats_interval_opts, always);
305 	TEST_MALLCTL_OPT(const char *, junk, fill);
306 	TEST_MALLCTL_OPT(bool, zero, fill);
307 	TEST_MALLCTL_OPT(bool, utrace, utrace);
308 	TEST_MALLCTL_OPT(bool, xmalloc, xmalloc);
309 	TEST_MALLCTL_OPT(bool, tcache, always);
310 	TEST_MALLCTL_OPT(size_t, lg_extent_max_active_fit, always);
311 	TEST_MALLCTL_OPT(size_t, tcache_max, always);
312 	TEST_MALLCTL_OPT(const char *, thp, always);
313 	TEST_MALLCTL_OPT(const char *, zero_realloc, always);
314 	TEST_MALLCTL_OPT(bool, prof, prof);
315 	TEST_MALLCTL_OPT(const char *, prof_prefix, prof);
316 	TEST_MALLCTL_OPT(bool, prof_active, prof);
317 	TEST_MALLCTL_OPT(ssize_t, lg_prof_sample, prof);
318 	TEST_MALLCTL_OPT(bool, prof_accum, prof);
319 	TEST_MALLCTL_OPT(ssize_t, lg_prof_interval, prof);
320 	TEST_MALLCTL_OPT(bool, prof_gdump, prof);
321 	TEST_MALLCTL_OPT(bool, prof_final, prof);
322 	TEST_MALLCTL_OPT(bool, prof_leak, prof);
323 	TEST_MALLCTL_OPT(bool, prof_leak_error, prof);
324 	TEST_MALLCTL_OPT(ssize_t, prof_recent_alloc_max, prof);
325 	TEST_MALLCTL_OPT(bool, prof_stats, prof);
326 	TEST_MALLCTL_OPT(bool, prof_sys_thread_name, prof);
327 	TEST_MALLCTL_OPT(ssize_t, lg_san_uaf_align, uaf_detection);
328 
329 #undef TEST_MALLCTL_OPT
330 }
331 TEST_END
332 
333 TEST_BEGIN(test_manpage_example) {
334 	unsigned nbins, i;
335 	size_t mib[4];
336 	size_t len, miblen;
337 
338 	len = sizeof(nbins);
339 	expect_d_eq(mallctl("arenas.nbins", (void *)&nbins, &len, NULL, 0), 0,
340 	    "Unexpected mallctl() failure");
341 
342 	miblen = 4;
343 	expect_d_eq(mallctlnametomib("arenas.bin.0.size", mib, &miblen), 0,
344 	    "Unexpected mallctlnametomib() failure");
345 	for (i = 0; i < nbins; i++) {
346 		size_t bin_size;
347 
348 		mib[2] = i;
349 		len = sizeof(bin_size);
350 		expect_d_eq(mallctlbymib(mib, miblen, (void *)&bin_size, &len,
351 		    NULL, 0), 0, "Unexpected mallctlbymib() failure");
352 		/* Do something with bin_size... */
353 	}
354 }
355 TEST_END
356 
357 TEST_BEGIN(test_tcache_none) {
358 	test_skip_if(!opt_tcache);
359 
360 	/* Allocate p and q. */
361 	void *p0 = mallocx(42, 0);
362 	expect_ptr_not_null(p0, "Unexpected mallocx() failure");
363 	void *q = mallocx(42, 0);
364 	expect_ptr_not_null(q, "Unexpected mallocx() failure");
365 
366 	/* Deallocate p and q, but bypass the tcache for q. */
367 	dallocx(p0, 0);
368 	dallocx(q, MALLOCX_TCACHE_NONE);
369 
370 	/* Make sure that tcache-based allocation returns p, not q. */
371 	void *p1 = mallocx(42, 0);
372 	expect_ptr_not_null(p1, "Unexpected mallocx() failure");
373 	if (!opt_prof && !san_uaf_detection_enabled()) {
374 		expect_ptr_eq(p0, p1,
375 		    "Expected tcache to allocate cached region");
376 	}
377 
378 	/* Clean up. */
379 	dallocx(p1, MALLOCX_TCACHE_NONE);
380 }
381 TEST_END
382 
383 TEST_BEGIN(test_tcache) {
384 #define NTCACHES	10
385 	unsigned tis[NTCACHES];
386 	void *ps[NTCACHES];
387 	void *qs[NTCACHES];
388 	unsigned i;
389 	size_t sz, psz, qsz;
390 
391 	psz = 42;
392 	qsz = nallocx(psz, 0) + 1;
393 
394 	/* Create tcaches. */
395 	for (i = 0; i < NTCACHES; i++) {
396 		sz = sizeof(unsigned);
397 		expect_d_eq(mallctl("tcache.create", (void *)&tis[i], &sz, NULL,
398 		    0), 0, "Unexpected mallctl() failure, i=%u", i);
399 	}
400 
401 	/* Exercise tcache ID recycling. */
402 	for (i = 0; i < NTCACHES; i++) {
403 		expect_d_eq(mallctl("tcache.destroy", NULL, NULL,
404 		    (void *)&tis[i], sizeof(unsigned)), 0,
405 		    "Unexpected mallctl() failure, i=%u", i);
406 	}
407 	for (i = 0; i < NTCACHES; i++) {
408 		sz = sizeof(unsigned);
409 		expect_d_eq(mallctl("tcache.create", (void *)&tis[i], &sz, NULL,
410 		    0), 0, "Unexpected mallctl() failure, i=%u", i);
411 	}
412 
413 	/* Flush empty tcaches. */
414 	for (i = 0; i < NTCACHES; i++) {
415 		expect_d_eq(mallctl("tcache.flush", NULL, NULL, (void *)&tis[i],
416 		    sizeof(unsigned)), 0, "Unexpected mallctl() failure, i=%u",
417 		    i);
418 	}
419 
420 	/* Cache some allocations. */
421 	for (i = 0; i < NTCACHES; i++) {
422 		ps[i] = mallocx(psz, MALLOCX_TCACHE(tis[i]));
423 		expect_ptr_not_null(ps[i], "Unexpected mallocx() failure, i=%u",
424 		    i);
425 		dallocx(ps[i], MALLOCX_TCACHE(tis[i]));
426 
427 		qs[i] = mallocx(qsz, MALLOCX_TCACHE(tis[i]));
428 		expect_ptr_not_null(qs[i], "Unexpected mallocx() failure, i=%u",
429 		    i);
430 		dallocx(qs[i], MALLOCX_TCACHE(tis[i]));
431 	}
432 
433 	/* Verify that tcaches allocate cached regions. */
434 	for (i = 0; i < NTCACHES; i++) {
435 		void *p0 = ps[i];
436 		ps[i] = mallocx(psz, MALLOCX_TCACHE(tis[i]));
437 		expect_ptr_not_null(ps[i], "Unexpected mallocx() failure, i=%u",
438 		    i);
439 		if (!san_uaf_detection_enabled()) {
440 			expect_ptr_eq(ps[i], p0, "Expected mallocx() to "
441 			    "allocate cached region, i=%u", i);
442 		}
443 	}
444 
445 	/* Verify that reallocation uses cached regions. */
446 	for (i = 0; i < NTCACHES; i++) {
447 		void *q0 = qs[i];
448 		qs[i] = rallocx(ps[i], qsz, MALLOCX_TCACHE(tis[i]));
449 		expect_ptr_not_null(qs[i], "Unexpected rallocx() failure, i=%u",
450 		    i);
451 		if (!san_uaf_detection_enabled()) {
452 			expect_ptr_eq(qs[i], q0, "Expected rallocx() to "
453 			    "allocate cached region, i=%u", i);
454 		}
455 		/* Avoid undefined behavior in case of test failure. */
456 		if (qs[i] == NULL) {
457 			qs[i] = ps[i];
458 		}
459 	}
460 	for (i = 0; i < NTCACHES; i++) {
461 		dallocx(qs[i], MALLOCX_TCACHE(tis[i]));
462 	}
463 
464 	/* Flush some non-empty tcaches. */
465 	for (i = 0; i < NTCACHES/2; i++) {
466 		expect_d_eq(mallctl("tcache.flush", NULL, NULL, (void *)&tis[i],
467 		    sizeof(unsigned)), 0, "Unexpected mallctl() failure, i=%u",
468 		    i);
469 	}
470 
471 	/* Destroy tcaches. */
472 	for (i = 0; i < NTCACHES; i++) {
473 		expect_d_eq(mallctl("tcache.destroy", NULL, NULL,
474 		    (void *)&tis[i], sizeof(unsigned)), 0,
475 		    "Unexpected mallctl() failure, i=%u", i);
476 	}
477 }
478 TEST_END
479 
480 TEST_BEGIN(test_thread_arena) {
481 	unsigned old_arena_ind, new_arena_ind, narenas;
482 
483 	const char *opa;
484 	size_t sz = sizeof(opa);
485 	expect_d_eq(mallctl("opt.percpu_arena", (void *)&opa, &sz, NULL, 0), 0,
486 	    "Unexpected mallctl() failure");
487 
488 	sz = sizeof(unsigned);
489 	expect_d_eq(mallctl("arenas.narenas", (void *)&narenas, &sz, NULL, 0),
490 	    0, "Unexpected mallctl() failure");
491 	if (opt_oversize_threshold != 0) {
492 		narenas--;
493 	}
494 	expect_u_eq(narenas, opt_narenas, "Number of arenas incorrect");
495 
496 	if (strcmp(opa, "disabled") == 0) {
497 		new_arena_ind = narenas - 1;
498 		expect_d_eq(mallctl("thread.arena", (void *)&old_arena_ind, &sz,
499 		    (void *)&new_arena_ind, sizeof(unsigned)), 0,
500 		    "Unexpected mallctl() failure");
501 		new_arena_ind = 0;
502 		expect_d_eq(mallctl("thread.arena", (void *)&old_arena_ind, &sz,
503 		    (void *)&new_arena_ind, sizeof(unsigned)), 0,
504 		    "Unexpected mallctl() failure");
505 	} else {
506 		expect_d_eq(mallctl("thread.arena", (void *)&old_arena_ind, &sz,
507 		    NULL, 0), 0, "Unexpected mallctl() failure");
508 		new_arena_ind = percpu_arena_ind_limit(opt_percpu_arena) - 1;
509 		if (old_arena_ind != new_arena_ind) {
510 			expect_d_eq(mallctl("thread.arena",
511 			    (void *)&old_arena_ind, &sz, (void *)&new_arena_ind,
512 			    sizeof(unsigned)), EPERM, "thread.arena ctl "
513 			    "should not be allowed with percpu arena");
514 		}
515 	}
516 }
517 TEST_END
518 
519 TEST_BEGIN(test_arena_i_initialized) {
520 	unsigned narenas, i;
521 	size_t sz;
522 	size_t mib[3];
523 	size_t miblen = sizeof(mib) / sizeof(size_t);
524 	bool initialized;
525 
526 	sz = sizeof(narenas);
527 	expect_d_eq(mallctl("arenas.narenas", (void *)&narenas, &sz, NULL, 0),
528 	    0, "Unexpected mallctl() failure");
529 
530 	expect_d_eq(mallctlnametomib("arena.0.initialized", mib, &miblen), 0,
531 	    "Unexpected mallctlnametomib() failure");
532 	for (i = 0; i < narenas; i++) {
533 		mib[1] = i;
534 		sz = sizeof(initialized);
535 		expect_d_eq(mallctlbymib(mib, miblen, &initialized, &sz, NULL,
536 		    0), 0, "Unexpected mallctl() failure");
537 	}
538 
539 	mib[1] = MALLCTL_ARENAS_ALL;
540 	sz = sizeof(initialized);
541 	expect_d_eq(mallctlbymib(mib, miblen, &initialized, &sz, NULL, 0), 0,
542 	    "Unexpected mallctl() failure");
543 	expect_true(initialized,
544 	    "Merged arena statistics should always be initialized");
545 
546 	/* Equivalent to the above but using mallctl() directly. */
547 	sz = sizeof(initialized);
548 	expect_d_eq(mallctl(
549 	    "arena." STRINGIFY(MALLCTL_ARENAS_ALL) ".initialized",
550 	    (void *)&initialized, &sz, NULL, 0), 0,
551 	    "Unexpected mallctl() failure");
552 	expect_true(initialized,
553 	    "Merged arena statistics should always be initialized");
554 }
555 TEST_END
556 
557 TEST_BEGIN(test_arena_i_dirty_decay_ms) {
558 	ssize_t dirty_decay_ms, orig_dirty_decay_ms, prev_dirty_decay_ms;
559 	size_t sz = sizeof(ssize_t);
560 
561 	expect_d_eq(mallctl("arena.0.dirty_decay_ms",
562 	    (void *)&orig_dirty_decay_ms, &sz, NULL, 0), 0,
563 	    "Unexpected mallctl() failure");
564 
565 	dirty_decay_ms = -2;
566 	expect_d_eq(mallctl("arena.0.dirty_decay_ms", NULL, NULL,
567 	    (void *)&dirty_decay_ms, sizeof(ssize_t)), EFAULT,
568 	    "Unexpected mallctl() success");
569 
570 	dirty_decay_ms = 0x7fffffff;
571 	expect_d_eq(mallctl("arena.0.dirty_decay_ms", NULL, NULL,
572 	    (void *)&dirty_decay_ms, sizeof(ssize_t)), 0,
573 	    "Unexpected mallctl() failure");
574 
575 	for (prev_dirty_decay_ms = dirty_decay_ms, dirty_decay_ms = -1;
576 	    dirty_decay_ms < 20; prev_dirty_decay_ms = dirty_decay_ms,
577 	    dirty_decay_ms++) {
578 		ssize_t old_dirty_decay_ms;
579 
580 		expect_d_eq(mallctl("arena.0.dirty_decay_ms",
581 		    (void *)&old_dirty_decay_ms, &sz, (void *)&dirty_decay_ms,
582 		    sizeof(ssize_t)), 0, "Unexpected mallctl() failure");
583 		expect_zd_eq(old_dirty_decay_ms, prev_dirty_decay_ms,
584 		    "Unexpected old arena.0.dirty_decay_ms");
585 	}
586 }
587 TEST_END
588 
589 TEST_BEGIN(test_arena_i_muzzy_decay_ms) {
590 	ssize_t muzzy_decay_ms, orig_muzzy_decay_ms, prev_muzzy_decay_ms;
591 	size_t sz = sizeof(ssize_t);
592 
593 	expect_d_eq(mallctl("arena.0.muzzy_decay_ms",
594 	    (void *)&orig_muzzy_decay_ms, &sz, NULL, 0), 0,
595 	    "Unexpected mallctl() failure");
596 
597 	muzzy_decay_ms = -2;
598 	expect_d_eq(mallctl("arena.0.muzzy_decay_ms", NULL, NULL,
599 	    (void *)&muzzy_decay_ms, sizeof(ssize_t)), EFAULT,
600 	    "Unexpected mallctl() success");
601 
602 	muzzy_decay_ms = 0x7fffffff;
603 	expect_d_eq(mallctl("arena.0.muzzy_decay_ms", NULL, NULL,
604 	    (void *)&muzzy_decay_ms, sizeof(ssize_t)), 0,
605 	    "Unexpected mallctl() failure");
606 
607 	for (prev_muzzy_decay_ms = muzzy_decay_ms, muzzy_decay_ms = -1;
608 	    muzzy_decay_ms < 20; prev_muzzy_decay_ms = muzzy_decay_ms,
609 	    muzzy_decay_ms++) {
610 		ssize_t old_muzzy_decay_ms;
611 
612 		expect_d_eq(mallctl("arena.0.muzzy_decay_ms",
613 		    (void *)&old_muzzy_decay_ms, &sz, (void *)&muzzy_decay_ms,
614 		    sizeof(ssize_t)), 0, "Unexpected mallctl() failure");
615 		expect_zd_eq(old_muzzy_decay_ms, prev_muzzy_decay_ms,
616 		    "Unexpected old arena.0.muzzy_decay_ms");
617 	}
618 }
619 TEST_END
620 
621 TEST_BEGIN(test_arena_i_purge) {
622 	unsigned narenas;
623 	size_t sz = sizeof(unsigned);
624 	size_t mib[3];
625 	size_t miblen = 3;
626 
627 	expect_d_eq(mallctl("arena.0.purge", NULL, NULL, NULL, 0), 0,
628 	    "Unexpected mallctl() failure");
629 
630 	expect_d_eq(mallctl("arenas.narenas", (void *)&narenas, &sz, NULL, 0),
631 	    0, "Unexpected mallctl() failure");
632 	expect_d_eq(mallctlnametomib("arena.0.purge", mib, &miblen), 0,
633 	    "Unexpected mallctlnametomib() failure");
634 	mib[1] = narenas;
635 	expect_d_eq(mallctlbymib(mib, miblen, NULL, NULL, NULL, 0), 0,
636 	    "Unexpected mallctlbymib() failure");
637 
638 	mib[1] = MALLCTL_ARENAS_ALL;
639 	expect_d_eq(mallctlbymib(mib, miblen, NULL, NULL, NULL, 0), 0,
640 	    "Unexpected mallctlbymib() failure");
641 }
642 TEST_END
643 
644 TEST_BEGIN(test_arena_i_decay) {
645 	unsigned narenas;
646 	size_t sz = sizeof(unsigned);
647 	size_t mib[3];
648 	size_t miblen = 3;
649 
650 	expect_d_eq(mallctl("arena.0.decay", NULL, NULL, NULL, 0), 0,
651 	    "Unexpected mallctl() failure");
652 
653 	expect_d_eq(mallctl("arenas.narenas", (void *)&narenas, &sz, NULL, 0),
654 	    0, "Unexpected mallctl() failure");
655 	expect_d_eq(mallctlnametomib("arena.0.decay", mib, &miblen), 0,
656 	    "Unexpected mallctlnametomib() failure");
657 	mib[1] = narenas;
658 	expect_d_eq(mallctlbymib(mib, miblen, NULL, NULL, NULL, 0), 0,
659 	    "Unexpected mallctlbymib() failure");
660 
661 	mib[1] = MALLCTL_ARENAS_ALL;
662 	expect_d_eq(mallctlbymib(mib, miblen, NULL, NULL, NULL, 0), 0,
663 	    "Unexpected mallctlbymib() failure");
664 }
665 TEST_END
666 
667 TEST_BEGIN(test_arena_i_dss) {
668 	const char *dss_prec_old, *dss_prec_new;
669 	size_t sz = sizeof(dss_prec_old);
670 	size_t mib[3];
671 	size_t miblen;
672 
673 	miblen = sizeof(mib)/sizeof(size_t);
674 	expect_d_eq(mallctlnametomib("arena.0.dss", mib, &miblen), 0,
675 	    "Unexpected mallctlnametomib() error");
676 
677 	dss_prec_new = "disabled";
678 	expect_d_eq(mallctlbymib(mib, miblen, (void *)&dss_prec_old, &sz,
679 	    (void *)&dss_prec_new, sizeof(dss_prec_new)), 0,
680 	    "Unexpected mallctl() failure");
681 	expect_str_ne(dss_prec_old, "primary",
682 	    "Unexpected default for dss precedence");
683 
684 	expect_d_eq(mallctlbymib(mib, miblen, (void *)&dss_prec_new, &sz,
685 	    (void *)&dss_prec_old, sizeof(dss_prec_old)), 0,
686 	    "Unexpected mallctl() failure");
687 
688 	expect_d_eq(mallctlbymib(mib, miblen, (void *)&dss_prec_old, &sz, NULL,
689 	    0), 0, "Unexpected mallctl() failure");
690 	expect_str_ne(dss_prec_old, "primary",
691 	    "Unexpected value for dss precedence");
692 
693 	mib[1] = narenas_total_get();
694 	dss_prec_new = "disabled";
695 	expect_d_eq(mallctlbymib(mib, miblen, (void *)&dss_prec_old, &sz,
696 	    (void *)&dss_prec_new, sizeof(dss_prec_new)), 0,
697 	    "Unexpected mallctl() failure");
698 	expect_str_ne(dss_prec_old, "primary",
699 	    "Unexpected default for dss precedence");
700 
701 	expect_d_eq(mallctlbymib(mib, miblen, (void *)&dss_prec_new, &sz,
702 	    (void *)&dss_prec_old, sizeof(dss_prec_new)), 0,
703 	    "Unexpected mallctl() failure");
704 
705 	expect_d_eq(mallctlbymib(mib, miblen, (void *)&dss_prec_old, &sz, NULL,
706 	    0), 0, "Unexpected mallctl() failure");
707 	expect_str_ne(dss_prec_old, "primary",
708 	    "Unexpected value for dss precedence");
709 }
710 TEST_END
711 
712 TEST_BEGIN(test_arena_i_retain_grow_limit) {
713 	size_t old_limit, new_limit, default_limit;
714 	size_t mib[3];
715 	size_t miblen;
716 
717 	bool retain_enabled;
718 	size_t sz = sizeof(retain_enabled);
719 	expect_d_eq(mallctl("opt.retain", &retain_enabled, &sz, NULL, 0),
720 	    0, "Unexpected mallctl() failure");
721 	test_skip_if(!retain_enabled);
722 
723 	sz = sizeof(default_limit);
724 	miblen = sizeof(mib)/sizeof(size_t);
725 	expect_d_eq(mallctlnametomib("arena.0.retain_grow_limit", mib, &miblen),
726 	    0, "Unexpected mallctlnametomib() error");
727 
728 	expect_d_eq(mallctlbymib(mib, miblen, &default_limit, &sz, NULL, 0), 0,
729 	    "Unexpected mallctl() failure");
730 	expect_zu_eq(default_limit, SC_LARGE_MAXCLASS,
731 	    "Unexpected default for retain_grow_limit");
732 
733 	new_limit = PAGE - 1;
734 	expect_d_eq(mallctlbymib(mib, miblen, NULL, NULL, &new_limit,
735 	    sizeof(new_limit)), EFAULT, "Unexpected mallctl() success");
736 
737 	new_limit = PAGE + 1;
738 	expect_d_eq(mallctlbymib(mib, miblen, NULL, NULL, &new_limit,
739 	    sizeof(new_limit)), 0, "Unexpected mallctl() failure");
740 	expect_d_eq(mallctlbymib(mib, miblen, &old_limit, &sz, NULL, 0), 0,
741 	    "Unexpected mallctl() failure");
742 	expect_zu_eq(old_limit, PAGE,
743 	    "Unexpected value for retain_grow_limit");
744 
745 	/* Expect grow less than psize class 10. */
746 	new_limit = sz_pind2sz(10) - 1;
747 	expect_d_eq(mallctlbymib(mib, miblen, NULL, NULL, &new_limit,
748 	    sizeof(new_limit)), 0, "Unexpected mallctl() failure");
749 	expect_d_eq(mallctlbymib(mib, miblen, &old_limit, &sz, NULL, 0), 0,
750 	    "Unexpected mallctl() failure");
751 	expect_zu_eq(old_limit, sz_pind2sz(9),
752 	    "Unexpected value for retain_grow_limit");
753 
754 	/* Restore to default. */
755 	expect_d_eq(mallctlbymib(mib, miblen, NULL, NULL, &default_limit,
756 	    sizeof(default_limit)), 0, "Unexpected mallctl() failure");
757 }
758 TEST_END
759 
760 TEST_BEGIN(test_arenas_dirty_decay_ms) {
761 	ssize_t dirty_decay_ms, orig_dirty_decay_ms, prev_dirty_decay_ms;
762 	size_t sz = sizeof(ssize_t);
763 
764 	expect_d_eq(mallctl("arenas.dirty_decay_ms",
765 	    (void *)&orig_dirty_decay_ms, &sz, NULL, 0), 0,
766 	    "Unexpected mallctl() failure");
767 
768 	dirty_decay_ms = -2;
769 	expect_d_eq(mallctl("arenas.dirty_decay_ms", NULL, NULL,
770 	    (void *)&dirty_decay_ms, sizeof(ssize_t)), EFAULT,
771 	    "Unexpected mallctl() success");
772 
773 	dirty_decay_ms = 0x7fffffff;
774 	expect_d_eq(mallctl("arenas.dirty_decay_ms", NULL, NULL,
775 	    (void *)&dirty_decay_ms, sizeof(ssize_t)), 0,
776 	    "Expected mallctl() failure");
777 
778 	for (prev_dirty_decay_ms = dirty_decay_ms, dirty_decay_ms = -1;
779 	    dirty_decay_ms < 20; prev_dirty_decay_ms = dirty_decay_ms,
780 	    dirty_decay_ms++) {
781 		ssize_t old_dirty_decay_ms;
782 
783 		expect_d_eq(mallctl("arenas.dirty_decay_ms",
784 		    (void *)&old_dirty_decay_ms, &sz, (void *)&dirty_decay_ms,
785 		    sizeof(ssize_t)), 0, "Unexpected mallctl() failure");
786 		expect_zd_eq(old_dirty_decay_ms, prev_dirty_decay_ms,
787 		    "Unexpected old arenas.dirty_decay_ms");
788 	}
789 }
790 TEST_END
791 
792 TEST_BEGIN(test_arenas_muzzy_decay_ms) {
793 	ssize_t muzzy_decay_ms, orig_muzzy_decay_ms, prev_muzzy_decay_ms;
794 	size_t sz = sizeof(ssize_t);
795 
796 	expect_d_eq(mallctl("arenas.muzzy_decay_ms",
797 	    (void *)&orig_muzzy_decay_ms, &sz, NULL, 0), 0,
798 	    "Unexpected mallctl() failure");
799 
800 	muzzy_decay_ms = -2;
801 	expect_d_eq(mallctl("arenas.muzzy_decay_ms", NULL, NULL,
802 	    (void *)&muzzy_decay_ms, sizeof(ssize_t)), EFAULT,
803 	    "Unexpected mallctl() success");
804 
805 	muzzy_decay_ms = 0x7fffffff;
806 	expect_d_eq(mallctl("arenas.muzzy_decay_ms", NULL, NULL,
807 	    (void *)&muzzy_decay_ms, sizeof(ssize_t)), 0,
808 	    "Expected mallctl() failure");
809 
810 	for (prev_muzzy_decay_ms = muzzy_decay_ms, muzzy_decay_ms = -1;
811 	    muzzy_decay_ms < 20; prev_muzzy_decay_ms = muzzy_decay_ms,
812 	    muzzy_decay_ms++) {
813 		ssize_t old_muzzy_decay_ms;
814 
815 		expect_d_eq(mallctl("arenas.muzzy_decay_ms",
816 		    (void *)&old_muzzy_decay_ms, &sz, (void *)&muzzy_decay_ms,
817 		    sizeof(ssize_t)), 0, "Unexpected mallctl() failure");
818 		expect_zd_eq(old_muzzy_decay_ms, prev_muzzy_decay_ms,
819 		    "Unexpected old arenas.muzzy_decay_ms");
820 	}
821 }
822 TEST_END
823 
824 TEST_BEGIN(test_arenas_constants) {
825 #define TEST_ARENAS_CONSTANT(t, name, expected) do {			\
826 	t name;								\
827 	size_t sz = sizeof(t);						\
828 	expect_d_eq(mallctl("arenas."#name, (void *)&name, &sz, NULL,	\
829 	    0), 0, "Unexpected mallctl() failure");			\
830 	expect_zu_eq(name, expected, "Incorrect "#name" size");		\
831 } while (0)
832 
833 	TEST_ARENAS_CONSTANT(size_t, quantum, QUANTUM);
834 	TEST_ARENAS_CONSTANT(size_t, page, PAGE);
835 	TEST_ARENAS_CONSTANT(unsigned, nbins, SC_NBINS);
836 	TEST_ARENAS_CONSTANT(unsigned, nlextents, SC_NSIZES - SC_NBINS);
837 
838 #undef TEST_ARENAS_CONSTANT
839 }
840 TEST_END
841 
842 TEST_BEGIN(test_arenas_bin_constants) {
843 #define TEST_ARENAS_BIN_CONSTANT(t, name, expected) do {		\
844 	t name;								\
845 	size_t sz = sizeof(t);						\
846 	expect_d_eq(mallctl("arenas.bin.0."#name, (void *)&name, &sz,	\
847 	    NULL, 0), 0, "Unexpected mallctl() failure");		\
848 	expect_zu_eq(name, expected, "Incorrect "#name" size");		\
849 } while (0)
850 
851 	TEST_ARENAS_BIN_CONSTANT(size_t, size, bin_infos[0].reg_size);
852 	TEST_ARENAS_BIN_CONSTANT(uint32_t, nregs, bin_infos[0].nregs);
853 	TEST_ARENAS_BIN_CONSTANT(size_t, slab_size,
854 	    bin_infos[0].slab_size);
855 	TEST_ARENAS_BIN_CONSTANT(uint32_t, nshards, bin_infos[0].n_shards);
856 
857 #undef TEST_ARENAS_BIN_CONSTANT
858 }
859 TEST_END
860 
861 TEST_BEGIN(test_arenas_lextent_constants) {
862 #define TEST_ARENAS_LEXTENT_CONSTANT(t, name, expected) do {		\
863 	t name;								\
864 	size_t sz = sizeof(t);						\
865 	expect_d_eq(mallctl("arenas.lextent.0."#name, (void *)&name,	\
866 	    &sz, NULL, 0), 0, "Unexpected mallctl() failure");		\
867 	expect_zu_eq(name, expected, "Incorrect "#name" size");		\
868 } while (0)
869 
870 	TEST_ARENAS_LEXTENT_CONSTANT(size_t, size,
871 	    SC_LARGE_MINCLASS);
872 
873 #undef TEST_ARENAS_LEXTENT_CONSTANT
874 }
875 TEST_END
876 
877 TEST_BEGIN(test_arenas_create) {
878 	unsigned narenas_before, arena, narenas_after;
879 	size_t sz = sizeof(unsigned);
880 
881 	expect_d_eq(mallctl("arenas.narenas", (void *)&narenas_before, &sz,
882 	    NULL, 0), 0, "Unexpected mallctl() failure");
883 	expect_d_eq(mallctl("arenas.create", (void *)&arena, &sz, NULL, 0), 0,
884 	    "Unexpected mallctl() failure");
885 	expect_d_eq(mallctl("arenas.narenas", (void *)&narenas_after, &sz, NULL,
886 	    0), 0, "Unexpected mallctl() failure");
887 
888 	expect_u_eq(narenas_before+1, narenas_after,
889 	    "Unexpected number of arenas before versus after extension");
890 	expect_u_eq(arena, narenas_after-1, "Unexpected arena index");
891 }
892 TEST_END
893 
894 TEST_BEGIN(test_arenas_lookup) {
895 	unsigned arena, arena1;
896 	void *ptr;
897 	size_t sz = sizeof(unsigned);
898 
899 	expect_d_eq(mallctl("arenas.create", (void *)&arena, &sz, NULL, 0), 0,
900 	    "Unexpected mallctl() failure");
901 	ptr = mallocx(42, MALLOCX_ARENA(arena) | MALLOCX_TCACHE_NONE);
902 	expect_ptr_not_null(ptr, "Unexpected mallocx() failure");
903 	expect_d_eq(mallctl("arenas.lookup", &arena1, &sz, &ptr, sizeof(ptr)),
904 	    0, "Unexpected mallctl() failure");
905 	expect_u_eq(arena, arena1, "Unexpected arena index");
906 	dallocx(ptr, 0);
907 }
908 TEST_END
909 
910 TEST_BEGIN(test_prof_active) {
911 	/*
912 	 * If config_prof is off, then the test for prof_active in
913 	 * test_mallctl_opt was already enough.
914 	 */
915 	test_skip_if(!config_prof);
916 	test_skip_if(opt_prof);
917 
918 	bool active, old;
919 	size_t len = sizeof(bool);
920 
921 	active = true;
922 	expect_d_eq(mallctl("prof.active", NULL, NULL, &active, len), ENOENT,
923 	    "Setting prof_active to true should fail when opt_prof is off");
924 	old = true;
925 	expect_d_eq(mallctl("prof.active", &old, &len, &active, len), ENOENT,
926 	    "Setting prof_active to true should fail when opt_prof is off");
927 	expect_true(old, "old value should not be touched when mallctl fails");
928 	active = false;
929 	expect_d_eq(mallctl("prof.active", NULL, NULL, &active, len), 0,
930 	    "Setting prof_active to false should succeed when opt_prof is off");
931 	expect_d_eq(mallctl("prof.active", &old, &len, &active, len), 0,
932 	    "Setting prof_active to false should succeed when opt_prof is off");
933 	expect_false(old, "prof_active should be false when opt_prof is off");
934 }
935 TEST_END
936 
937 TEST_BEGIN(test_stats_arenas) {
938 #define TEST_STATS_ARENAS(t, name) do {					\
939 	t name;								\
940 	size_t sz = sizeof(t);						\
941 	expect_d_eq(mallctl("stats.arenas.0."#name, (void *)&name, &sz,	\
942 	    NULL, 0), 0, "Unexpected mallctl() failure");		\
943 } while (0)
944 
945 	TEST_STATS_ARENAS(unsigned, nthreads);
946 	TEST_STATS_ARENAS(const char *, dss);
947 	TEST_STATS_ARENAS(ssize_t, dirty_decay_ms);
948 	TEST_STATS_ARENAS(ssize_t, muzzy_decay_ms);
949 	TEST_STATS_ARENAS(size_t, pactive);
950 	TEST_STATS_ARENAS(size_t, pdirty);
951 
952 #undef TEST_STATS_ARENAS
953 }
954 TEST_END
955 
956 static void
957 alloc_hook(void *extra, UNUSED hook_alloc_t type, UNUSED void *result,
958     UNUSED uintptr_t result_raw, UNUSED uintptr_t args_raw[3]) {
959 	*(bool *)extra = true;
960 }
961 
962 static void
963 dalloc_hook(void *extra, UNUSED hook_dalloc_t type,
964     UNUSED void *address, UNUSED uintptr_t args_raw[3]) {
965 	*(bool *)extra = true;
966 }
967 
968 TEST_BEGIN(test_hooks) {
969 	bool hook_called = false;
970 	hooks_t hooks = {&alloc_hook, &dalloc_hook, NULL, &hook_called};
971 	void *handle = NULL;
972 	size_t sz = sizeof(handle);
973 	int err = mallctl("experimental.hooks.install", &handle, &sz, &hooks,
974 	    sizeof(hooks));
975 	expect_d_eq(err, 0, "Hook installation failed");
976 	expect_ptr_ne(handle, NULL, "Hook installation gave null handle");
977 	void *ptr = mallocx(1, 0);
978 	expect_true(hook_called, "Alloc hook not called");
979 	hook_called = false;
980 	free(ptr);
981 	expect_true(hook_called, "Free hook not called");
982 
983 	err = mallctl("experimental.hooks.remove", NULL, NULL, &handle,
984 	    sizeof(handle));
985 	expect_d_eq(err, 0, "Hook removal failed");
986 	hook_called = false;
987 	ptr = mallocx(1, 0);
988 	free(ptr);
989 	expect_false(hook_called, "Hook called after removal");
990 }
991 TEST_END
992 
993 TEST_BEGIN(test_hooks_exhaustion) {
994 	bool hook_called = false;
995 	hooks_t hooks = {&alloc_hook, &dalloc_hook, NULL, &hook_called};
996 
997 	void *handle;
998 	void *handles[HOOK_MAX];
999 	size_t sz = sizeof(handle);
1000 	int err;
1001 	for (int i = 0; i < HOOK_MAX; i++) {
1002 		handle = NULL;
1003 		err = mallctl("experimental.hooks.install", &handle, &sz,
1004 		    &hooks, sizeof(hooks));
1005 		expect_d_eq(err, 0, "Error installation hooks");
1006 		expect_ptr_ne(handle, NULL, "Got NULL handle");
1007 		handles[i] = handle;
1008 	}
1009 	err = mallctl("experimental.hooks.install", &handle, &sz, &hooks,
1010 	    sizeof(hooks));
1011 	expect_d_eq(err, EAGAIN, "Should have failed hook installation");
1012 	for (int i = 0; i < HOOK_MAX; i++) {
1013 		err = mallctl("experimental.hooks.remove", NULL, NULL,
1014 		    &handles[i], sizeof(handles[i]));
1015 		expect_d_eq(err, 0, "Hook removal failed");
1016 	}
1017 	/* Insertion failed, but then we removed some; it should work now. */
1018 	handle = NULL;
1019 	err = mallctl("experimental.hooks.install", &handle, &sz, &hooks,
1020 	    sizeof(hooks));
1021 	expect_d_eq(err, 0, "Hook insertion failed");
1022 	expect_ptr_ne(handle, NULL, "Got NULL handle");
1023 	err = mallctl("experimental.hooks.remove", NULL, NULL, &handle,
1024 	    sizeof(handle));
1025 	expect_d_eq(err, 0, "Hook removal failed");
1026 }
1027 TEST_END
1028 
1029 TEST_BEGIN(test_thread_idle) {
1030 	/*
1031 	 * We're cheating a little bit in this test, and inferring things about
1032 	 * implementation internals (like tcache details).  We have to;
1033 	 * thread.idle has no guaranteed effects.  We need stats to make these
1034 	 * inferences.
1035 	 */
1036 	test_skip_if(!config_stats);
1037 
1038 	int err;
1039 	size_t sz;
1040 	size_t miblen;
1041 
1042 	bool tcache_enabled = false;
1043 	sz = sizeof(tcache_enabled);
1044 	err = mallctl("thread.tcache.enabled", &tcache_enabled, &sz, NULL, 0);
1045 	expect_d_eq(err, 0, "");
1046 	test_skip_if(!tcache_enabled);
1047 
1048 	size_t tcache_max;
1049 	sz = sizeof(tcache_max);
1050 	err = mallctl("arenas.tcache_max", &tcache_max, &sz, NULL, 0);
1051 	expect_d_eq(err, 0, "");
1052 	test_skip_if(tcache_max == 0);
1053 
1054 	unsigned arena_ind;
1055 	sz = sizeof(arena_ind);
1056 	err = mallctl("thread.arena", &arena_ind, &sz, NULL, 0);
1057 	expect_d_eq(err, 0, "");
1058 
1059 	/* We're going to do an allocation of size 1, which we know is small. */
1060 	size_t mib[5];
1061 	miblen = sizeof(mib)/sizeof(mib[0]);
1062 	err = mallctlnametomib("stats.arenas.0.small.ndalloc", mib, &miblen);
1063 	expect_d_eq(err, 0, "");
1064 	mib[2] = arena_ind;
1065 
1066 	/*
1067 	 * This alloc and dalloc should leave something in the tcache, in a
1068 	 * small size's cache bin.
1069 	 */
1070 	void *ptr = mallocx(1, 0);
1071 	dallocx(ptr, 0);
1072 
1073 	uint64_t epoch;
1074 	err = mallctl("epoch", NULL, NULL, &epoch, sizeof(epoch));
1075 	expect_d_eq(err, 0, "");
1076 
1077 	uint64_t small_dalloc_pre_idle;
1078 	sz = sizeof(small_dalloc_pre_idle);
1079 	err = mallctlbymib(mib, miblen, &small_dalloc_pre_idle, &sz, NULL, 0);
1080 	expect_d_eq(err, 0, "");
1081 
1082 	err = mallctl("thread.idle", NULL, NULL, NULL, 0);
1083 	expect_d_eq(err, 0, "");
1084 
1085 	err = mallctl("epoch", NULL, NULL, &epoch, sizeof(epoch));
1086 	expect_d_eq(err, 0, "");
1087 
1088 	uint64_t small_dalloc_post_idle;
1089 	sz = sizeof(small_dalloc_post_idle);
1090 	err = mallctlbymib(mib, miblen, &small_dalloc_post_idle, &sz, NULL, 0);
1091 	expect_d_eq(err, 0, "");
1092 
1093 	expect_u64_lt(small_dalloc_pre_idle, small_dalloc_post_idle,
1094 	    "Purge didn't flush the tcache");
1095 }
1096 TEST_END
1097 
1098 TEST_BEGIN(test_thread_peak) {
1099 	test_skip_if(!config_stats);
1100 
1101 	/*
1102 	 * We don't commit to any stable amount of accuracy for peak tracking
1103 	 * (in practice, when this test was written, we made sure to be within
1104 	 * 100k).  But 10MB is big for more or less any definition of big.
1105 	 */
1106 	size_t big_size = 10 * 1024 * 1024;
1107 	size_t small_size = 256;
1108 
1109 	void *ptr;
1110 	int err;
1111 	size_t sz;
1112 	uint64_t peak;
1113 	sz = sizeof(uint64_t);
1114 
1115 	err = mallctl("thread.peak.reset", NULL, NULL, NULL, 0);
1116 	expect_d_eq(err, 0, "");
1117 	ptr = mallocx(SC_SMALL_MAXCLASS, 0);
1118 	err = mallctl("thread.peak.read", &peak, &sz, NULL, 0);
1119 	expect_d_eq(err, 0, "");
1120 	expect_u64_eq(peak, SC_SMALL_MAXCLASS, "Missed an update");
1121 	free(ptr);
1122 	err = mallctl("thread.peak.read", &peak, &sz, NULL, 0);
1123 	expect_d_eq(err, 0, "");
1124 	expect_u64_eq(peak, SC_SMALL_MAXCLASS, "Freeing changed peak");
1125 	ptr = mallocx(big_size, 0);
1126 	free(ptr);
1127 	/*
1128 	 * The peak should have hit big_size in the last two lines, even though
1129 	 * the net allocated bytes has since dropped back down to zero.  We
1130 	 * should have noticed the peak change without having down any mallctl
1131 	 * calls while net allocated bytes was high.
1132 	 */
1133 	err = mallctl("thread.peak.read", &peak, &sz, NULL, 0);
1134 	expect_d_eq(err, 0, "");
1135 	expect_u64_ge(peak, big_size, "Missed a peak change.");
1136 
1137 	/* Allocate big_size, but using small allocations. */
1138 	size_t nallocs = big_size / small_size;
1139 	void **ptrs = calloc(nallocs, sizeof(void *));
1140 	err = mallctl("thread.peak.reset", NULL, NULL, NULL, 0);
1141 	expect_d_eq(err, 0, "");
1142 	err = mallctl("thread.peak.read", &peak, &sz, NULL, 0);
1143 	expect_d_eq(err, 0, "");
1144 	expect_u64_eq(0, peak, "Missed a reset.");
1145 	for (size_t i = 0; i < nallocs; i++) {
1146 		ptrs[i] = mallocx(small_size, 0);
1147 	}
1148 	for (size_t i = 0; i < nallocs; i++) {
1149 		free(ptrs[i]);
1150 	}
1151 	err = mallctl("thread.peak.read", &peak, &sz, NULL, 0);
1152 	expect_d_eq(err, 0, "");
1153 	/*
1154 	 * We don't guarantee exactness; make sure we're within 10% of the peak,
1155 	 * though.
1156 	 */
1157 	expect_u64_ge(peak, nallocx(small_size, 0) * nallocs * 9 / 10,
1158 	    "Missed some peak changes.");
1159 	expect_u64_le(peak, nallocx(small_size, 0) * nallocs * 11 / 10,
1160 	    "Overcounted peak changes.");
1161 	free(ptrs);
1162 }
1163 TEST_END
1164 
1165 typedef struct activity_test_data_s activity_test_data_t;
1166 struct activity_test_data_s {
1167 	uint64_t obtained_alloc;
1168 	uint64_t obtained_dalloc;
1169 };
1170 
1171 static void
1172 activity_test_callback(void *uctx, uint64_t alloc, uint64_t dalloc) {
1173 	activity_test_data_t *test_data = (activity_test_data_t *)uctx;
1174 	test_data->obtained_alloc = alloc;
1175 	test_data->obtained_dalloc = dalloc;
1176 }
1177 
1178 TEST_BEGIN(test_thread_activity_callback) {
1179 	test_skip_if(!config_stats);
1180 
1181 	const size_t big_size = 10 * 1024 * 1024;
1182 	void *ptr;
1183 	int err;
1184 	size_t sz;
1185 
1186 	uint64_t *allocatedp;
1187 	uint64_t *deallocatedp;
1188 	sz = sizeof(allocatedp);
1189 	err = mallctl("thread.allocatedp", &allocatedp, &sz, NULL, 0);
1190 	assert_d_eq(0, err, "");
1191 	err = mallctl("thread.deallocatedp", &deallocatedp, &sz, NULL, 0);
1192 	assert_d_eq(0, err, "");
1193 
1194 	activity_callback_thunk_t old_thunk = {(activity_callback_t)111,
1195 		(void *)222};
1196 
1197 	activity_test_data_t test_data = {333, 444};
1198 	activity_callback_thunk_t new_thunk =
1199 	    {&activity_test_callback, &test_data};
1200 
1201 	sz = sizeof(old_thunk);
1202 	err = mallctl("experimental.thread.activity_callback", &old_thunk, &sz,
1203 	    &new_thunk, sizeof(new_thunk));
1204 	assert_d_eq(0, err, "");
1205 
1206 	expect_true(old_thunk.callback == NULL, "Callback already installed");
1207 	expect_true(old_thunk.uctx == NULL, "Callback data already installed");
1208 
1209 	ptr = mallocx(big_size, 0);
1210 	expect_u64_eq(test_data.obtained_alloc, *allocatedp, "");
1211 	expect_u64_eq(test_data.obtained_dalloc, *deallocatedp, "");
1212 
1213 	free(ptr);
1214 	expect_u64_eq(test_data.obtained_alloc, *allocatedp, "");
1215 	expect_u64_eq(test_data.obtained_dalloc, *deallocatedp, "");
1216 
1217 	sz = sizeof(old_thunk);
1218 	new_thunk = (activity_callback_thunk_t){ NULL, NULL };
1219 	err = mallctl("experimental.thread.activity_callback", &old_thunk, &sz,
1220 	    &new_thunk, sizeof(new_thunk));
1221 	assert_d_eq(0, err, "");
1222 
1223 	expect_true(old_thunk.callback == &activity_test_callback, "");
1224 	expect_true(old_thunk.uctx == &test_data, "");
1225 
1226 	/* Inserting NULL should have turned off tracking. */
1227 	test_data.obtained_alloc = 333;
1228 	test_data.obtained_dalloc = 444;
1229 	ptr = mallocx(big_size, 0);
1230 	free(ptr);
1231 	expect_u64_eq(333, test_data.obtained_alloc, "");
1232 	expect_u64_eq(444, test_data.obtained_dalloc, "");
1233 }
1234 TEST_END
1235 
1236 int
1237 main(void) {
1238 	return test(
1239 	    test_mallctl_errors,
1240 	    test_mallctlnametomib_errors,
1241 	    test_mallctlbymib_errors,
1242 	    test_mallctl_read_write,
1243 	    test_mallctlnametomib_short_mib,
1244 	    test_mallctlnametomib_short_name,
1245 	    test_mallctlmibnametomib,
1246 	    test_mallctlbymibname,
1247 	    test_mallctl_config,
1248 	    test_mallctl_opt,
1249 	    test_manpage_example,
1250 	    test_tcache_none,
1251 	    test_tcache,
1252 	    test_thread_arena,
1253 	    test_arena_i_initialized,
1254 	    test_arena_i_dirty_decay_ms,
1255 	    test_arena_i_muzzy_decay_ms,
1256 	    test_arena_i_purge,
1257 	    test_arena_i_decay,
1258 	    test_arena_i_dss,
1259 	    test_arena_i_retain_grow_limit,
1260 	    test_arenas_dirty_decay_ms,
1261 	    test_arenas_muzzy_decay_ms,
1262 	    test_arenas_constants,
1263 	    test_arenas_bin_constants,
1264 	    test_arenas_lextent_constants,
1265 	    test_arenas_create,
1266 	    test_arenas_lookup,
1267 	    test_prof_active,
1268 	    test_stats_arenas,
1269 	    test_hooks,
1270 	    test_hooks_exhaustion,
1271 	    test_thread_idle,
1272 	    test_thread_peak,
1273 	    test_thread_activity_callback);
1274 }
1275