• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 #include "test/jemalloc_test.h"
2 
3 #ifdef JEMALLOC_PROF
4 const char *malloc_conf =
5     "prof:true,prof_active:false,lg_prof_sample:0";
6 #endif
7 
8 static int
prof_dump_open_intercept(bool propagate_err,const char * filename)9 prof_dump_open_intercept(bool propagate_err, const char *filename)
10 {
11 	int fd;
12 
13 	fd = open("/dev/null", O_WRONLY);
14 	assert_d_ne(fd, -1, "Unexpected open() failure");
15 
16 	return (fd);
17 }
18 
TEST_BEGIN(test_prof_reset_basic)19 TEST_BEGIN(test_prof_reset_basic)
20 {
21 	size_t lg_prof_sample_orig, lg_prof_sample, lg_prof_sample_next;
22 	size_t sz;
23 	unsigned i;
24 
25 	test_skip_if(!config_prof);
26 
27 	sz = sizeof(size_t);
28 	assert_d_eq(mallctl("opt.lg_prof_sample", &lg_prof_sample_orig, &sz,
29 	    NULL, 0), 0,
30 	    "Unexpected mallctl failure while reading profiling sample rate");
31 	assert_zu_eq(lg_prof_sample_orig, 0,
32 	    "Unexpected profiling sample rate");
33 	sz = sizeof(size_t);
34 	assert_d_eq(mallctl("prof.lg_sample", &lg_prof_sample, &sz, NULL, 0), 0,
35 	    "Unexpected mallctl failure while reading profiling sample rate");
36 	assert_zu_eq(lg_prof_sample_orig, lg_prof_sample,
37 	    "Unexpected disagreement between \"opt.lg_prof_sample\" and "
38 	    "\"prof.lg_sample\"");
39 
40 	/* Test simple resets. */
41 	for (i = 0; i < 2; i++) {
42 		assert_d_eq(mallctl("prof.reset", NULL, NULL, NULL, 0), 0,
43 		    "Unexpected mallctl failure while resetting profile data");
44 		sz = sizeof(size_t);
45 		assert_d_eq(mallctl("prof.lg_sample", &lg_prof_sample, &sz,
46 		    NULL, 0), 0, "Unexpected mallctl failure while reading "
47 		    "profiling sample rate");
48 		assert_zu_eq(lg_prof_sample_orig, lg_prof_sample,
49 		    "Unexpected profile sample rate change");
50 	}
51 
52 	/* Test resets with prof.lg_sample changes. */
53 	lg_prof_sample_next = 1;
54 	for (i = 0; i < 2; i++) {
55 		assert_d_eq(mallctl("prof.reset", NULL, NULL,
56 		    &lg_prof_sample_next, sizeof(size_t)), 0,
57 		    "Unexpected mallctl failure while resetting profile data");
58 		sz = sizeof(size_t);
59 		assert_d_eq(mallctl("prof.lg_sample", &lg_prof_sample, &sz,
60 		    NULL, 0), 0, "Unexpected mallctl failure while reading "
61 		    "profiling sample rate");
62 		assert_zu_eq(lg_prof_sample, lg_prof_sample_next,
63 		    "Expected profile sample rate change");
64 		lg_prof_sample_next = lg_prof_sample_orig;
65 	}
66 
67 	/* Make sure the test code restored prof.lg_sample. */
68 	sz = sizeof(size_t);
69 	assert_d_eq(mallctl("prof.lg_sample", &lg_prof_sample, &sz, NULL, 0), 0,
70 	    "Unexpected mallctl failure while reading profiling sample rate");
71 	assert_zu_eq(lg_prof_sample_orig, lg_prof_sample,
72 	    "Unexpected disagreement between \"opt.lg_prof_sample\" and "
73 	    "\"prof.lg_sample\"");
74 }
75 TEST_END
76 
77 bool prof_dump_header_intercepted = false;
78 prof_cnt_t cnt_all_copy = {0, 0, 0, 0};
79 static bool
prof_dump_header_intercept(bool propagate_err,const prof_cnt_t * cnt_all)80 prof_dump_header_intercept(bool propagate_err, const prof_cnt_t *cnt_all)
81 {
82 
83 	prof_dump_header_intercepted = true;
84 	memcpy(&cnt_all_copy, cnt_all, sizeof(prof_cnt_t));
85 
86 	return (false);
87 }
88 
TEST_BEGIN(test_prof_reset_cleanup)89 TEST_BEGIN(test_prof_reset_cleanup)
90 {
91 	bool active;
92 	void *p;
93 	prof_dump_header_t *prof_dump_header_orig;
94 
95 	test_skip_if(!config_prof);
96 
97 	active = true;
98 	assert_d_eq(mallctl("prof.active", NULL, NULL, &active, sizeof(active)),
99 	    0, "Unexpected mallctl failure while activating profiling");
100 
101 	assert_zu_eq(prof_bt_count(), 0, "Expected 0 backtraces");
102 	p = mallocx(1, 0);
103 	assert_ptr_not_null(p, "Unexpected mallocx() failure");
104 	assert_zu_eq(prof_bt_count(), 1, "Expected 1 backtrace");
105 
106 	prof_dump_header_orig = prof_dump_header;
107 	prof_dump_header = prof_dump_header_intercept;
108 	assert_false(prof_dump_header_intercepted, "Unexpected intercept");
109 
110 	assert_d_eq(mallctl("prof.dump", NULL, NULL, NULL, 0),
111 	    0, "Unexpected error while dumping heap profile");
112 	assert_true(prof_dump_header_intercepted, "Expected intercept");
113 	assert_u64_eq(cnt_all_copy.curobjs, 1, "Expected 1 allocation");
114 
115 	assert_d_eq(mallctl("prof.reset", NULL, NULL, NULL, 0), 0,
116 	    "Unexpected error while resetting heap profile data");
117 	assert_d_eq(mallctl("prof.dump", NULL, NULL, NULL, 0),
118 	    0, "Unexpected error while dumping heap profile");
119 	assert_u64_eq(cnt_all_copy.curobjs, 0, "Expected 0 allocations");
120 	assert_zu_eq(prof_bt_count(), 1, "Expected 1 backtrace");
121 
122 	prof_dump_header = prof_dump_header_orig;
123 
124 	dallocx(p, 0);
125 	assert_zu_eq(prof_bt_count(), 0, "Expected 0 backtraces");
126 
127 	active = false;
128 	assert_d_eq(mallctl("prof.active", NULL, NULL, &active, sizeof(active)),
129 	    0, "Unexpected mallctl failure while deactivating profiling");
130 }
131 TEST_END
132 
133 #define	NTHREADS		4
134 #define	NALLOCS_PER_THREAD	(1U << 13)
135 #define	OBJ_RING_BUF_COUNT	1531
136 #define	RESET_INTERVAL		(1U << 10)
137 #define	DUMP_INTERVAL		3677
138 static void *
thd_start(void * varg)139 thd_start(void *varg)
140 {
141 	unsigned thd_ind = *(unsigned *)varg;
142 	unsigned i;
143 	void *objs[OBJ_RING_BUF_COUNT];
144 
145 	memset(objs, 0, sizeof(objs));
146 
147 	for (i = 0; i < NALLOCS_PER_THREAD; i++) {
148 		if (i % RESET_INTERVAL == 0) {
149 			assert_d_eq(mallctl("prof.reset", NULL, NULL, NULL, 0),
150 			    0, "Unexpected error while resetting heap profile "
151 			    "data");
152 		}
153 
154 		if (i % DUMP_INTERVAL == 0) {
155 			assert_d_eq(mallctl("prof.dump", NULL, NULL, NULL, 0),
156 			    0, "Unexpected error while dumping heap profile");
157 		}
158 
159 		{
160 			void **pp = &objs[i % OBJ_RING_BUF_COUNT];
161 			if (*pp != NULL) {
162 				dallocx(*pp, 0);
163 				*pp = NULL;
164 			}
165 			*pp = btalloc(1, thd_ind*NALLOCS_PER_THREAD + i);
166 			assert_ptr_not_null(*pp,
167 			    "Unexpected btalloc() failure");
168 		}
169 	}
170 
171 	/* Clean up any remaining objects. */
172 	for (i = 0; i < OBJ_RING_BUF_COUNT; i++) {
173 		void **pp = &objs[i % OBJ_RING_BUF_COUNT];
174 		if (*pp != NULL) {
175 			dallocx(*pp, 0);
176 			*pp = NULL;
177 		}
178 	}
179 
180 	return (NULL);
181 }
182 
TEST_BEGIN(test_prof_reset)183 TEST_BEGIN(test_prof_reset)
184 {
185 	bool active;
186 	thd_t thds[NTHREADS];
187 	unsigned thd_args[NTHREADS];
188 	unsigned i;
189 	size_t bt_count, tdata_count;
190 
191 	test_skip_if(!config_prof);
192 
193 	bt_count = prof_bt_count();
194 	assert_zu_eq(bt_count, 0,
195 	    "Unexpected pre-existing tdata structures");
196 	tdata_count = prof_tdata_count();
197 
198 	active = true;
199 	assert_d_eq(mallctl("prof.active", NULL, NULL, &active, sizeof(active)),
200 	    0, "Unexpected mallctl failure while activating profiling");
201 
202 	for (i = 0; i < NTHREADS; i++) {
203 		thd_args[i] = i;
204 		thd_create(&thds[i], thd_start, (void *)&thd_args[i]);
205 	}
206 	for (i = 0; i < NTHREADS; i++)
207 		thd_join(thds[i], NULL);
208 
209 	assert_zu_eq(prof_bt_count(), bt_count,
210 	    "Unexpected bactrace count change");
211 	assert_zu_eq(prof_tdata_count(), tdata_count,
212 	    "Unexpected remaining tdata structures");
213 
214 	active = false;
215 	assert_d_eq(mallctl("prof.active", NULL, NULL, &active, sizeof(active)),
216 	    0, "Unexpected mallctl failure while deactivating profiling");
217 }
218 TEST_END
219 #undef NTHREADS
220 #undef NALLOCS_PER_THREAD
221 #undef OBJ_RING_BUF_COUNT
222 #undef RESET_INTERVAL
223 #undef DUMP_INTERVAL
224 
225 int
main(void)226 main(void)
227 {
228 
229 	/* Intercept dumping prior to running any tests. */
230 	prof_dump_open = prof_dump_open_intercept;
231 
232 	return (test(
233 	    test_prof_reset_basic,
234 	    test_prof_reset_cleanup,
235 	    test_prof_reset));
236 }
237