• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 #include "test/jemalloc_test.h"
2 
3 #ifdef JEMALLOC_FILL
4 #  ifndef JEMALLOC_TEST_JUNK_OPT
5 #    define JEMALLOC_TEST_JUNK_OPT "junk:true"
6 #  endif
7 const char *malloc_conf =
8     "abort:false,zero:false,redzone:true,quarantine:0," JEMALLOC_TEST_JUNK_OPT;
9 #endif
10 
11 static arena_dalloc_junk_small_t *arena_dalloc_junk_small_orig;
12 static arena_dalloc_junk_large_t *arena_dalloc_junk_large_orig;
13 static huge_dalloc_junk_t *huge_dalloc_junk_orig;
14 static void *watch_for_junking;
15 static bool saw_junking;
16 
17 static void
watch_junking(void * p)18 watch_junking(void *p)
19 {
20 
21 	watch_for_junking = p;
22 	saw_junking = false;
23 }
24 
25 static void
arena_dalloc_junk_small_intercept(void * ptr,arena_bin_info_t * bin_info)26 arena_dalloc_junk_small_intercept(void *ptr, arena_bin_info_t *bin_info)
27 {
28 	size_t i;
29 
30 	arena_dalloc_junk_small_orig(ptr, bin_info);
31 	for (i = 0; i < bin_info->reg_size; i++) {
32 		assert_u_eq(((uint8_t *)ptr)[i], JEMALLOC_FREE_JUNK,
33 		    "Missing junk fill for byte %zu/%zu of deallocated region",
34 		    i, bin_info->reg_size);
35 	}
36 	if (ptr == watch_for_junking)
37 		saw_junking = true;
38 }
39 
40 static void
arena_dalloc_junk_large_intercept(void * ptr,size_t usize)41 arena_dalloc_junk_large_intercept(void *ptr, size_t usize)
42 {
43 	size_t i;
44 
45 	arena_dalloc_junk_large_orig(ptr, usize);
46 	for (i = 0; i < usize; i++) {
47 		assert_u_eq(((uint8_t *)ptr)[i], JEMALLOC_FREE_JUNK,
48 		    "Missing junk fill for byte %zu/%zu of deallocated region",
49 		    i, usize);
50 	}
51 	if (ptr == watch_for_junking)
52 		saw_junking = true;
53 }
54 
55 static void
huge_dalloc_junk_intercept(void * ptr,size_t usize)56 huge_dalloc_junk_intercept(void *ptr, size_t usize)
57 {
58 
59 	huge_dalloc_junk_orig(ptr, usize);
60 	/*
61 	 * The conditions under which junk filling actually occurs are nuanced
62 	 * enough that it doesn't make sense to duplicate the decision logic in
63 	 * test code, so don't actually check that the region is junk-filled.
64 	 */
65 	if (ptr == watch_for_junking)
66 		saw_junking = true;
67 }
68 
69 static void
test_junk(size_t sz_min,size_t sz_max)70 test_junk(size_t sz_min, size_t sz_max)
71 {
72 	uint8_t *s;
73 	size_t sz_prev, sz, i;
74 
75 	if (opt_junk_free) {
76 		arena_dalloc_junk_small_orig = arena_dalloc_junk_small;
77 		arena_dalloc_junk_small = arena_dalloc_junk_small_intercept;
78 		arena_dalloc_junk_large_orig = arena_dalloc_junk_large;
79 		arena_dalloc_junk_large = arena_dalloc_junk_large_intercept;
80 		huge_dalloc_junk_orig = huge_dalloc_junk;
81 		huge_dalloc_junk = huge_dalloc_junk_intercept;
82 	}
83 
84 	sz_prev = 0;
85 	s = (uint8_t *)mallocx(sz_min, 0);
86 	assert_ptr_not_null((void *)s, "Unexpected mallocx() failure");
87 
88 	for (sz = sallocx(s, 0); sz <= sz_max;
89 	    sz_prev = sz, sz = sallocx(s, 0)) {
90 		if (sz_prev > 0) {
91 			assert_u_eq(s[0], 'a',
92 			    "Previously allocated byte %zu/%zu is corrupted",
93 			    ZU(0), sz_prev);
94 			assert_u_eq(s[sz_prev-1], 'a',
95 			    "Previously allocated byte %zu/%zu is corrupted",
96 			    sz_prev-1, sz_prev);
97 		}
98 
99 		for (i = sz_prev; i < sz; i++) {
100 			if (opt_junk_alloc) {
101 				assert_u_eq(s[i], JEMALLOC_ALLOC_JUNK,
102 				    "Newly allocated byte %zu/%zu isn't "
103 				    "junk-filled", i, sz);
104 			}
105 			s[i] = 'a';
106 		}
107 
108 		if (xallocx(s, sz+1, 0, 0) == sz) {
109 			watch_junking(s);
110 			s = (uint8_t *)rallocx(s, sz+1, 0);
111 			assert_ptr_not_null((void *)s,
112 			    "Unexpected rallocx() failure");
113 			assert_true(!opt_junk_free || saw_junking,
114 			    "Expected region of size %zu to be junk-filled",
115 			    sz);
116 		}
117 	}
118 
119 	watch_junking(s);
120 	dallocx(s, 0);
121 	assert_true(!opt_junk_free || saw_junking,
122 	    "Expected region of size %zu to be junk-filled", sz);
123 
124 	if (opt_junk_free) {
125 		arena_dalloc_junk_small = arena_dalloc_junk_small_orig;
126 		arena_dalloc_junk_large = arena_dalloc_junk_large_orig;
127 		huge_dalloc_junk = huge_dalloc_junk_orig;
128 	}
129 }
130 
TEST_BEGIN(test_junk_small)131 TEST_BEGIN(test_junk_small)
132 {
133 
134 	test_skip_if(!config_fill);
135 	test_junk(1, SMALL_MAXCLASS-1);
136 }
137 TEST_END
138 
TEST_BEGIN(test_junk_large)139 TEST_BEGIN(test_junk_large)
140 {
141 
142 	test_skip_if(!config_fill);
143 	test_junk(SMALL_MAXCLASS+1, large_maxclass);
144 }
145 TEST_END
146 
TEST_BEGIN(test_junk_huge)147 TEST_BEGIN(test_junk_huge)
148 {
149 
150 	test_skip_if(!config_fill);
151 	test_junk(large_maxclass+1, chunksize*2);
152 }
153 TEST_END
154 
155 arena_ralloc_junk_large_t *arena_ralloc_junk_large_orig;
156 static void *most_recently_trimmed;
157 
158 static size_t
shrink_size(size_t size)159 shrink_size(size_t size)
160 {
161 	size_t shrink_size;
162 
163 	for (shrink_size = size - 1; nallocx(shrink_size, 0) == size;
164 	    shrink_size--)
165 		; /* Do nothing. */
166 
167 	return (shrink_size);
168 }
169 
170 static void
arena_ralloc_junk_large_intercept(void * ptr,size_t old_usize,size_t usize)171 arena_ralloc_junk_large_intercept(void *ptr, size_t old_usize, size_t usize)
172 {
173 
174 	arena_ralloc_junk_large_orig(ptr, old_usize, usize);
175 	assert_zu_eq(old_usize, large_maxclass, "Unexpected old_usize");
176 	assert_zu_eq(usize, shrink_size(large_maxclass), "Unexpected usize");
177 	most_recently_trimmed = ptr;
178 }
179 
TEST_BEGIN(test_junk_large_ralloc_shrink)180 TEST_BEGIN(test_junk_large_ralloc_shrink)
181 {
182 	void *p1, *p2;
183 
184 	p1 = mallocx(large_maxclass, 0);
185 	assert_ptr_not_null(p1, "Unexpected mallocx() failure");
186 
187 	arena_ralloc_junk_large_orig = arena_ralloc_junk_large;
188 	arena_ralloc_junk_large = arena_ralloc_junk_large_intercept;
189 
190 	p2 = rallocx(p1, shrink_size(large_maxclass), 0);
191 	assert_ptr_eq(p1, p2, "Unexpected move during shrink");
192 
193 	arena_ralloc_junk_large = arena_ralloc_junk_large_orig;
194 
195 	assert_ptr_eq(most_recently_trimmed, p1,
196 	    "Expected trimmed portion of region to be junk-filled");
197 }
198 TEST_END
199 
200 static bool detected_redzone_corruption;
201 
202 static void
arena_redzone_corruption_replacement(void * ptr,size_t usize,bool after,size_t offset,uint8_t byte)203 arena_redzone_corruption_replacement(void *ptr, size_t usize, bool after,
204     size_t offset, uint8_t byte)
205 {
206 
207 	detected_redzone_corruption = true;
208 }
209 
TEST_BEGIN(test_junk_redzone)210 TEST_BEGIN(test_junk_redzone)
211 {
212 	char *s;
213 	arena_redzone_corruption_t *arena_redzone_corruption_orig;
214 
215 	test_skip_if(!config_fill);
216 	test_skip_if(!opt_junk_alloc || !opt_junk_free);
217 
218 	arena_redzone_corruption_orig = arena_redzone_corruption;
219 	arena_redzone_corruption = arena_redzone_corruption_replacement;
220 
221 	/* Test underflow. */
222 	detected_redzone_corruption = false;
223 	s = (char *)mallocx(1, 0);
224 	assert_ptr_not_null((void *)s, "Unexpected mallocx() failure");
225 	s[-1] = 0xbb;
226 	dallocx(s, 0);
227 	assert_true(detected_redzone_corruption,
228 	    "Did not detect redzone corruption");
229 
230 	/* Test overflow. */
231 	detected_redzone_corruption = false;
232 	s = (char *)mallocx(1, 0);
233 	assert_ptr_not_null((void *)s, "Unexpected mallocx() failure");
234 	s[sallocx(s, 0)] = 0xbb;
235 	dallocx(s, 0);
236 	assert_true(detected_redzone_corruption,
237 	    "Did not detect redzone corruption");
238 
239 	arena_redzone_corruption = arena_redzone_corruption_orig;
240 }
241 TEST_END
242 
243 int
main(void)244 main(void)
245 {
246 
247 	return (test(
248 	    test_junk_small,
249 	    test_junk_large,
250 	    test_junk_huge,
251 	    test_junk_large_ralloc_shrink,
252 	    test_junk_redzone));
253 }
254