1 #include "test/jemalloc_test.h"
2
3 #include "jemalloc/internal/util.h"
4
5 static arena_dalloc_junk_small_t *arena_dalloc_junk_small_orig;
6 static large_dalloc_junk_t *large_dalloc_junk_orig;
7 static large_dalloc_maybe_junk_t *large_dalloc_maybe_junk_orig;
8 static void *watch_for_junking;
9 static bool saw_junking;
10
11 static void
watch_junking(void * p)12 watch_junking(void *p) {
13 watch_for_junking = p;
14 saw_junking = false;
15 }
16
17 static void
arena_dalloc_junk_small_intercept(void * ptr,const bin_info_t * bin_info)18 arena_dalloc_junk_small_intercept(void *ptr, const bin_info_t *bin_info) {
19 size_t i;
20
21 arena_dalloc_junk_small_orig(ptr, bin_info);
22 for (i = 0; i < bin_info->reg_size; i++) {
23 assert_u_eq(((uint8_t *)ptr)[i], JEMALLOC_FREE_JUNK,
24 "Missing junk fill for byte %zu/%zu of deallocated region",
25 i, bin_info->reg_size);
26 }
27 if (ptr == watch_for_junking) {
28 saw_junking = true;
29 }
30 }
31
32 static void
large_dalloc_junk_intercept(void * ptr,size_t usize)33 large_dalloc_junk_intercept(void *ptr, size_t usize) {
34 size_t i;
35
36 large_dalloc_junk_orig(ptr, usize);
37 for (i = 0; i < usize; i++) {
38 assert_u_eq(((uint8_t *)ptr)[i], JEMALLOC_FREE_JUNK,
39 "Missing junk fill for byte %zu/%zu of deallocated region",
40 i, usize);
41 }
42 if (ptr == watch_for_junking) {
43 saw_junking = true;
44 }
45 }
46
47 static void
large_dalloc_maybe_junk_intercept(void * ptr,size_t usize)48 large_dalloc_maybe_junk_intercept(void *ptr, size_t usize) {
49 large_dalloc_maybe_junk_orig(ptr, usize);
50 if (ptr == watch_for_junking) {
51 saw_junking = true;
52 }
53 }
54
55 static void
test_junk(size_t sz_min,size_t sz_max)56 test_junk(size_t sz_min, size_t sz_max) {
57 uint8_t *s;
58 size_t sz_prev, sz, i;
59
60 if (opt_junk_free) {
61 arena_dalloc_junk_small_orig = arena_dalloc_junk_small;
62 arena_dalloc_junk_small = arena_dalloc_junk_small_intercept;
63 large_dalloc_junk_orig = large_dalloc_junk;
64 large_dalloc_junk = large_dalloc_junk_intercept;
65 large_dalloc_maybe_junk_orig = large_dalloc_maybe_junk;
66 large_dalloc_maybe_junk = large_dalloc_maybe_junk_intercept;
67 }
68
69 sz_prev = 0;
70 s = (uint8_t *)mallocx(sz_min, 0);
71 assert_ptr_not_null((void *)s, "Unexpected mallocx() failure");
72
73 for (sz = sallocx(s, 0); sz <= sz_max;
74 sz_prev = sz, sz = sallocx(s, 0)) {
75 if (sz_prev > 0) {
76 assert_u_eq(s[0], 'a',
77 "Previously allocated byte %zu/%zu is corrupted",
78 ZU(0), sz_prev);
79 assert_u_eq(s[sz_prev-1], 'a',
80 "Previously allocated byte %zu/%zu is corrupted",
81 sz_prev-1, sz_prev);
82 }
83
84 for (i = sz_prev; i < sz; i++) {
85 if (opt_junk_alloc) {
86 assert_u_eq(s[i], JEMALLOC_ALLOC_JUNK,
87 "Newly allocated byte %zu/%zu isn't "
88 "junk-filled", i, sz);
89 }
90 s[i] = 'a';
91 }
92
93 if (xallocx(s, sz+1, 0, 0) == sz) {
94 uint8_t *t;
95 watch_junking(s);
96 t = (uint8_t *)rallocx(s, sz+1, 0);
97 assert_ptr_not_null((void *)t,
98 "Unexpected rallocx() failure");
99 assert_zu_ge(sallocx(t, 0), sz+1,
100 "Unexpectedly small rallocx() result");
101 if (!background_thread_enabled()) {
102 assert_ptr_ne(s, t,
103 "Unexpected in-place rallocx()");
104 assert_true(!opt_junk_free || saw_junking,
105 "Expected region of size %zu to be "
106 "junk-filled", sz);
107 }
108 s = t;
109 }
110 }
111
112 watch_junking(s);
113 dallocx(s, 0);
114 assert_true(!opt_junk_free || saw_junking,
115 "Expected region of size %zu to be junk-filled", sz);
116
117 if (opt_junk_free) {
118 arena_dalloc_junk_small = arena_dalloc_junk_small_orig;
119 large_dalloc_junk = large_dalloc_junk_orig;
120 large_dalloc_maybe_junk = large_dalloc_maybe_junk_orig;
121 }
122 }
123
TEST_BEGIN(test_junk_small)124 TEST_BEGIN(test_junk_small) {
125 test_skip_if(!config_fill);
126 test_junk(1, SMALL_MAXCLASS-1);
127 }
128 TEST_END
129
TEST_BEGIN(test_junk_large)130 TEST_BEGIN(test_junk_large) {
131 test_skip_if(!config_fill);
132 test_junk(SMALL_MAXCLASS+1, (1U << (LG_LARGE_MINCLASS+1)));
133 }
134 TEST_END
135
136 int
main(void)137 main(void) {
138 return test(
139 test_junk_small,
140 test_junk_large);
141 }
142