1 /*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #ifndef ART_RUNTIME_GC_SPACE_MEMORY_TOOL_MALLOC_SPACE_INL_H_
18 #define ART_RUNTIME_GC_SPACE_MEMORY_TOOL_MALLOC_SPACE_INL_H_
19
20 #include "memory_tool_malloc_space.h"
21
22 #include "base/memory_tool.h"
23 #include "memory_tool_settings.h"
24 #include "mirror/object-inl.h"
25
26 namespace art {
27 namespace gc {
28 namespace space {
29
30 namespace memory_tool_details {
31
32 template <size_t kMemoryToolRedZoneBytes, bool kUseObjSizeForUsable>
AdjustForMemoryTool(void * obj_with_rdz,size_t num_bytes,size_t bytes_allocated,size_t usable_size,size_t bytes_tl_bulk_allocated,size_t * bytes_allocated_out,size_t * usable_size_out,size_t * bytes_tl_bulk_allocated_out)33 inline mirror::Object* AdjustForMemoryTool(void* obj_with_rdz,
34 size_t num_bytes,
35 size_t bytes_allocated,
36 size_t usable_size,
37 size_t bytes_tl_bulk_allocated,
38 size_t* bytes_allocated_out,
39 size_t* usable_size_out,
40 size_t* bytes_tl_bulk_allocated_out) {
41 if (bytes_allocated_out != nullptr) {
42 *bytes_allocated_out = bytes_allocated;
43 }
44 if (bytes_tl_bulk_allocated_out != nullptr) {
45 *bytes_tl_bulk_allocated_out = bytes_tl_bulk_allocated;
46 }
47
48 // This cuts over-provision and is a trade-off between testing the over-provisioning code paths
49 // vs checking overflows in the regular paths.
50 if (usable_size_out != nullptr) {
51 if (kUseObjSizeForUsable) {
52 *usable_size_out = num_bytes;
53 } else {
54 *usable_size_out = usable_size - 2 * kMemoryToolRedZoneBytes;
55 }
56 }
57
58 // Left redzone.
59 MEMORY_TOOL_MAKE_NOACCESS(obj_with_rdz, kMemoryToolRedZoneBytes);
60
61 // Make requested memory readable.
62 // (If the allocator assumes memory is zeroed out, we might get UNDEFINED warnings, so make
63 // everything DEFINED initially.)
64 mirror::Object* result = reinterpret_cast<mirror::Object*>(
65 reinterpret_cast<uint8_t*>(obj_with_rdz) + kMemoryToolRedZoneBytes);
66 MEMORY_TOOL_MAKE_DEFINED(result, num_bytes);
67
68 // Right redzone. Assumes that if bytes_allocated > usable_size, then the difference is
69 // management data at the upper end, and for simplicity we will not protect that.
70 // At the moment, this fits RosAlloc (no management data in a slot, usable_size == alloc_size)
71 // and DlMalloc (allocation_size = (usable_size == num_bytes) + 4, 4 is management)
72 MEMORY_TOOL_MAKE_NOACCESS(reinterpret_cast<uint8_t*>(result) + num_bytes,
73 usable_size - (num_bytes + kMemoryToolRedZoneBytes));
74
75 return result;
76 }
77
GetObjSizeNoThreadSafety(mirror::Object * obj)78 inline size_t GetObjSizeNoThreadSafety(mirror::Object* obj) NO_THREAD_SAFETY_ANALYSIS {
79 return obj->SizeOf<kVerifyNone>();
80 }
81
82 } // namespace memory_tool_details
83
84 template <typename S,
85 size_t kMemoryToolRedZoneBytes,
86 bool kAdjustForRedzoneInAllocSize,
87 bool kUseObjSizeForUsable>
88 mirror::Object*
89 MemoryToolMallocSpace<S,
90 kMemoryToolRedZoneBytes,
91 kAdjustForRedzoneInAllocSize,
AllocWithGrowth(Thread * self,size_t num_bytes,size_t * bytes_allocated_out,size_t * usable_size_out,size_t * bytes_tl_bulk_allocated_out)92 kUseObjSizeForUsable>::AllocWithGrowth(
93 Thread* self,
94 size_t num_bytes,
95 size_t* bytes_allocated_out,
96 size_t* usable_size_out,
97 size_t* bytes_tl_bulk_allocated_out) {
98 size_t bytes_allocated;
99 size_t usable_size;
100 size_t bytes_tl_bulk_allocated;
101 void* obj_with_rdz = S::AllocWithGrowth(self,
102 num_bytes + 2 * kMemoryToolRedZoneBytes,
103 &bytes_allocated,
104 &usable_size,
105 &bytes_tl_bulk_allocated);
106 if (obj_with_rdz == nullptr) {
107 return nullptr;
108 }
109
110 return memory_tool_details::AdjustForMemoryTool<kMemoryToolRedZoneBytes, kUseObjSizeForUsable>(
111 obj_with_rdz,
112 num_bytes,
113 bytes_allocated,
114 usable_size,
115 bytes_tl_bulk_allocated,
116 bytes_allocated_out,
117 usable_size_out,
118 bytes_tl_bulk_allocated_out);
119 }
120
121 template <typename S,
122 size_t kMemoryToolRedZoneBytes,
123 bool kAdjustForRedzoneInAllocSize,
124 bool kUseObjSizeForUsable>
125 mirror::Object* MemoryToolMallocSpace<S,
126 kMemoryToolRedZoneBytes,
127 kAdjustForRedzoneInAllocSize,
Alloc(Thread * self,size_t num_bytes,size_t * bytes_allocated_out,size_t * usable_size_out,size_t * bytes_tl_bulk_allocated_out)128 kUseObjSizeForUsable>::Alloc(
129 Thread* self,
130 size_t num_bytes,
131 size_t* bytes_allocated_out,
132 size_t* usable_size_out,
133 size_t* bytes_tl_bulk_allocated_out) {
134 size_t bytes_allocated;
135 size_t usable_size;
136 size_t bytes_tl_bulk_allocated;
137 void* obj_with_rdz = S::Alloc(self,
138 num_bytes + 2 * kMemoryToolRedZoneBytes,
139 &bytes_allocated,
140 &usable_size,
141 &bytes_tl_bulk_allocated);
142 if (obj_with_rdz == nullptr) {
143 return nullptr;
144 }
145
146 return memory_tool_details::AdjustForMemoryTool<kMemoryToolRedZoneBytes, kUseObjSizeForUsable>(
147 obj_with_rdz,
148 num_bytes,
149 bytes_allocated,
150 usable_size,
151 bytes_tl_bulk_allocated,
152 bytes_allocated_out,
153 usable_size_out,
154 bytes_tl_bulk_allocated_out);
155 }
156
157 template <typename S,
158 size_t kMemoryToolRedZoneBytes,
159 bool kAdjustForRedzoneInAllocSize,
160 bool kUseObjSizeForUsable>
161 mirror::Object* MemoryToolMallocSpace<S,
162 kMemoryToolRedZoneBytes,
163 kAdjustForRedzoneInAllocSize,
AllocThreadUnsafe(Thread * self,size_t num_bytes,size_t * bytes_allocated_out,size_t * usable_size_out,size_t * bytes_tl_bulk_allocated_out)164 kUseObjSizeForUsable>::AllocThreadUnsafe(
165 Thread* self,
166 size_t num_bytes,
167 size_t* bytes_allocated_out,
168 size_t* usable_size_out,
169 size_t* bytes_tl_bulk_allocated_out) {
170 size_t bytes_allocated;
171 size_t usable_size;
172 size_t bytes_tl_bulk_allocated;
173 void* obj_with_rdz = S::AllocThreadUnsafe(self,
174 num_bytes + 2 * kMemoryToolRedZoneBytes,
175 &bytes_allocated,
176 &usable_size,
177 &bytes_tl_bulk_allocated);
178 if (obj_with_rdz == nullptr) {
179 return nullptr;
180 }
181
182 return memory_tool_details::AdjustForMemoryTool<kMemoryToolRedZoneBytes, kUseObjSizeForUsable>(
183 obj_with_rdz,
184 num_bytes,
185 bytes_allocated,
186 usable_size,
187 bytes_tl_bulk_allocated,
188 bytes_allocated_out,
189 usable_size_out,
190 bytes_tl_bulk_allocated_out);
191 }
192
193 template <typename S,
194 size_t kMemoryToolRedZoneBytes,
195 bool kAdjustForRedzoneInAllocSize,
196 bool kUseObjSizeForUsable>
197 size_t MemoryToolMallocSpace<S,
198 kMemoryToolRedZoneBytes,
199 kAdjustForRedzoneInAllocSize,
AllocationSize(mirror::Object * obj,size_t * usable_size)200 kUseObjSizeForUsable>::AllocationSize(
201 mirror::Object* obj, size_t* usable_size) {
202 size_t result = S::AllocationSize(
203 reinterpret_cast<mirror::Object*>(
204 reinterpret_cast<uint8_t*>(obj)
205 - (kAdjustForRedzoneInAllocSize ? kMemoryToolRedZoneBytes : 0)),
206 usable_size);
207 if (usable_size != nullptr) {
208 if (kUseObjSizeForUsable) {
209 *usable_size = memory_tool_details::GetObjSizeNoThreadSafety(obj);
210 } else {
211 *usable_size = *usable_size - 2 * kMemoryToolRedZoneBytes;
212 }
213 }
214 return result;
215 }
216
217 template <typename S,
218 size_t kMemoryToolRedZoneBytes,
219 bool kAdjustForRedzoneInAllocSize,
220 bool kUseObjSizeForUsable>
221 size_t MemoryToolMallocSpace<S,
222 kMemoryToolRedZoneBytes,
223 kAdjustForRedzoneInAllocSize,
Free(Thread * self,mirror::Object * ptr)224 kUseObjSizeForUsable>::Free(Thread* self, mirror::Object* ptr) {
225 void* obj_after_rdz = reinterpret_cast<void*>(ptr);
226 uint8_t* obj_with_rdz = reinterpret_cast<uint8_t*>(obj_after_rdz) - kMemoryToolRedZoneBytes;
227
228 // Make redzones undefined.
229 size_t usable_size;
230 size_t allocation_size = AllocationSize(ptr, &usable_size);
231
232 // Unprotect the allocation.
233 // Use the obj-size-for-usable flag to determine whether usable_size is the more important one,
234 // e.g., whether there's data in the allocation_size (and usable_size can't be trusted).
235 if (kUseObjSizeForUsable) {
236 MEMORY_TOOL_MAKE_UNDEFINED(obj_with_rdz, allocation_size);
237 } else {
238 MEMORY_TOOL_MAKE_UNDEFINED(obj_with_rdz, usable_size + 2 * kMemoryToolRedZoneBytes);
239 }
240
241 return S::Free(self, reinterpret_cast<mirror::Object*>(obj_with_rdz));
242 }
243
244 template <typename S,
245 size_t kMemoryToolRedZoneBytes,
246 bool kAdjustForRedzoneInAllocSize,
247 bool kUseObjSizeForUsable>
248 size_t MemoryToolMallocSpace<S,
249 kMemoryToolRedZoneBytes,
250 kAdjustForRedzoneInAllocSize,
FreeList(Thread * self,size_t num_ptrs,mirror::Object ** ptrs)251 kUseObjSizeForUsable>::FreeList(
252 Thread* self, size_t num_ptrs, mirror::Object** ptrs) {
253 size_t freed = 0;
254 // Sort the pointers to free non class objects first. See b/131542326 for why this is necessary to
255 // avoid crashes.
256 std::sort(ptrs, ptrs + num_ptrs, [](mirror::Object* a, mirror::Object* b)
257 REQUIRES_SHARED(Locks::mutator_lock_) {
258 return a->IsClass() < b->IsClass();
259 });
260 for (size_t i = 0; i < num_ptrs; i++) {
261 freed += Free(self, ptrs[i]);
262 ptrs[i] = nullptr;
263 }
264 return freed;
265 }
266
267 template <typename S,
268 size_t kMemoryToolRedZoneBytes,
269 bool kAdjustForRedzoneInAllocSize,
270 bool kUseObjSizeForUsable>
271 template <typename... Params>
272 MemoryToolMallocSpace<S,
273 kMemoryToolRedZoneBytes,
274 kAdjustForRedzoneInAllocSize,
MemoryToolMallocSpace(MemMap && mem_map,size_t initial_size,Params...params)275 kUseObjSizeForUsable>::MemoryToolMallocSpace(
276 MemMap&& mem_map, size_t initial_size, Params... params)
277 : S(std::move(mem_map), initial_size, params...) {
278 // Don't want to change the memory tool states of the mem map here as the allocator is already
279 // initialized at this point and that may interfere with what the allocator does internally. Note
280 // that the tail beyond the initial size is mprotected.
281 }
282
283 template <typename S,
284 size_t kMemoryToolRedZoneBytes,
285 bool kAdjustForRedzoneInAllocSize,
286 bool kUseObjSizeForUsable>
287 size_t MemoryToolMallocSpace<S,
288 kMemoryToolRedZoneBytes,
289 kAdjustForRedzoneInAllocSize,
MaxBytesBulkAllocatedFor(size_t num_bytes)290 kUseObjSizeForUsable>::MaxBytesBulkAllocatedFor(size_t num_bytes) {
291 return S::MaxBytesBulkAllocatedFor(num_bytes + 2 * kMemoryToolRedZoneBytes);
292 }
293
294 } // namespace space
295 } // namespace gc
296 } // namespace art
297
298 #endif // ART_RUNTIME_GC_SPACE_MEMORY_TOOL_MALLOC_SPACE_INL_H_
299