1 /*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #ifndef ART_RUNTIME_LINEAR_ALLOC_INL_H_
18 #define ART_RUNTIME_LINEAR_ALLOC_INL_H_
19
20 #include "linear_alloc.h"
21
22 #include "base/gc_visited_arena_pool.h"
23 #include "thread-current-inl.h"
24
25 namespace art {
26
SetFirstObject(void * begin,size_t bytes)27 inline void LinearAlloc::SetFirstObject(void* begin, size_t bytes) const {
28 DCHECK(track_allocations_);
29 if (ArenaAllocator::IsRunningOnMemoryTool()) {
30 bytes += ArenaAllocator::kMemoryToolRedZoneBytes;
31 }
32 uint8_t* end = static_cast<uint8_t*>(begin) + bytes;
33 Arena* arena = allocator_.GetHeadArena();
34 DCHECK_NE(arena, nullptr);
35 // The object would either be in the head arena or the next one.
36 if (UNLIKELY(begin < arena->Begin() || begin >= arena->End())) {
37 arena = arena->Next();
38 }
39 DCHECK(begin >= arena->Begin() && end <= arena->End());
40 down_cast<TrackedArena*>(arena)->SetFirstObject(static_cast<uint8_t*>(begin), end);
41 }
42
SetupForPostZygoteFork(Thread * self)43 inline void LinearAlloc::SetupForPostZygoteFork(Thread* self) {
44 MutexLock mu(self, lock_);
45 DCHECK(track_allocations_);
46 allocator_.ResetCurrentArena();
47 }
48
Realloc(Thread * self,void * ptr,size_t old_size,size_t new_size,LinearAllocKind kind)49 inline void* LinearAlloc::Realloc(Thread* self,
50 void* ptr,
51 size_t old_size,
52 size_t new_size,
53 LinearAllocKind kind) {
54 MutexLock mu(self, lock_);
55 if (track_allocations_) {
56 if (ptr != nullptr) {
57 // Realloc cannot be called on 16-byte aligned as Realloc doesn't guarantee
58 // that. So the header must be immediately prior to ptr.
59 TrackingHeader* header = reinterpret_cast<TrackingHeader*>(ptr) - 1;
60 DCHECK_EQ(header->GetKind(), kind);
61 old_size += sizeof(TrackingHeader);
62 DCHECK_EQ(header->GetSize(), old_size);
63 ptr = header;
64 } else {
65 DCHECK_EQ(old_size, 0u);
66 }
67 new_size += sizeof(TrackingHeader);
68 void* ret = allocator_.Realloc(ptr, old_size, new_size);
69 new (ret) TrackingHeader(new_size, kind);
70 SetFirstObject(ret, new_size);
71 return static_cast<TrackingHeader*>(ret) + 1;
72 } else {
73 return allocator_.Realloc(ptr, old_size, new_size);
74 }
75 }
76
Alloc(Thread * self,size_t size,LinearAllocKind kind)77 inline void* LinearAlloc::Alloc(Thread* self, size_t size, LinearAllocKind kind) {
78 MutexLock mu(self, lock_);
79 if (track_allocations_) {
80 size += sizeof(TrackingHeader);
81 TrackingHeader* storage = new (allocator_.Alloc(size)) TrackingHeader(size, kind);
82 SetFirstObject(storage, size);
83 return storage + 1;
84 } else {
85 return allocator_.Alloc(size);
86 }
87 }
88
AllocAlign16(Thread * self,size_t size,LinearAllocKind kind)89 inline void* LinearAlloc::AllocAlign16(Thread* self, size_t size, LinearAllocKind kind) {
90 MutexLock mu(self, lock_);
91 DCHECK_ALIGNED(size, 16);
92 if (track_allocations_) {
93 size_t mem_tool_bytes = ArenaAllocator::IsRunningOnMemoryTool()
94 ? ArenaAllocator::kMemoryToolRedZoneBytes : 0;
95 uint8_t* ptr = allocator_.CurrentPtr() + sizeof(TrackingHeader);
96 uintptr_t padding =
97 RoundUp(reinterpret_cast<uintptr_t>(ptr), 16) - reinterpret_cast<uintptr_t>(ptr);
98 DCHECK_LT(padding, 16u);
99 size_t required_size = size + sizeof(TrackingHeader) + padding;
100
101 if (allocator_.CurrentArenaUnusedBytes() < required_size + mem_tool_bytes) {
102 // The allocator will require a new arena, which is expected to be
103 // 16-byte aligned.
104 static_assert(ArenaAllocator::kArenaAlignment >= 16,
105 "Expecting sufficient alignment for new Arena.");
106 required_size = size + RoundUp(sizeof(TrackingHeader), 16);
107 }
108 // Using ArenaAllocator's AllocAlign16 now would disturb the alignment by
109 // trying to make header 16-byte aligned. The alignment requirements are
110 // already addressed here. Now we want allocator to just bump the pointer.
111 ptr = static_cast<uint8_t*>(allocator_.Alloc(required_size));
112 new (ptr) TrackingHeader(required_size, kind, /*is_16_aligned=*/true);
113 SetFirstObject(ptr, required_size);
114 return AlignUp(ptr + sizeof(TrackingHeader), 16);
115 } else {
116 return allocator_.AllocAlign16(size);
117 }
118 }
119
GetUsedMemory()120 inline size_t LinearAlloc::GetUsedMemory() const {
121 MutexLock mu(Thread::Current(), lock_);
122 return allocator_.BytesUsed();
123 }
124
GetArenaPool()125 inline ArenaPool* LinearAlloc::GetArenaPool() {
126 MutexLock mu(Thread::Current(), lock_);
127 return allocator_.GetArenaPool();
128 }
129
Contains(void * ptr)130 inline bool LinearAlloc::Contains(void* ptr) const {
131 MutexLock mu(Thread::Current(), lock_);
132 return allocator_.Contains(ptr);
133 }
134
135 } // namespace art
136
137 #endif // ART_RUNTIME_LINEAR_ALLOC_INL_H_
138