1 // Copyright 2020 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/heap/cppgc/heap-page.h"
6
7 #include <algorithm>
8
9 #include "include/cppgc/internal/api-constants.h"
10 #include "src/base/logging.h"
11 #include "src/base/platform/mutex.h"
12 #include "src/heap/cppgc/globals.h"
13 #include "src/heap/cppgc/heap-object-header.h"
14 #include "src/heap/cppgc/heap-space.h"
15 #include "src/heap/cppgc/heap.h"
16 #include "src/heap/cppgc/memory.h"
17 #include "src/heap/cppgc/object-start-bitmap.h"
18 #include "src/heap/cppgc/page-memory.h"
19 #include "src/heap/cppgc/raw-heap.h"
20 #include "src/heap/cppgc/stats-collector.h"
21
22 namespace cppgc {
23 namespace internal {
24
25 namespace {
26
AlignAddress(Address address,size_t alignment)27 Address AlignAddress(Address address, size_t alignment) {
28 return reinterpret_cast<Address>(
29 RoundUp(reinterpret_cast<uintptr_t>(address), alignment));
30 }
31
32 } // namespace
33
34 // static
FromInnerAddress(const HeapBase * heap,void * address)35 BasePage* BasePage::FromInnerAddress(const HeapBase* heap, void* address) {
36 return const_cast<BasePage*>(
37 FromInnerAddress(heap, const_cast<const void*>(address)));
38 }
39
40 // static
FromInnerAddress(const HeapBase * heap,const void * address)41 const BasePage* BasePage::FromInnerAddress(const HeapBase* heap,
42 const void* address) {
43 return reinterpret_cast<const BasePage*>(
44 heap->page_backend()->Lookup(static_cast<ConstAddress>(address)));
45 }
46
47 // static
Destroy(BasePage * page)48 void BasePage::Destroy(BasePage* page) {
49 if (page->discarded_memory()) {
50 page->space()
51 .raw_heap()
52 ->heap()
53 ->stats_collector()
54 ->DecrementDiscardedMemory(page->discarded_memory());
55 }
56 if (page->is_large()) {
57 LargePage::Destroy(LargePage::From(page));
58 } else {
59 NormalPage::Destroy(NormalPage::From(page));
60 }
61 }
62
PayloadStart()63 Address BasePage::PayloadStart() {
64 return is_large() ? LargePage::From(this)->PayloadStart()
65 : NormalPage::From(this)->PayloadStart();
66 }
67
PayloadStart() const68 ConstAddress BasePage::PayloadStart() const {
69 return const_cast<BasePage*>(this)->PayloadStart();
70 }
71
PayloadEnd()72 Address BasePage::PayloadEnd() {
73 return is_large() ? LargePage::From(this)->PayloadEnd()
74 : NormalPage::From(this)->PayloadEnd();
75 }
76
PayloadEnd() const77 ConstAddress BasePage::PayloadEnd() const {
78 return const_cast<BasePage*>(this)->PayloadEnd();
79 }
80
AllocatedBytesAtLastGC() const81 size_t BasePage::AllocatedBytesAtLastGC() const {
82 return is_large() ? LargePage::From(this)->AllocatedBytesAtLastGC()
83 : NormalPage::From(this)->AllocatedBytesAtLastGC();
84 }
85
TryObjectHeaderFromInnerAddress(void * address) const86 HeapObjectHeader* BasePage::TryObjectHeaderFromInnerAddress(
87 void* address) const {
88 return const_cast<HeapObjectHeader*>(
89 TryObjectHeaderFromInnerAddress(const_cast<const void*>(address)));
90 }
91
TryObjectHeaderFromInnerAddress(const void * address) const92 const HeapObjectHeader* BasePage::TryObjectHeaderFromInnerAddress(
93 const void* address) const {
94 if (is_large()) {
95 if (!LargePage::From(this)->PayloadContains(
96 static_cast<ConstAddress>(address)))
97 return nullptr;
98 } else {
99 const NormalPage* normal_page = NormalPage::From(this);
100 if (!normal_page->PayloadContains(static_cast<ConstAddress>(address)))
101 return nullptr;
102 // Check that the space has no linear allocation buffer.
103 DCHECK(!NormalPageSpace::From(normal_page->space())
104 .linear_allocation_buffer()
105 .size());
106 }
107
108 // |address| is on the heap, so we FromInnerAddress can get the header.
109 const HeapObjectHeader* header =
110 ObjectHeaderFromInnerAddressImpl(this, address);
111 if (header->IsFree()) return nullptr;
112 DCHECK_NE(kFreeListGCInfoIndex, header->GetGCInfoIndex());
113 return header;
114 }
115
BasePage(HeapBase & heap,BaseSpace & space,PageType type)116 BasePage::BasePage(HeapBase& heap, BaseSpace& space, PageType type)
117 : heap_(heap), space_(space), type_(type) {
118 DCHECK_EQ(0u, (reinterpret_cast<uintptr_t>(this) - kGuardPageSize) &
119 kPageOffsetMask);
120 DCHECK_EQ(&heap_.raw_heap(), space_.raw_heap());
121 }
122
123 // static
Create(PageBackend & page_backend,NormalPageSpace & space)124 NormalPage* NormalPage::Create(PageBackend& page_backend,
125 NormalPageSpace& space) {
126 void* memory = page_backend.AllocateNormalPageMemory(space.index());
127 auto* normal_page = new (memory) NormalPage(*space.raw_heap()->heap(), space);
128 normal_page->SynchronizedStore();
129 normal_page->heap().stats_collector()->NotifyAllocatedMemory(kPageSize);
130 // Memory is zero initialized as
131 // a) memory retrieved from the OS is zeroed;
132 // b) memory retrieved from the page pool was swept and thus is zeroed except
133 // for the first header which will anyways serve as header again.
134 //
135 // The following is a subset of SetMemoryInaccessible() to establish the
136 // invariant that memory is in the same state as it would be after sweeping.
137 // This allows to return newly allocated pages to go into that LAB and back
138 // into the free list.
139 Address begin = normal_page->PayloadStart() + sizeof(HeapObjectHeader);
140 const size_t size = normal_page->PayloadSize() - sizeof(HeapObjectHeader);
141 #if defined(V8_USE_MEMORY_SANITIZER)
142 MSAN_ALLOCATED_UNINITIALIZED_MEMORY(begin, size);
143 #elif defined(V8_USE_ADDRESS_SANITIZER)
144 ASAN_POISON_MEMORY_REGION(begin, size);
145 #elif DEBUG
146 cppgc::internal::ZapMemory(begin, size);
147 #endif // Release builds.
148 CheckMemoryIsInaccessible(begin, size);
149 return normal_page;
150 }
151
152 // static
Destroy(NormalPage * page)153 void NormalPage::Destroy(NormalPage* page) {
154 DCHECK(page);
155 const BaseSpace& space = page->space();
156 DCHECK_EQ(space.end(), std::find(space.begin(), space.end(), page));
157 page->~NormalPage();
158 PageBackend* backend = page->heap().page_backend();
159 page->heap().stats_collector()->NotifyFreedMemory(kPageSize);
160 backend->FreeNormalPageMemory(space.index(), reinterpret_cast<Address>(page));
161 }
162
NormalPage(HeapBase & heap,BaseSpace & space)163 NormalPage::NormalPage(HeapBase& heap, BaseSpace& space)
164 : BasePage(heap, space, PageType::kNormal),
165 object_start_bitmap_(PayloadStart()) {
166 DCHECK_LT(kLargeObjectSizeThreshold,
167 static_cast<size_t>(PayloadEnd() - PayloadStart()));
168 }
169
170 NormalPage::~NormalPage() = default;
171
begin()172 NormalPage::iterator NormalPage::begin() {
173 const auto& lab = NormalPageSpace::From(space()).linear_allocation_buffer();
174 return iterator(reinterpret_cast<HeapObjectHeader*>(PayloadStart()),
175 lab.start(), lab.size());
176 }
177
begin() const178 NormalPage::const_iterator NormalPage::begin() const {
179 const auto& lab = NormalPageSpace::From(space()).linear_allocation_buffer();
180 return const_iterator(
181 reinterpret_cast<const HeapObjectHeader*>(PayloadStart()), lab.start(),
182 lab.size());
183 }
184
PayloadStart()185 Address NormalPage::PayloadStart() {
186 return AlignAddress((reinterpret_cast<Address>(this + 1)),
187 kAllocationGranularity);
188 }
189
PayloadStart() const190 ConstAddress NormalPage::PayloadStart() const {
191 return const_cast<NormalPage*>(this)->PayloadStart();
192 }
193
PayloadEnd()194 Address NormalPage::PayloadEnd() { return PayloadStart() + PayloadSize(); }
195
PayloadEnd() const196 ConstAddress NormalPage::PayloadEnd() const {
197 return const_cast<NormalPage*>(this)->PayloadEnd();
198 }
199
200 // static
PayloadSize()201 size_t NormalPage::PayloadSize() {
202 const size_t header_size =
203 RoundUp(sizeof(NormalPage), kAllocationGranularity);
204 return kPageSize - 2 * kGuardPageSize - header_size;
205 }
206
LargePage(HeapBase & heap,BaseSpace & space,size_t size)207 LargePage::LargePage(HeapBase& heap, BaseSpace& space, size_t size)
208 : BasePage(heap, space, PageType::kLarge), payload_size_(size) {}
209
210 LargePage::~LargePage() = default;
211
212 // static
AllocationSize(size_t payload_size)213 size_t LargePage::AllocationSize(size_t payload_size) {
214 return PageHeaderSize() + payload_size;
215 }
216
217 // static
Create(PageBackend & page_backend,LargePageSpace & space,size_t size)218 LargePage* LargePage::Create(PageBackend& page_backend, LargePageSpace& space,
219 size_t size) {
220 // Ensure that the API-provided alignment guarantees does not violate the
221 // internally guaranteed alignment of large page allocations.
222 STATIC_ASSERT(kGuaranteedObjectAlignment <=
223 api_constants::kMaxSupportedAlignment);
224 STATIC_ASSERT(
225 api_constants::kMaxSupportedAlignment % kGuaranteedObjectAlignment == 0);
226
227 DCHECK_LE(kLargeObjectSizeThreshold, size);
228 const size_t allocation_size = AllocationSize(size);
229
230 auto* heap = space.raw_heap()->heap();
231 void* memory = page_backend.AllocateLargePageMemory(allocation_size);
232 LargePage* page = new (memory) LargePage(*heap, space, size);
233 page->SynchronizedStore();
234 page->heap().stats_collector()->NotifyAllocatedMemory(allocation_size);
235 return page;
236 }
237
238 // static
Destroy(LargePage * page)239 void LargePage::Destroy(LargePage* page) {
240 DCHECK(page);
241 #if DEBUG
242 const BaseSpace& space = page->space();
243 {
244 // Destroy() happens on the mutator but another concurrent sweeper task may
245 // add add a live object using `BaseSpace::AddPage()` while iterating the
246 // pages.
247 v8::base::LockGuard<v8::base::Mutex> guard(&space.pages_mutex());
248 DCHECK_EQ(space.end(), std::find(space.begin(), space.end(), page));
249 }
250 #endif // DEBUG
251 page->~LargePage();
252 PageBackend* backend = page->heap().page_backend();
253 page->heap().stats_collector()->NotifyFreedMemory(
254 AllocationSize(page->PayloadSize()));
255 backend->FreeLargePageMemory(reinterpret_cast<Address>(page));
256 }
257
ObjectHeader()258 HeapObjectHeader* LargePage::ObjectHeader() {
259 return reinterpret_cast<HeapObjectHeader*>(PayloadStart());
260 }
261
ObjectHeader() const262 const HeapObjectHeader* LargePage::ObjectHeader() const {
263 return reinterpret_cast<const HeapObjectHeader*>(PayloadStart());
264 }
265
PayloadStart()266 Address LargePage::PayloadStart() {
267 return reinterpret_cast<Address>(this) + PageHeaderSize();
268 }
269
PayloadStart() const270 ConstAddress LargePage::PayloadStart() const {
271 return const_cast<LargePage*>(this)->PayloadStart();
272 }
273
PayloadEnd()274 Address LargePage::PayloadEnd() { return PayloadStart() + PayloadSize(); }
275
PayloadEnd() const276 ConstAddress LargePage::PayloadEnd() const {
277 return const_cast<LargePage*>(this)->PayloadEnd();
278 }
279
280 } // namespace internal
281 } // namespace cppgc
282