1 // Copyright 2020 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/heap/cppgc/page-memory.h"
6
7 #include "src/base/macros.h"
8 #include "src/heap/cppgc/sanitizers.h"
9
10 namespace cppgc {
11 namespace internal {
12
13 namespace {
14
Unprotect(PageAllocator * allocator,const PageMemory & page_memory)15 void Unprotect(PageAllocator* allocator, const PageMemory& page_memory) {
16 if (SupportsCommittingGuardPages(allocator)) {
17 CHECK(allocator->SetPermissions(page_memory.writeable_region().base(),
18 page_memory.writeable_region().size(),
19 PageAllocator::Permission::kReadWrite));
20 } else {
21 // No protection in case the allocator cannot commit at the required
22 // granularity. Only protect if the allocator supports committing at that
23 // granularity.
24 //
25 // The allocator needs to support committing the overall range.
26 CHECK_EQ(0u,
27 page_memory.overall_region().size() % allocator->CommitPageSize());
28 CHECK(allocator->SetPermissions(page_memory.overall_region().base(),
29 page_memory.overall_region().size(),
30 PageAllocator::Permission::kReadWrite));
31 }
32 }
33
Protect(PageAllocator * allocator,const PageMemory & page_memory)34 void Protect(PageAllocator* allocator, const PageMemory& page_memory) {
35 if (SupportsCommittingGuardPages(allocator)) {
36 // Swap the same region, providing the OS with a chance for fast lookup and
37 // change.
38 CHECK(allocator->SetPermissions(page_memory.writeable_region().base(),
39 page_memory.writeable_region().size(),
40 PageAllocator::Permission::kNoAccess));
41 } else {
42 // See Unprotect().
43 CHECK_EQ(0u,
44 page_memory.overall_region().size() % allocator->CommitPageSize());
45 CHECK(allocator->SetPermissions(page_memory.overall_region().base(),
46 page_memory.overall_region().size(),
47 PageAllocator::Permission::kNoAccess));
48 }
49 }
50
ReserveMemoryRegion(PageAllocator * allocator,size_t allocation_size)51 MemoryRegion ReserveMemoryRegion(PageAllocator* allocator,
52 size_t allocation_size) {
53 void* region_memory =
54 allocator->AllocatePages(nullptr, allocation_size, kPageSize,
55 PageAllocator::Permission::kNoAccess);
56 const MemoryRegion reserved_region(static_cast<Address>(region_memory),
57 allocation_size);
58 DCHECK_EQ(reserved_region.base() + allocation_size, reserved_region.end());
59 return reserved_region;
60 }
61
FreeMemoryRegion(PageAllocator * allocator,const MemoryRegion & reserved_region)62 void FreeMemoryRegion(PageAllocator* allocator,
63 const MemoryRegion& reserved_region) {
64 // Make sure pages returned to OS are unpoisoned.
65 ASAN_UNPOISON_MEMORY_REGION(reserved_region.base(), reserved_region.size());
66 allocator->FreePages(reserved_region.base(), reserved_region.size());
67 }
68
69 } // namespace
70
PageMemoryRegion(PageAllocator * allocator,MemoryRegion reserved_region,bool is_large)71 PageMemoryRegion::PageMemoryRegion(PageAllocator* allocator,
72 MemoryRegion reserved_region, bool is_large)
73 : allocator_(allocator),
74 reserved_region_(reserved_region),
75 is_large_(is_large) {}
76
~PageMemoryRegion()77 PageMemoryRegion::~PageMemoryRegion() {
78 FreeMemoryRegion(allocator_, reserved_region());
79 }
80
81 // static
82 constexpr size_t NormalPageMemoryRegion::kNumPageRegions;
83
NormalPageMemoryRegion(PageAllocator * allocator)84 NormalPageMemoryRegion::NormalPageMemoryRegion(PageAllocator* allocator)
85 : PageMemoryRegion(allocator,
86 ReserveMemoryRegion(
87 allocator, RoundUp(kPageSize * kNumPageRegions,
88 allocator->AllocatePageSize())),
89 false) {
90 #ifdef DEBUG
91 for (size_t i = 0; i < kNumPageRegions; ++i) {
92 DCHECK_EQ(false, page_memories_in_use_[i]);
93 }
94 #endif // DEBUG
95 }
96
97 NormalPageMemoryRegion::~NormalPageMemoryRegion() = default;
98
Allocate(Address writeable_base)99 void NormalPageMemoryRegion::Allocate(Address writeable_base) {
100 const size_t index = GetIndex(writeable_base);
101 ChangeUsed(index, true);
102 Unprotect(allocator_, GetPageMemory(index));
103 }
104
Free(Address writeable_base)105 void NormalPageMemoryRegion::Free(Address writeable_base) {
106 const size_t index = GetIndex(writeable_base);
107 ChangeUsed(index, false);
108 Protect(allocator_, GetPageMemory(index));
109 }
110
UnprotectForTesting()111 void NormalPageMemoryRegion::UnprotectForTesting() {
112 for (size_t i = 0; i < kNumPageRegions; ++i) {
113 Unprotect(allocator_, GetPageMemory(i));
114 }
115 }
116
LargePageMemoryRegion(PageAllocator * allocator,size_t length)117 LargePageMemoryRegion::LargePageMemoryRegion(PageAllocator* allocator,
118 size_t length)
119 : PageMemoryRegion(allocator,
120 ReserveMemoryRegion(
121 allocator, RoundUp(length + 2 * kGuardPageSize,
122 allocator->AllocatePageSize())),
123 true) {}
124
125 LargePageMemoryRegion::~LargePageMemoryRegion() = default;
126
UnprotectForTesting()127 void LargePageMemoryRegion::UnprotectForTesting() {
128 Unprotect(allocator_, GetPageMemory());
129 }
130
131 PageMemoryRegionTree::PageMemoryRegionTree() = default;
132
133 PageMemoryRegionTree::~PageMemoryRegionTree() = default;
134
Add(PageMemoryRegion * region)135 void PageMemoryRegionTree::Add(PageMemoryRegion* region) {
136 DCHECK(region);
137 auto result = set_.emplace(region->reserved_region().base(), region);
138 USE(result);
139 DCHECK(result.second);
140 }
141
Remove(PageMemoryRegion * region)142 void PageMemoryRegionTree::Remove(PageMemoryRegion* region) {
143 DCHECK(region);
144 auto size = set_.erase(region->reserved_region().base());
145 USE(size);
146 DCHECK_EQ(1u, size);
147 }
148
149 NormalPageMemoryPool::NormalPageMemoryPool() = default;
150
151 NormalPageMemoryPool::~NormalPageMemoryPool() = default;
152
Add(size_t bucket,NormalPageMemoryRegion * pmr,Address writeable_base)153 void NormalPageMemoryPool::Add(size_t bucket, NormalPageMemoryRegion* pmr,
154 Address writeable_base) {
155 DCHECK_LT(bucket, kNumPoolBuckets);
156 pool_[bucket].push_back(std::make_pair(pmr, writeable_base));
157 }
158
Take(size_t bucket)159 std::pair<NormalPageMemoryRegion*, Address> NormalPageMemoryPool::Take(
160 size_t bucket) {
161 DCHECK_LT(bucket, kNumPoolBuckets);
162 if (pool_[bucket].empty()) return {nullptr, nullptr};
163 std::pair<NormalPageMemoryRegion*, Address> pair = pool_[bucket].back();
164 pool_[bucket].pop_back();
165 return pair;
166 }
167
PageBackend(PageAllocator * allocator)168 PageBackend::PageBackend(PageAllocator* allocator) : allocator_(allocator) {}
169
170 PageBackend::~PageBackend() = default;
171
AllocateNormalPageMemory(size_t bucket)172 Address PageBackend::AllocateNormalPageMemory(size_t bucket) {
173 std::pair<NormalPageMemoryRegion*, Address> result = page_pool_.Take(bucket);
174 if (!result.first) {
175 auto pmr = std::make_unique<NormalPageMemoryRegion>(allocator_);
176 for (size_t i = 0; i < NormalPageMemoryRegion::kNumPageRegions; ++i) {
177 page_pool_.Add(bucket, pmr.get(),
178 pmr->GetPageMemory(i).writeable_region().base());
179 }
180 page_memory_region_tree_.Add(pmr.get());
181 normal_page_memory_regions_.push_back(std::move(pmr));
182 return AllocateNormalPageMemory(bucket);
183 }
184 result.first->Allocate(result.second);
185 return result.second;
186 }
187
FreeNormalPageMemory(size_t bucket,Address writeable_base)188 void PageBackend::FreeNormalPageMemory(size_t bucket, Address writeable_base) {
189 auto* pmr = static_cast<NormalPageMemoryRegion*>(
190 page_memory_region_tree_.Lookup(writeable_base));
191 pmr->Free(writeable_base);
192 page_pool_.Add(bucket, pmr, writeable_base);
193 }
194
AllocateLargePageMemory(size_t size)195 Address PageBackend::AllocateLargePageMemory(size_t size) {
196 auto pmr = std::make_unique<LargePageMemoryRegion>(allocator_, size);
197 const PageMemory pm = pmr->GetPageMemory();
198 Unprotect(allocator_, pm);
199 page_memory_region_tree_.Add(pmr.get());
200 large_page_memory_regions_.insert(std::make_pair(pmr.get(), std::move(pmr)));
201 return pm.writeable_region().base();
202 }
203
FreeLargePageMemory(Address writeable_base)204 void PageBackend::FreeLargePageMemory(Address writeable_base) {
205 PageMemoryRegion* pmr = page_memory_region_tree_.Lookup(writeable_base);
206 page_memory_region_tree_.Remove(pmr);
207 auto size = large_page_memory_regions_.erase(pmr);
208 USE(size);
209 DCHECK_EQ(1u, size);
210 }
211
212 } // namespace internal
213 } // namespace cppgc
214