• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2020 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_HEAP_CPPGC_PAGE_MEMORY_H_
6 #define V8_HEAP_CPPGC_PAGE_MEMORY_H_
7 
8 #include <array>
9 #include <map>
10 #include <memory>
11 #include <unordered_map>
12 #include <vector>
13 
14 #include "include/cppgc/platform.h"
15 #include "src/base/macros.h"
16 #include "src/heap/cppgc/globals.h"
17 
18 namespace cppgc {
19 namespace internal {
20 
21 class V8_EXPORT_PRIVATE MemoryRegion final {
22  public:
23   MemoryRegion() = default;
MemoryRegion(Address base,size_t size)24   MemoryRegion(Address base, size_t size) : base_(base), size_(size) {
25     DCHECK(base);
26     DCHECK_LT(0u, size);
27   }
28 
base()29   Address base() const { return base_; }
size()30   size_t size() const { return size_; }
end()31   Address end() const { return base_ + size_; }
32 
Contains(ConstAddress addr)33   bool Contains(ConstAddress addr) const {
34     return (reinterpret_cast<uintptr_t>(addr) -
35             reinterpret_cast<uintptr_t>(base_)) < size_;
36   }
37 
Contains(const MemoryRegion & other)38   bool Contains(const MemoryRegion& other) const {
39     return base_ <= other.base() && other.end() <= end();
40   }
41 
42  private:
43   Address base_ = nullptr;
44   size_t size_ = 0;
45 };
46 
47 // PageMemory provides the backing of a single normal or large page.
48 class V8_EXPORT_PRIVATE PageMemory final {
49  public:
PageMemory(MemoryRegion overall,MemoryRegion writeable)50   PageMemory(MemoryRegion overall, MemoryRegion writeable)
51       : overall_(overall), writable_(writeable) {
52     DCHECK(overall.Contains(writeable));
53   }
54 
writeable_region()55   const MemoryRegion writeable_region() const { return writable_; }
overall_region()56   const MemoryRegion overall_region() const { return overall_; }
57 
58  private:
59   MemoryRegion overall_;
60   MemoryRegion writable_;
61 };
62 
63 class V8_EXPORT_PRIVATE PageMemoryRegion {
64  public:
65   virtual ~PageMemoryRegion();
66 
reserved_region()67   const MemoryRegion reserved_region() const { return reserved_region_; }
is_large()68   bool is_large() const { return is_large_; }
69 
70   // Lookup writeable base for an |address| that's contained in
71   // PageMemoryRegion. Filters out addresses that are contained in non-writeable
72   // regions (e.g. guard pages).
73   inline Address Lookup(ConstAddress address) const;
74 
75   // Disallow copy/move.
76   PageMemoryRegion(const PageMemoryRegion&) = delete;
77   PageMemoryRegion& operator=(const PageMemoryRegion&) = delete;
78 
79   virtual void UnprotectForTesting() = 0;
80 
81  protected:
82   PageMemoryRegion(PageAllocator*, MemoryRegion, bool);
83 
84   PageAllocator* const allocator_;
85   const MemoryRegion reserved_region_;
86   const bool is_large_;
87 };
88 
89 // NormalPageMemoryRegion serves kNumPageRegions normal-sized PageMemory object.
90 class V8_EXPORT_PRIVATE NormalPageMemoryRegion final : public PageMemoryRegion {
91  public:
92   static constexpr size_t kNumPageRegions = 10;
93 
94   explicit NormalPageMemoryRegion(PageAllocator*);
95   ~NormalPageMemoryRegion() override;
96 
GetPageMemory(size_t index)97   const PageMemory GetPageMemory(size_t index) const {
98     DCHECK_LT(index, kNumPageRegions);
99     return PageMemory(
100         MemoryRegion(reserved_region().base() + kPageSize * index, kPageSize),
101         MemoryRegion(
102             reserved_region().base() + kPageSize * index + kGuardPageSize,
103             kPageSize - 2 * kGuardPageSize));
104   }
105 
106   // Allocates a normal page at |writeable_base| address. Changes page
107   // protection.
108   void Allocate(Address writeable_base);
109 
110   // Frees a normal page at at |writeable_base| address. Changes page
111   // protection.
112   void Free(Address);
113 
114   inline Address Lookup(ConstAddress) const;
115 
116   void UnprotectForTesting() final;
117 
118  private:
ChangeUsed(size_t index,bool value)119   void ChangeUsed(size_t index, bool value) {
120     DCHECK_LT(index, kNumPageRegions);
121     DCHECK_EQ(value, !page_memories_in_use_[index]);
122     page_memories_in_use_[index] = value;
123   }
124 
GetIndex(ConstAddress address)125   size_t GetIndex(ConstAddress address) const {
126     return static_cast<size_t>(address - reserved_region().base()) >>
127            kPageSizeLog2;
128   }
129 
130   std::array<bool, kNumPageRegions> page_memories_in_use_ = {};
131 };
132 
133 // LargePageMemoryRegion serves a single large PageMemory object.
134 class V8_EXPORT_PRIVATE LargePageMemoryRegion final : public PageMemoryRegion {
135  public:
136   LargePageMemoryRegion(PageAllocator*, size_t);
137   ~LargePageMemoryRegion() override;
138 
GetPageMemory()139   const PageMemory GetPageMemory() const {
140     return PageMemory(
141         MemoryRegion(reserved_region().base(), reserved_region().size()),
142         MemoryRegion(reserved_region().base() + kGuardPageSize,
143                      reserved_region().size() - 2 * kGuardPageSize));
144   }
145 
146   inline Address Lookup(ConstAddress) const;
147 
148   void UnprotectForTesting() final;
149 };
150 
151 // A PageMemoryRegionTree is a binary search tree of PageMemoryRegions sorted
152 // by reserved base addresses.
153 //
154 // The tree does not keep its elements alive but merely provides indexing
155 // capabilities.
156 class V8_EXPORT_PRIVATE PageMemoryRegionTree final {
157  public:
158   PageMemoryRegionTree();
159   ~PageMemoryRegionTree();
160 
161   void Add(PageMemoryRegion*);
162   void Remove(PageMemoryRegion*);
163 
164   inline PageMemoryRegion* Lookup(ConstAddress) const;
165 
166  private:
167   std::map<ConstAddress, PageMemoryRegion*> set_;
168 };
169 
170 // A pool of PageMemory objects represented by the writeable base addresses.
171 //
172 // The pool does not keep its elements alive but merely provides pooling
173 // capabilities.
174 class V8_EXPORT_PRIVATE NormalPageMemoryPool final {
175  public:
176   static constexpr size_t kNumPoolBuckets = 16;
177 
178   using Result = std::pair<NormalPageMemoryRegion*, Address>;
179 
180   NormalPageMemoryPool();
181   ~NormalPageMemoryPool();
182 
183   void Add(size_t, NormalPageMemoryRegion*, Address);
184   Result Take(size_t);
185 
186  private:
187   std::vector<Result> pool_[kNumPoolBuckets];
188 };
189 
190 // A backend that is used for allocating and freeing normal and large pages.
191 //
192 // Internally maintaints a set of PageMemoryRegions. The backend keeps its used
193 // regions alive.
194 class V8_EXPORT_PRIVATE PageBackend final {
195  public:
196   explicit PageBackend(PageAllocator*);
197   ~PageBackend();
198 
199   // Allocates a normal page from the backend.
200   //
201   // Returns the writeable base of the region.
202   Address AllocateNormalPageMemory(size_t);
203 
204   // Returns normal page memory back to the backend. Expects the
205   // |writeable_base| returned by |AllocateNormalMemory()|.
206   void FreeNormalPageMemory(size_t, Address writeable_base);
207 
208   // Allocates a large page from the backend.
209   //
210   // Returns the writeable base of the region.
211   Address AllocateLargePageMemory(size_t size);
212 
213   // Returns large page memory back to the backend. Expects the |writeable_base|
214   // returned by |AllocateLargePageMemory()|.
215   void FreeLargePageMemory(Address writeable_base);
216 
217   // Returns the writeable base if |address| is contained in a valid page
218   // memory.
219   inline Address Lookup(ConstAddress) const;
220 
221   // Disallow copy/move.
222   PageBackend(const PageBackend&) = delete;
223   PageBackend& operator=(const PageBackend&) = delete;
224 
225  private:
226   PageAllocator* allocator_;
227   NormalPageMemoryPool page_pool_;
228   PageMemoryRegionTree page_memory_region_tree_;
229   std::vector<std::unique_ptr<PageMemoryRegion>> normal_page_memory_regions_;
230   std::unordered_map<PageMemoryRegion*, std::unique_ptr<PageMemoryRegion>>
231       large_page_memory_regions_;
232 };
233 
234 // Returns true if the provided allocator supports committing at the required
235 // granularity.
SupportsCommittingGuardPages(PageAllocator * allocator)236 inline bool SupportsCommittingGuardPages(PageAllocator* allocator) {
237   return kGuardPageSize % allocator->CommitPageSize() == 0;
238 }
239 
Lookup(ConstAddress address)240 Address NormalPageMemoryRegion::Lookup(ConstAddress address) const {
241   size_t index = GetIndex(address);
242   if (!page_memories_in_use_[index]) return nullptr;
243   const MemoryRegion writeable_region = GetPageMemory(index).writeable_region();
244   return writeable_region.Contains(address) ? writeable_region.base() : nullptr;
245 }
246 
Lookup(ConstAddress address)247 Address LargePageMemoryRegion::Lookup(ConstAddress address) const {
248   const MemoryRegion writeable_region = GetPageMemory().writeable_region();
249   return writeable_region.Contains(address) ? writeable_region.base() : nullptr;
250 }
251 
Lookup(ConstAddress address)252 Address PageMemoryRegion::Lookup(ConstAddress address) const {
253   DCHECK(reserved_region().Contains(address));
254   return is_large()
255              ? static_cast<const LargePageMemoryRegion*>(this)->Lookup(address)
256              : static_cast<const NormalPageMemoryRegion*>(this)->Lookup(
257                    address);
258 }
259 
Lookup(ConstAddress address)260 PageMemoryRegion* PageMemoryRegionTree::Lookup(ConstAddress address) const {
261   auto it = set_.upper_bound(address);
262   // This check also covers set_.size() > 0, since for empty vectors it is
263   // guaranteed that begin() == end().
264   if (it == set_.begin()) return nullptr;
265   auto* result = std::next(it, -1)->second;
266   if (address < result->reserved_region().end()) return result;
267   return nullptr;
268 }
269 
Lookup(ConstAddress address)270 Address PageBackend::Lookup(ConstAddress address) const {
271   PageMemoryRegion* pmr = page_memory_region_tree_.Lookup(address);
272   return pmr ? pmr->Lookup(address) : nullptr;
273 }
274 
275 }  // namespace internal
276 }  // namespace cppgc
277 
278 #endif  // V8_HEAP_CPPGC_PAGE_MEMORY_H_
279