• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2020 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_HEAP_READ_ONLY_SPACES_H_
6 #define V8_HEAP_READ_ONLY_SPACES_H_
7 
8 #include <memory>
9 #include <utility>
10 
11 #include "include/v8-platform.h"
12 #include "src/base/macros.h"
13 #include "src/common/globals.h"
14 #include "src/heap/allocation-stats.h"
15 #include "src/heap/base-space.h"
16 #include "src/heap/basic-memory-chunk.h"
17 #include "src/heap/list.h"
18 #include "src/heap/memory-chunk.h"
19 
20 namespace v8 {
21 namespace internal {
22 
23 class MemoryAllocator;
24 class ReadOnlyHeap;
25 class SnapshotData;
26 
27 class ReadOnlyPage : public BasicMemoryChunk {
28  public:
29   ReadOnlyPage(Heap* heap, BaseSpace* space, size_t chunk_size,
30                Address area_start, Address area_end, VirtualMemory reservation);
31 
32   // Clears any pointers in the header that point out of the page that would
33   // otherwise make the header non-relocatable.
34   void MakeHeaderRelocatable();
35 
36   size_t ShrinkToHighWaterMark();
37 
38   // Returns the address for a given offset in this page.
OffsetToAddress(size_t offset)39   Address OffsetToAddress(size_t offset) const {
40     Address address_in_page = address() + offset;
41     if (V8_SHARED_RO_HEAP_BOOL && COMPRESS_POINTERS_IN_ISOLATE_CAGE_BOOL) {
42       // Pointer compression with a per-Isolate cage and shared ReadOnlyPages
43       // means that the area_start and area_end cannot be defined since they are
44       // stored within the pages which can be mapped at multiple memory
45       // addresses.
46       DCHECK_LT(offset, size());
47     } else {
48       DCHECK_GE(address_in_page, area_start());
49       DCHECK_LT(address_in_page, area_end());
50     }
51     return address_in_page;
52   }
53 
54   // Returns the start area of the page without using area_start() which cannot
55   // return the correct result when the page is remapped multiple times.
GetAreaStart()56   Address GetAreaStart() const {
57     return address() +
58            MemoryChunkLayout::ObjectStartOffsetInMemoryChunk(RO_SPACE);
59   }
60 
61  private:
62   friend class ReadOnlySpace;
63 };
64 
65 // -----------------------------------------------------------------------------
66 // Artifacts used to construct a new SharedReadOnlySpace
67 class ReadOnlyArtifacts {
68  public:
69   virtual ~ReadOnlyArtifacts() = default;
70 
71   // Initialize the ReadOnlyArtifacts from an Isolate that has just been created
72   // either by serialization or by creating the objects directly.
73   virtual void Initialize(Isolate* isolate, std::vector<ReadOnlyPage*>&& pages,
74                           const AllocationStats& stats) = 0;
75 
76   // This replaces the ReadOnlySpace in the given Heap with a newly constructed
77   // SharedReadOnlySpace that has pages created from the ReadOnlyArtifacts. This
78   // is only called for the first Isolate, where the ReadOnlySpace is created
79   // during the bootstrap process.
80 
81   virtual void ReinstallReadOnlySpace(Isolate* isolate) = 0;
82   // Creates a ReadOnlyHeap for a specific Isolate. This will be populated with
83   // a SharedReadOnlySpace object that points to the Isolate's heap. Should only
84   // be used when the read-only heap memory is shared with or without pointer
85   // compression. This is called for all subsequent Isolates created after the
86   // first one.
87   virtual ReadOnlyHeap* GetReadOnlyHeapForIsolate(Isolate* isolate) = 0;
88 
89   virtual void VerifyHeapAndSpaceRelationships(Isolate* isolate) = 0;
90 
pages()91   std::vector<ReadOnlyPage*>& pages() { return pages_; }
92 
set_accounting_stats(const AllocationStats & stats)93   void set_accounting_stats(const AllocationStats& stats) { stats_ = stats; }
accounting_stats()94   const AllocationStats& accounting_stats() const { return stats_; }
95 
set_shared_read_only_space(std::unique_ptr<SharedReadOnlySpace> shared_space)96   void set_shared_read_only_space(
97       std::unique_ptr<SharedReadOnlySpace> shared_space) {
98     shared_read_only_space_ = std::move(shared_space);
99   }
shared_read_only_space()100   SharedReadOnlySpace* shared_read_only_space() {
101     return shared_read_only_space_.get();
102   }
103 
104   void set_read_only_heap(std::unique_ptr<ReadOnlyHeap> read_only_heap);
read_only_heap()105   ReadOnlyHeap* read_only_heap() const { return read_only_heap_.get(); }
106 
107   void InitializeChecksum(SnapshotData* read_only_snapshot_data);
108   void VerifyChecksum(SnapshotData* read_only_snapshot_data,
109                       bool read_only_heap_created);
110 
111  protected:
112   ReadOnlyArtifacts() = default;
113 
114   std::vector<ReadOnlyPage*> pages_;
115   AllocationStats stats_;
116   std::unique_ptr<SharedReadOnlySpace> shared_read_only_space_;
117   std::unique_ptr<ReadOnlyHeap> read_only_heap_;
118 #ifdef DEBUG
119   // The checksum of the blob the read-only heap was deserialized from, if
120   // any.
121   base::Optional<uint32_t> read_only_blob_checksum_;
122 #endif  // DEBUG
123 };
124 
125 // -----------------------------------------------------------------------------
126 // Artifacts used to construct a new SharedReadOnlySpace when pointer
127 // compression is disabled and so there is a single ReadOnlySpace with one set
128 // of pages shared between all Isolates.
129 class SingleCopyReadOnlyArtifacts : public ReadOnlyArtifacts {
130  public:
131   ~SingleCopyReadOnlyArtifacts() override;
132 
133   ReadOnlyHeap* GetReadOnlyHeapForIsolate(Isolate* isolate) override;
134   void Initialize(Isolate* isolate, std::vector<ReadOnlyPage*>&& pages,
135                   const AllocationStats& stats) override;
136   void ReinstallReadOnlySpace(Isolate* isolate) override;
137   void VerifyHeapAndSpaceRelationships(Isolate* isolate) override;
138 
139  private:
140   v8::PageAllocator* page_allocator_ = nullptr;
141 };
142 
143 // -----------------------------------------------------------------------------
144 // Artifacts used to construct a new SharedReadOnlySpace when pointer
145 // compression is enabled and so there is a ReadOnlySpace for each Isolate with
146 // with its own set of pages mapped from the canonical set stored here.
147 class PointerCompressedReadOnlyArtifacts : public ReadOnlyArtifacts {
148  public:
149   ReadOnlyHeap* GetReadOnlyHeapForIsolate(Isolate* isolate) override;
150   void Initialize(Isolate* isolate, std::vector<ReadOnlyPage*>&& pages,
151                   const AllocationStats& stats) override;
152   void ReinstallReadOnlySpace(Isolate* isolate) override;
153   void VerifyHeapAndSpaceRelationships(Isolate* isolate) override;
154 
155  private:
156   SharedReadOnlySpace* CreateReadOnlySpace(Isolate* isolate);
OffsetForPage(size_t index)157   Tagged_t OffsetForPage(size_t index) const { return page_offsets_[index]; }
158   void InitializeRootsIn(Isolate* isolate);
159   void InitializeRootsFrom(Isolate* isolate);
160 
161   std::unique_ptr<v8::PageAllocator::SharedMemoryMapping> RemapPageTo(
162       size_t i, Address new_address, ReadOnlyPage*& new_page);
163 
164   static constexpr size_t kReadOnlyRootsCount =
165       static_cast<size_t>(RootIndex::kReadOnlyRootsCount);
166 
167   Address read_only_roots_[kReadOnlyRootsCount];
168   std::vector<Tagged_t> page_offsets_;
169   std::vector<std::unique_ptr<PageAllocator::SharedMemory>> shared_memory_;
170 };
171 
172 // -----------------------------------------------------------------------------
173 // Read Only space for all Immortal Immovable and Immutable objects
174 class ReadOnlySpace : public BaseSpace {
175  public:
176   V8_EXPORT_PRIVATE explicit ReadOnlySpace(Heap* heap);
177 
178   // Detach the pages and add them to artifacts for using in creating a
179   // SharedReadOnlySpace. Since the current space no longer has any pages, it
180   // should be replaced straight after this in its Heap.
181   void DetachPagesAndAddToArtifacts(
182       std::shared_ptr<ReadOnlyArtifacts> artifacts);
183 
184   V8_EXPORT_PRIVATE ~ReadOnlySpace() override;
185   V8_EXPORT_PRIVATE virtual void TearDown(MemoryAllocator* memory_allocator);
186 
IsDetached()187   bool IsDetached() const { return heap_ == nullptr; }
188 
writable()189   bool writable() const { return !is_marked_read_only_; }
190 
191   bool Contains(Address a) = delete;
192   bool Contains(Object o) = delete;
193 
194   V8_EXPORT_PRIVATE
195   AllocationResult AllocateRaw(int size_in_bytes,
196                                AllocationAlignment alignment);
197 
198   V8_EXPORT_PRIVATE void ClearStringPaddingIfNeeded();
199 
200   enum class SealMode {
201     kDetachFromHeap,
202     kDetachFromHeapAndUnregisterMemory,
203     kDoNotDetachFromHeap
204   };
205 
206   // Seal the space by marking it read-only, optionally detaching it
207   // from the heap and forgetting it for memory bookkeeping purposes (e.g.
208   // prevent space's memory from registering as leaked).
209   V8_EXPORT_PRIVATE void Seal(SealMode ro_mode);
210 
211   // During boot the free_space_map is created, and afterwards we may need
212   // to write it into the free space nodes that were already created.
213   void RepairFreeSpacesAfterDeserialization();
214 
Size()215   size_t Size() const override { return accounting_stats_.Size(); }
216   V8_EXPORT_PRIVATE size_t CommittedPhysicalMemory() const override;
217 
pages()218   const std::vector<ReadOnlyPage*>& pages() const { return pages_; }
top()219   Address top() const { return top_; }
limit()220   Address limit() const { return limit_; }
Capacity()221   size_t Capacity() const { return capacity_; }
222 
223   bool ContainsSlow(Address addr) const;
224   V8_EXPORT_PRIVATE void ShrinkPages();
225 #ifdef VERIFY_HEAP
226   void Verify(Isolate* isolate) const;
227 #ifdef DEBUG
228   void VerifyCounters(Heap* heap) const;
229 #endif  // DEBUG
230 #endif  // VERIFY_HEAP
231 
232   // Return size of allocatable area on a page in this space.
AreaSize()233   int AreaSize() const { return static_cast<int>(area_size_); }
234 
FirstPageAddress()235   Address FirstPageAddress() const { return pages_.front()->address(); }
236 
237  protected:
238   friend class SingleCopyReadOnlyArtifacts;
239 
240   void SetPermissionsForPages(MemoryAllocator* memory_allocator,
241                               PageAllocator::Permission access);
242 
243   bool is_marked_read_only_ = false;
244 
245   // Accounting information for this space.
246   AllocationStats accounting_stats_;
247 
248   std::vector<ReadOnlyPage*> pages_;
249 
250   Address top_;
251   Address limit_;
252 
253  private:
254   // Unseal the space after it has been sealed, by making it writable.
255   void Unseal();
256 
DetachFromHeap()257   void DetachFromHeap() { heap_ = nullptr; }
258 
259   AllocationResult AllocateRawUnaligned(int size_in_bytes);
260   AllocationResult AllocateRawAligned(int size_in_bytes,
261                                       AllocationAlignment alignment);
262 
263   HeapObject TryAllocateLinearlyAligned(int size_in_bytes,
264                                         AllocationAlignment alignment);
265   void EnsureSpaceForAllocation(int size_in_bytes);
266   void FreeLinearAllocationArea();
267 
268   // String padding must be cleared just before serialization and therefore
269   // the string padding in the space will already have been cleared if the
270   // space was deserialized.
271   bool is_string_padding_cleared_;
272 
273   size_t capacity_;
274   const size_t area_size_;
275 };
276 
277 class SharedReadOnlySpace : public ReadOnlySpace {
278  public:
SharedReadOnlySpace(Heap * heap)279   explicit SharedReadOnlySpace(Heap* heap) : ReadOnlySpace(heap) {
280     is_marked_read_only_ = true;
281   }
282 
283   SharedReadOnlySpace(Heap* heap,
284                       PointerCompressedReadOnlyArtifacts* artifacts);
285   SharedReadOnlySpace(
286       Heap* heap, std::vector<ReadOnlyPage*>&& new_pages,
287       std::vector<std::unique_ptr<::v8::PageAllocator::SharedMemoryMapping>>&&
288           mappings,
289       AllocationStats&& new_stats);
290   SharedReadOnlySpace(Heap* heap, SingleCopyReadOnlyArtifacts* artifacts);
291   SharedReadOnlySpace(const SharedReadOnlySpace&) = delete;
292 
293   void TearDown(MemoryAllocator* memory_allocator) override;
294 
295   // Holds any shared memory mapping that must be freed when the space is
296   // deallocated.
297   std::vector<std::unique_ptr<v8::PageAllocator::SharedMemoryMapping>>
298       shared_memory_mappings_;
299 };
300 
301 }  // namespace internal
302 }  // namespace v8
303 
304 #endif  // V8_HEAP_READ_ONLY_SPACES_H_
305