• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2020 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_HEAP_READ_ONLY_SPACES_H_
6 #define V8_HEAP_READ_ONLY_SPACES_H_
7 
8 #include <memory>
9 #include <utility>
10 
11 #include "include/v8-platform.h"
12 #include "src/base/macros.h"
13 #include "src/common/globals.h"
14 #include "src/heap/allocation-stats.h"
15 #include "src/heap/base-space.h"
16 #include "src/heap/basic-memory-chunk.h"
17 #include "src/heap/list.h"
18 #include "src/heap/memory-chunk.h"
19 
20 namespace v8 {
21 namespace internal {
22 
23 class MemoryAllocator;
24 class ReadOnlyHeap;
25 class SnapshotData;
26 
27 class ReadOnlyPage : public BasicMemoryChunk {
28  public:
29   // Clears any pointers in the header that point out of the page that would
30   // otherwise make the header non-relocatable.
31   void MakeHeaderRelocatable();
32 
33   size_t ShrinkToHighWaterMark();
34 
35   // Returns the address for a given offset in this page.
OffsetToAddress(size_t offset)36   Address OffsetToAddress(size_t offset) const {
37     Address address_in_page = address() + offset;
38     if (V8_SHARED_RO_HEAP_BOOL && COMPRESS_POINTERS_BOOL) {
39       // Pointer compression with share ReadOnlyPages means that the area_start
40       // and area_end cannot be defined since they are stored within the pages
41       // which can be mapped at multiple memory addresses.
42       DCHECK_LT(offset, size());
43     } else {
44       DCHECK_GE(address_in_page, area_start());
45       DCHECK_LT(address_in_page, area_end());
46     }
47     return address_in_page;
48   }
49 
50   // Returns the start area of the page without using area_start() which cannot
51   // return the correct result when the page is remapped multiple times.
GetAreaStart()52   Address GetAreaStart() const {
53     return address() +
54            MemoryChunkLayout::ObjectStartOffsetInMemoryChunk(RO_SPACE);
55   }
56 
57  private:
58   friend class ReadOnlySpace;
59 };
60 
61 // -----------------------------------------------------------------------------
62 // Artifacts used to construct a new SharedReadOnlySpace
63 class ReadOnlyArtifacts {
64  public:
65   virtual ~ReadOnlyArtifacts() = default;
66 
67   // Initialize the ReadOnlyArtifacts from an Isolate that has just been created
68   // either by serialization or by creating the objects directly.
69   virtual void Initialize(Isolate* isolate, std::vector<ReadOnlyPage*>&& pages,
70                           const AllocationStats& stats) = 0;
71 
72   // This replaces the ReadOnlySpace in the given Heap with a newly constructed
73   // SharedReadOnlySpace that has pages created from the ReadOnlyArtifacts. This
74   // is only called for the first Isolate, where the ReadOnlySpace is created
75   // during the bootstrap process.
76 
77   virtual void ReinstallReadOnlySpace(Isolate* isolate) = 0;
78   // Creates a ReadOnlyHeap for a specific Isolate. This will be populated with
79   // a SharedReadOnlySpace object that points to the Isolate's heap. Should only
80   // be used when the read-only heap memory is shared with or without pointer
81   // compression. This is called for all subsequent Isolates created after the
82   // first one.
83   virtual ReadOnlyHeap* GetReadOnlyHeapForIsolate(Isolate* isolate) = 0;
84 
85   virtual void VerifyHeapAndSpaceRelationships(Isolate* isolate) = 0;
86 
pages()87   std::vector<ReadOnlyPage*>& pages() { return pages_; }
88 
set_accounting_stats(const AllocationStats & stats)89   void set_accounting_stats(const AllocationStats& stats) { stats_ = stats; }
accounting_stats()90   const AllocationStats& accounting_stats() const { return stats_; }
91 
set_shared_read_only_space(std::unique_ptr<SharedReadOnlySpace> shared_space)92   void set_shared_read_only_space(
93       std::unique_ptr<SharedReadOnlySpace> shared_space) {
94     shared_read_only_space_ = std::move(shared_space);
95   }
shared_read_only_space()96   SharedReadOnlySpace* shared_read_only_space() {
97     return shared_read_only_space_.get();
98   }
99 
100   void set_read_only_heap(std::unique_ptr<ReadOnlyHeap> read_only_heap);
read_only_heap()101   ReadOnlyHeap* read_only_heap() const { return read_only_heap_.get(); }
102 
103   void InitializeChecksum(SnapshotData* read_only_snapshot_data);
104   void VerifyChecksum(SnapshotData* read_only_snapshot_data,
105                       bool read_only_heap_created);
106 
107  protected:
108   ReadOnlyArtifacts() = default;
109 
110   std::vector<ReadOnlyPage*> pages_;
111   AllocationStats stats_;
112   std::unique_ptr<SharedReadOnlySpace> shared_read_only_space_;
113   std::unique_ptr<ReadOnlyHeap> read_only_heap_;
114 #ifdef DEBUG
115   // The checksum of the blob the read-only heap was deserialized from, if
116   // any.
117   base::Optional<uint32_t> read_only_blob_checksum_;
118 #endif  // DEBUG
119 };
120 
121 // -----------------------------------------------------------------------------
122 // Artifacts used to construct a new SharedReadOnlySpace when pointer
123 // compression is disabled and so there is a single ReadOnlySpace with one set
124 // of pages shared between all Isolates.
125 class SingleCopyReadOnlyArtifacts : public ReadOnlyArtifacts {
126  public:
127   ~SingleCopyReadOnlyArtifacts() override;
128 
129   ReadOnlyHeap* GetReadOnlyHeapForIsolate(Isolate* isolate) override;
130   void Initialize(Isolate* isolate, std::vector<ReadOnlyPage*>&& pages,
131                   const AllocationStats& stats) override;
132   void ReinstallReadOnlySpace(Isolate* isolate) override;
133   void VerifyHeapAndSpaceRelationships(Isolate* isolate) override;
134 };
135 
136 // -----------------------------------------------------------------------------
137 // Artifacts used to construct a new SharedReadOnlySpace when pointer
138 // compression is enabled and so there is a ReadOnlySpace for each Isolate with
139 // with its own set of pages mapped from the canonical set stored here.
140 class PointerCompressedReadOnlyArtifacts : public ReadOnlyArtifacts {
141  public:
142   ReadOnlyHeap* GetReadOnlyHeapForIsolate(Isolate* isolate) override;
143   void Initialize(Isolate* isolate, std::vector<ReadOnlyPage*>&& pages,
144                   const AllocationStats& stats) override;
145   void ReinstallReadOnlySpace(Isolate* isolate) override;
146   void VerifyHeapAndSpaceRelationships(Isolate* isolate) override;
147 
148  private:
149   SharedReadOnlySpace* CreateReadOnlySpace(Isolate* isolate);
OffsetForPage(size_t index)150   Tagged_t OffsetForPage(size_t index) const { return page_offsets_[index]; }
151   void InitializeRootsIn(Isolate* isolate);
152   void InitializeRootsFrom(Isolate* isolate);
153 
154   std::unique_ptr<v8::PageAllocator::SharedMemoryMapping> RemapPageTo(
155       size_t i, Address new_address, ReadOnlyPage*& new_page);
156 
157   static constexpr size_t kReadOnlyRootsCount =
158       static_cast<size_t>(RootIndex::kReadOnlyRootsCount);
159 
160   Address read_only_roots_[kReadOnlyRootsCount];
161   std::vector<Tagged_t> page_offsets_;
162   std::vector<std::unique_ptr<PageAllocator::SharedMemory>> shared_memory_;
163 };
164 
165 // -----------------------------------------------------------------------------
166 // Read Only space for all Immortal Immovable and Immutable objects
167 class ReadOnlySpace : public BaseSpace {
168  public:
169   V8_EXPORT_PRIVATE explicit ReadOnlySpace(Heap* heap);
170 
171   // Detach the pages and add them to artifacts for using in creating a
172   // SharedReadOnlySpace. Since the current space no longer has any pages, it
173   // should be replaced straight after this in its Heap.
174   void DetachPagesAndAddToArtifacts(
175       std::shared_ptr<ReadOnlyArtifacts> artifacts);
176 
177   V8_EXPORT_PRIVATE ~ReadOnlySpace() override;
178   V8_EXPORT_PRIVATE virtual void TearDown(MemoryAllocator* memory_allocator);
179 
IsDetached()180   bool IsDetached() const { return heap_ == nullptr; }
181 
writable()182   bool writable() const { return !is_marked_read_only_; }
183 
184   bool Contains(Address a) = delete;
185   bool Contains(Object o) = delete;
186 
187   V8_EXPORT_PRIVATE
188   AllocationResult AllocateRaw(int size_in_bytes,
189                                AllocationAlignment alignment);
190 
191   V8_EXPORT_PRIVATE void ClearStringPaddingIfNeeded();
192 
193   enum class SealMode {
194     kDetachFromHeap,
195     kDetachFromHeapAndUnregisterMemory,
196     kDoNotDetachFromHeap
197   };
198 
199   // Seal the space by marking it read-only, optionally detaching it
200   // from the heap and forgetting it for memory bookkeeping purposes (e.g.
201   // prevent space's memory from registering as leaked).
202   V8_EXPORT_PRIVATE void Seal(SealMode ro_mode);
203 
204   // During boot the free_space_map is created, and afterwards we may need
205   // to write it into the free space nodes that were already created.
206   void RepairFreeSpacesAfterDeserialization();
207 
Size()208   size_t Size() override { return accounting_stats_.Size(); }
209   V8_EXPORT_PRIVATE size_t CommittedPhysicalMemory() override;
210 
pages()211   const std::vector<ReadOnlyPage*>& pages() const { return pages_; }
top()212   Address top() const { return top_; }
limit()213   Address limit() const { return limit_; }
Capacity()214   size_t Capacity() const { return capacity_; }
215 
216   bool ContainsSlow(Address addr);
217   V8_EXPORT_PRIVATE void ShrinkPages();
218 #ifdef VERIFY_HEAP
219   void Verify(Isolate* isolate);
220 #ifdef DEBUG
221   void VerifyCounters(Heap* heap);
222 #endif  // DEBUG
223 #endif  // VERIFY_HEAP
224 
225   // Return size of allocatable area on a page in this space.
AreaSize()226   int AreaSize() const { return static_cast<int>(area_size_); }
227 
228   ReadOnlyPage* InitializePage(BasicMemoryChunk* chunk);
229 
FirstPageAddress()230   Address FirstPageAddress() const { return pages_.front()->address(); }
231 
232  protected:
233   friend class SingleCopyReadOnlyArtifacts;
234 
235   void SetPermissionsForPages(MemoryAllocator* memory_allocator,
236                               PageAllocator::Permission access);
237 
238   bool is_marked_read_only_ = false;
239 
240   // Accounting information for this space.
241   AllocationStats accounting_stats_;
242 
243   std::vector<ReadOnlyPage*> pages_;
244 
245   Address top_;
246   Address limit_;
247 
248  private:
249   // Unseal the space after it has been sealed, by making it writable.
250   void Unseal();
251 
DetachFromHeap()252   void DetachFromHeap() { heap_ = nullptr; }
253 
254   AllocationResult AllocateRawUnaligned(int size_in_bytes);
255   AllocationResult AllocateRawAligned(int size_in_bytes,
256                                       AllocationAlignment alignment);
257 
258   HeapObject TryAllocateLinearlyAligned(int size_in_bytes,
259                                         AllocationAlignment alignment);
260   void EnsureSpaceForAllocation(int size_in_bytes);
261   void FreeLinearAllocationArea();
262 
263   // String padding must be cleared just before serialization and therefore
264   // the string padding in the space will already have been cleared if the
265   // space was deserialized.
266   bool is_string_padding_cleared_;
267 
268   size_t capacity_;
269   const size_t area_size_;
270 };
271 
272 class SharedReadOnlySpace : public ReadOnlySpace {
273  public:
SharedReadOnlySpace(Heap * heap)274   explicit SharedReadOnlySpace(Heap* heap) : ReadOnlySpace(heap) {
275     is_marked_read_only_ = true;
276   }
277 
278   SharedReadOnlySpace(Heap* heap,
279                       PointerCompressedReadOnlyArtifacts* artifacts);
280   SharedReadOnlySpace(
281       Heap* heap, std::vector<ReadOnlyPage*>&& new_pages,
282       std::vector<std::unique_ptr<::v8::PageAllocator::SharedMemoryMapping>>&&
283           mappings,
284       AllocationStats&& new_stats);
285   SharedReadOnlySpace(Heap* heap, SingleCopyReadOnlyArtifacts* artifacts);
286   SharedReadOnlySpace(const SharedReadOnlySpace&) = delete;
287 
288   void TearDown(MemoryAllocator* memory_allocator) override;
289 
290   // Holds any shared memory mapping that must be freed when the space is
291   // deallocated.
292   std::vector<std::unique_ptr<v8::PageAllocator::SharedMemoryMapping>>
293       shared_memory_mappings_;
294 };
295 
296 }  // namespace internal
297 }  // namespace v8
298 
299 #endif  // V8_HEAP_READ_ONLY_SPACES_H_
300