• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2020 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/heap/cppgc/object-allocator.h"
6 
7 #include "src/base/logging.h"
8 #include "src/base/macros.h"
9 #include "src/heap/cppgc/free-list.h"
10 #include "src/heap/cppgc/globals.h"
11 #include "src/heap/cppgc/heap-object-header.h"
12 #include "src/heap/cppgc/heap-page.h"
13 #include "src/heap/cppgc/heap-space.h"
14 #include "src/heap/cppgc/heap-visitor.h"
15 #include "src/heap/cppgc/heap.h"
16 #include "src/heap/cppgc/object-start-bitmap.h"
17 #include "src/heap/cppgc/page-memory.h"
18 #include "src/heap/cppgc/stats-collector.h"
19 #include "src/heap/cppgc/sweeper.h"
20 
21 namespace cppgc {
22 namespace internal {
23 namespace {
24 
MarkRangeAsYoung(BasePage * page,Address begin,Address end)25 void MarkRangeAsYoung(BasePage* page, Address begin, Address end) {
26 #if defined(CPPGC_YOUNG_GENERATION)
27   DCHECK_LT(begin, end);
28 
29   static constexpr auto kEntrySize = AgeTable::kEntrySizeInBytes;
30 
31   const uintptr_t offset_begin = CagedHeap::OffsetFromAddress(begin);
32   const uintptr_t offset_end = CagedHeap::OffsetFromAddress(end);
33 
34   const uintptr_t young_offset_begin = (begin == page->PayloadStart())
35                                            ? RoundDown(offset_begin, kEntrySize)
36                                            : RoundUp(offset_begin, kEntrySize);
37   const uintptr_t young_offset_end = (end == page->PayloadEnd())
38                                          ? RoundUp(offset_end, kEntrySize)
39                                          : RoundDown(offset_end, kEntrySize);
40 
41   auto& age_table = page->heap()->caged_heap().local_data().age_table;
42   for (auto offset = young_offset_begin; offset < young_offset_end;
43        offset += AgeTable::kEntrySizeInBytes) {
44     age_table[offset] = AgeTable::Age::kYoung;
45   }
46 
47   // Set to kUnknown the first and the last regions of the newly allocated
48   // linear buffer.
49   if (begin != page->PayloadStart() && !IsAligned(offset_begin, kEntrySize))
50     age_table[offset_begin] = AgeTable::Age::kUnknown;
51   if (end != page->PayloadEnd() && !IsAligned(offset_end, kEntrySize))
52     age_table[offset_end] = AgeTable::Age::kUnknown;
53 #endif
54 }
55 
AddToFreeList(NormalPageSpace * space,Address start,size_t size)56 void AddToFreeList(NormalPageSpace* space, Address start, size_t size) {
57   auto& free_list = space->free_list();
58   free_list.Add({start, size});
59   NormalPage::From(BasePage::FromPayload(start))
60       ->object_start_bitmap()
61       .SetBit(start);
62 }
63 
ReplaceLinearAllocationBuffer(NormalPageSpace * space,StatsCollector * stats_collector,Address new_buffer,size_t new_size)64 void ReplaceLinearAllocationBuffer(NormalPageSpace* space,
65                                    StatsCollector* stats_collector,
66                                    Address new_buffer, size_t new_size) {
67   DCHECK_NOT_NULL(space);
68   DCHECK_NOT_NULL(stats_collector);
69 
70   auto& lab = space->linear_allocation_buffer();
71   if (lab.size()) {
72     AddToFreeList(space, lab.start(), lab.size());
73     stats_collector->NotifyExplicitFree(lab.size());
74   }
75 
76   lab.Set(new_buffer, new_size);
77   if (new_size) {
78     DCHECK_NOT_NULL(new_buffer);
79     stats_collector->NotifyAllocation(new_size);
80     auto* page = NormalPage::From(BasePage::FromPayload(new_buffer));
81     page->object_start_bitmap().ClearBit(new_buffer);
82     MarkRangeAsYoung(page, new_buffer, new_buffer + new_size);
83   }
84 }
85 
AllocateLargeObject(PageBackend * page_backend,LargePageSpace * space,StatsCollector * stats_collector,size_t size,GCInfoIndex gcinfo)86 void* AllocateLargeObject(PageBackend* page_backend, LargePageSpace* space,
87                           StatsCollector* stats_collector, size_t size,
88                           GCInfoIndex gcinfo) {
89   LargePage* page = LargePage::Create(page_backend, space, size);
90   space->AddPage(page);
91 
92   auto* header = new (page->ObjectHeader())
93       HeapObjectHeader(HeapObjectHeader::kLargeObjectSizeInHeader, gcinfo);
94 
95   stats_collector->NotifyAllocation(size);
96   MarkRangeAsYoung(page, page->PayloadStart(), page->PayloadEnd());
97 
98   return header->Payload();
99 }
100 
101 }  // namespace
102 
ObjectAllocator(RawHeap * heap,PageBackend * page_backend,StatsCollector * stats_collector)103 ObjectAllocator::ObjectAllocator(RawHeap* heap, PageBackend* page_backend,
104                                  StatsCollector* stats_collector)
105     : raw_heap_(heap),
106       page_backend_(page_backend),
107       stats_collector_(stats_collector) {}
108 
OutOfLineAllocate(NormalPageSpace * space,size_t size,GCInfoIndex gcinfo)109 void* ObjectAllocator::OutOfLineAllocate(NormalPageSpace* space, size_t size,
110                                          GCInfoIndex gcinfo) {
111   void* memory = OutOfLineAllocateImpl(space, size, gcinfo);
112   stats_collector_->NotifySafePointForConservativeCollection();
113   raw_heap_->heap()->AdvanceIncrementalGarbageCollectionOnAllocationIfNeeded();
114   return memory;
115 }
116 
OutOfLineAllocateImpl(NormalPageSpace * space,size_t size,GCInfoIndex gcinfo)117 void* ObjectAllocator::OutOfLineAllocateImpl(NormalPageSpace* space,
118                                              size_t size, GCInfoIndex gcinfo) {
119   DCHECK_EQ(0, size & kAllocationMask);
120   DCHECK_LE(kFreeListEntrySize, size);
121 
122   // 1. If this allocation is big enough, allocate a large object.
123   if (size >= kLargeObjectSizeThreshold) {
124     auto* large_space = LargePageSpace::From(
125         raw_heap_->Space(RawHeap::RegularSpaceType::kLarge));
126     return AllocateLargeObject(page_backend_, large_space, stats_collector_,
127                                size, gcinfo);
128   }
129 
130   // 2. Try to allocate from the freelist.
131   if (void* result = AllocateFromFreeList(space, size, gcinfo)) {
132     return result;
133   }
134 
135   // 3. Lazily sweep pages of this heap until we find a freed area for
136   // this allocation or we finish sweeping all pages of this heap.
137   // TODO(chromium:1056170): Add lazy sweep.
138 
139   // 4. Complete sweeping.
140   raw_heap_->heap()->sweeper().FinishIfRunning();
141 
142   // 5. Add a new page to this heap.
143   auto* new_page = NormalPage::Create(page_backend_, space);
144   space->AddPage(new_page);
145 
146   // 6. Set linear allocation buffer to new page.
147   ReplaceLinearAllocationBuffer(space, stats_collector_,
148                                 new_page->PayloadStart(),
149                                 new_page->PayloadSize());
150 
151   // 7. Allocate from it. The allocation must succeed.
152   void* result = AllocateObjectOnSpace(space, size, gcinfo);
153   CHECK(result);
154 
155   return result;
156 }
157 
AllocateFromFreeList(NormalPageSpace * space,size_t size,GCInfoIndex gcinfo)158 void* ObjectAllocator::AllocateFromFreeList(NormalPageSpace* space, size_t size,
159                                             GCInfoIndex gcinfo) {
160   const FreeList::Block entry = space->free_list().Allocate(size);
161   if (!entry.address) return nullptr;
162 
163   ReplaceLinearAllocationBuffer(
164       space, stats_collector_, static_cast<Address>(entry.address), entry.size);
165 
166   return AllocateObjectOnSpace(space, size, gcinfo);
167 }
168 
ResetLinearAllocationBuffers()169 void ObjectAllocator::ResetLinearAllocationBuffers() {
170   class Resetter : public HeapVisitor<Resetter> {
171    public:
172     explicit Resetter(StatsCollector* stats) : stats_collector_(stats) {}
173 
174     bool VisitLargePageSpace(LargePageSpace*) { return true; }
175 
176     bool VisitNormalPageSpace(NormalPageSpace* space) {
177       ReplaceLinearAllocationBuffer(space, stats_collector_, nullptr, 0);
178       return true;
179     }
180 
181    private:
182     StatsCollector* stats_collector_;
183   } visitor(stats_collector_);
184 
185   visitor.Traverse(raw_heap_);
186 }
187 
NoAllocationScope(ObjectAllocator & allocator)188 ObjectAllocator::NoAllocationScope::NoAllocationScope(
189     ObjectAllocator& allocator)
190     : allocator_(allocator) {
191   allocator.no_allocation_scope_++;
192 }
193 
~NoAllocationScope()194 ObjectAllocator::NoAllocationScope::~NoAllocationScope() {
195   allocator_.no_allocation_scope_--;
196 }
197 
198 }  // namespace internal
199 }  // namespace cppgc
200