• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2020 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_HEAP_PAGED_SPACES_INL_H_
6 #define V8_HEAP_PAGED_SPACES_INL_H_
7 
8 #include "src/common/globals.h"
9 #include "src/heap/heap-inl.h"
10 #include "src/heap/incremental-marking.h"
11 #include "src/heap/paged-spaces.h"
12 #include "src/objects/code-inl.h"
13 #include "src/objects/heap-object.h"
14 #include "src/objects/objects-inl.h"
15 
16 namespace v8 {
17 namespace internal {
18 
19 // -----------------------------------------------------------------------------
20 // PagedSpaceObjectIterator
21 
Next()22 HeapObject PagedSpaceObjectIterator::Next() {
23   do {
24     HeapObject next_obj = FromCurrentPage();
25     if (!next_obj.is_null()) return next_obj;
26   } while (AdvanceToNextPage());
27   return HeapObject();
28 }
29 
FromCurrentPage()30 HeapObject PagedSpaceObjectIterator::FromCurrentPage() {
31   while (cur_addr_ != cur_end_) {
32     HeapObject obj = HeapObject::FromAddress(cur_addr_);
33     const int obj_size = obj.Size();
34     cur_addr_ += obj_size;
35     DCHECK_LE(cur_addr_, cur_end_);
36     if (!obj.IsFreeSpaceOrFiller()) {
37       if (obj.IsCode()) {
38         DCHECK_EQ(space_->identity(), CODE_SPACE);
39         DCHECK_CODEOBJECT_SIZE(obj_size, space_);
40       } else {
41         DCHECK_OBJECT_SIZE(obj_size);
42       }
43       return obj;
44     }
45   }
46   return HeapObject();
47 }
48 
Contains(Address addr)49 bool PagedSpace::Contains(Address addr) const {
50   if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) {
51     return true;
52   }
53   return Page::FromAddress(addr)->owner() == this;
54 }
55 
Contains(Object o)56 bool PagedSpace::Contains(Object o) const {
57   if (!o.IsHeapObject()) return false;
58   return Page::FromAddress(o.ptr())->owner() == this;
59 }
60 
UnlinkFreeListCategories(Page * page)61 void PagedSpace::UnlinkFreeListCategories(Page* page) {
62   DCHECK_EQ(this, page->owner());
63   page->ForAllFreeListCategories([this](FreeListCategory* category) {
64     free_list()->RemoveCategory(category);
65   });
66 }
67 
RelinkFreeListCategories(Page * page)68 size_t PagedSpace::RelinkFreeListCategories(Page* page) {
69   DCHECK_EQ(this, page->owner());
70   size_t added = 0;
71   page->ForAllFreeListCategories([this, &added](FreeListCategory* category) {
72     added += category->available();
73     category->Relink(free_list());
74   });
75 
76   DCHECK_IMPLIES(!page->IsFlagSet(Page::NEVER_ALLOCATE_ON_PAGE),
77                  page->AvailableInFreeList() ==
78                      page->AvailableInFreeListFromAllocatedBytes());
79   return added;
80 }
81 
TryFreeLast(HeapObject object,int object_size)82 bool PagedSpace::TryFreeLast(HeapObject object, int object_size) {
83   if (allocation_info_.top() != kNullAddress) {
84     const Address object_address = object.address();
85     if ((allocation_info_.top() - object_size) == object_address) {
86       allocation_info_.set_top(object_address);
87       return true;
88     }
89   }
90   return false;
91 }
92 
EnsureLabMain(int size_in_bytes,AllocationOrigin origin)93 bool PagedSpace::EnsureLabMain(int size_in_bytes, AllocationOrigin origin) {
94   if (allocation_info_.top() + size_in_bytes <= allocation_info_.limit()) {
95     return true;
96   }
97   return RefillLabMain(size_in_bytes, origin);
98 }
99 
AllocateFastUnaligned(int size_in_bytes)100 AllocationResult PagedSpace::AllocateFastUnaligned(int size_in_bytes) {
101   Address current_top = allocation_info_.top();
102   Address new_top = current_top + size_in_bytes;
103   if (new_top > allocation_info_.limit())
104     return AllocationResult::Retry(identity());
105   DCHECK_LE(new_top, allocation_info_.limit());
106   allocation_info_.set_top(new_top);
107 
108   return AllocationResult(HeapObject::FromAddress(current_top));
109 }
110 
AllocateFastAligned(int size_in_bytes,int * aligned_size_in_bytes,AllocationAlignment alignment)111 AllocationResult PagedSpace::AllocateFastAligned(
112     int size_in_bytes, int* aligned_size_in_bytes,
113     AllocationAlignment alignment) {
114   Address current_top = allocation_info_.top();
115   int filler_size = Heap::GetFillToAlign(current_top, alignment);
116 
117   Address new_top = current_top + filler_size + size_in_bytes;
118   if (new_top > allocation_info_.limit())
119     return AllocationResult::Retry(identity());
120 
121   allocation_info_.set_top(new_top);
122   if (aligned_size_in_bytes)
123     *aligned_size_in_bytes = filler_size + size_in_bytes;
124   if (filler_size > 0) {
125     Heap::PrecedeWithFiller(ReadOnlyRoots(heap()),
126                             HeapObject::FromAddress(current_top), filler_size);
127   }
128 
129   return AllocationResult(HeapObject::FromAddress(current_top + filler_size));
130 }
131 
AllocateRawUnaligned(int size_in_bytes,AllocationOrigin origin)132 AllocationResult PagedSpace::AllocateRawUnaligned(int size_in_bytes,
133                                                   AllocationOrigin origin) {
134   if (!EnsureLabMain(size_in_bytes, origin)) {
135     return AllocationResult::Retry(identity());
136   }
137 
138   AllocationResult result = AllocateFastUnaligned(size_in_bytes);
139   DCHECK(!result.IsRetry());
140   MSAN_ALLOCATED_UNINITIALIZED_MEMORY(result.ToObjectChecked().address(),
141                                       size_in_bytes);
142 
143   if (FLAG_trace_allocations_origins) {
144     UpdateAllocationOrigins(origin);
145   }
146 
147   InvokeAllocationObservers(result.ToAddress(), size_in_bytes, size_in_bytes,
148                             size_in_bytes);
149 
150   return result;
151 }
152 
AllocateRawAligned(int size_in_bytes,AllocationAlignment alignment,AllocationOrigin origin)153 AllocationResult PagedSpace::AllocateRawAligned(int size_in_bytes,
154                                                 AllocationAlignment alignment,
155                                                 AllocationOrigin origin) {
156   DCHECK_EQ(identity(), OLD_SPACE);
157   int allocation_size = size_in_bytes;
158   // We don't know exactly how much filler we need to align until space is
159   // allocated, so assume the worst case.
160   int filler_size = Heap::GetMaximumFillToAlign(alignment);
161   allocation_size += filler_size;
162   if (!EnsureLabMain(allocation_size, origin)) {
163     return AllocationResult::Retry(identity());
164   }
165   int aligned_size_in_bytes;
166   AllocationResult result =
167       AllocateFastAligned(size_in_bytes, &aligned_size_in_bytes, alignment);
168   DCHECK(!result.IsRetry());
169   MSAN_ALLOCATED_UNINITIALIZED_MEMORY(result.ToObjectChecked().address(),
170                                       size_in_bytes);
171 
172   if (FLAG_trace_allocations_origins) {
173     UpdateAllocationOrigins(origin);
174   }
175 
176   InvokeAllocationObservers(result.ToAddress(), size_in_bytes,
177                             aligned_size_in_bytes, allocation_size);
178 
179   return result;
180 }
181 
AllocateRaw(int size_in_bytes,AllocationAlignment alignment,AllocationOrigin origin)182 AllocationResult PagedSpace::AllocateRaw(int size_in_bytes,
183                                          AllocationAlignment alignment,
184                                          AllocationOrigin origin) {
185   AllocationResult result;
186 
187   if (alignment != kWordAligned) {
188     result = AllocateFastAligned(size_in_bytes, nullptr, alignment);
189   } else {
190     result = AllocateFastUnaligned(size_in_bytes);
191   }
192 
193   if (!result.IsRetry()) {
194     return result;
195   } else {
196     return AllocateRawSlow(size_in_bytes, alignment, origin);
197   }
198 }
199 
200 }  // namespace internal
201 }  // namespace v8
202 
203 #endif  // V8_HEAP_PAGED_SPACES_INL_H_
204