• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_HEAP_SPACES_INL_H_
6 #define V8_HEAP_SPACES_INL_H_
7 
8 #include "src/base/atomic-utils.h"
9 #include "src/base/v8-fallthrough.h"
10 #include "src/common/globals.h"
11 #include "src/heap/heap-inl.h"
12 #include "src/heap/incremental-marking.h"
13 #include "src/heap/large-spaces.h"
14 #include "src/heap/memory-chunk-inl.h"
15 #include "src/heap/new-spaces.h"
16 #include "src/heap/paged-spaces.h"
17 #include "src/heap/spaces.h"
18 
19 namespace v8 {
20 namespace internal {
21 
22 template <class PAGE_TYPE>
23 PageIteratorImpl<PAGE_TYPE>& PageIteratorImpl<PAGE_TYPE>::operator++() {
24   p_ = p_->next_page();
25   return *this;
26 }
27 
28 template <class PAGE_TYPE>
29 PageIteratorImpl<PAGE_TYPE> PageIteratorImpl<PAGE_TYPE>::operator++(int) {
30   PageIteratorImpl<PAGE_TYPE> tmp(*this);
31   operator++();
32   return tmp;
33 }
34 
PageRange(Address start,Address limit)35 PageRange::PageRange(Address start, Address limit)
36     : begin_(Page::FromAddress(start)),
37       end_(Page::FromAllocationAreaAddress(limit)->next_page()) {
38 #ifdef DEBUG
39   if (begin_->InNewSpace()) {
40     SemiSpace::AssertValidRange(start, limit);
41   }
42 #endif  // DEBUG
43 }
44 
IncrementExternalBackingStoreBytes(ExternalBackingStoreType type,size_t amount)45 void Space::IncrementExternalBackingStoreBytes(ExternalBackingStoreType type,
46                                                size_t amount) {
47   base::CheckedIncrement(&external_backing_store_bytes_[type], amount);
48   heap()->IncrementExternalBackingStoreBytes(type, amount);
49 }
50 
DecrementExternalBackingStoreBytes(ExternalBackingStoreType type,size_t amount)51 void Space::DecrementExternalBackingStoreBytes(ExternalBackingStoreType type,
52                                                size_t amount) {
53   base::CheckedDecrement(&external_backing_store_bytes_[type], amount);
54   heap()->DecrementExternalBackingStoreBytes(type, amount);
55 }
56 
MoveExternalBackingStoreBytes(ExternalBackingStoreType type,Space * from,Space * to,size_t amount)57 void Space::MoveExternalBackingStoreBytes(ExternalBackingStoreType type,
58                                           Space* from, Space* to,
59                                           size_t amount) {
60   if (from == to) return;
61 
62   base::CheckedDecrement(&(from->external_backing_store_bytes_[type]), amount);
63   base::CheckedIncrement(&(to->external_backing_store_bytes_[type]), amount);
64 }
65 
MarkNeverAllocateForTesting()66 void Page::MarkNeverAllocateForTesting() {
67   DCHECK(this->owner_identity() != NEW_SPACE);
68   DCHECK(!IsFlagSet(NEVER_ALLOCATE_ON_PAGE));
69   SetFlag(NEVER_ALLOCATE_ON_PAGE);
70   SetFlag(NEVER_EVACUATE);
71   reinterpret_cast<PagedSpace*>(owner())->free_list()->EvictFreeListItems(this);
72 }
73 
MarkEvacuationCandidate()74 void Page::MarkEvacuationCandidate() {
75   DCHECK(!IsFlagSet(NEVER_EVACUATE));
76   DCHECK_NULL(slot_set<OLD_TO_OLD>());
77   DCHECK_NULL(typed_slot_set<OLD_TO_OLD>());
78   SetFlag(EVACUATION_CANDIDATE);
79   reinterpret_cast<PagedSpace*>(owner())->free_list()->EvictFreeListItems(this);
80 }
81 
ClearEvacuationCandidate()82 void Page::ClearEvacuationCandidate() {
83   if (!IsFlagSet(COMPACTION_WAS_ABORTED)) {
84     DCHECK_NULL(slot_set<OLD_TO_OLD>());
85     DCHECK_NULL(typed_slot_set<OLD_TO_OLD>());
86   }
87   ClearFlag(EVACUATION_CANDIDATE);
88   InitializeFreeListCategories();
89 }
90 
OldGenerationMemoryChunkIterator(Heap * heap)91 OldGenerationMemoryChunkIterator::OldGenerationMemoryChunkIterator(Heap* heap)
92     : heap_(heap),
93       state_(kOldSpaceState),
94       old_iterator_(heap->old_space()->begin()),
95       code_iterator_(heap->code_space()->begin()),
96       map_iterator_(heap->map_space() ? heap->map_space()->begin()
97                                       : PageRange::iterator(nullptr)),
98       lo_iterator_(heap->lo_space()->begin()),
99       code_lo_iterator_(heap->code_lo_space()->begin()) {}
100 
next()101 MemoryChunk* OldGenerationMemoryChunkIterator::next() {
102   switch (state_) {
103     case kOldSpaceState: {
104       if (old_iterator_ != heap_->old_space()->end()) return *(old_iterator_++);
105       state_ = kMapState;
106       V8_FALLTHROUGH;
107     }
108     case kMapState: {
109       if (map_iterator_ != heap_->map_space()->end()) return *(map_iterator_++);
110       state_ = kCodeState;
111       V8_FALLTHROUGH;
112     }
113     case kCodeState: {
114       if (code_iterator_ != heap_->code_space()->end())
115         return *(code_iterator_++);
116       state_ = kLargeObjectState;
117       V8_FALLTHROUGH;
118     }
119     case kLargeObjectState: {
120       if (lo_iterator_ != heap_->lo_space()->end()) return *(lo_iterator_++);
121       state_ = kCodeLargeObjectState;
122       V8_FALLTHROUGH;
123     }
124     case kCodeLargeObjectState: {
125       if (code_lo_iterator_ != heap_->code_lo_space()->end())
126         return *(code_lo_iterator_++);
127       state_ = kFinishedState;
128       V8_FALLTHROUGH;
129     }
130     case kFinishedState:
131       return nullptr;
132     default:
133       break;
134   }
135   UNREACHABLE();
136 }
137 
AllocateRawAligned(int size_in_bytes,AllocationAlignment alignment)138 AllocationResult LocalAllocationBuffer::AllocateRawAligned(
139     int size_in_bytes, AllocationAlignment alignment) {
140   Address current_top = allocation_info_.top();
141   int filler_size = Heap::GetFillToAlign(current_top, alignment);
142   int aligned_size = filler_size + size_in_bytes;
143   if (!allocation_info_.CanIncrementTop(aligned_size)) {
144     return AllocationResult::Failure();
145   }
146   HeapObject object =
147       HeapObject::FromAddress(allocation_info_.IncrementTop(aligned_size));
148   return filler_size > 0 ? AllocationResult::FromObject(
149                                heap_->PrecedeWithFiller(object, filler_size))
150                          : AllocationResult::FromObject(object);
151 }
152 
FromResult(Heap * heap,AllocationResult result,intptr_t size)153 LocalAllocationBuffer LocalAllocationBuffer::FromResult(Heap* heap,
154                                                         AllocationResult result,
155                                                         intptr_t size) {
156   if (result.IsFailure()) return InvalidBuffer();
157   HeapObject obj;
158   bool ok = result.To(&obj);
159   USE(ok);
160   DCHECK(ok);
161   Address top = HeapObject::cast(obj).address();
162   return LocalAllocationBuffer(heap, LinearAllocationArea(top, top + size));
163 }
164 
TryMerge(LocalAllocationBuffer * other)165 bool LocalAllocationBuffer::TryMerge(LocalAllocationBuffer* other) {
166   return allocation_info_.MergeIfAdjacent(other->allocation_info_);
167 }
168 
TryFreeLast(HeapObject object,int object_size)169 bool LocalAllocationBuffer::TryFreeLast(HeapObject object, int object_size) {
170   if (IsValid()) {
171     const Address object_address = object.address();
172     return allocation_info_.DecrementTopIfAdjacent(object_address, object_size);
173   }
174   return false;
175 }
176 
HasNext()177 bool MemoryChunkIterator::HasNext() {
178   if (current_chunk_) return true;
179 
180   while (space_iterator_.HasNext()) {
181     Space* space = space_iterator_.Next();
182     current_chunk_ = space->first_page();
183     if (current_chunk_) return true;
184   }
185 
186   return false;
187 }
188 
Next()189 MemoryChunk* MemoryChunkIterator::Next() {
190   MemoryChunk* chunk = current_chunk_;
191   current_chunk_ = chunk->list_node().next();
192   return chunk;
193 }
194 
AllocateFastUnaligned(int size_in_bytes,AllocationOrigin origin)195 AllocationResult SpaceWithLinearArea::AllocateFastUnaligned(
196     int size_in_bytes, AllocationOrigin origin) {
197   if (!allocation_info_->CanIncrementTop(size_in_bytes)) {
198     return AllocationResult::Failure();
199   }
200   HeapObject obj =
201       HeapObject::FromAddress(allocation_info_->IncrementTop(size_in_bytes));
202 
203   MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj.address(), size_in_bytes);
204 
205   if (FLAG_trace_allocations_origins) {
206     UpdateAllocationOrigins(origin);
207   }
208 
209   return AllocationResult::FromObject(obj);
210 }
211 
AllocateFastAligned(int size_in_bytes,int * result_aligned_size_in_bytes,AllocationAlignment alignment,AllocationOrigin origin)212 AllocationResult SpaceWithLinearArea::AllocateFastAligned(
213     int size_in_bytes, int* result_aligned_size_in_bytes,
214     AllocationAlignment alignment, AllocationOrigin origin) {
215   Address top = allocation_info_->top();
216   int filler_size = Heap::GetFillToAlign(top, alignment);
217   int aligned_size_in_bytes = size_in_bytes + filler_size;
218 
219   if (!allocation_info_->CanIncrementTop(aligned_size_in_bytes)) {
220     return AllocationResult::Failure();
221   }
222   HeapObject obj = HeapObject::FromAddress(
223       allocation_info_->IncrementTop(aligned_size_in_bytes));
224   if (result_aligned_size_in_bytes)
225     *result_aligned_size_in_bytes = aligned_size_in_bytes;
226 
227   if (filler_size > 0) {
228     obj = heap()->PrecedeWithFiller(obj, filler_size);
229   }
230 
231   MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj.address(), size_in_bytes);
232 
233   if (FLAG_trace_allocations_origins) {
234     UpdateAllocationOrigins(origin);
235   }
236 
237   return AllocationResult::FromObject(obj);
238 }
239 
AllocateRaw(int size_in_bytes,AllocationAlignment alignment,AllocationOrigin origin)240 AllocationResult SpaceWithLinearArea::AllocateRaw(int size_in_bytes,
241                                                   AllocationAlignment alignment,
242                                                   AllocationOrigin origin) {
243   DCHECK(!FLAG_enable_third_party_heap);
244 
245   AllocationResult result;
246 
247   if (USE_ALLOCATION_ALIGNMENT_BOOL && alignment != kTaggedAligned) {
248     result = AllocateFastAligned(size_in_bytes, nullptr, alignment, origin);
249   } else {
250     result = AllocateFastUnaligned(size_in_bytes, origin);
251   }
252 
253   return result.IsFailure() ? AllocateRawSlow(size_in_bytes, alignment, origin)
254                             : result;
255 }
256 
AllocateRawUnaligned(int size_in_bytes,AllocationOrigin origin)257 AllocationResult SpaceWithLinearArea::AllocateRawUnaligned(
258     int size_in_bytes, AllocationOrigin origin) {
259   DCHECK(!FLAG_enable_third_party_heap);
260   int max_aligned_size;
261   if (!EnsureAllocation(size_in_bytes, kTaggedAligned, origin,
262                         &max_aligned_size)) {
263     return AllocationResult::Failure();
264   }
265 
266   DCHECK_EQ(max_aligned_size, size_in_bytes);
267   DCHECK_LE(allocation_info_->start(), allocation_info_->top());
268 
269   AllocationResult result = AllocateFastUnaligned(size_in_bytes, origin);
270   DCHECK(!result.IsFailure());
271 
272   InvokeAllocationObservers(result.ToAddress(), size_in_bytes, size_in_bytes,
273                             size_in_bytes);
274 
275   return result;
276 }
277 
AllocateRawAligned(int size_in_bytes,AllocationAlignment alignment,AllocationOrigin origin)278 AllocationResult SpaceWithLinearArea::AllocateRawAligned(
279     int size_in_bytes, AllocationAlignment alignment, AllocationOrigin origin) {
280   DCHECK(!FLAG_enable_third_party_heap);
281   int max_aligned_size;
282   if (!EnsureAllocation(size_in_bytes, alignment, origin, &max_aligned_size)) {
283     return AllocationResult::Failure();
284   }
285 
286   DCHECK_GE(max_aligned_size, size_in_bytes);
287   DCHECK_LE(allocation_info_->start(), allocation_info_->top());
288 
289   int aligned_size_in_bytes;
290 
291   AllocationResult result = AllocateFastAligned(
292       size_in_bytes, &aligned_size_in_bytes, alignment, origin);
293   DCHECK_GE(max_aligned_size, aligned_size_in_bytes);
294   DCHECK(!result.IsFailure());
295 
296   InvokeAllocationObservers(result.ToAddress(), size_in_bytes,
297                             aligned_size_in_bytes, max_aligned_size);
298 
299   return result;
300 }
301 
AllocateRawSlow(int size_in_bytes,AllocationAlignment alignment,AllocationOrigin origin)302 AllocationResult SpaceWithLinearArea::AllocateRawSlow(
303     int size_in_bytes, AllocationAlignment alignment, AllocationOrigin origin) {
304   AllocationResult result =
305       USE_ALLOCATION_ALIGNMENT_BOOL && alignment != kTaggedAligned
306           ? AllocateRawAligned(size_in_bytes, alignment, origin)
307           : AllocateRawUnaligned(size_in_bytes, origin);
308   return result;
309 }
310 
311 }  // namespace internal
312 }  // namespace v8
313 
314 #endif  // V8_HEAP_SPACES_INL_H_
315