1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_HEAP_SPACES_INL_H_
6 #define V8_HEAP_SPACES_INL_H_
7
8 #include "src/base/atomic-utils.h"
9 #include "src/base/v8-fallthrough.h"
10 #include "src/common/globals.h"
11 #include "src/heap/heap-inl.h"
12 #include "src/heap/incremental-marking.h"
13 #include "src/heap/large-spaces.h"
14 #include "src/heap/memory-chunk-inl.h"
15 #include "src/heap/new-spaces.h"
16 #include "src/heap/paged-spaces.h"
17 #include "src/heap/spaces.h"
18 #include "src/objects/code-inl.h"
19
20 namespace v8 {
21 namespace internal {
22
23 template <class PAGE_TYPE>
24 PageIteratorImpl<PAGE_TYPE>& PageIteratorImpl<PAGE_TYPE>::operator++() {
25 p_ = p_->next_page();
26 return *this;
27 }
28
29 template <class PAGE_TYPE>
30 PageIteratorImpl<PAGE_TYPE> PageIteratorImpl<PAGE_TYPE>::operator++(int) {
31 PageIteratorImpl<PAGE_TYPE> tmp(*this);
32 operator++();
33 return tmp;
34 }
35
PageRange(Address start,Address limit)36 PageRange::PageRange(Address start, Address limit)
37 : begin_(Page::FromAddress(start)),
38 end_(Page::FromAllocationAreaAddress(limit)->next_page()) {
39 #ifdef DEBUG
40 if (begin_->InNewSpace()) {
41 SemiSpace::AssertValidRange(start, limit);
42 }
43 #endif // DEBUG
44 }
45
IncrementExternalBackingStoreBytes(ExternalBackingStoreType type,size_t amount)46 void Space::IncrementExternalBackingStoreBytes(ExternalBackingStoreType type,
47 size_t amount) {
48 base::CheckedIncrement(&external_backing_store_bytes_[type], amount);
49 heap()->IncrementExternalBackingStoreBytes(type, amount);
50 }
51
DecrementExternalBackingStoreBytes(ExternalBackingStoreType type,size_t amount)52 void Space::DecrementExternalBackingStoreBytes(ExternalBackingStoreType type,
53 size_t amount) {
54 base::CheckedDecrement(&external_backing_store_bytes_[type], amount);
55 heap()->DecrementExternalBackingStoreBytes(type, amount);
56 }
57
MoveExternalBackingStoreBytes(ExternalBackingStoreType type,Space * from,Space * to,size_t amount)58 void Space::MoveExternalBackingStoreBytes(ExternalBackingStoreType type,
59 Space* from, Space* to,
60 size_t amount) {
61 if (from == to) return;
62
63 base::CheckedDecrement(&(from->external_backing_store_bytes_[type]), amount);
64 base::CheckedIncrement(&(to->external_backing_store_bytes_[type]), amount);
65 }
66
MarkNeverAllocateForTesting()67 void Page::MarkNeverAllocateForTesting() {
68 DCHECK(this->owner_identity() != NEW_SPACE);
69 DCHECK(!IsFlagSet(NEVER_ALLOCATE_ON_PAGE));
70 SetFlag(NEVER_ALLOCATE_ON_PAGE);
71 SetFlag(NEVER_EVACUATE);
72 reinterpret_cast<PagedSpace*>(owner())->free_list()->EvictFreeListItems(this);
73 }
74
MarkEvacuationCandidate()75 void Page::MarkEvacuationCandidate() {
76 DCHECK(!IsFlagSet(NEVER_EVACUATE));
77 DCHECK_NULL(slot_set<OLD_TO_OLD>());
78 DCHECK_NULL(typed_slot_set<OLD_TO_OLD>());
79 SetFlag(EVACUATION_CANDIDATE);
80 reinterpret_cast<PagedSpace*>(owner())->free_list()->EvictFreeListItems(this);
81 }
82
ClearEvacuationCandidate()83 void Page::ClearEvacuationCandidate() {
84 if (!IsFlagSet(COMPACTION_WAS_ABORTED)) {
85 DCHECK_NULL(slot_set<OLD_TO_OLD>());
86 DCHECK_NULL(typed_slot_set<OLD_TO_OLD>());
87 }
88 ClearFlag(EVACUATION_CANDIDATE);
89 InitializeFreeListCategories();
90 }
91
OldGenerationMemoryChunkIterator(Heap * heap)92 OldGenerationMemoryChunkIterator::OldGenerationMemoryChunkIterator(Heap* heap)
93 : heap_(heap),
94 state_(kOldSpaceState),
95 old_iterator_(heap->old_space()->begin()),
96 code_iterator_(heap->code_space()->begin()),
97 map_iterator_(heap->map_space()->begin()),
98 lo_iterator_(heap->lo_space()->begin()),
99 code_lo_iterator_(heap->code_lo_space()->begin()) {}
100
next()101 MemoryChunk* OldGenerationMemoryChunkIterator::next() {
102 switch (state_) {
103 case kOldSpaceState: {
104 if (old_iterator_ != heap_->old_space()->end()) return *(old_iterator_++);
105 state_ = kMapState;
106 V8_FALLTHROUGH;
107 }
108 case kMapState: {
109 if (map_iterator_ != heap_->map_space()->end()) return *(map_iterator_++);
110 state_ = kCodeState;
111 V8_FALLTHROUGH;
112 }
113 case kCodeState: {
114 if (code_iterator_ != heap_->code_space()->end())
115 return *(code_iterator_++);
116 state_ = kLargeObjectState;
117 V8_FALLTHROUGH;
118 }
119 case kLargeObjectState: {
120 if (lo_iterator_ != heap_->lo_space()->end()) return *(lo_iterator_++);
121 state_ = kCodeLargeObjectState;
122 V8_FALLTHROUGH;
123 }
124 case kCodeLargeObjectState: {
125 if (code_lo_iterator_ != heap_->code_lo_space()->end())
126 return *(code_lo_iterator_++);
127 state_ = kFinishedState;
128 V8_FALLTHROUGH;
129 }
130 case kFinishedState:
131 return nullptr;
132 default:
133 break;
134 }
135 UNREACHABLE();
136 }
137
AllocateRawAligned(int size_in_bytes,AllocationAlignment alignment)138 AllocationResult LocalAllocationBuffer::AllocateRawAligned(
139 int size_in_bytes, AllocationAlignment alignment) {
140 Address current_top = allocation_info_.top();
141 int filler_size = Heap::GetFillToAlign(current_top, alignment);
142
143 Address new_top = current_top + filler_size + size_in_bytes;
144 if (new_top > allocation_info_.limit()) return AllocationResult::Retry();
145
146 allocation_info_.set_top(new_top);
147 if (filler_size > 0) {
148 return Heap::PrecedeWithFiller(ReadOnlyRoots(heap_),
149 HeapObject::FromAddress(current_top),
150 filler_size);
151 }
152
153 return AllocationResult(HeapObject::FromAddress(current_top));
154 }
155
FromResult(Heap * heap,AllocationResult result,intptr_t size)156 LocalAllocationBuffer LocalAllocationBuffer::FromResult(Heap* heap,
157 AllocationResult result,
158 intptr_t size) {
159 if (result.IsRetry()) return InvalidBuffer();
160 HeapObject obj;
161 bool ok = result.To(&obj);
162 USE(ok);
163 DCHECK(ok);
164 Address top = HeapObject::cast(obj).address();
165 return LocalAllocationBuffer(heap, LinearAllocationArea(top, top + size));
166 }
167
168
TryMerge(LocalAllocationBuffer * other)169 bool LocalAllocationBuffer::TryMerge(LocalAllocationBuffer* other) {
170 if (allocation_info_.top() == other->allocation_info_.limit()) {
171 allocation_info_.set_top(other->allocation_info_.top());
172 other->allocation_info_.Reset(kNullAddress, kNullAddress);
173 return true;
174 }
175 return false;
176 }
177
TryFreeLast(HeapObject object,int object_size)178 bool LocalAllocationBuffer::TryFreeLast(HeapObject object, int object_size) {
179 if (IsValid()) {
180 const Address object_address = object.address();
181 if ((allocation_info_.top() - object_size) == object_address) {
182 allocation_info_.set_top(object_address);
183 return true;
184 }
185 }
186 return false;
187 }
188
189 } // namespace internal
190 } // namespace v8
191
192 #endif // V8_HEAP_SPACES_INL_H_
193