1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_HEAP_MARK_COMPACT_INL_H_
6 #define V8_HEAP_MARK_COMPACT_INL_H_
7
8 #include "src/heap/mark-compact.h"
9 #include "src/heap/remembered-set.h"
10 #include "src/isolate.h"
11
12 namespace v8 {
13 namespace internal {
14
PushBlack(HeapObject * obj)15 void MarkCompactCollector::PushBlack(HeapObject* obj) {
16 DCHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(obj)));
17 if (marking_deque()->Push(obj)) {
18 MemoryChunk::IncrementLiveBytesFromGC(obj, obj->Size());
19 } else {
20 MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj);
21 Marking::BlackToGrey(mark_bit);
22 }
23 }
24
25
UnshiftBlack(HeapObject * obj)26 void MarkCompactCollector::UnshiftBlack(HeapObject* obj) {
27 DCHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(obj)));
28 if (!marking_deque()->Unshift(obj)) {
29 MemoryChunk::IncrementLiveBytesFromGC(obj, -obj->Size());
30 MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj);
31 Marking::BlackToGrey(mark_bit);
32 }
33 }
34
35
MarkObject(HeapObject * obj,MarkBit mark_bit)36 void MarkCompactCollector::MarkObject(HeapObject* obj, MarkBit mark_bit) {
37 DCHECK(ObjectMarking::MarkBitFrom(obj) == mark_bit);
38 if (Marking::IsWhite(mark_bit)) {
39 Marking::WhiteToBlack(mark_bit);
40 DCHECK(obj->GetIsolate()->heap()->Contains(obj));
41 PushBlack(obj);
42 }
43 }
44
45
SetMark(HeapObject * obj,MarkBit mark_bit)46 void MarkCompactCollector::SetMark(HeapObject* obj, MarkBit mark_bit) {
47 DCHECK(Marking::IsWhite(mark_bit));
48 DCHECK(ObjectMarking::MarkBitFrom(obj) == mark_bit);
49 Marking::WhiteToBlack(mark_bit);
50 MemoryChunk::IncrementLiveBytesFromGC(obj, obj->Size());
51 }
52
53
IsMarked(Object * obj)54 bool MarkCompactCollector::IsMarked(Object* obj) {
55 DCHECK(obj->IsHeapObject());
56 HeapObject* heap_object = HeapObject::cast(obj);
57 return Marking::IsBlackOrGrey(ObjectMarking::MarkBitFrom(heap_object));
58 }
59
60
RecordSlot(HeapObject * object,Object ** slot,Object * target)61 void MarkCompactCollector::RecordSlot(HeapObject* object, Object** slot,
62 Object* target) {
63 Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target));
64 Page* source_page = Page::FromAddress(reinterpret_cast<Address>(object));
65 if (target_page->IsEvacuationCandidate() &&
66 !ShouldSkipEvacuationSlotRecording(object)) {
67 DCHECK(Marking::IsBlackOrGrey(ObjectMarking::MarkBitFrom(object)));
68 RememberedSet<OLD_TO_OLD>::Insert(source_page,
69 reinterpret_cast<Address>(slot));
70 }
71 }
72
73
AddCandidate(SharedFunctionInfo * shared_info)74 void CodeFlusher::AddCandidate(SharedFunctionInfo* shared_info) {
75 if (GetNextCandidate(shared_info) == nullptr) {
76 SetNextCandidate(shared_info, shared_function_info_candidates_head_);
77 shared_function_info_candidates_head_ = shared_info;
78 }
79 }
80
81
AddCandidate(JSFunction * function)82 void CodeFlusher::AddCandidate(JSFunction* function) {
83 DCHECK(function->code() == function->shared()->code());
84 if (function->next_function_link()->IsUndefined(isolate_)) {
85 SetNextCandidate(function, jsfunction_candidates_head_);
86 jsfunction_candidates_head_ = function;
87 }
88 }
89
90
GetNextCandidateSlot(JSFunction * candidate)91 JSFunction** CodeFlusher::GetNextCandidateSlot(JSFunction* candidate) {
92 return reinterpret_cast<JSFunction**>(
93 HeapObject::RawField(candidate, JSFunction::kNextFunctionLinkOffset));
94 }
95
96
GetNextCandidate(JSFunction * candidate)97 JSFunction* CodeFlusher::GetNextCandidate(JSFunction* candidate) {
98 Object* next_candidate = candidate->next_function_link();
99 return reinterpret_cast<JSFunction*>(next_candidate);
100 }
101
102
SetNextCandidate(JSFunction * candidate,JSFunction * next_candidate)103 void CodeFlusher::SetNextCandidate(JSFunction* candidate,
104 JSFunction* next_candidate) {
105 candidate->set_next_function_link(next_candidate, UPDATE_WEAK_WRITE_BARRIER);
106 }
107
108
ClearNextCandidate(JSFunction * candidate,Object * undefined)109 void CodeFlusher::ClearNextCandidate(JSFunction* candidate, Object* undefined) {
110 DCHECK(undefined->IsUndefined(candidate->GetIsolate()));
111 candidate->set_next_function_link(undefined, SKIP_WRITE_BARRIER);
112 }
113
114
GetNextCandidate(SharedFunctionInfo * candidate)115 SharedFunctionInfo* CodeFlusher::GetNextCandidate(
116 SharedFunctionInfo* candidate) {
117 Object* next_candidate = candidate->code()->gc_metadata();
118 return reinterpret_cast<SharedFunctionInfo*>(next_candidate);
119 }
120
121
SetNextCandidate(SharedFunctionInfo * candidate,SharedFunctionInfo * next_candidate)122 void CodeFlusher::SetNextCandidate(SharedFunctionInfo* candidate,
123 SharedFunctionInfo* next_candidate) {
124 candidate->code()->set_gc_metadata(next_candidate);
125 }
126
127
ClearNextCandidate(SharedFunctionInfo * candidate)128 void CodeFlusher::ClearNextCandidate(SharedFunctionInfo* candidate) {
129 candidate->code()->set_gc_metadata(NULL, SKIP_WRITE_BARRIER);
130 }
131
132
133 template <LiveObjectIterationMode T>
Next()134 HeapObject* LiveObjectIterator<T>::Next() {
135 while (!it_.Done()) {
136 HeapObject* object = nullptr;
137 while (current_cell_ != 0) {
138 uint32_t trailing_zeros = base::bits::CountTrailingZeros32(current_cell_);
139 Address addr = cell_base_ + trailing_zeros * kPointerSize;
140
141 // Clear the first bit of the found object..
142 current_cell_ &= ~(1u << trailing_zeros);
143
144 uint32_t second_bit_index = 0;
145 if (trailing_zeros < Bitmap::kBitIndexMask) {
146 second_bit_index = 1u << (trailing_zeros + 1);
147 } else {
148 second_bit_index = 0x1;
149 // The overlapping case; there has to exist a cell after the current
150 // cell.
151 // However, if there is a black area at the end of the page, and the
152 // last word is a one word filler, we are not allowed to advance. In
153 // that case we can return immediately.
154 if (it_.Done()) {
155 DCHECK(HeapObject::FromAddress(addr)->map() ==
156 HeapObject::FromAddress(addr)
157 ->GetHeap()
158 ->one_pointer_filler_map());
159 return nullptr;
160 }
161 it_.Advance();
162 cell_base_ = it_.CurrentCellBase();
163 current_cell_ = *it_.CurrentCell();
164 }
165
166 Map* map = nullptr;
167 if (current_cell_ & second_bit_index) {
168 // We found a black object. If the black object is within a black area,
169 // make sure that we skip all set bits in the black area until the
170 // object ends.
171 HeapObject* black_object = HeapObject::FromAddress(addr);
172 map = base::NoBarrierAtomicValue<Map*>::FromAddress(addr)->Value();
173 Address end = addr + black_object->SizeFromMap(map) - kPointerSize;
174 // One word filler objects do not borrow the second mark bit. We have
175 // to jump over the advancing and clearing part.
176 // Note that we know that we are at a one word filler when
177 // object_start + object_size - kPointerSize == object_start.
178 if (addr != end) {
179 DCHECK_EQ(chunk_, MemoryChunk::FromAddress(end));
180 uint32_t end_mark_bit_index = chunk_->AddressToMarkbitIndex(end);
181 unsigned int end_cell_index =
182 end_mark_bit_index >> Bitmap::kBitsPerCellLog2;
183 MarkBit::CellType end_index_mask =
184 1u << Bitmap::IndexInCell(end_mark_bit_index);
185 if (it_.Advance(end_cell_index)) {
186 cell_base_ = it_.CurrentCellBase();
187 current_cell_ = *it_.CurrentCell();
188 }
189
190 // Clear all bits in current_cell, including the end index.
191 current_cell_ &= ~(end_index_mask + end_index_mask - 1);
192 }
193
194 if (T == kBlackObjects || T == kAllLiveObjects) {
195 object = black_object;
196 }
197 } else if ((T == kGreyObjects || T == kAllLiveObjects)) {
198 object = HeapObject::FromAddress(addr);
199 }
200
201 // We found a live object.
202 if (object != nullptr) {
203 if (map != nullptr && map == heap()->one_pointer_filler_map()) {
204 // Black areas together with slack tracking may result in black one
205 // word filler objects. We filter these objects out in the iterator.
206 object = nullptr;
207 } else {
208 break;
209 }
210 }
211 }
212
213 if (current_cell_ == 0) {
214 if (!it_.Done()) {
215 it_.Advance();
216 cell_base_ = it_.CurrentCellBase();
217 current_cell_ = *it_.CurrentCell();
218 }
219 }
220 if (object != nullptr) return object;
221 }
222 return nullptr;
223 }
224
225 } // namespace internal
226 } // namespace v8
227
228 #endif // V8_HEAP_MARK_COMPACT_INL_H_
229