1 // Copyright 2015 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/heap/scavenger.h"
6
7 #include "src/contexts.h"
8 #include "src/heap/heap.h"
9 #include "src/heap/objects-visiting-inl.h"
10 #include "src/heap/scavenger-inl.h"
11 #include "src/isolate.h"
12 #include "src/log.h"
13
14 namespace v8 {
15 namespace internal {
16
17 enum LoggingAndProfiling {
18 LOGGING_AND_PROFILING_ENABLED,
19 LOGGING_AND_PROFILING_DISABLED
20 };
21
22
23 enum MarksHandling { TRANSFER_MARKS, IGNORE_MARKS };
24
25 template <MarksHandling marks_handling,
26 LoggingAndProfiling logging_and_profiling_mode>
27 class ScavengingVisitor : public StaticVisitorBase {
28 public:
Initialize()29 static void Initialize() {
30 table_.Register(kVisitSeqOneByteString, &EvacuateSeqOneByteString);
31 table_.Register(kVisitSeqTwoByteString, &EvacuateSeqTwoByteString);
32 table_.Register(kVisitShortcutCandidate, &EvacuateShortcutCandidate);
33 table_.Register(kVisitByteArray, &EvacuateByteArray);
34 table_.Register(kVisitFixedArray, &EvacuateFixedArray);
35 table_.Register(kVisitFixedDoubleArray, &EvacuateFixedDoubleArray);
36 table_.Register(kVisitFixedTypedArray, &EvacuateFixedTypedArray);
37 table_.Register(kVisitFixedFloat64Array, &EvacuateFixedFloat64Array);
38 table_.Register(kVisitJSArrayBuffer,
39 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
40
41 table_.Register(
42 kVisitNativeContext,
43 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
44 Context::kSize>);
45
46 table_.Register(
47 kVisitConsString,
48 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
49 ConsString::kSize>);
50
51 table_.Register(
52 kVisitSlicedString,
53 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
54 SlicedString::kSize>);
55
56 table_.Register(
57 kVisitSymbol,
58 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
59 Symbol::kSize>);
60
61 table_.Register(
62 kVisitSharedFunctionInfo,
63 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
64 SharedFunctionInfo::kSize>);
65
66 table_.Register(kVisitJSWeakCollection,
67 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
68
69 table_.Register(kVisitJSRegExp,
70 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
71
72 table_.Register(kVisitJSFunction, &EvacuateJSFunction);
73
74 table_.RegisterSpecializations<ObjectEvacuationStrategy<DATA_OBJECT>,
75 kVisitDataObject, kVisitDataObjectGeneric>();
76
77 table_.RegisterSpecializations<ObjectEvacuationStrategy<POINTER_OBJECT>,
78 kVisitJSObject, kVisitJSObjectGeneric>();
79
80 table_
81 .RegisterSpecializations<ObjectEvacuationStrategy<POINTER_OBJECT>,
82 kVisitJSApiObject, kVisitJSApiObjectGeneric>();
83
84 table_.RegisterSpecializations<ObjectEvacuationStrategy<POINTER_OBJECT>,
85 kVisitStruct, kVisitStructGeneric>();
86 }
87
GetTable()88 static VisitorDispatchTable<ScavengingCallback>* GetTable() {
89 return &table_;
90 }
91
92 private:
93 enum ObjectContents { DATA_OBJECT, POINTER_OBJECT };
94
RecordCopiedObject(Heap * heap,HeapObject * obj)95 static void RecordCopiedObject(Heap* heap, HeapObject* obj) {
96 bool should_record = false;
97 #ifdef DEBUG
98 should_record = FLAG_heap_stats;
99 #endif
100 should_record = should_record || FLAG_log_gc;
101 if (should_record) {
102 if (heap->new_space()->Contains(obj)) {
103 heap->new_space()->RecordAllocation(obj);
104 } else {
105 heap->new_space()->RecordPromotion(obj);
106 }
107 }
108 }
109
110 // Helper function used by CopyObject to copy a source object to an
111 // allocated target object and update the forwarding pointer in the source
112 // object. Returns the target object.
INLINE(static void MigrateObject (Heap * heap,HeapObject * source,HeapObject * target,int size))113 INLINE(static void MigrateObject(Heap* heap, HeapObject* source,
114 HeapObject* target, int size)) {
115 // If we migrate into to-space, then the to-space top pointer should be
116 // right after the target object. Incorporate double alignment
117 // over-allocation.
118 DCHECK(!heap->InToSpace(target) ||
119 target->address() + size == heap->new_space()->top() ||
120 target->address() + size + kPointerSize == heap->new_space()->top());
121
122 // Make sure that we do not overwrite the promotion queue which is at
123 // the end of to-space.
124 DCHECK(!heap->InToSpace(target) ||
125 heap->promotion_queue()->IsBelowPromotionQueue(
126 heap->new_space()->top()));
127
128 // Copy the content of source to target.
129 heap->CopyBlock(target->address(), source->address(), size);
130
131 // Set the forwarding address.
132 source->set_map_word(MapWord::FromForwardingAddress(target));
133
134 if (logging_and_profiling_mode == LOGGING_AND_PROFILING_ENABLED) {
135 // Update NewSpace stats if necessary.
136 RecordCopiedObject(heap, target);
137 heap->OnMoveEvent(target, source, size);
138 }
139
140 if (marks_handling == TRANSFER_MARKS) {
141 if (IncrementalMarking::TransferColor(source, target, size)) {
142 MemoryChunk::IncrementLiveBytesFromGC(target, size);
143 }
144 }
145 }
146
147 template <AllocationAlignment alignment>
SemiSpaceCopyObject(Map * map,HeapObject ** slot,HeapObject * object,int object_size)148 static inline bool SemiSpaceCopyObject(Map* map, HeapObject** slot,
149 HeapObject* object, int object_size) {
150 Heap* heap = map->GetHeap();
151
152 DCHECK(heap->AllowedToBeMigrated(object, NEW_SPACE));
153 AllocationResult allocation =
154 heap->new_space()->AllocateRaw(object_size, alignment);
155
156 HeapObject* target = NULL; // Initialization to please compiler.
157 if (allocation.To(&target)) {
158 // Order is important here: Set the promotion limit before storing a
159 // filler for double alignment or migrating the object. Otherwise we
160 // may end up overwriting promotion queue entries when we migrate the
161 // object.
162 heap->promotion_queue()->SetNewLimit(heap->new_space()->top());
163
164 MigrateObject(heap, object, target, object_size);
165
166 // Update slot to new target.
167 *slot = target;
168
169 heap->IncrementSemiSpaceCopiedObjectSize(object_size);
170 return true;
171 }
172 return false;
173 }
174
175
176 template <ObjectContents object_contents, AllocationAlignment alignment>
PromoteObject(Map * map,HeapObject ** slot,HeapObject * object,int object_size)177 static inline bool PromoteObject(Map* map, HeapObject** slot,
178 HeapObject* object, int object_size) {
179 Heap* heap = map->GetHeap();
180
181 AllocationResult allocation =
182 heap->old_space()->AllocateRaw(object_size, alignment);
183
184 HeapObject* target = NULL; // Initialization to please compiler.
185 if (allocation.To(&target)) {
186 MigrateObject(heap, object, target, object_size);
187
188 // Update slot to new target using CAS. A concurrent sweeper thread my
189 // filter the slot concurrently.
190 HeapObject* old = *slot;
191 base::Release_CompareAndSwap(reinterpret_cast<base::AtomicWord*>(slot),
192 reinterpret_cast<base::AtomicWord>(old),
193 reinterpret_cast<base::AtomicWord>(target));
194
195 if (object_contents == POINTER_OBJECT) {
196 heap->promotion_queue()->insert(
197 target, object_size,
198 Marking::IsBlack(ObjectMarking::MarkBitFrom(object)));
199 }
200 heap->IncrementPromotedObjectsSize(object_size);
201 return true;
202 }
203 return false;
204 }
205
206 template <ObjectContents object_contents, AllocationAlignment alignment>
EvacuateObject(Map * map,HeapObject ** slot,HeapObject * object,int object_size)207 static inline void EvacuateObject(Map* map, HeapObject** slot,
208 HeapObject* object, int object_size) {
209 SLOW_DCHECK(object_size <= Page::kAllocatableMemory);
210 SLOW_DCHECK(object->Size() == object_size);
211 Heap* heap = map->GetHeap();
212
213 if (!heap->ShouldBePromoted(object->address(), object_size)) {
214 // A semi-space copy may fail due to fragmentation. In that case, we
215 // try to promote the object.
216 if (SemiSpaceCopyObject<alignment>(map, slot, object, object_size)) {
217 return;
218 }
219 }
220
221 if (PromoteObject<object_contents, alignment>(map, slot, object,
222 object_size)) {
223 return;
224 }
225
226 // If promotion failed, we try to copy the object to the other semi-space
227 if (SemiSpaceCopyObject<alignment>(map, slot, object, object_size)) return;
228
229 FatalProcessOutOfMemory("Scavenger: semi-space copy\n");
230 }
231
EvacuateJSFunction(Map * map,HeapObject ** slot,HeapObject * object)232 static inline void EvacuateJSFunction(Map* map, HeapObject** slot,
233 HeapObject* object) {
234 ObjectEvacuationStrategy<POINTER_OBJECT>::Visit(map, slot, object);
235
236 if (marks_handling == IGNORE_MARKS) return;
237
238 MapWord map_word = object->map_word();
239 DCHECK(map_word.IsForwardingAddress());
240 HeapObject* target = map_word.ToForwardingAddress();
241
242 MarkBit mark_bit = ObjectMarking::MarkBitFrom(target);
243 if (Marking::IsBlack(mark_bit)) {
244 // This object is black and it might not be rescanned by marker.
245 // We should explicitly record code entry slot for compaction because
246 // promotion queue processing (IteratePromotedObjectPointers) will
247 // miss it as it is not HeapObject-tagged.
248 Address code_entry_slot =
249 target->address() + JSFunction::kCodeEntryOffset;
250 Code* code = Code::cast(Code::GetObjectFromEntryAddress(code_entry_slot));
251 map->GetHeap()->mark_compact_collector()->RecordCodeEntrySlot(
252 target, code_entry_slot, code);
253 }
254 }
255
EvacuateFixedArray(Map * map,HeapObject ** slot,HeapObject * object)256 static inline void EvacuateFixedArray(Map* map, HeapObject** slot,
257 HeapObject* object) {
258 int length = reinterpret_cast<FixedArray*>(object)->synchronized_length();
259 int object_size = FixedArray::SizeFor(length);
260 EvacuateObject<POINTER_OBJECT, kWordAligned>(map, slot, object,
261 object_size);
262 }
263
EvacuateFixedDoubleArray(Map * map,HeapObject ** slot,HeapObject * object)264 static inline void EvacuateFixedDoubleArray(Map* map, HeapObject** slot,
265 HeapObject* object) {
266 int length = reinterpret_cast<FixedDoubleArray*>(object)->length();
267 int object_size = FixedDoubleArray::SizeFor(length);
268 EvacuateObject<DATA_OBJECT, kDoubleAligned>(map, slot, object, object_size);
269 }
270
EvacuateFixedTypedArray(Map * map,HeapObject ** slot,HeapObject * object)271 static inline void EvacuateFixedTypedArray(Map* map, HeapObject** slot,
272 HeapObject* object) {
273 int object_size = reinterpret_cast<FixedTypedArrayBase*>(object)->size();
274 EvacuateObject<POINTER_OBJECT, kWordAligned>(map, slot, object,
275 object_size);
276 }
277
EvacuateFixedFloat64Array(Map * map,HeapObject ** slot,HeapObject * object)278 static inline void EvacuateFixedFloat64Array(Map* map, HeapObject** slot,
279 HeapObject* object) {
280 int object_size = reinterpret_cast<FixedFloat64Array*>(object)->size();
281 EvacuateObject<POINTER_OBJECT, kDoubleAligned>(map, slot, object,
282 object_size);
283 }
284
EvacuateByteArray(Map * map,HeapObject ** slot,HeapObject * object)285 static inline void EvacuateByteArray(Map* map, HeapObject** slot,
286 HeapObject* object) {
287 int object_size = reinterpret_cast<ByteArray*>(object)->ByteArraySize();
288 EvacuateObject<DATA_OBJECT, kWordAligned>(map, slot, object, object_size);
289 }
290
EvacuateSeqOneByteString(Map * map,HeapObject ** slot,HeapObject * object)291 static inline void EvacuateSeqOneByteString(Map* map, HeapObject** slot,
292 HeapObject* object) {
293 int object_size = SeqOneByteString::cast(object)
294 ->SeqOneByteStringSize(map->instance_type());
295 EvacuateObject<DATA_OBJECT, kWordAligned>(map, slot, object, object_size);
296 }
297
EvacuateSeqTwoByteString(Map * map,HeapObject ** slot,HeapObject * object)298 static inline void EvacuateSeqTwoByteString(Map* map, HeapObject** slot,
299 HeapObject* object) {
300 int object_size = SeqTwoByteString::cast(object)
301 ->SeqTwoByteStringSize(map->instance_type());
302 EvacuateObject<DATA_OBJECT, kWordAligned>(map, slot, object, object_size);
303 }
304
EvacuateShortcutCandidate(Map * map,HeapObject ** slot,HeapObject * object)305 static inline void EvacuateShortcutCandidate(Map* map, HeapObject** slot,
306 HeapObject* object) {
307 DCHECK(IsShortcutCandidate(map->instance_type()));
308
309 Heap* heap = map->GetHeap();
310
311 if (marks_handling == IGNORE_MARKS &&
312 ConsString::cast(object)->unchecked_second() == heap->empty_string()) {
313 HeapObject* first =
314 HeapObject::cast(ConsString::cast(object)->unchecked_first());
315
316 *slot = first;
317
318 if (!heap->InNewSpace(first)) {
319 object->set_map_word(MapWord::FromForwardingAddress(first));
320 return;
321 }
322
323 MapWord first_word = first->map_word();
324 if (first_word.IsForwardingAddress()) {
325 HeapObject* target = first_word.ToForwardingAddress();
326
327 *slot = target;
328 object->set_map_word(MapWord::FromForwardingAddress(target));
329 return;
330 }
331
332 Scavenger::ScavengeObjectSlow(slot, first);
333 object->set_map_word(MapWord::FromForwardingAddress(*slot));
334 return;
335 }
336
337 int object_size = ConsString::kSize;
338 EvacuateObject<POINTER_OBJECT, kWordAligned>(map, slot, object,
339 object_size);
340 }
341
342 template <ObjectContents object_contents>
343 class ObjectEvacuationStrategy {
344 public:
345 template <int object_size>
VisitSpecialized(Map * map,HeapObject ** slot,HeapObject * object)346 static inline void VisitSpecialized(Map* map, HeapObject** slot,
347 HeapObject* object) {
348 EvacuateObject<object_contents, kWordAligned>(map, slot, object,
349 object_size);
350 }
351
Visit(Map * map,HeapObject ** slot,HeapObject * object)352 static inline void Visit(Map* map, HeapObject** slot, HeapObject* object) {
353 int object_size = map->instance_size();
354 EvacuateObject<object_contents, kWordAligned>(map, slot, object,
355 object_size);
356 }
357 };
358
359 static VisitorDispatchTable<ScavengingCallback> table_;
360 };
361
362 template <MarksHandling marks_handling,
363 LoggingAndProfiling logging_and_profiling_mode>
364 VisitorDispatchTable<ScavengingCallback>
365 ScavengingVisitor<marks_handling, logging_and_profiling_mode>::table_;
366
367 // static
Initialize()368 void Scavenger::Initialize() {
369 ScavengingVisitor<TRANSFER_MARKS,
370 LOGGING_AND_PROFILING_DISABLED>::Initialize();
371 ScavengingVisitor<IGNORE_MARKS, LOGGING_AND_PROFILING_DISABLED>::Initialize();
372 ScavengingVisitor<TRANSFER_MARKS,
373 LOGGING_AND_PROFILING_ENABLED>::Initialize();
374 ScavengingVisitor<IGNORE_MARKS, LOGGING_AND_PROFILING_ENABLED>::Initialize();
375 }
376
377
378 // static
ScavengeObjectSlow(HeapObject ** p,HeapObject * object)379 void Scavenger::ScavengeObjectSlow(HeapObject** p, HeapObject* object) {
380 SLOW_DCHECK(object->GetIsolate()->heap()->InFromSpace(object));
381 MapWord first_word = object->map_word();
382 SLOW_DCHECK(!first_word.IsForwardingAddress());
383 Map* map = first_word.ToMap();
384 Scavenger* scavenger = map->GetHeap()->scavenge_collector_;
385 scavenger->scavenging_visitors_table_.GetVisitor(map)(map, p, object);
386 }
387
388
SelectScavengingVisitorsTable()389 void Scavenger::SelectScavengingVisitorsTable() {
390 bool logging_and_profiling =
391 FLAG_verify_predictable || isolate()->logger()->is_logging() ||
392 isolate()->is_profiling() ||
393 (isolate()->heap_profiler() != NULL &&
394 isolate()->heap_profiler()->is_tracking_object_moves());
395
396 if (!heap()->incremental_marking()->IsMarking()) {
397 if (!logging_and_profiling) {
398 scavenging_visitors_table_.CopyFrom(
399 ScavengingVisitor<IGNORE_MARKS,
400 LOGGING_AND_PROFILING_DISABLED>::GetTable());
401 } else {
402 scavenging_visitors_table_.CopyFrom(
403 ScavengingVisitor<IGNORE_MARKS,
404 LOGGING_AND_PROFILING_ENABLED>::GetTable());
405 }
406 } else {
407 if (!logging_and_profiling) {
408 scavenging_visitors_table_.CopyFrom(
409 ScavengingVisitor<TRANSFER_MARKS,
410 LOGGING_AND_PROFILING_DISABLED>::GetTable());
411 } else {
412 scavenging_visitors_table_.CopyFrom(
413 ScavengingVisitor<TRANSFER_MARKS,
414 LOGGING_AND_PROFILING_ENABLED>::GetTable());
415 }
416
417 if (heap()->incremental_marking()->IsCompacting()) {
418 // When compacting forbid short-circuiting of cons-strings.
419 // Scavenging code relies on the fact that new space object
420 // can't be evacuated into evacuation candidate but
421 // short-circuiting violates this assumption.
422 scavenging_visitors_table_.Register(
423 StaticVisitorBase::kVisitShortcutCandidate,
424 scavenging_visitors_table_.GetVisitorById(
425 StaticVisitorBase::kVisitConsString));
426 }
427 }
428 }
429
430
isolate()431 Isolate* Scavenger::isolate() { return heap()->isolate(); }
432
433
VisitPointer(Object ** p)434 void ScavengeVisitor::VisitPointer(Object** p) { ScavengePointer(p); }
435
436
VisitPointers(Object ** start,Object ** end)437 void ScavengeVisitor::VisitPointers(Object** start, Object** end) {
438 // Copy all HeapObject pointers in [start, end)
439 for (Object** p = start; p < end; p++) ScavengePointer(p);
440 }
441
442
ScavengePointer(Object ** p)443 void ScavengeVisitor::ScavengePointer(Object** p) {
444 Object* object = *p;
445 if (!heap_->InNewSpace(object)) return;
446
447 Scavenger::ScavengeObject(reinterpret_cast<HeapObject**>(p),
448 reinterpret_cast<HeapObject*>(object));
449 }
450
451 } // namespace internal
452 } // namespace v8
453