• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_OBJECTS_VISITING_INL_H_
6 #define V8_OBJECTS_VISITING_INL_H_
7 
8 #include "src/heap/array-buffer-tracker.h"
9 #include "src/heap/objects-visiting.h"
10 #include "src/ic/ic-state.h"
11 #include "src/macro-assembler.h"
12 #include "src/objects-body-descriptors-inl.h"
13 
14 namespace v8 {
15 namespace internal {
16 
17 
18 template <typename Callback>
GetVisitor(Map * map)19 Callback VisitorDispatchTable<Callback>::GetVisitor(Map* map) {
20   return reinterpret_cast<Callback>(callbacks_[map->visitor_id()]);
21 }
22 
23 
24 template <typename StaticVisitor>
Initialize()25 void StaticNewSpaceVisitor<StaticVisitor>::Initialize() {
26   table_.Register(
27       kVisitShortcutCandidate,
28       &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor, int>::Visit);
29 
30   table_.Register(
31       kVisitConsString,
32       &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor, int>::Visit);
33 
34   table_.Register(kVisitSlicedString,
35                   &FixedBodyVisitor<StaticVisitor, SlicedString::BodyDescriptor,
36                                     int>::Visit);
37 
38   table_.Register(
39       kVisitSymbol,
40       &FixedBodyVisitor<StaticVisitor, Symbol::BodyDescriptor, int>::Visit);
41 
42   table_.Register(kVisitFixedArray,
43                   &FlexibleBodyVisitor<StaticVisitor,
44                                        FixedArray::BodyDescriptor, int>::Visit);
45 
46   table_.Register(kVisitFixedDoubleArray, &VisitFixedDoubleArray);
47   table_.Register(
48       kVisitFixedTypedArray,
49       &FlexibleBodyVisitor<StaticVisitor, FixedTypedArrayBase::BodyDescriptor,
50                            int>::Visit);
51 
52   table_.Register(
53       kVisitFixedFloat64Array,
54       &FlexibleBodyVisitor<StaticVisitor, FixedTypedArrayBase::BodyDescriptor,
55                            int>::Visit);
56 
57   table_.Register(
58       kVisitNativeContext,
59       &FixedBodyVisitor<StaticVisitor, Context::ScavengeBodyDescriptor,
60                         int>::Visit);
61 
62   table_.Register(kVisitByteArray, &VisitByteArray);
63   table_.Register(kVisitBytecodeArray, &VisitBytecodeArray);
64 
65   table_.Register(
66       kVisitSharedFunctionInfo,
67       &FixedBodyVisitor<StaticVisitor, SharedFunctionInfo::BodyDescriptor,
68                         int>::Visit);
69 
70   table_.Register(kVisitSeqOneByteString, &VisitSeqOneByteString);
71 
72   table_.Register(kVisitSeqTwoByteString, &VisitSeqTwoByteString);
73 
74   // Don't visit code entry. We are using this visitor only during scavenges.
75   table_.Register(
76       kVisitJSFunction,
77       &FlexibleBodyVisitor<StaticVisitor, JSFunction::BodyDescriptorWeakCode,
78                            int>::Visit);
79 
80   table_.Register(
81       kVisitJSArrayBuffer,
82       &FlexibleBodyVisitor<StaticVisitor, JSArrayBuffer::BodyDescriptor,
83                            int>::Visit);
84 
85   table_.Register(kVisitFreeSpace, &VisitFreeSpace);
86 
87   table_.Register(kVisitJSWeakCollection, &JSObjectVisitor::Visit);
88 
89   table_.Register(kVisitJSRegExp, &JSObjectVisitor::Visit);
90 
91   table_.template RegisterSpecializations<DataObjectVisitor, kVisitDataObject,
92                                           kVisitDataObjectGeneric>();
93 
94   table_.template RegisterSpecializations<JSObjectVisitor, kVisitJSObject,
95                                           kVisitJSObjectGeneric>();
96 
97   // Not using specialized Api object visitor for newspace.
98   table_.template RegisterSpecializations<JSObjectVisitor, kVisitJSApiObject,
99                                           kVisitJSApiObjectGeneric>();
100 
101   table_.template RegisterSpecializations<StructVisitor, kVisitStruct,
102                                           kVisitStructGeneric>();
103 }
104 
105 template <typename StaticVisitor>
VisitBytecodeArray(Map * map,HeapObject * object)106 int StaticNewSpaceVisitor<StaticVisitor>::VisitBytecodeArray(
107     Map* map, HeapObject* object) {
108   VisitPointers(
109       map->GetHeap(), object,
110       HeapObject::RawField(object, BytecodeArray::kConstantPoolOffset),
111       HeapObject::RawField(object, BytecodeArray::kFrameSizeOffset));
112   return reinterpret_cast<BytecodeArray*>(object)->BytecodeArraySize();
113 }
114 
115 
116 template <typename StaticVisitor>
Initialize()117 void StaticMarkingVisitor<StaticVisitor>::Initialize() {
118   table_.Register(kVisitShortcutCandidate,
119                   &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor,
120                                     void>::Visit);
121 
122   table_.Register(kVisitConsString,
123                   &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor,
124                                     void>::Visit);
125 
126   table_.Register(kVisitSlicedString,
127                   &FixedBodyVisitor<StaticVisitor, SlicedString::BodyDescriptor,
128                                     void>::Visit);
129 
130   table_.Register(
131       kVisitSymbol,
132       &FixedBodyVisitor<StaticVisitor, Symbol::BodyDescriptor, void>::Visit);
133 
134   table_.Register(kVisitFixedArray, &FixedArrayVisitor::Visit);
135 
136   table_.Register(kVisitFixedDoubleArray, &DataObjectVisitor::Visit);
137 
138   table_.Register(
139       kVisitFixedTypedArray,
140       &FlexibleBodyVisitor<StaticVisitor, FixedTypedArrayBase::BodyDescriptor,
141                            void>::Visit);
142 
143   table_.Register(
144       kVisitFixedFloat64Array,
145       &FlexibleBodyVisitor<StaticVisitor, FixedTypedArrayBase::BodyDescriptor,
146                            void>::Visit);
147 
148   table_.Register(kVisitNativeContext, &VisitNativeContext);
149 
150   table_.Register(kVisitAllocationSite, &VisitAllocationSite);
151 
152   table_.Register(kVisitByteArray, &DataObjectVisitor::Visit);
153 
154   table_.Register(kVisitBytecodeArray, &VisitBytecodeArray);
155 
156   table_.Register(kVisitFreeSpace, &DataObjectVisitor::Visit);
157 
158   table_.Register(kVisitSeqOneByteString, &DataObjectVisitor::Visit);
159 
160   table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit);
161 
162   table_.Register(kVisitJSWeakCollection, &VisitWeakCollection);
163 
164   table_.Register(
165       kVisitOddball,
166       &FixedBodyVisitor<StaticVisitor, Oddball::BodyDescriptor, void>::Visit);
167 
168   table_.Register(kVisitMap, &VisitMap);
169 
170   table_.Register(kVisitCode, &VisitCode);
171 
172   table_.Register(kVisitSharedFunctionInfo, &VisitSharedFunctionInfo);
173 
174   table_.Register(kVisitJSFunction, &VisitJSFunction);
175 
176   table_.Register(
177       kVisitJSArrayBuffer,
178       &FlexibleBodyVisitor<StaticVisitor, JSArrayBuffer::BodyDescriptor,
179                            void>::Visit);
180 
181   // Registration for kVisitJSRegExp is done by StaticVisitor.
182 
183   table_.Register(
184       kVisitCell,
185       &FixedBodyVisitor<StaticVisitor, Cell::BodyDescriptor, void>::Visit);
186 
187   table_.Register(kVisitPropertyCell, &VisitPropertyCell);
188 
189   table_.Register(kVisitWeakCell, &VisitWeakCell);
190 
191   table_.Register(kVisitTransitionArray, &VisitTransitionArray);
192 
193   table_.template RegisterSpecializations<DataObjectVisitor, kVisitDataObject,
194                                           kVisitDataObjectGeneric>();
195 
196   table_.template RegisterSpecializations<JSObjectVisitor, kVisitJSObject,
197                                           kVisitJSObjectGeneric>();
198 
199   table_.template RegisterSpecializations<JSApiObjectVisitor, kVisitJSApiObject,
200                                           kVisitJSApiObjectGeneric>();
201 
202   table_.template RegisterSpecializations<StructObjectVisitor, kVisitStruct,
203                                           kVisitStructGeneric>();
204 }
205 
206 
207 template <typename StaticVisitor>
VisitCodeEntry(Heap * heap,HeapObject * object,Address entry_address)208 void StaticMarkingVisitor<StaticVisitor>::VisitCodeEntry(
209     Heap* heap, HeapObject* object, Address entry_address) {
210   Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
211   heap->mark_compact_collector()->RecordCodeEntrySlot(object, entry_address,
212                                                       code);
213   StaticVisitor::MarkObject(heap, code);
214 }
215 
216 
217 template <typename StaticVisitor>
VisitEmbeddedPointer(Heap * heap,RelocInfo * rinfo)218 void StaticMarkingVisitor<StaticVisitor>::VisitEmbeddedPointer(
219     Heap* heap, RelocInfo* rinfo) {
220   DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
221   HeapObject* object = HeapObject::cast(rinfo->target_object());
222   Code* host = rinfo->host();
223   heap->mark_compact_collector()->RecordRelocSlot(host, rinfo, object);
224   // TODO(ulan): It could be better to record slots only for strongly embedded
225   // objects here and record slots for weakly embedded object during clearing
226   // of non-live references in mark-compact.
227   if (!host->IsWeakObject(object)) {
228     StaticVisitor::MarkObject(heap, object);
229   }
230 }
231 
232 
233 template <typename StaticVisitor>
VisitCell(Heap * heap,RelocInfo * rinfo)234 void StaticMarkingVisitor<StaticVisitor>::VisitCell(Heap* heap,
235                                                     RelocInfo* rinfo) {
236   DCHECK(rinfo->rmode() == RelocInfo::CELL);
237   Cell* cell = rinfo->target_cell();
238   Code* host = rinfo->host();
239   heap->mark_compact_collector()->RecordRelocSlot(host, rinfo, cell);
240   if (!host->IsWeakObject(cell)) {
241     StaticVisitor::MarkObject(heap, cell);
242   }
243 }
244 
245 
246 template <typename StaticVisitor>
VisitDebugTarget(Heap * heap,RelocInfo * rinfo)247 void StaticMarkingVisitor<StaticVisitor>::VisitDebugTarget(Heap* heap,
248                                                            RelocInfo* rinfo) {
249   DCHECK(RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
250          rinfo->IsPatchedDebugBreakSlotSequence());
251   Code* target = Code::GetCodeFromTargetAddress(rinfo->debug_call_address());
252   Code* host = rinfo->host();
253   heap->mark_compact_collector()->RecordRelocSlot(host, rinfo, target);
254   StaticVisitor::MarkObject(heap, target);
255 }
256 
257 
258 template <typename StaticVisitor>
VisitCodeTarget(Heap * heap,RelocInfo * rinfo)259 void StaticMarkingVisitor<StaticVisitor>::VisitCodeTarget(Heap* heap,
260                                                           RelocInfo* rinfo) {
261   DCHECK(RelocInfo::IsCodeTarget(rinfo->rmode()));
262   Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
263   // Monomorphic ICs are preserved when possible, but need to be flushed
264   // when they might be keeping a Context alive, or when the heap is about
265   // to be serialized.
266   if (FLAG_cleanup_code_caches_at_gc && target->is_inline_cache_stub() &&
267       (heap->isolate()->serializer_enabled() ||
268        target->ic_age() != heap->global_ic_age())) {
269     ICUtility::Clear(heap->isolate(), rinfo->pc(),
270                      rinfo->host()->constant_pool());
271     target = Code::GetCodeFromTargetAddress(rinfo->target_address());
272   }
273   Code* host = rinfo->host();
274   heap->mark_compact_collector()->RecordRelocSlot(host, rinfo, target);
275   StaticVisitor::MarkObject(heap, target);
276 }
277 
278 
279 template <typename StaticVisitor>
VisitCodeAgeSequence(Heap * heap,RelocInfo * rinfo)280 void StaticMarkingVisitor<StaticVisitor>::VisitCodeAgeSequence(
281     Heap* heap, RelocInfo* rinfo) {
282   DCHECK(RelocInfo::IsCodeAgeSequence(rinfo->rmode()));
283   Code* target = rinfo->code_age_stub();
284   DCHECK(target != NULL);
285   Code* host = rinfo->host();
286   heap->mark_compact_collector()->RecordRelocSlot(host, rinfo, target);
287   StaticVisitor::MarkObject(heap, target);
288 }
289 
290 
291 template <typename StaticVisitor>
VisitNativeContext(Map * map,HeapObject * object)292 void StaticMarkingVisitor<StaticVisitor>::VisitNativeContext(
293     Map* map, HeapObject* object) {
294   FixedBodyVisitor<StaticVisitor, Context::MarkCompactBodyDescriptor,
295                    void>::Visit(map, object);
296 }
297 
298 
299 template <typename StaticVisitor>
VisitMap(Map * map,HeapObject * object)300 void StaticMarkingVisitor<StaticVisitor>::VisitMap(Map* map,
301                                                    HeapObject* object) {
302   Heap* heap = map->GetHeap();
303   Map* map_object = Map::cast(object);
304 
305   // Clears the cache of ICs related to this map.
306   if (FLAG_cleanup_code_caches_at_gc) {
307     map_object->ClearCodeCache(heap);
308   }
309 
310   // When map collection is enabled we have to mark through map's transitions
311   // and back pointers in a special way to make these links weak.
312   if (map_object->CanTransition()) {
313     MarkMapContents(heap, map_object);
314   } else {
315     StaticVisitor::VisitPointers(
316         heap, object,
317         HeapObject::RawField(object, Map::kPointerFieldsBeginOffset),
318         HeapObject::RawField(object, Map::kPointerFieldsEndOffset));
319   }
320 }
321 
322 
323 template <typename StaticVisitor>
VisitPropertyCell(Map * map,HeapObject * object)324 void StaticMarkingVisitor<StaticVisitor>::VisitPropertyCell(
325     Map* map, HeapObject* object) {
326   Heap* heap = map->GetHeap();
327 
328   StaticVisitor::VisitPointers(
329       heap, object,
330       HeapObject::RawField(object, PropertyCell::kPointerFieldsBeginOffset),
331       HeapObject::RawField(object, PropertyCell::kPointerFieldsEndOffset));
332 }
333 
334 
335 template <typename StaticVisitor>
VisitWeakCell(Map * map,HeapObject * object)336 void StaticMarkingVisitor<StaticVisitor>::VisitWeakCell(Map* map,
337                                                         HeapObject* object) {
338   Heap* heap = map->GetHeap();
339   WeakCell* weak_cell = reinterpret_cast<WeakCell*>(object);
340   // Enqueue weak cell in linked list of encountered weak collections.
341   // We can ignore weak cells with cleared values because they will always
342   // contain smi zero.
343   if (weak_cell->next_cleared() && !weak_cell->cleared()) {
344     HeapObject* value = HeapObject::cast(weak_cell->value());
345     if (MarkCompactCollector::IsMarked(value)) {
346       // Weak cells with live values are directly processed here to reduce
347       // the processing time of weak cells during the main GC pause.
348       Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset);
349       map->GetHeap()->mark_compact_collector()->RecordSlot(weak_cell, slot,
350                                                            *slot);
351     } else {
352       // If we do not know about liveness of values of weak cells, we have to
353       // process them when we know the liveness of the whole transitive
354       // closure.
355       weak_cell->set_next(heap->encountered_weak_cells(),
356                           UPDATE_WEAK_WRITE_BARRIER);
357       heap->set_encountered_weak_cells(weak_cell);
358     }
359   }
360 }
361 
362 
363 template <typename StaticVisitor>
VisitTransitionArray(Map * map,HeapObject * object)364 void StaticMarkingVisitor<StaticVisitor>::VisitTransitionArray(
365     Map* map, HeapObject* object) {
366   TransitionArray* array = TransitionArray::cast(object);
367   Heap* heap = array->GetHeap();
368   // Visit strong references.
369   if (array->HasPrototypeTransitions()) {
370     StaticVisitor::VisitPointer(heap, array,
371                                 array->GetPrototypeTransitionsSlot());
372   }
373   int num_transitions = TransitionArray::NumberOfTransitions(array);
374   for (int i = 0; i < num_transitions; ++i) {
375     StaticVisitor::VisitPointer(heap, array, array->GetKeySlot(i));
376   }
377   // Enqueue the array in linked list of encountered transition arrays if it is
378   // not already in the list.
379   if (array->next_link()->IsUndefined(heap->isolate())) {
380     Heap* heap = map->GetHeap();
381     array->set_next_link(heap->encountered_transition_arrays(),
382                          UPDATE_WEAK_WRITE_BARRIER);
383     heap->set_encountered_transition_arrays(array);
384   }
385 }
386 
387 
388 template <typename StaticVisitor>
VisitAllocationSite(Map * map,HeapObject * object)389 void StaticMarkingVisitor<StaticVisitor>::VisitAllocationSite(
390     Map* map, HeapObject* object) {
391   Heap* heap = map->GetHeap();
392 
393   StaticVisitor::VisitPointers(
394       heap, object,
395       HeapObject::RawField(object, AllocationSite::kPointerFieldsBeginOffset),
396       HeapObject::RawField(object, AllocationSite::kPointerFieldsEndOffset));
397 }
398 
399 
400 template <typename StaticVisitor>
VisitWeakCollection(Map * map,HeapObject * object)401 void StaticMarkingVisitor<StaticVisitor>::VisitWeakCollection(
402     Map* map, HeapObject* object) {
403   typedef FlexibleBodyVisitor<StaticVisitor,
404                               JSWeakCollection::BodyDescriptorWeak,
405                               void> JSWeakCollectionBodyVisitor;
406   Heap* heap = map->GetHeap();
407   JSWeakCollection* weak_collection =
408       reinterpret_cast<JSWeakCollection*>(object);
409 
410   // Enqueue weak collection in linked list of encountered weak collections.
411   if (weak_collection->next() == heap->undefined_value()) {
412     weak_collection->set_next(heap->encountered_weak_collections());
413     heap->set_encountered_weak_collections(weak_collection);
414   }
415 
416   // Skip visiting the backing hash table containing the mappings and the
417   // pointer to the other enqueued weak collections, both are post-processed.
418   JSWeakCollectionBodyVisitor::Visit(map, object);
419 
420   // Partially initialized weak collection is enqueued, but table is ignored.
421   if (!weak_collection->table()->IsHashTable()) return;
422 
423   // Mark the backing hash table without pushing it on the marking stack.
424   Object** slot = HeapObject::RawField(object, JSWeakCollection::kTableOffset);
425   HeapObject* obj = HeapObject::cast(*slot);
426   heap->mark_compact_collector()->RecordSlot(object, slot, obj);
427   StaticVisitor::MarkObjectWithoutPush(heap, obj);
428 }
429 
430 
431 template <typename StaticVisitor>
VisitCode(Map * map,HeapObject * object)432 void StaticMarkingVisitor<StaticVisitor>::VisitCode(Map* map,
433                                                     HeapObject* object) {
434   typedef FlexibleBodyVisitor<StaticVisitor, Code::BodyDescriptor, void>
435       CodeBodyVisitor;
436   Heap* heap = map->GetHeap();
437   Code* code = Code::cast(object);
438   if (FLAG_age_code && !heap->isolate()->serializer_enabled()) {
439     code->MakeOlder(heap->mark_compact_collector()->marking_parity());
440   }
441   CodeBodyVisitor::Visit(map, object);
442 }
443 
444 
445 template <typename StaticVisitor>
VisitSharedFunctionInfo(Map * map,HeapObject * object)446 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo(
447     Map* map, HeapObject* object) {
448   Heap* heap = map->GetHeap();
449   SharedFunctionInfo* shared = SharedFunctionInfo::cast(object);
450   if (shared->ic_age() != heap->global_ic_age()) {
451     shared->ResetForNewContext(heap->global_ic_age());
452   }
453   if (FLAG_flush_optimized_code_cache) {
454     if (!shared->OptimizedCodeMapIsCleared()) {
455       // Always flush the optimized code map if requested by flag.
456       shared->ClearOptimizedCodeMap();
457     }
458   }
459   MarkCompactCollector* collector = heap->mark_compact_collector();
460   if (collector->is_code_flushing_enabled()) {
461     if (IsFlushable(heap, shared)) {
462       // This function's code looks flushable. But we have to postpone
463       // the decision until we see all functions that point to the same
464       // SharedFunctionInfo because some of them might be optimized.
465       // That would also make the non-optimized version of the code
466       // non-flushable, because it is required for bailing out from
467       // optimized code.
468       collector->code_flusher()->AddCandidate(shared);
469       // Treat the reference to the code object weakly.
470       VisitSharedFunctionInfoWeakCode(heap, object);
471       return;
472     }
473   }
474   VisitSharedFunctionInfoStrongCode(heap, object);
475 }
476 
477 
478 template <typename StaticVisitor>
VisitJSFunction(Map * map,HeapObject * object)479 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunction(Map* map,
480                                                           HeapObject* object) {
481   Heap* heap = map->GetHeap();
482   JSFunction* function = JSFunction::cast(object);
483   if (FLAG_cleanup_code_caches_at_gc) {
484     function->ClearTypeFeedbackInfoAtGCTime();
485   }
486   MarkCompactCollector* collector = heap->mark_compact_collector();
487   if (collector->is_code_flushing_enabled()) {
488     if (IsFlushable(heap, function)) {
489       // This function's code looks flushable. But we have to postpone
490       // the decision until we see all functions that point to the same
491       // SharedFunctionInfo because some of them might be optimized.
492       // That would also make the non-optimized version of the code
493       // non-flushable, because it is required for bailing out from
494       // optimized code.
495       collector->code_flusher()->AddCandidate(function);
496       // Treat the reference to the code object weakly.
497       VisitJSFunctionWeakCode(map, object);
498       return;
499     } else {
500       // Visit all unoptimized code objects to prevent flushing them.
501       StaticVisitor::MarkObject(heap, function->shared()->code());
502     }
503   }
504   VisitJSFunctionStrongCode(map, object);
505 }
506 
507 
508 template <typename StaticVisitor>
VisitJSRegExp(Map * map,HeapObject * object)509 void StaticMarkingVisitor<StaticVisitor>::VisitJSRegExp(Map* map,
510                                                         HeapObject* object) {
511   JSObjectVisitor::Visit(map, object);
512 }
513 
514 template <typename StaticVisitor>
VisitBytecodeArray(Map * map,HeapObject * object)515 void StaticMarkingVisitor<StaticVisitor>::VisitBytecodeArray(
516     Map* map, HeapObject* object) {
517   StaticVisitor::VisitPointers(
518       map->GetHeap(), object,
519       HeapObject::RawField(object, BytecodeArray::kConstantPoolOffset),
520       HeapObject::RawField(object, BytecodeArray::kFrameSizeOffset));
521 }
522 
523 
524 template <typename StaticVisitor>
MarkMapContents(Heap * heap,Map * map)525 void StaticMarkingVisitor<StaticVisitor>::MarkMapContents(Heap* heap,
526                                                           Map* map) {
527   // Since descriptor arrays are potentially shared, ensure that only the
528   // descriptors that belong to this map are marked. The first time a non-empty
529   // descriptor array is marked, its header is also visited. The slot holding
530   // the descriptor array will be implicitly recorded when the pointer fields of
531   // this map are visited.  Prototype maps don't keep track of transitions, so
532   // just mark the entire descriptor array.
533   if (!map->is_prototype_map()) {
534     DescriptorArray* descriptors = map->instance_descriptors();
535     if (StaticVisitor::MarkObjectWithoutPush(heap, descriptors) &&
536         descriptors->length() > 0) {
537       StaticVisitor::VisitPointers(heap, descriptors,
538                                    descriptors->GetFirstElementAddress(),
539                                    descriptors->GetDescriptorEndSlot(0));
540     }
541     int start = 0;
542     int end = map->NumberOfOwnDescriptors();
543     if (start < end) {
544       StaticVisitor::VisitPointers(heap, descriptors,
545                                    descriptors->GetDescriptorStartSlot(start),
546                                    descriptors->GetDescriptorEndSlot(end));
547     }
548   }
549 
550   // Mark the pointer fields of the Map. Since the transitions array has
551   // been marked already, it is fine that one of these fields contains a
552   // pointer to it.
553   StaticVisitor::VisitPointers(
554       heap, map, HeapObject::RawField(map, Map::kPointerFieldsBeginOffset),
555       HeapObject::RawField(map, Map::kPointerFieldsEndOffset));
556 }
557 
558 
HasSourceCode(Heap * heap,SharedFunctionInfo * info)559 inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) {
560   Object* undefined = heap->undefined_value();
561   return (info->script() != undefined) &&
562          (reinterpret_cast<Script*>(info->script())->source() != undefined);
563 }
564 
565 
566 template <typename StaticVisitor>
IsFlushable(Heap * heap,JSFunction * function)567 bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(Heap* heap,
568                                                       JSFunction* function) {
569   SharedFunctionInfo* shared_info = function->shared();
570 
571   // Code is either on stack, in compilation cache or referenced
572   // by optimized version of function.
573   MarkBit code_mark = Marking::MarkBitFrom(function->code());
574   if (Marking::IsBlackOrGrey(code_mark)) {
575     return false;
576   }
577 
578   // We do not (yet) flush code for optimized functions.
579   if (function->code() != shared_info->code()) {
580     return false;
581   }
582 
583   // Check age of optimized code.
584   if (FLAG_age_code && !function->code()->IsOld()) {
585     return false;
586   }
587 
588   return IsFlushable(heap, shared_info);
589 }
590 
591 
592 template <typename StaticVisitor>
IsFlushable(Heap * heap,SharedFunctionInfo * shared_info)593 bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
594     Heap* heap, SharedFunctionInfo* shared_info) {
595   // Code is either on stack, in compilation cache or referenced
596   // by optimized version of function.
597   MarkBit code_mark = Marking::MarkBitFrom(shared_info->code());
598   if (Marking::IsBlackOrGrey(code_mark)) {
599     return false;
600   }
601 
602   // The function must be compiled and have the source code available,
603   // to be able to recompile it in case we need the function again.
604   if (!(shared_info->is_compiled() && HasSourceCode(heap, shared_info))) {
605     return false;
606   }
607 
608   // We never flush code for API functions.
609   if (shared_info->IsApiFunction()) {
610     return false;
611   }
612 
613   // Only flush code for functions.
614   if (shared_info->code()->kind() != Code::FUNCTION) {
615     return false;
616   }
617 
618   // Function must be lazy compilable.
619   if (!shared_info->allows_lazy_compilation()) {
620     return false;
621   }
622 
623   // We do not (yet?) flush code for generator functions, or async functions,
624   // because we don't know if there are still live activations
625   // (generator objects) on the heap.
626   if (shared_info->is_resumable()) {
627     return false;
628   }
629 
630   // If this is a full script wrapped in a function we do not flush the code.
631   if (shared_info->is_toplevel()) {
632     return false;
633   }
634 
635   // The function must not be a builtin.
636   if (shared_info->IsBuiltin()) {
637     return false;
638   }
639 
640   // Maintain debug break slots in the code.
641   if (shared_info->HasDebugCode()) {
642     return false;
643   }
644 
645   // If this is a function initialized with %SetCode then the one-to-one
646   // relation between SharedFunctionInfo and Code is broken.
647   if (shared_info->dont_flush()) {
648     return false;
649   }
650 
651   // Check age of code. If code aging is disabled we never flush.
652   if (!FLAG_age_code || !shared_info->code()->IsOld()) {
653     return false;
654   }
655 
656   return true;
657 }
658 
659 
660 template <typename StaticVisitor>
VisitSharedFunctionInfoStrongCode(Heap * heap,HeapObject * object)661 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoStrongCode(
662     Heap* heap, HeapObject* object) {
663   Object** start_slot = HeapObject::RawField(
664       object, SharedFunctionInfo::BodyDescriptor::kStartOffset);
665   Object** end_slot = HeapObject::RawField(
666       object, SharedFunctionInfo::BodyDescriptor::kEndOffset);
667   StaticVisitor::VisitPointers(heap, object, start_slot, end_slot);
668 }
669 
670 
671 template <typename StaticVisitor>
VisitSharedFunctionInfoWeakCode(Heap * heap,HeapObject * object)672 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoWeakCode(
673     Heap* heap, HeapObject* object) {
674   Object** name_slot =
675       HeapObject::RawField(object, SharedFunctionInfo::kNameOffset);
676   StaticVisitor::VisitPointer(heap, object, name_slot);
677 
678   // Skip visiting kCodeOffset as it is treated weakly here.
679   STATIC_ASSERT(SharedFunctionInfo::kNameOffset + kPointerSize ==
680                 SharedFunctionInfo::kCodeOffset);
681   STATIC_ASSERT(SharedFunctionInfo::kCodeOffset + kPointerSize ==
682                 SharedFunctionInfo::kOptimizedCodeMapOffset);
683 
684   Object** start_slot =
685       HeapObject::RawField(object, SharedFunctionInfo::kOptimizedCodeMapOffset);
686   Object** end_slot = HeapObject::RawField(
687       object, SharedFunctionInfo::BodyDescriptor::kEndOffset);
688   StaticVisitor::VisitPointers(heap, object, start_slot, end_slot);
689 }
690 
691 
692 template <typename StaticVisitor>
VisitJSFunctionStrongCode(Map * map,HeapObject * object)693 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionStrongCode(
694     Map* map, HeapObject* object) {
695   typedef FlexibleBodyVisitor<StaticVisitor,
696                               JSFunction::BodyDescriptorStrongCode,
697                               void> JSFunctionStrongCodeBodyVisitor;
698   JSFunctionStrongCodeBodyVisitor::Visit(map, object);
699 }
700 
701 
702 template <typename StaticVisitor>
VisitJSFunctionWeakCode(Map * map,HeapObject * object)703 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionWeakCode(
704     Map* map, HeapObject* object) {
705   typedef FlexibleBodyVisitor<StaticVisitor, JSFunction::BodyDescriptorWeakCode,
706                               void> JSFunctionWeakCodeBodyVisitor;
707   JSFunctionWeakCodeBodyVisitor::Visit(map, object);
708 }
709 
710 
711 }  // namespace internal
712 }  // namespace v8
713 
714 #endif  // V8_OBJECTS_VISITING_INL_H_
715