1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_OBJECTS_VISITING_INL_H_
6 #define V8_OBJECTS_VISITING_INL_H_
7
8 #include "src/heap/array-buffer-tracker.h"
9 #include "src/heap/mark-compact.h"
10 #include "src/heap/objects-visiting.h"
11 #include "src/ic/ic-state.h"
12 #include "src/macro-assembler.h"
13 #include "src/objects-body-descriptors-inl.h"
14
15 namespace v8 {
16 namespace internal {
17
18
19 template <typename Callback>
GetVisitor(Map * map)20 Callback VisitorDispatchTable<Callback>::GetVisitor(Map* map) {
21 return reinterpret_cast<Callback>(callbacks_[map->visitor_id()]);
22 }
23
24
25 template <typename StaticVisitor>
Initialize()26 void StaticNewSpaceVisitor<StaticVisitor>::Initialize() {
27 table_.Register(
28 kVisitShortcutCandidate,
29 &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor, int>::Visit);
30
31 table_.Register(
32 kVisitConsString,
33 &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor, int>::Visit);
34
35 table_.Register(
36 kVisitThinString,
37 &FixedBodyVisitor<StaticVisitor, ThinString::BodyDescriptor, int>::Visit);
38
39 table_.Register(kVisitSlicedString,
40 &FixedBodyVisitor<StaticVisitor, SlicedString::BodyDescriptor,
41 int>::Visit);
42
43 table_.Register(
44 kVisitSymbol,
45 &FixedBodyVisitor<StaticVisitor, Symbol::BodyDescriptor, int>::Visit);
46
47 table_.Register(kVisitFixedArray,
48 &FlexibleBodyVisitor<StaticVisitor,
49 FixedArray::BodyDescriptor, int>::Visit);
50
51 table_.Register(kVisitFixedDoubleArray, &VisitFixedDoubleArray);
52 table_.Register(
53 kVisitFixedTypedArray,
54 &FlexibleBodyVisitor<StaticVisitor, FixedTypedArrayBase::BodyDescriptor,
55 int>::Visit);
56
57 table_.Register(
58 kVisitFixedFloat64Array,
59 &FlexibleBodyVisitor<StaticVisitor, FixedTypedArrayBase::BodyDescriptor,
60 int>::Visit);
61
62 table_.Register(
63 kVisitNativeContext,
64 &FixedBodyVisitor<StaticVisitor, Context::ScavengeBodyDescriptor,
65 int>::Visit);
66
67 table_.Register(kVisitByteArray, &VisitByteArray);
68
69 table_.Register(
70 kVisitSharedFunctionInfo,
71 &FixedBodyVisitor<StaticVisitor, SharedFunctionInfo::BodyDescriptor,
72 int>::Visit);
73
74 table_.Register(kVisitSeqOneByteString, &VisitSeqOneByteString);
75
76 table_.Register(kVisitSeqTwoByteString, &VisitSeqTwoByteString);
77
78 // Don't visit code entry. We are using this visitor only during scavenges.
79 table_.Register(
80 kVisitJSFunction,
81 &FlexibleBodyVisitor<StaticVisitor, JSFunction::BodyDescriptorWeakCode,
82 int>::Visit);
83
84 table_.Register(
85 kVisitJSArrayBuffer,
86 &FlexibleBodyVisitor<StaticVisitor, JSArrayBuffer::BodyDescriptor,
87 int>::Visit);
88
89 table_.Register(kVisitFreeSpace, &VisitFreeSpace);
90
91 table_.Register(
92 kVisitJSWeakCollection,
93 &FlexibleBodyVisitor<StaticVisitor, JSWeakCollection::BodyDescriptor,
94 int>::Visit);
95
96 table_.Register(kVisitJSRegExp, &JSObjectVisitor::Visit);
97
98 table_.template RegisterSpecializations<DataObjectVisitor, kVisitDataObject,
99 kVisitDataObjectGeneric>();
100
101 table_.template RegisterSpecializations<JSObjectVisitor, kVisitJSObject,
102 kVisitJSObjectGeneric>();
103
104 // Not using specialized Api object visitor for newspace.
105 table_.template RegisterSpecializations<JSObjectVisitor, kVisitJSApiObject,
106 kVisitJSApiObjectGeneric>();
107
108 table_.template RegisterSpecializations<StructVisitor, kVisitStruct,
109 kVisitStructGeneric>();
110
111 table_.Register(kVisitBytecodeArray, &UnreachableVisitor);
112 table_.Register(kVisitSharedFunctionInfo, &UnreachableVisitor);
113 }
114
115 template <typename StaticVisitor>
Initialize()116 void StaticMarkingVisitor<StaticVisitor>::Initialize() {
117 table_.Register(kVisitShortcutCandidate,
118 &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor,
119 void>::Visit);
120
121 table_.Register(kVisitConsString,
122 &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor,
123 void>::Visit);
124
125 table_.Register(kVisitThinString,
126 &FixedBodyVisitor<StaticVisitor, ThinString::BodyDescriptor,
127 void>::Visit);
128
129 table_.Register(kVisitSlicedString,
130 &FixedBodyVisitor<StaticVisitor, SlicedString::BodyDescriptor,
131 void>::Visit);
132
133 table_.Register(
134 kVisitSymbol,
135 &FixedBodyVisitor<StaticVisitor, Symbol::BodyDescriptor, void>::Visit);
136
137 table_.Register(kVisitFixedArray, &FixedArrayVisitor::Visit);
138
139 table_.Register(kVisitFixedDoubleArray, &DataObjectVisitor::Visit);
140
141 table_.Register(
142 kVisitFixedTypedArray,
143 &FlexibleBodyVisitor<StaticVisitor, FixedTypedArrayBase::BodyDescriptor,
144 void>::Visit);
145
146 table_.Register(
147 kVisitFixedFloat64Array,
148 &FlexibleBodyVisitor<StaticVisitor, FixedTypedArrayBase::BodyDescriptor,
149 void>::Visit);
150
151 table_.Register(kVisitNativeContext, &VisitNativeContext);
152
153 table_.Register(
154 kVisitAllocationSite,
155 &FixedBodyVisitor<StaticVisitor, AllocationSite::MarkingBodyDescriptor,
156 void>::Visit);
157
158 table_.Register(kVisitByteArray, &DataObjectVisitor::Visit);
159
160 table_.Register(kVisitBytecodeArray, &VisitBytecodeArray);
161
162 table_.Register(kVisitFreeSpace, &DataObjectVisitor::Visit);
163
164 table_.Register(kVisitSeqOneByteString, &DataObjectVisitor::Visit);
165
166 table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit);
167
168 table_.Register(kVisitJSWeakCollection, &VisitWeakCollection);
169
170 table_.Register(
171 kVisitOddball,
172 &FixedBodyVisitor<StaticVisitor, Oddball::BodyDescriptor, void>::Visit);
173
174 table_.Register(kVisitMap, &VisitMap);
175
176 table_.Register(kVisitCode, &VisitCode);
177
178 table_.Register(kVisitSharedFunctionInfo, &VisitSharedFunctionInfo);
179
180 table_.Register(kVisitJSFunction, &VisitJSFunction);
181
182 table_.Register(
183 kVisitJSArrayBuffer,
184 &FlexibleBodyVisitor<StaticVisitor, JSArrayBuffer::BodyDescriptor,
185 void>::Visit);
186
187 table_.Register(kVisitJSRegExp, &JSObjectVisitor::Visit);
188
189 table_.Register(
190 kVisitCell,
191 &FixedBodyVisitor<StaticVisitor, Cell::BodyDescriptor, void>::Visit);
192
193 table_.Register(kVisitPropertyCell,
194 &FixedBodyVisitor<StaticVisitor, PropertyCell::BodyDescriptor,
195 void>::Visit);
196
197 table_.Register(kVisitWeakCell, &VisitWeakCell);
198
199 table_.Register(kVisitTransitionArray, &VisitTransitionArray);
200
201 table_.template RegisterSpecializations<DataObjectVisitor, kVisitDataObject,
202 kVisitDataObjectGeneric>();
203
204 table_.template RegisterSpecializations<JSObjectVisitor, kVisitJSObject,
205 kVisitJSObjectGeneric>();
206
207 table_.template RegisterSpecializations<JSApiObjectVisitor, kVisitJSApiObject,
208 kVisitJSApiObjectGeneric>();
209
210 table_.template RegisterSpecializations<StructObjectVisitor, kVisitStruct,
211 kVisitStructGeneric>();
212 }
213
214
215 template <typename StaticVisitor>
VisitCodeEntry(Heap * heap,HeapObject * object,Address entry_address)216 void StaticMarkingVisitor<StaticVisitor>::VisitCodeEntry(
217 Heap* heap, HeapObject* object, Address entry_address) {
218 Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
219 heap->mark_compact_collector()->RecordCodeEntrySlot(object, entry_address,
220 code);
221 StaticVisitor::MarkObject(heap, code);
222 }
223
224
225 template <typename StaticVisitor>
VisitEmbeddedPointer(Heap * heap,RelocInfo * rinfo)226 void StaticMarkingVisitor<StaticVisitor>::VisitEmbeddedPointer(
227 Heap* heap, RelocInfo* rinfo) {
228 DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
229 HeapObject* object = HeapObject::cast(rinfo->target_object());
230 Code* host = rinfo->host();
231 heap->mark_compact_collector()->RecordRelocSlot(host, rinfo, object);
232 // TODO(ulan): It could be better to record slots only for strongly embedded
233 // objects here and record slots for weakly embedded object during clearing
234 // of non-live references in mark-compact.
235 if (!host->IsWeakObject(object)) {
236 StaticVisitor::MarkObject(heap, object);
237 }
238 }
239
240
241 template <typename StaticVisitor>
VisitCell(Heap * heap,RelocInfo * rinfo)242 void StaticMarkingVisitor<StaticVisitor>::VisitCell(Heap* heap,
243 RelocInfo* rinfo) {
244 DCHECK(rinfo->rmode() == RelocInfo::CELL);
245 Cell* cell = rinfo->target_cell();
246 Code* host = rinfo->host();
247 heap->mark_compact_collector()->RecordRelocSlot(host, rinfo, cell);
248 if (!host->IsWeakObject(cell)) {
249 StaticVisitor::MarkObject(heap, cell);
250 }
251 }
252
253
254 template <typename StaticVisitor>
VisitDebugTarget(Heap * heap,RelocInfo * rinfo)255 void StaticMarkingVisitor<StaticVisitor>::VisitDebugTarget(Heap* heap,
256 RelocInfo* rinfo) {
257 DCHECK(RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
258 rinfo->IsPatchedDebugBreakSlotSequence());
259 Code* target = Code::GetCodeFromTargetAddress(rinfo->debug_call_address());
260 Code* host = rinfo->host();
261 heap->mark_compact_collector()->RecordRelocSlot(host, rinfo, target);
262 StaticVisitor::MarkObject(heap, target);
263 }
264
265
266 template <typename StaticVisitor>
VisitCodeTarget(Heap * heap,RelocInfo * rinfo)267 void StaticMarkingVisitor<StaticVisitor>::VisitCodeTarget(Heap* heap,
268 RelocInfo* rinfo) {
269 DCHECK(RelocInfo::IsCodeTarget(rinfo->rmode()));
270 Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
271 Code* host = rinfo->host();
272 heap->mark_compact_collector()->RecordRelocSlot(host, rinfo, target);
273 StaticVisitor::MarkObject(heap, target);
274 }
275
276 template <typename StaticVisitor>
VisitCodeAgeSequence(Heap * heap,RelocInfo * rinfo)277 void StaticMarkingVisitor<StaticVisitor>::VisitCodeAgeSequence(
278 Heap* heap, RelocInfo* rinfo) {
279 DCHECK(RelocInfo::IsCodeAgeSequence(rinfo->rmode()));
280 Code* target = rinfo->code_age_stub();
281 DCHECK(target != NULL);
282 Code* host = rinfo->host();
283 heap->mark_compact_collector()->RecordRelocSlot(host, rinfo, target);
284 StaticVisitor::MarkObject(heap, target);
285 }
286
287 template <typename StaticVisitor>
VisitBytecodeArray(Map * map,HeapObject * object)288 void StaticMarkingVisitor<StaticVisitor>::VisitBytecodeArray(
289 Map* map, HeapObject* object) {
290 FixedBodyVisitor<StaticVisitor, BytecodeArray::MarkingBodyDescriptor,
291 void>::Visit(map, object);
292 BytecodeArray::cast(object)->MakeOlder();
293 }
294
295 template <typename StaticVisitor>
VisitNativeContext(Map * map,HeapObject * object)296 void StaticMarkingVisitor<StaticVisitor>::VisitNativeContext(
297 Map* map, HeapObject* object) {
298 FixedBodyVisitor<StaticVisitor, Context::MarkCompactBodyDescriptor,
299 void>::Visit(map, object);
300 }
301
302
303 template <typename StaticVisitor>
VisitMap(Map * map,HeapObject * object)304 void StaticMarkingVisitor<StaticVisitor>::VisitMap(Map* map,
305 HeapObject* object) {
306 Heap* heap = map->GetHeap();
307 Map* map_object = Map::cast(object);
308
309 // Clears the cache of ICs related to this map.
310 if (FLAG_cleanup_code_caches_at_gc) {
311 map_object->ClearCodeCache(heap);
312 }
313
314 // When map collection is enabled we have to mark through map's transitions
315 // and back pointers in a special way to make these links weak.
316 if (map_object->CanTransition()) {
317 MarkMapContents(heap, map_object);
318 } else {
319 StaticVisitor::VisitPointers(
320 heap, object,
321 HeapObject::RawField(object, Map::kPointerFieldsBeginOffset),
322 HeapObject::RawField(object, Map::kPointerFieldsEndOffset));
323 }
324 }
325
326 template <typename StaticVisitor>
VisitWeakCell(Map * map,HeapObject * object)327 void StaticMarkingVisitor<StaticVisitor>::VisitWeakCell(Map* map,
328 HeapObject* object) {
329 Heap* heap = map->GetHeap();
330 WeakCell* weak_cell = reinterpret_cast<WeakCell*>(object);
331 // Enqueue weak cell in linked list of encountered weak collections.
332 // We can ignore weak cells with cleared values because they will always
333 // contain smi zero.
334 if (weak_cell->next_cleared() && !weak_cell->cleared()) {
335 HeapObject* value = HeapObject::cast(weak_cell->value());
336 if (ObjectMarking::IsBlackOrGrey(value)) {
337 // Weak cells with live values are directly processed here to reduce
338 // the processing time of weak cells during the main GC pause.
339 Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset);
340 map->GetHeap()->mark_compact_collector()->RecordSlot(weak_cell, slot,
341 *slot);
342 } else {
343 // If we do not know about liveness of values of weak cells, we have to
344 // process them when we know the liveness of the whole transitive
345 // closure.
346 weak_cell->set_next(heap->encountered_weak_cells(),
347 UPDATE_WEAK_WRITE_BARRIER);
348 heap->set_encountered_weak_cells(weak_cell);
349 }
350 }
351 }
352
353
354 template <typename StaticVisitor>
VisitTransitionArray(Map * map,HeapObject * object)355 void StaticMarkingVisitor<StaticVisitor>::VisitTransitionArray(
356 Map* map, HeapObject* object) {
357 TransitionArray* array = TransitionArray::cast(object);
358 Heap* heap = array->GetHeap();
359 // Visit strong references.
360 if (array->HasPrototypeTransitions()) {
361 StaticVisitor::VisitPointer(heap, array,
362 array->GetPrototypeTransitionsSlot());
363 }
364 int num_transitions = TransitionArray::NumberOfTransitions(array);
365 for (int i = 0; i < num_transitions; ++i) {
366 StaticVisitor::VisitPointer(heap, array, array->GetKeySlot(i));
367 }
368 // Enqueue the array in linked list of encountered transition arrays if it is
369 // not already in the list.
370 if (array->next_link()->IsUndefined(heap->isolate())) {
371 Heap* heap = map->GetHeap();
372 array->set_next_link(heap->encountered_transition_arrays(),
373 UPDATE_WEAK_WRITE_BARRIER);
374 heap->set_encountered_transition_arrays(array);
375 }
376 }
377
378 template <typename StaticVisitor>
VisitWeakCollection(Map * map,HeapObject * object)379 void StaticMarkingVisitor<StaticVisitor>::VisitWeakCollection(
380 Map* map, HeapObject* object) {
381 typedef FlexibleBodyVisitor<StaticVisitor,
382 JSWeakCollection::BodyDescriptorWeak,
383 void> JSWeakCollectionBodyVisitor;
384 Heap* heap = map->GetHeap();
385 JSWeakCollection* weak_collection =
386 reinterpret_cast<JSWeakCollection*>(object);
387
388 // Enqueue weak collection in linked list of encountered weak collections.
389 if (weak_collection->next() == heap->undefined_value()) {
390 weak_collection->set_next(heap->encountered_weak_collections());
391 heap->set_encountered_weak_collections(weak_collection);
392 }
393
394 // Skip visiting the backing hash table containing the mappings and the
395 // pointer to the other enqueued weak collections, both are post-processed.
396 JSWeakCollectionBodyVisitor::Visit(map, object);
397
398 // Partially initialized weak collection is enqueued, but table is ignored.
399 if (!weak_collection->table()->IsHashTable()) return;
400
401 // Mark the backing hash table without pushing it on the marking stack.
402 Object** slot = HeapObject::RawField(object, JSWeakCollection::kTableOffset);
403 HeapObject* obj = HeapObject::cast(*slot);
404 heap->mark_compact_collector()->RecordSlot(object, slot, obj);
405 StaticVisitor::MarkObjectWithoutPush(heap, obj);
406 }
407
408
409 template <typename StaticVisitor>
VisitCode(Map * map,HeapObject * object)410 void StaticMarkingVisitor<StaticVisitor>::VisitCode(Map* map,
411 HeapObject* object) {
412 typedef FlexibleBodyVisitor<StaticVisitor, Code::BodyDescriptor, void>
413 CodeBodyVisitor;
414 Heap* heap = map->GetHeap();
415 Code* code = Code::cast(object);
416 if (FLAG_age_code && !heap->isolate()->serializer_enabled()) {
417 code->MakeOlder();
418 }
419 CodeBodyVisitor::Visit(map, object);
420 }
421
422
423 template <typename StaticVisitor>
VisitSharedFunctionInfo(Map * map,HeapObject * object)424 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo(
425 Map* map, HeapObject* object) {
426 Heap* heap = map->GetHeap();
427 SharedFunctionInfo* shared = SharedFunctionInfo::cast(object);
428 if (shared->ic_age() != heap->global_ic_age()) {
429 shared->ResetForNewContext(heap->global_ic_age());
430 }
431 MarkCompactCollector* collector = heap->mark_compact_collector();
432 if (collector->is_code_flushing_enabled()) {
433 if (IsFlushable(heap, shared)) {
434 // This function's code looks flushable. But we have to postpone
435 // the decision until we see all functions that point to the same
436 // SharedFunctionInfo because some of them might be optimized.
437 // That would also make the non-optimized version of the code
438 // non-flushable, because it is required for bailing out from
439 // optimized code.
440 collector->code_flusher()->AddCandidate(shared);
441 // Treat the reference to the code object weakly.
442 VisitSharedFunctionInfoWeakCode(map, object);
443 return;
444 }
445 }
446 VisitSharedFunctionInfoStrongCode(map, object);
447 }
448
449
450 template <typename StaticVisitor>
VisitJSFunction(Map * map,HeapObject * object)451 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunction(Map* map,
452 HeapObject* object) {
453 Heap* heap = map->GetHeap();
454 JSFunction* function = JSFunction::cast(object);
455 MarkCompactCollector* collector = heap->mark_compact_collector();
456 if (collector->is_code_flushing_enabled()) {
457 if (IsFlushable(heap, function)) {
458 // This function's code looks flushable. But we have to postpone
459 // the decision until we see all functions that point to the same
460 // SharedFunctionInfo because some of them might be optimized.
461 // That would also make the non-optimized version of the code
462 // non-flushable, because it is required for bailing out from
463 // optimized code.
464 collector->code_flusher()->AddCandidate(function);
465 // Treat the reference to the code object weakly.
466 VisitJSFunctionWeakCode(map, object);
467 return;
468 } else {
469 // Visit all unoptimized code objects to prevent flushing them.
470 StaticVisitor::MarkObject(heap, function->shared()->code());
471 }
472 }
473 VisitJSFunctionStrongCode(map, object);
474 }
475
476 template <typename StaticVisitor>
MarkMapContents(Heap * heap,Map * map)477 void StaticMarkingVisitor<StaticVisitor>::MarkMapContents(Heap* heap,
478 Map* map) {
479 // Since descriptor arrays are potentially shared, ensure that only the
480 // descriptors that belong to this map are marked. The first time a non-empty
481 // descriptor array is marked, its header is also visited. The slot holding
482 // the descriptor array will be implicitly recorded when the pointer fields of
483 // this map are visited. Prototype maps don't keep track of transitions, so
484 // just mark the entire descriptor array.
485 if (!map->is_prototype_map()) {
486 DescriptorArray* descriptors = map->instance_descriptors();
487 if (StaticVisitor::MarkObjectWithoutPush(heap, descriptors) &&
488 descriptors->length() > 0) {
489 StaticVisitor::VisitPointers(heap, descriptors,
490 descriptors->GetFirstElementAddress(),
491 descriptors->GetDescriptorEndSlot(0));
492 }
493 int start = 0;
494 int end = map->NumberOfOwnDescriptors();
495 if (start < end) {
496 StaticVisitor::VisitPointers(heap, descriptors,
497 descriptors->GetDescriptorStartSlot(start),
498 descriptors->GetDescriptorEndSlot(end));
499 }
500 }
501
502 // Mark the pointer fields of the Map. Since the transitions array has
503 // been marked already, it is fine that one of these fields contains a
504 // pointer to it.
505 StaticVisitor::VisitPointers(
506 heap, map, HeapObject::RawField(map, Map::kPointerFieldsBeginOffset),
507 HeapObject::RawField(map, Map::kPointerFieldsEndOffset));
508 }
509
510
HasSourceCode(Heap * heap,SharedFunctionInfo * info)511 inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) {
512 Object* undefined = heap->undefined_value();
513 return (info->script() != undefined) &&
514 (reinterpret_cast<Script*>(info->script())->source() != undefined);
515 }
516
517
518 template <typename StaticVisitor>
IsFlushable(Heap * heap,JSFunction * function)519 bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(Heap* heap,
520 JSFunction* function) {
521 SharedFunctionInfo* shared_info = function->shared();
522
523 // Code is either on stack, in compilation cache or referenced
524 // by optimized version of function.
525 if (ObjectMarking::IsBlackOrGrey(function->code())) {
526 return false;
527 }
528
529 // We do not (yet) flush code for optimized functions.
530 if (function->code() != shared_info->code()) {
531 return false;
532 }
533
534 // Check age of optimized code.
535 if (FLAG_age_code && !function->code()->IsOld()) {
536 return false;
537 }
538
539 return IsFlushable(heap, shared_info);
540 }
541
542
543 template <typename StaticVisitor>
IsFlushable(Heap * heap,SharedFunctionInfo * shared_info)544 bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
545 Heap* heap, SharedFunctionInfo* shared_info) {
546 // Code is either on stack, in compilation cache or referenced
547 // by optimized version of function.
548 if (ObjectMarking::IsBlackOrGrey(shared_info->code())) {
549 return false;
550 }
551
552 // The function must be compiled and have the source code available,
553 // to be able to recompile it in case we need the function again.
554 if (!(shared_info->is_compiled() && HasSourceCode(heap, shared_info))) {
555 return false;
556 }
557
558 // We never flush code for API functions.
559 if (shared_info->IsApiFunction()) {
560 return false;
561 }
562
563 // Only flush code for functions.
564 if (shared_info->code()->kind() != Code::FUNCTION) {
565 return false;
566 }
567
568 // Function must be lazy compilable.
569 if (!shared_info->allows_lazy_compilation()) {
570 return false;
571 }
572
573 // We do not (yet?) flush code for generator functions, or async functions,
574 // because we don't know if there are still live activations
575 // (generator objects) on the heap.
576 if (IsResumableFunction(shared_info->kind())) {
577 return false;
578 }
579
580 // If this is a full script wrapped in a function we do not flush the code.
581 if (shared_info->is_toplevel()) {
582 return false;
583 }
584
585 // The function must be user code.
586 if (!shared_info->IsUserJavaScript()) {
587 return false;
588 }
589
590 // Maintain debug break slots in the code.
591 if (shared_info->HasDebugCode()) {
592 return false;
593 }
594
595 // If this is a function initialized with %SetCode then the one-to-one
596 // relation between SharedFunctionInfo and Code is broken.
597 if (shared_info->dont_flush()) {
598 return false;
599 }
600
601 // Check age of code. If code aging is disabled we never flush.
602 if (!FLAG_age_code || !shared_info->code()->IsOld()) {
603 return false;
604 }
605
606 return true;
607 }
608
609 template <typename StaticVisitor>
VisitSharedFunctionInfoStrongCode(Map * map,HeapObject * object)610 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoStrongCode(
611 Map* map, HeapObject* object) {
612 FixedBodyVisitor<StaticVisitor, SharedFunctionInfo::BodyDescriptor,
613 void>::Visit(map, object);
614 }
615
616 template <typename StaticVisitor>
VisitSharedFunctionInfoWeakCode(Map * map,HeapObject * object)617 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoWeakCode(
618 Map* map, HeapObject* object) {
619 // Skip visiting kCodeOffset as it is treated weakly here.
620 STATIC_ASSERT(SharedFunctionInfo::kCodeOffset <
621 SharedFunctionInfo::BodyDescriptorWeakCode::kStartOffset);
622 FixedBodyVisitor<StaticVisitor, SharedFunctionInfo::BodyDescriptorWeakCode,
623 void>::Visit(map, object);
624 }
625
626 template <typename StaticVisitor>
VisitJSFunctionStrongCode(Map * map,HeapObject * object)627 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionStrongCode(
628 Map* map, HeapObject* object) {
629 typedef FlexibleBodyVisitor<StaticVisitor,
630 JSFunction::BodyDescriptorStrongCode,
631 void> JSFunctionStrongCodeBodyVisitor;
632 JSFunctionStrongCodeBodyVisitor::Visit(map, object);
633 }
634
635
636 template <typename StaticVisitor>
VisitJSFunctionWeakCode(Map * map,HeapObject * object)637 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionWeakCode(
638 Map* map, HeapObject* object) {
639 typedef FlexibleBodyVisitor<StaticVisitor, JSFunction::BodyDescriptorWeakCode,
640 void> JSFunctionWeakCodeBodyVisitor;
641 JSFunctionWeakCodeBodyVisitor::Visit(map, object);
642 }
643
644
645 } // namespace internal
646 } // namespace v8
647
648 #endif // V8_OBJECTS_VISITING_INL_H_
649