• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/heap/objects-visiting.h"
6 
7 #include "src/heap/mark-compact-inl.h"
8 #include "src/heap/objects-visiting-inl.h"
9 
10 namespace v8 {
11 namespace internal {
12 
13 
GetVisitorId(Map * map)14 StaticVisitorBase::VisitorId StaticVisitorBase::GetVisitorId(Map* map) {
15   return GetVisitorId(map->instance_type(), map->instance_size(),
16                       FLAG_unbox_double_fields && !map->HasFastPointerLayout());
17 }
18 
19 
GetVisitorId(int instance_type,int instance_size,bool has_unboxed_fields)20 StaticVisitorBase::VisitorId StaticVisitorBase::GetVisitorId(
21     int instance_type, int instance_size, bool has_unboxed_fields) {
22   if (instance_type < FIRST_NONSTRING_TYPE) {
23     switch (instance_type & kStringRepresentationMask) {
24       case kSeqStringTag:
25         if ((instance_type & kStringEncodingMask) == kOneByteStringTag) {
26           return kVisitSeqOneByteString;
27         } else {
28           return kVisitSeqTwoByteString;
29         }
30 
31       case kConsStringTag:
32         if (IsShortcutCandidate(instance_type)) {
33           return kVisitShortcutCandidate;
34         } else {
35           return kVisitConsString;
36         }
37 
38       case kSlicedStringTag:
39         return kVisitSlicedString;
40 
41       case kExternalStringTag:
42         return GetVisitorIdForSize(kVisitDataObject, kVisitDataObjectGeneric,
43                                    instance_size, has_unboxed_fields);
44     }
45     UNREACHABLE();
46   }
47 
48   switch (instance_type) {
49     case BYTE_ARRAY_TYPE:
50       return kVisitByteArray;
51 
52     case BYTECODE_ARRAY_TYPE:
53       return kVisitBytecodeArray;
54 
55     case FREE_SPACE_TYPE:
56       return kVisitFreeSpace;
57 
58     case FIXED_ARRAY_TYPE:
59       return kVisitFixedArray;
60 
61     case FIXED_DOUBLE_ARRAY_TYPE:
62       return kVisitFixedDoubleArray;
63 
64     case ODDBALL_TYPE:
65       return kVisitOddball;
66 
67     case MAP_TYPE:
68       return kVisitMap;
69 
70     case CODE_TYPE:
71       return kVisitCode;
72 
73     case CELL_TYPE:
74       return kVisitCell;
75 
76     case PROPERTY_CELL_TYPE:
77       return kVisitPropertyCell;
78 
79     case WEAK_CELL_TYPE:
80       return kVisitWeakCell;
81 
82     case TRANSITION_ARRAY_TYPE:
83       return kVisitTransitionArray;
84 
85     case JS_WEAK_MAP_TYPE:
86     case JS_WEAK_SET_TYPE:
87       return kVisitJSWeakCollection;
88 
89     case JS_REGEXP_TYPE:
90       return kVisitJSRegExp;
91 
92     case SHARED_FUNCTION_INFO_TYPE:
93       return kVisitSharedFunctionInfo;
94 
95     case JS_PROXY_TYPE:
96       return GetVisitorIdForSize(kVisitStruct, kVisitStructGeneric,
97                                  instance_size, has_unboxed_fields);
98 
99     case SYMBOL_TYPE:
100       return kVisitSymbol;
101 
102     case JS_ARRAY_BUFFER_TYPE:
103       return kVisitJSArrayBuffer;
104 
105     case JS_OBJECT_TYPE:
106     case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
107     case JS_GENERATOR_OBJECT_TYPE:
108     case JS_MODULE_TYPE:
109     case JS_VALUE_TYPE:
110     case JS_DATE_TYPE:
111     case JS_ARRAY_TYPE:
112     case JS_GLOBAL_PROXY_TYPE:
113     case JS_GLOBAL_OBJECT_TYPE:
114     case JS_MESSAGE_OBJECT_TYPE:
115     case JS_TYPED_ARRAY_TYPE:
116     case JS_DATA_VIEW_TYPE:
117     case JS_SET_TYPE:
118     case JS_MAP_TYPE:
119     case JS_SET_ITERATOR_TYPE:
120     case JS_MAP_ITERATOR_TYPE:
121     case JS_ITERATOR_RESULT_TYPE:
122     case JS_PROMISE_TYPE:
123     case JS_BOUND_FUNCTION_TYPE:
124       return GetVisitorIdForSize(kVisitJSObject, kVisitJSObjectGeneric,
125                                  instance_size, has_unboxed_fields);
126 
127     case JS_FUNCTION_TYPE:
128       return kVisitJSFunction;
129 
130     case FILLER_TYPE:
131       if (instance_size == kPointerSize) return kVisitDataObjectGeneric;
132     // Fall through.
133     case FOREIGN_TYPE:
134     case HEAP_NUMBER_TYPE:
135     case MUTABLE_HEAP_NUMBER_TYPE:
136     case SIMD128_VALUE_TYPE:
137       return GetVisitorIdForSize(kVisitDataObject, kVisitDataObjectGeneric,
138                                  instance_size, has_unboxed_fields);
139 
140     case FIXED_UINT8_ARRAY_TYPE:
141     case FIXED_INT8_ARRAY_TYPE:
142     case FIXED_UINT16_ARRAY_TYPE:
143     case FIXED_INT16_ARRAY_TYPE:
144     case FIXED_UINT32_ARRAY_TYPE:
145     case FIXED_INT32_ARRAY_TYPE:
146     case FIXED_FLOAT32_ARRAY_TYPE:
147     case FIXED_UINT8_CLAMPED_ARRAY_TYPE:
148       return kVisitFixedTypedArray;
149 
150     case FIXED_FLOAT64_ARRAY_TYPE:
151       return kVisitFixedFloat64Array;
152 
153 #define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE:
154       STRUCT_LIST(MAKE_STRUCT_CASE)
155 #undef MAKE_STRUCT_CASE
156       if (instance_type == ALLOCATION_SITE_TYPE) {
157         return kVisitAllocationSite;
158       }
159 
160       return GetVisitorIdForSize(kVisitStruct, kVisitStructGeneric,
161                                  instance_size, has_unboxed_fields);
162 
163     default:
164       UNREACHABLE();
165       return kVisitorIdCount;
166   }
167 }
168 
169 
170 // We don't record weak slots during marking or scavenges. Instead we do it
171 // once when we complete mark-compact cycle.  Note that write barrier has no
172 // effect if we are already in the middle of compacting mark-sweep cycle and we
173 // have to record slots manually.
MustRecordSlots(Heap * heap)174 static bool MustRecordSlots(Heap* heap) {
175   return heap->gc_state() == Heap::MARK_COMPACT &&
176          heap->mark_compact_collector()->is_compacting();
177 }
178 
179 
180 template <class T>
181 struct WeakListVisitor;
182 
183 
184 template <class T>
VisitWeakList(Heap * heap,Object * list,WeakObjectRetainer * retainer)185 Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer) {
186   Object* undefined = heap->undefined_value();
187   Object* head = undefined;
188   T* tail = NULL;
189   MarkCompactCollector* collector = heap->mark_compact_collector();
190   bool record_slots = MustRecordSlots(heap);
191 
192   while (list != undefined) {
193     // Check whether to keep the candidate in the list.
194     T* candidate = reinterpret_cast<T*>(list);
195 
196     Object* retained = retainer->RetainAs(list);
197     if (retained != NULL) {
198       if (head == undefined) {
199         // First element in the list.
200         head = retained;
201       } else {
202         // Subsequent elements in the list.
203         DCHECK(tail != NULL);
204         WeakListVisitor<T>::SetWeakNext(tail, retained);
205         if (record_slots) {
206           Object** next_slot =
207               HeapObject::RawField(tail, WeakListVisitor<T>::WeakNextOffset());
208           collector->RecordSlot(tail, next_slot, retained);
209         }
210       }
211       // Retained object is new tail.
212       DCHECK(!retained->IsUndefined());
213       candidate = reinterpret_cast<T*>(retained);
214       tail = candidate;
215 
216       // tail is a live object, visit it.
217       WeakListVisitor<T>::VisitLiveObject(heap, tail, retainer);
218 
219     } else {
220       WeakListVisitor<T>::VisitPhantomObject(heap, candidate);
221     }
222 
223     // Move to next element in the list.
224     list = WeakListVisitor<T>::WeakNext(candidate);
225   }
226 
227   // Terminate the list if there is one or more elements.
228   if (tail != NULL) WeakListVisitor<T>::SetWeakNext(tail, undefined);
229   return head;
230 }
231 
232 
233 template <class T>
ClearWeakList(Heap * heap,Object * list)234 static void ClearWeakList(Heap* heap, Object* list) {
235   Object* undefined = heap->undefined_value();
236   while (list != undefined) {
237     T* candidate = reinterpret_cast<T*>(list);
238     list = WeakListVisitor<T>::WeakNext(candidate);
239     WeakListVisitor<T>::SetWeakNext(candidate, undefined);
240   }
241 }
242 
243 
244 template <>
245 struct WeakListVisitor<JSFunction> {
SetWeakNextv8::internal::WeakListVisitor246   static void SetWeakNext(JSFunction* function, Object* next) {
247     function->set_next_function_link(next, UPDATE_WEAK_WRITE_BARRIER);
248   }
249 
WeakNextv8::internal::WeakListVisitor250   static Object* WeakNext(JSFunction* function) {
251     return function->next_function_link();
252   }
253 
WeakNextOffsetv8::internal::WeakListVisitor254   static int WeakNextOffset() { return JSFunction::kNextFunctionLinkOffset; }
255 
VisitLiveObjectv8::internal::WeakListVisitor256   static void VisitLiveObject(Heap*, JSFunction*, WeakObjectRetainer*) {}
257 
VisitPhantomObjectv8::internal::WeakListVisitor258   static void VisitPhantomObject(Heap*, JSFunction*) {}
259 };
260 
261 
262 template <>
263 struct WeakListVisitor<Code> {
SetWeakNextv8::internal::WeakListVisitor264   static void SetWeakNext(Code* code, Object* next) {
265     code->set_next_code_link(next, UPDATE_WEAK_WRITE_BARRIER);
266   }
267 
WeakNextv8::internal::WeakListVisitor268   static Object* WeakNext(Code* code) { return code->next_code_link(); }
269 
WeakNextOffsetv8::internal::WeakListVisitor270   static int WeakNextOffset() { return Code::kNextCodeLinkOffset; }
271 
VisitLiveObjectv8::internal::WeakListVisitor272   static void VisitLiveObject(Heap*, Code*, WeakObjectRetainer*) {}
273 
VisitPhantomObjectv8::internal::WeakListVisitor274   static void VisitPhantomObject(Heap*, Code*) {}
275 };
276 
277 
278 template <>
279 struct WeakListVisitor<Context> {
SetWeakNextv8::internal::WeakListVisitor280   static void SetWeakNext(Context* context, Object* next) {
281     context->set(Context::NEXT_CONTEXT_LINK, next, UPDATE_WEAK_WRITE_BARRIER);
282   }
283 
WeakNextv8::internal::WeakListVisitor284   static Object* WeakNext(Context* context) {
285     return context->get(Context::NEXT_CONTEXT_LINK);
286   }
287 
WeakNextOffsetv8::internal::WeakListVisitor288   static int WeakNextOffset() {
289     return FixedArray::SizeFor(Context::NEXT_CONTEXT_LINK);
290   }
291 
VisitLiveObjectv8::internal::WeakListVisitor292   static void VisitLiveObject(Heap* heap, Context* context,
293                               WeakObjectRetainer* retainer) {
294     // Process the three weak lists linked off the context.
295     DoWeakList<JSFunction>(heap, context, retainer,
296                            Context::OPTIMIZED_FUNCTIONS_LIST);
297 
298     if (heap->gc_state() == Heap::MARK_COMPACT) {
299       // Record the slots of the weak entries in the native context.
300       MarkCompactCollector* collector = heap->mark_compact_collector();
301       for (int idx = Context::FIRST_WEAK_SLOT;
302            idx < Context::NATIVE_CONTEXT_SLOTS; ++idx) {
303         Object** slot = Context::cast(context)->RawFieldOfElementAt(idx);
304         collector->RecordSlot(context, slot, *slot);
305       }
306       // Code objects are always allocated in Code space, we do not have to
307       // visit
308       // them during scavenges.
309       DoWeakList<Code>(heap, context, retainer, Context::OPTIMIZED_CODE_LIST);
310       DoWeakList<Code>(heap, context, retainer, Context::DEOPTIMIZED_CODE_LIST);
311     }
312   }
313 
314   template <class T>
DoWeakListv8::internal::WeakListVisitor315   static void DoWeakList(Heap* heap, Context* context,
316                          WeakObjectRetainer* retainer, int index) {
317     // Visit the weak list, removing dead intermediate elements.
318     Object* list_head = VisitWeakList<T>(heap, context->get(index), retainer);
319 
320     // Update the list head.
321     context->set(index, list_head, UPDATE_WRITE_BARRIER);
322 
323     if (MustRecordSlots(heap)) {
324       // Record the updated slot if necessary.
325       Object** head_slot =
326           HeapObject::RawField(context, FixedArray::SizeFor(index));
327       heap->mark_compact_collector()->RecordSlot(context, head_slot, list_head);
328     }
329   }
330 
VisitPhantomObjectv8::internal::WeakListVisitor331   static void VisitPhantomObject(Heap* heap, Context* context) {
332     ClearWeakList<JSFunction>(heap,
333                               context->get(Context::OPTIMIZED_FUNCTIONS_LIST));
334     ClearWeakList<Code>(heap, context->get(Context::OPTIMIZED_CODE_LIST));
335     ClearWeakList<Code>(heap, context->get(Context::DEOPTIMIZED_CODE_LIST));
336   }
337 };
338 
339 
340 template <>
341 struct WeakListVisitor<AllocationSite> {
SetWeakNextv8::internal::WeakListVisitor342   static void SetWeakNext(AllocationSite* obj, Object* next) {
343     obj->set_weak_next(next, UPDATE_WEAK_WRITE_BARRIER);
344   }
345 
WeakNextv8::internal::WeakListVisitor346   static Object* WeakNext(AllocationSite* obj) { return obj->weak_next(); }
347 
WeakNextOffsetv8::internal::WeakListVisitor348   static int WeakNextOffset() { return AllocationSite::kWeakNextOffset; }
349 
VisitLiveObjectv8::internal::WeakListVisitor350   static void VisitLiveObject(Heap*, AllocationSite*, WeakObjectRetainer*) {}
351 
VisitPhantomObjectv8::internal::WeakListVisitor352   static void VisitPhantomObject(Heap*, AllocationSite*) {}
353 };
354 
355 
356 template Object* VisitWeakList<Context>(Heap* heap, Object* list,
357                                         WeakObjectRetainer* retainer);
358 
359 template Object* VisitWeakList<AllocationSite>(Heap* heap, Object* list,
360                                                WeakObjectRetainer* retainer);
361 }  // namespace internal
362 }  // namespace v8
363