• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/heap/objects-visiting.h"
6 
7 #include "src/heap/mark-compact-inl.h"
8 #include "src/heap/objects-visiting-inl.h"
9 
10 namespace v8 {
11 namespace internal {
12 
13 
GetVisitorId(Map * map)14 StaticVisitorBase::VisitorId StaticVisitorBase::GetVisitorId(Map* map) {
15   return GetVisitorId(map->instance_type(), map->instance_size(),
16                       FLAG_unbox_double_fields && !map->HasFastPointerLayout());
17 }
18 
19 
GetVisitorId(int instance_type,int instance_size,bool has_unboxed_fields)20 StaticVisitorBase::VisitorId StaticVisitorBase::GetVisitorId(
21     int instance_type, int instance_size, bool has_unboxed_fields) {
22   if (instance_type < FIRST_NONSTRING_TYPE) {
23     switch (instance_type & kStringRepresentationMask) {
24       case kSeqStringTag:
25         if ((instance_type & kStringEncodingMask) == kOneByteStringTag) {
26           return kVisitSeqOneByteString;
27         } else {
28           return kVisitSeqTwoByteString;
29         }
30 
31       case kConsStringTag:
32         if (IsShortcutCandidate(instance_type)) {
33           return kVisitShortcutCandidate;
34         } else {
35           return kVisitConsString;
36         }
37 
38       case kSlicedStringTag:
39         return kVisitSlicedString;
40 
41       case kExternalStringTag:
42         return GetVisitorIdForSize(kVisitDataObject, kVisitDataObjectGeneric,
43                                    instance_size, has_unboxed_fields);
44     }
45     UNREACHABLE();
46   }
47 
48   switch (instance_type) {
49     case BYTE_ARRAY_TYPE:
50       return kVisitByteArray;
51 
52     case BYTECODE_ARRAY_TYPE:
53       return kVisitBytecodeArray;
54 
55     case FREE_SPACE_TYPE:
56       return kVisitFreeSpace;
57 
58     case FIXED_ARRAY_TYPE:
59       return kVisitFixedArray;
60 
61     case FIXED_DOUBLE_ARRAY_TYPE:
62       return kVisitFixedDoubleArray;
63 
64     case ODDBALL_TYPE:
65       return kVisitOddball;
66 
67     case MAP_TYPE:
68       return kVisitMap;
69 
70     case CODE_TYPE:
71       return kVisitCode;
72 
73     case CELL_TYPE:
74       return kVisitCell;
75 
76     case PROPERTY_CELL_TYPE:
77       return kVisitPropertyCell;
78 
79     case WEAK_CELL_TYPE:
80       return kVisitWeakCell;
81 
82     case TRANSITION_ARRAY_TYPE:
83       return kVisitTransitionArray;
84 
85     case JS_WEAK_MAP_TYPE:
86     case JS_WEAK_SET_TYPE:
87       return kVisitJSWeakCollection;
88 
89     case JS_REGEXP_TYPE:
90       return kVisitJSRegExp;
91 
92     case SHARED_FUNCTION_INFO_TYPE:
93       return kVisitSharedFunctionInfo;
94 
95     case JS_PROXY_TYPE:
96       return GetVisitorIdForSize(kVisitStruct, kVisitStructGeneric,
97                                  instance_size, has_unboxed_fields);
98 
99     case SYMBOL_TYPE:
100       return kVisitSymbol;
101 
102     case JS_ARRAY_BUFFER_TYPE:
103       return kVisitJSArrayBuffer;
104 
105     case JS_OBJECT_TYPE:
106     case JS_ERROR_TYPE:
107     case JS_ARGUMENTS_TYPE:
108     case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
109     case JS_GENERATOR_OBJECT_TYPE:
110     case JS_MODULE_TYPE:
111     case JS_VALUE_TYPE:
112     case JS_DATE_TYPE:
113     case JS_ARRAY_TYPE:
114     case JS_GLOBAL_PROXY_TYPE:
115     case JS_GLOBAL_OBJECT_TYPE:
116     case JS_MESSAGE_OBJECT_TYPE:
117     case JS_TYPED_ARRAY_TYPE:
118     case JS_DATA_VIEW_TYPE:
119     case JS_SET_TYPE:
120     case JS_MAP_TYPE:
121     case JS_SET_ITERATOR_TYPE:
122     case JS_MAP_ITERATOR_TYPE:
123     case JS_PROMISE_TYPE:
124     case JS_BOUND_FUNCTION_TYPE:
125       return GetVisitorIdForSize(kVisitJSObject, kVisitJSObjectGeneric,
126                                  instance_size, has_unboxed_fields);
127     case JS_API_OBJECT_TYPE:
128     case JS_SPECIAL_API_OBJECT_TYPE:
129       return GetVisitorIdForSize(kVisitJSApiObject, kVisitJSApiObjectGeneric,
130                                  instance_size, has_unboxed_fields);
131 
132     case JS_FUNCTION_TYPE:
133       return kVisitJSFunction;
134 
135     case FILLER_TYPE:
136       if (instance_size == kPointerSize) return kVisitDataObjectGeneric;
137     // Fall through.
138     case FOREIGN_TYPE:
139     case HEAP_NUMBER_TYPE:
140     case MUTABLE_HEAP_NUMBER_TYPE:
141     case SIMD128_VALUE_TYPE:
142       return GetVisitorIdForSize(kVisitDataObject, kVisitDataObjectGeneric,
143                                  instance_size, has_unboxed_fields);
144 
145     case FIXED_UINT8_ARRAY_TYPE:
146     case FIXED_INT8_ARRAY_TYPE:
147     case FIXED_UINT16_ARRAY_TYPE:
148     case FIXED_INT16_ARRAY_TYPE:
149     case FIXED_UINT32_ARRAY_TYPE:
150     case FIXED_INT32_ARRAY_TYPE:
151     case FIXED_FLOAT32_ARRAY_TYPE:
152     case FIXED_UINT8_CLAMPED_ARRAY_TYPE:
153       return kVisitFixedTypedArray;
154 
155     case FIXED_FLOAT64_ARRAY_TYPE:
156       return kVisitFixedFloat64Array;
157 
158 #define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE:
159       STRUCT_LIST(MAKE_STRUCT_CASE)
160 #undef MAKE_STRUCT_CASE
161       if (instance_type == ALLOCATION_SITE_TYPE) {
162         return kVisitAllocationSite;
163       }
164 
165       return GetVisitorIdForSize(kVisitStruct, kVisitStructGeneric,
166                                  instance_size, has_unboxed_fields);
167 
168     default:
169       UNREACHABLE();
170       return kVisitorIdCount;
171   }
172 }
173 
174 
175 // We don't record weak slots during marking or scavenges. Instead we do it
176 // once when we complete mark-compact cycle.  Note that write barrier has no
177 // effect if we are already in the middle of compacting mark-sweep cycle and we
178 // have to record slots manually.
MustRecordSlots(Heap * heap)179 static bool MustRecordSlots(Heap* heap) {
180   return heap->gc_state() == Heap::MARK_COMPACT &&
181          heap->mark_compact_collector()->is_compacting();
182 }
183 
184 
185 template <class T>
186 struct WeakListVisitor;
187 
188 
189 template <class T>
VisitWeakList(Heap * heap,Object * list,WeakObjectRetainer * retainer)190 Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer) {
191   Object* undefined = heap->undefined_value();
192   Object* head = undefined;
193   T* tail = NULL;
194   MarkCompactCollector* collector = heap->mark_compact_collector();
195   bool record_slots = MustRecordSlots(heap);
196 
197   while (list != undefined) {
198     // Check whether to keep the candidate in the list.
199     T* candidate = reinterpret_cast<T*>(list);
200 
201     Object* retained = retainer->RetainAs(list);
202     if (retained != NULL) {
203       if (head == undefined) {
204         // First element in the list.
205         head = retained;
206       } else {
207         // Subsequent elements in the list.
208         DCHECK(tail != NULL);
209         WeakListVisitor<T>::SetWeakNext(tail, retained);
210         if (record_slots) {
211           Object** next_slot =
212               HeapObject::RawField(tail, WeakListVisitor<T>::WeakNextOffset());
213           collector->RecordSlot(tail, next_slot, retained);
214         }
215       }
216       // Retained object is new tail.
217       DCHECK(!retained->IsUndefined(heap->isolate()));
218       candidate = reinterpret_cast<T*>(retained);
219       tail = candidate;
220 
221       // tail is a live object, visit it.
222       WeakListVisitor<T>::VisitLiveObject(heap, tail, retainer);
223 
224     } else {
225       WeakListVisitor<T>::VisitPhantomObject(heap, candidate);
226     }
227 
228     // Move to next element in the list.
229     list = WeakListVisitor<T>::WeakNext(candidate);
230   }
231 
232   // Terminate the list if there is one or more elements.
233   if (tail != NULL) WeakListVisitor<T>::SetWeakNext(tail, undefined);
234   return head;
235 }
236 
237 
238 template <class T>
ClearWeakList(Heap * heap,Object * list)239 static void ClearWeakList(Heap* heap, Object* list) {
240   Object* undefined = heap->undefined_value();
241   while (list != undefined) {
242     T* candidate = reinterpret_cast<T*>(list);
243     list = WeakListVisitor<T>::WeakNext(candidate);
244     WeakListVisitor<T>::SetWeakNext(candidate, undefined);
245   }
246 }
247 
248 
249 template <>
250 struct WeakListVisitor<JSFunction> {
SetWeakNextv8::internal::WeakListVisitor251   static void SetWeakNext(JSFunction* function, Object* next) {
252     function->set_next_function_link(next, UPDATE_WEAK_WRITE_BARRIER);
253   }
254 
WeakNextv8::internal::WeakListVisitor255   static Object* WeakNext(JSFunction* function) {
256     return function->next_function_link();
257   }
258 
WeakNextOffsetv8::internal::WeakListVisitor259   static int WeakNextOffset() { return JSFunction::kNextFunctionLinkOffset; }
260 
VisitLiveObjectv8::internal::WeakListVisitor261   static void VisitLiveObject(Heap*, JSFunction*, WeakObjectRetainer*) {}
262 
VisitPhantomObjectv8::internal::WeakListVisitor263   static void VisitPhantomObject(Heap*, JSFunction*) {}
264 };
265 
266 
267 template <>
268 struct WeakListVisitor<Code> {
SetWeakNextv8::internal::WeakListVisitor269   static void SetWeakNext(Code* code, Object* next) {
270     code->set_next_code_link(next, UPDATE_WEAK_WRITE_BARRIER);
271   }
272 
WeakNextv8::internal::WeakListVisitor273   static Object* WeakNext(Code* code) { return code->next_code_link(); }
274 
WeakNextOffsetv8::internal::WeakListVisitor275   static int WeakNextOffset() { return Code::kNextCodeLinkOffset; }
276 
VisitLiveObjectv8::internal::WeakListVisitor277   static void VisitLiveObject(Heap*, Code*, WeakObjectRetainer*) {}
278 
VisitPhantomObjectv8::internal::WeakListVisitor279   static void VisitPhantomObject(Heap*, Code*) {}
280 };
281 
282 
283 template <>
284 struct WeakListVisitor<Context> {
SetWeakNextv8::internal::WeakListVisitor285   static void SetWeakNext(Context* context, Object* next) {
286     context->set(Context::NEXT_CONTEXT_LINK, next, UPDATE_WEAK_WRITE_BARRIER);
287   }
288 
WeakNextv8::internal::WeakListVisitor289   static Object* WeakNext(Context* context) {
290     return context->next_context_link();
291   }
292 
WeakNextOffsetv8::internal::WeakListVisitor293   static int WeakNextOffset() {
294     return FixedArray::SizeFor(Context::NEXT_CONTEXT_LINK);
295   }
296 
VisitLiveObjectv8::internal::WeakListVisitor297   static void VisitLiveObject(Heap* heap, Context* context,
298                               WeakObjectRetainer* retainer) {
299     // Process the three weak lists linked off the context.
300     DoWeakList<JSFunction>(heap, context, retainer,
301                            Context::OPTIMIZED_FUNCTIONS_LIST);
302 
303     if (heap->gc_state() == Heap::MARK_COMPACT) {
304       // Record the slots of the weak entries in the native context.
305       MarkCompactCollector* collector = heap->mark_compact_collector();
306       for (int idx = Context::FIRST_WEAK_SLOT;
307            idx < Context::NATIVE_CONTEXT_SLOTS; ++idx) {
308         Object** slot = Context::cast(context)->RawFieldOfElementAt(idx);
309         collector->RecordSlot(context, slot, *slot);
310       }
311       // Code objects are always allocated in Code space, we do not have to
312       // visit
313       // them during scavenges.
314       DoWeakList<Code>(heap, context, retainer, Context::OPTIMIZED_CODE_LIST);
315       DoWeakList<Code>(heap, context, retainer, Context::DEOPTIMIZED_CODE_LIST);
316     }
317   }
318 
319   template <class T>
DoWeakListv8::internal::WeakListVisitor320   static void DoWeakList(Heap* heap, Context* context,
321                          WeakObjectRetainer* retainer, int index) {
322     // Visit the weak list, removing dead intermediate elements.
323     Object* list_head = VisitWeakList<T>(heap, context->get(index), retainer);
324 
325     // Update the list head.
326     context->set(index, list_head, UPDATE_WRITE_BARRIER);
327 
328     if (MustRecordSlots(heap)) {
329       // Record the updated slot if necessary.
330       Object** head_slot =
331           HeapObject::RawField(context, FixedArray::SizeFor(index));
332       heap->mark_compact_collector()->RecordSlot(context, head_slot, list_head);
333     }
334   }
335 
VisitPhantomObjectv8::internal::WeakListVisitor336   static void VisitPhantomObject(Heap* heap, Context* context) {
337     ClearWeakList<JSFunction>(heap,
338                               context->get(Context::OPTIMIZED_FUNCTIONS_LIST));
339     ClearWeakList<Code>(heap, context->get(Context::OPTIMIZED_CODE_LIST));
340     ClearWeakList<Code>(heap, context->get(Context::DEOPTIMIZED_CODE_LIST));
341   }
342 };
343 
344 
345 template <>
346 struct WeakListVisitor<AllocationSite> {
SetWeakNextv8::internal::WeakListVisitor347   static void SetWeakNext(AllocationSite* obj, Object* next) {
348     obj->set_weak_next(next, UPDATE_WEAK_WRITE_BARRIER);
349   }
350 
WeakNextv8::internal::WeakListVisitor351   static Object* WeakNext(AllocationSite* obj) { return obj->weak_next(); }
352 
WeakNextOffsetv8::internal::WeakListVisitor353   static int WeakNextOffset() { return AllocationSite::kWeakNextOffset; }
354 
VisitLiveObjectv8::internal::WeakListVisitor355   static void VisitLiveObject(Heap*, AllocationSite*, WeakObjectRetainer*) {}
356 
VisitPhantomObjectv8::internal::WeakListVisitor357   static void VisitPhantomObject(Heap*, AllocationSite*) {}
358 };
359 
360 
361 template Object* VisitWeakList<Context>(Heap* heap, Object* list,
362                                         WeakObjectRetainer* retainer);
363 
364 template Object* VisitWeakList<AllocationSite>(Heap* heap, Object* list,
365                                                WeakObjectRetainer* retainer);
366 }  // namespace internal
367 }  // namespace v8
368