• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/heap/objects-visiting.h"
6 
7 #include "src/heap/heap-inl.h"
8 #include "src/heap/mark-compact-inl.h"
9 #include "src/heap/objects-visiting-inl.h"
10 
11 namespace v8 {
12 namespace internal {
13 
14 // We don't record weak slots during marking or scavenges. Instead we do it
15 // once when we complete mark-compact cycle.  Note that write barrier has no
16 // effect if we are already in the middle of compacting mark-sweep cycle and we
17 // have to record slots manually.
MustRecordSlots(Heap * heap)18 static bool MustRecordSlots(Heap* heap) {
19   return heap->gc_state() == Heap::MARK_COMPACT &&
20          heap->mark_compact_collector()->is_compacting();
21 }
22 
23 
24 template <class T>
25 struct WeakListVisitor;
26 
27 template <class T>
VisitWeakList(Heap * heap,Object list,WeakObjectRetainer * retainer)28 Object VisitWeakList(Heap* heap, Object list, WeakObjectRetainer* retainer) {
29   HeapObject undefined = ReadOnlyRoots(heap).undefined_value();
30   Object head = undefined;
31   T tail;
32   bool record_slots = MustRecordSlots(heap);
33 
34   while (list != undefined) {
35     // Check whether to keep the candidate in the list.
36     T candidate = T::cast(list);
37 
38     Object retained = retainer->RetainAs(list);
39 
40     // Move to the next element before the WeakNext is cleared.
41     list = WeakListVisitor<T>::WeakNext(candidate);
42 
43     if (retained != Object()) {
44       if (head == undefined) {
45         // First element in the list.
46         head = retained;
47       } else {
48         // Subsequent elements in the list.
49         DCHECK(!tail.is_null());
50         WeakListVisitor<T>::SetWeakNext(tail, HeapObject::cast(retained));
51         if (record_slots) {
52           HeapObject slot_holder = WeakListVisitor<T>::WeakNextHolder(tail);
53           int slot_offset = WeakListVisitor<T>::WeakNextOffset();
54           ObjectSlot slot = slot_holder.RawField(slot_offset);
55           MarkCompactCollector::RecordSlot(slot_holder, slot,
56                                            HeapObject::cast(retained));
57         }
58       }
59       // Retained object is new tail.
60       DCHECK(!retained.IsUndefined(heap->isolate()));
61       candidate = T::cast(retained);
62       tail = candidate;
63 
64       // tail is a live object, visit it.
65       WeakListVisitor<T>::VisitLiveObject(heap, tail, retainer);
66 
67     } else {
68       WeakListVisitor<T>::VisitPhantomObject(heap, candidate);
69     }
70   }
71 
72   // Terminate the list if there is one or more elements.
73   if (!tail.is_null()) WeakListVisitor<T>::SetWeakNext(tail, undefined);
74   return head;
75 }
76 
77 template <class T>
ClearWeakList(Heap * heap,Object list)78 static void ClearWeakList(Heap* heap, Object list) {
79   Object undefined = ReadOnlyRoots(heap).undefined_value();
80   while (list != undefined) {
81     T candidate = T::cast(list);
82     list = WeakListVisitor<T>::WeakNext(candidate);
83     WeakListVisitor<T>::SetWeakNext(candidate, undefined);
84   }
85 }
86 
87 template <>
88 struct WeakListVisitor<CodeT> {
SetWeakNextv8::internal::WeakListVisitor89   static void SetWeakNext(CodeT code, Object next) {
90     CodeDataContainerFromCodeT(code).set_next_code_link(
91         next, UPDATE_WEAK_WRITE_BARRIER);
92   }
93 
WeakNextv8::internal::WeakListVisitor94   static Object WeakNext(CodeT code) {
95     return CodeDataContainerFromCodeT(code).next_code_link();
96   }
97 
WeakNextHolderv8::internal::WeakListVisitor98   static HeapObject WeakNextHolder(CodeT code) {
99     return CodeDataContainerFromCodeT(code);
100   }
101 
WeakNextOffsetv8::internal::WeakListVisitor102   static int WeakNextOffset() { return CodeDataContainer::kNextCodeLinkOffset; }
103 
VisitLiveObjectv8::internal::WeakListVisitor104   static void VisitLiveObject(Heap*, CodeT, WeakObjectRetainer*) {}
105 
VisitPhantomObjectv8::internal::WeakListVisitor106   static void VisitPhantomObject(Heap* heap, CodeT code) {
107     // Even though the code is dying, its code_data_container can still be
108     // alive. Clear the next_code_link slot to avoid a dangling pointer.
109     SetWeakNext(code, ReadOnlyRoots(heap).undefined_value());
110   }
111 };
112 
113 template <>
114 struct WeakListVisitor<Context> {
SetWeakNextv8::internal::WeakListVisitor115   static void SetWeakNext(Context context, Object next) {
116     context.set(Context::NEXT_CONTEXT_LINK, next, UPDATE_WEAK_WRITE_BARRIER);
117   }
118 
WeakNextv8::internal::WeakListVisitor119   static Object WeakNext(Context context) {
120     return context.next_context_link();
121   }
122 
WeakNextHolderv8::internal::WeakListVisitor123   static HeapObject WeakNextHolder(Context context) { return context; }
124 
WeakNextOffsetv8::internal::WeakListVisitor125   static int WeakNextOffset() {
126     return FixedArray::SizeFor(Context::NEXT_CONTEXT_LINK);
127   }
128 
VisitLiveObjectv8::internal::WeakListVisitor129   static void VisitLiveObject(Heap* heap, Context context,
130                               WeakObjectRetainer* retainer) {
131     if (heap->gc_state() == Heap::MARK_COMPACT) {
132       // Record the slots of the weak entries in the native context.
133       for (int idx = Context::FIRST_WEAK_SLOT;
134            idx < Context::NATIVE_CONTEXT_SLOTS; ++idx) {
135         ObjectSlot slot = context.RawField(Context::OffsetOfElementAt(idx));
136         MarkCompactCollector::RecordSlot(context, slot,
137                                          HeapObject::cast(*slot));
138       }
139       // Code objects are always allocated in Code space, we do not have to
140       // visit them during scavenges.
141       DoWeakList<CodeT>(heap, context, retainer, Context::OPTIMIZED_CODE_LIST);
142       DoWeakList<CodeT>(heap, context, retainer,
143                         Context::DEOPTIMIZED_CODE_LIST);
144     }
145   }
146 
147   template <class T>
DoWeakListv8::internal::WeakListVisitor148   static void DoWeakList(Heap* heap, Context context,
149                          WeakObjectRetainer* retainer, int index) {
150     // Visit the weak list, removing dead intermediate elements.
151     Object list_head = VisitWeakList<T>(heap, context.get(index), retainer);
152 
153     // Update the list head.
154     context.set(index, list_head, UPDATE_WRITE_BARRIER);
155 
156     if (MustRecordSlots(heap)) {
157       // Record the updated slot if necessary.
158       ObjectSlot head_slot = context.RawField(FixedArray::SizeFor(index));
159       heap->mark_compact_collector()->RecordSlot(context, head_slot,
160                                                  HeapObject::cast(list_head));
161     }
162   }
163 
VisitPhantomObjectv8::internal::WeakListVisitor164   static void VisitPhantomObject(Heap* heap, Context context) {
165     ClearWeakList<CodeT>(heap, context.get(Context::OPTIMIZED_CODE_LIST));
166     ClearWeakList<CodeT>(heap, context.get(Context::DEOPTIMIZED_CODE_LIST));
167   }
168 };
169 
170 
171 template <>
172 struct WeakListVisitor<AllocationSite> {
SetWeakNextv8::internal::WeakListVisitor173   static void SetWeakNext(AllocationSite obj, Object next) {
174     obj.set_weak_next(next, UPDATE_WEAK_WRITE_BARRIER);
175   }
176 
WeakNextv8::internal::WeakListVisitor177   static Object WeakNext(AllocationSite obj) { return obj.weak_next(); }
178 
WeakNextHolderv8::internal::WeakListVisitor179   static HeapObject WeakNextHolder(AllocationSite obj) { return obj; }
180 
WeakNextOffsetv8::internal::WeakListVisitor181   static int WeakNextOffset() { return AllocationSite::kWeakNextOffset; }
182 
VisitLiveObjectv8::internal::WeakListVisitor183   static void VisitLiveObject(Heap*, AllocationSite, WeakObjectRetainer*) {}
184 
VisitPhantomObjectv8::internal::WeakListVisitor185   static void VisitPhantomObject(Heap*, AllocationSite) {}
186 };
187 
188 template <>
189 struct WeakListVisitor<JSFinalizationRegistry> {
SetWeakNextv8::internal::WeakListVisitor190   static void SetWeakNext(JSFinalizationRegistry obj, HeapObject next) {
191     obj.set_next_dirty(next, UPDATE_WEAK_WRITE_BARRIER);
192   }
193 
WeakNextv8::internal::WeakListVisitor194   static Object WeakNext(JSFinalizationRegistry obj) {
195     return obj.next_dirty();
196   }
197 
WeakNextHolderv8::internal::WeakListVisitor198   static HeapObject WeakNextHolder(JSFinalizationRegistry obj) { return obj; }
199 
WeakNextOffsetv8::internal::WeakListVisitor200   static int WeakNextOffset() {
201     return JSFinalizationRegistry::kNextDirtyOffset;
202   }
203 
VisitLiveObjectv8::internal::WeakListVisitor204   static void VisitLiveObject(Heap* heap, JSFinalizationRegistry obj,
205                               WeakObjectRetainer*) {
206     heap->set_dirty_js_finalization_registries_list_tail(obj);
207   }
208 
VisitPhantomObjectv8::internal::WeakListVisitor209   static void VisitPhantomObject(Heap*, JSFinalizationRegistry) {}
210 };
211 
212 template Object VisitWeakList<Context>(Heap* heap, Object list,
213                                        WeakObjectRetainer* retainer);
214 
215 template Object VisitWeakList<AllocationSite>(Heap* heap, Object list,
216                                               WeakObjectRetainer* retainer);
217 
218 template Object VisitWeakList<JSFinalizationRegistry>(
219     Heap* heap, Object list, WeakObjectRetainer* retainer);
220 }  // namespace internal
221 }  // namespace v8
222