1 // Copyright 2015 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #ifndef V8_HEAP_SCAVENGER_H_ 6 #define V8_HEAP_SCAVENGER_H_ 7 8 #include "src/base/platform/condition-variable.h" 9 #include "src/heap/base/worklist.h" 10 #include "src/heap/evacuation-allocator.h" 11 #include "src/heap/index-generator.h" 12 #include "src/heap/objects-visiting.h" 13 #include "src/heap/parallel-work-item.h" 14 #include "src/heap/slot-set.h" 15 16 namespace v8 { 17 namespace internal { 18 19 class RootScavengeVisitor; 20 class Scavenger; 21 22 enum class CopyAndForwardResult { 23 SUCCESS_YOUNG_GENERATION, 24 SUCCESS_OLD_GENERATION, 25 FAILURE 26 }; 27 28 using ObjectAndSize = std::pair<HeapObject, int>; 29 using SurvivingNewLargeObjectsMap = 30 std::unordered_map<HeapObject, Map, Object::Hasher>; 31 using SurvivingNewLargeObjectMapEntry = std::pair<HeapObject, Map>; 32 33 constexpr int kEphemeronTableListSegmentSize = 128; 34 using EphemeronTableList = 35 ::heap::base::Worklist<EphemeronHashTable, kEphemeronTableListSegmentSize>; 36 37 class ScavengerCollector; 38 39 class Scavenger { 40 public: 41 struct PromotionListEntry { 42 HeapObject heap_object; 43 Map map; 44 int size; 45 }; 46 47 class PromotionList { 48 public: 49 static constexpr size_t kRegularObjectPromotionListSegmentSize = 256; 50 static constexpr size_t kLargeObjectPromotionListSegmentSize = 4; 51 52 using RegularObjectPromotionList = 53 ::heap::base::Worklist<ObjectAndSize, 54 kRegularObjectPromotionListSegmentSize>; 55 using LargeObjectPromotionList = 56 ::heap::base::Worklist<PromotionListEntry, 57 kLargeObjectPromotionListSegmentSize>; 58 59 class Local { 60 public: 61 explicit Local(PromotionList* promotion_list); 62 63 inline void PushRegularObject(HeapObject object, int size); 64 inline void PushLargeObject(HeapObject object, Map map, int size); 65 inline size_t LocalPushSegmentSize() const; 66 inline bool Pop(struct PromotionListEntry* entry); 67 inline bool IsGlobalPoolEmpty() const; 68 inline bool ShouldEagerlyProcessPromotionList() const; 69 inline void Publish(); 70 71 private: 72 RegularObjectPromotionList::Local regular_object_promotion_list_local_; 73 LargeObjectPromotionList::Local large_object_promotion_list_local_; 74 }; 75 76 inline bool IsEmpty() const; 77 inline size_t Size() const; 78 79 private: 80 RegularObjectPromotionList regular_object_promotion_list_; 81 LargeObjectPromotionList large_object_promotion_list_; 82 }; 83 84 static const int kCopiedListSegmentSize = 256; 85 86 using CopiedList = 87 ::heap::base::Worklist<ObjectAndSize, kCopiedListSegmentSize>; 88 using EmptyChunksList = ::heap::base::Worklist<MemoryChunk*, 64>; 89 90 Scavenger(ScavengerCollector* collector, Heap* heap, bool is_logging, 91 EmptyChunksList* empty_chunks, CopiedList* copied_list, 92 PromotionList* promotion_list, 93 EphemeronTableList* ephemeron_table_list, int task_id); 94 95 // Entry point for scavenging an old generation page. For scavenging single 96 // objects see RootScavengingVisitor and ScavengeVisitor below. 97 void ScavengePage(MemoryChunk* page); 98 99 // Processes remaining work (=objects) after single objects have been 100 // manually scavenged using ScavengeObject or CheckAndScavengeObject. 101 void Process(JobDelegate* delegate = nullptr); 102 103 // Finalize the Scavenger. Needs to be called from the main thread. 104 void Finalize(); 105 void Publish(); 106 107 void AddEphemeronHashTable(EphemeronHashTable table); 108 bytes_copied()109 size_t bytes_copied() const { return copied_size_; } bytes_promoted()110 size_t bytes_promoted() const { return promoted_size_; } 111 112 private: 113 enum PromotionHeapChoice { kPromoteIntoLocalHeap, kPromoteIntoSharedHeap }; 114 115 // Number of objects to process before interrupting for potentially waking 116 // up other tasks. 117 static const int kInterruptThreshold = 128; 118 static const int kInitialLocalPretenuringFeedbackCapacity = 256; 119 heap()120 inline Heap* heap() { return heap_; } 121 122 inline void PageMemoryFence(MaybeObject object); 123 124 void AddPageToSweeperIfNecessary(MemoryChunk* page); 125 126 // Potentially scavenges an object referenced from |slot| if it is 127 // indeed a HeapObject and resides in from space. 128 template <typename TSlot> 129 inline SlotCallbackResult CheckAndScavengeObject(Heap* heap, TSlot slot); 130 131 // Scavenges an object |object| referenced from slot |p|. |object| is required 132 // to be in from space. 133 template <typename THeapObjectSlot> 134 inline SlotCallbackResult ScavengeObject(THeapObjectSlot p, 135 HeapObject object); 136 137 // Copies |source| to |target| and sets the forwarding pointer in |source|. 138 V8_INLINE bool MigrateObject(Map map, HeapObject source, HeapObject target, 139 int size, 140 PromotionHeapChoice promotion_heap_choice); 141 142 V8_INLINE SlotCallbackResult 143 RememberedSetEntryNeeded(CopyAndForwardResult result); 144 145 template <typename THeapObjectSlot> 146 V8_INLINE CopyAndForwardResult 147 SemiSpaceCopyObject(Map map, THeapObjectSlot slot, HeapObject object, 148 int object_size, ObjectFields object_fields); 149 150 template <typename THeapObjectSlot, 151 PromotionHeapChoice promotion_heap_choice = kPromoteIntoLocalHeap> 152 V8_INLINE CopyAndForwardResult PromoteObject(Map map, THeapObjectSlot slot, 153 HeapObject object, 154 int object_size, 155 ObjectFields object_fields); 156 157 template <typename THeapObjectSlot> 158 V8_INLINE SlotCallbackResult EvacuateObject(THeapObjectSlot slot, Map map, 159 HeapObject source); 160 161 V8_INLINE bool HandleLargeObject(Map map, HeapObject object, int object_size, 162 ObjectFields object_fields); 163 164 // Different cases for object evacuation. 165 template <typename THeapObjectSlot, 166 PromotionHeapChoice promotion_heap_choice = kPromoteIntoLocalHeap> 167 V8_INLINE SlotCallbackResult 168 EvacuateObjectDefault(Map map, THeapObjectSlot slot, HeapObject object, 169 int object_size, ObjectFields object_fields); 170 171 template <typename THeapObjectSlot> 172 inline SlotCallbackResult EvacuateThinString(Map map, THeapObjectSlot slot, 173 ThinString object, 174 int object_size); 175 176 template <typename THeapObjectSlot> 177 inline SlotCallbackResult EvacuateShortcutCandidate(Map map, 178 THeapObjectSlot slot, 179 ConsString object, 180 int object_size); 181 182 template <typename THeapObjectSlot> 183 inline SlotCallbackResult EvacuateInPlaceInternalizableString( 184 Map map, THeapObjectSlot slot, String string, int object_size, 185 ObjectFields object_fields); 186 187 void IterateAndScavengePromotedObject(HeapObject target, Map map, int size); 188 void RememberPromotedEphemeron(EphemeronHashTable table, int index); 189 190 ScavengerCollector* const collector_; 191 Heap* const heap_; 192 EmptyChunksList::Local empty_chunks_local_; 193 PromotionList::Local promotion_list_local_; 194 CopiedList::Local copied_list_local_; 195 EphemeronTableList::Local ephemeron_table_list_local_; 196 Heap::PretenuringFeedbackMap local_pretenuring_feedback_; 197 size_t copied_size_; 198 size_t promoted_size_; 199 EvacuationAllocator allocator_; 200 std::unique_ptr<ConcurrentAllocator> shared_old_allocator_; 201 SurvivingNewLargeObjectsMap surviving_new_large_objects_; 202 203 EphemeronRememberedSet ephemeron_remembered_set_; 204 const bool is_logging_; 205 const bool is_incremental_marking_; 206 const bool is_compacting_; 207 const bool is_compacting_including_map_space_; 208 const bool shared_string_table_; 209 210 friend class IterateAndScavengePromotedObjectsVisitor; 211 friend class RootScavengeVisitor; 212 friend class ScavengeVisitor; 213 }; 214 215 // Helper class for turning the scavenger into an object visitor that is also 216 // filtering out non-HeapObjects and objects which do not reside in new space. 217 class RootScavengeVisitor final : public RootVisitor { 218 public: 219 explicit RootScavengeVisitor(Scavenger* scavenger); 220 221 void VisitRootPointer(Root root, const char* description, 222 FullObjectSlot p) final; 223 void VisitRootPointers(Root root, const char* description, 224 FullObjectSlot start, FullObjectSlot end) final; 225 226 private: 227 void ScavengePointer(FullObjectSlot p); 228 229 Scavenger* const scavenger_; 230 }; 231 232 class ScavengeVisitor final : public NewSpaceVisitor<ScavengeVisitor> { 233 public: 234 explicit ScavengeVisitor(Scavenger* scavenger); 235 236 V8_INLINE void VisitPointers(HeapObject host, ObjectSlot start, 237 ObjectSlot end) final; 238 239 V8_INLINE void VisitPointers(HeapObject host, MaybeObjectSlot start, 240 MaybeObjectSlot end) final; 241 V8_INLINE void VisitCodePointer(HeapObject host, CodeObjectSlot slot) final; 242 243 V8_INLINE void VisitCodeTarget(Code host, RelocInfo* rinfo) final; 244 V8_INLINE void VisitEmbeddedPointer(Code host, RelocInfo* rinfo) final; 245 V8_INLINE int VisitEphemeronHashTable(Map map, EphemeronHashTable object); 246 V8_INLINE int VisitJSArrayBuffer(Map map, JSArrayBuffer object); 247 248 private: 249 template <typename TSlot> 250 V8_INLINE void VisitHeapObjectImpl(TSlot slot, HeapObject heap_object); 251 252 template <typename TSlot> 253 V8_INLINE void VisitPointersImpl(HeapObject host, TSlot start, TSlot end); 254 255 Scavenger* const scavenger_; 256 }; 257 258 class ScavengerCollector { 259 public: 260 static const int kMaxScavengerTasks = 8; 261 static const int kMainThreadId = 0; 262 263 explicit ScavengerCollector(Heap* heap); 264 265 void CollectGarbage(); 266 267 private: 268 class JobTask : public v8::JobTask { 269 public: 270 explicit JobTask( 271 ScavengerCollector* outer, 272 std::vector<std::unique_ptr<Scavenger>>* scavengers, 273 std::vector<std::pair<ParallelWorkItem, MemoryChunk*>> memory_chunks, 274 Scavenger::CopiedList* copied_list, 275 Scavenger::PromotionList* promotion_list); 276 277 void Run(JobDelegate* delegate) override; 278 size_t GetMaxConcurrency(size_t worker_count) const override; 279 280 private: 281 void ProcessItems(JobDelegate* delegate, Scavenger* scavenger); 282 void ConcurrentScavengePages(Scavenger* scavenger); 283 284 ScavengerCollector* outer_; 285 286 std::vector<std::unique_ptr<Scavenger>>* scavengers_; 287 std::vector<std::pair<ParallelWorkItem, MemoryChunk*>> memory_chunks_; 288 std::atomic<size_t> remaining_memory_chunks_{0}; 289 IndexGenerator generator_; 290 291 Scavenger::CopiedList* copied_list_; 292 Scavenger::PromotionList* promotion_list_; 293 }; 294 295 void MergeSurvivingNewLargeObjects( 296 const SurvivingNewLargeObjectsMap& objects); 297 298 int NumberOfScavengeTasks(); 299 300 void ProcessWeakReferences(EphemeronTableList* ephemeron_table_list); 301 void ClearYoungEphemerons(EphemeronTableList* ephemeron_table_list); 302 void ClearOldEphemerons(); 303 void HandleSurvivingNewLargeObjects(); 304 305 void SweepArrayBufferExtensions(); 306 307 void IterateStackAndScavenge( 308 RootScavengeVisitor* root_scavenge_visitor, 309 std::vector<std::unique_ptr<Scavenger>>* scavengers, int main_thread_id); 310 311 Isolate* const isolate_; 312 Heap* const heap_; 313 SurvivingNewLargeObjectsMap surviving_new_large_objects_; 314 315 friend class Scavenger; 316 }; 317 318 } // namespace internal 319 } // namespace v8 320 321 #endif // V8_HEAP_SCAVENGER_H_ 322