1 // Copyright 2017 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #ifndef V8_HEAP_SWEEPER_H_ 6 #define V8_HEAP_SWEEPER_H_ 7 8 #include <map> 9 #include <vector> 10 11 #include "src/base/platform/condition-variable.h" 12 #include "src/base/platform/semaphore.h" 13 #include "src/common/globals.h" 14 #include "src/heap/slot-set.h" 15 #include "src/tasks/cancelable-task.h" 16 17 namespace v8 { 18 namespace internal { 19 20 class InvalidatedSlotsCleanup; 21 class MajorNonAtomicMarkingState; 22 class Page; 23 class PagedSpace; 24 class Space; 25 26 enum FreeSpaceTreatmentMode { IGNORE_FREE_SPACE, ZAP_FREE_SPACE }; 27 28 class Sweeper { 29 public: 30 using IterabilityList = std::vector<Page*>; 31 using SweepingList = std::vector<Page*>; 32 using SweptList = std::vector<Page*>; 33 34 // Pauses the sweeper tasks. 35 class V8_NODISCARD PauseScope final { 36 public: 37 explicit PauseScope(Sweeper* sweeper); 38 ~PauseScope(); 39 40 private: 41 Sweeper* const sweeper_; 42 }; 43 44 // Temporary filters old space sweeping lists. Requires the concurrent 45 // sweeper to be paused. Allows for pages to be added to the sweeper while 46 // in this scope. Note that the original list of sweeping pages is restored 47 // after exiting this scope. 48 class V8_NODISCARD FilterSweepingPagesScope final { 49 public: 50 FilterSweepingPagesScope(Sweeper* sweeper, const PauseScope& pause_scope); 51 ~FilterSweepingPagesScope(); 52 53 template <typename Callback> FilterOldSpaceSweepingPages(Callback callback)54 void FilterOldSpaceSweepingPages(Callback callback) { 55 if (!sweeping_in_progress_) return; 56 57 SweepingList* sweeper_list = 58 &sweeper_->sweeping_list_[GetSweepSpaceIndex(OLD_SPACE)]; 59 // Iteration here is from most free space to least free space. 60 for (auto it = old_space_sweeping_list_.begin(); 61 it != old_space_sweeping_list_.end(); it++) { 62 if (callback(*it)) { 63 sweeper_list->push_back(*it); 64 } 65 } 66 } 67 68 private: 69 Sweeper* const sweeper_; 70 SweepingList old_space_sweeping_list_; 71 bool sweeping_in_progress_; 72 }; 73 74 enum FreeListRebuildingMode { REBUILD_FREE_LIST, IGNORE_FREE_LIST }; 75 enum AddPageMode { REGULAR, READD_TEMPORARY_REMOVED_PAGE }; 76 enum class SweepingMode { kEagerDuringGC, kLazyOrConcurrent }; 77 78 Sweeper(Heap* heap, MajorNonAtomicMarkingState* marking_state); 79 sweeping_in_progress()80 bool sweeping_in_progress() const { return sweeping_in_progress_; } 81 82 void TearDown(); 83 84 void AddPage(AllocationSpace space, Page* page, AddPageMode mode); 85 86 int ParallelSweepSpace(AllocationSpace identity, SweepingMode sweeping_mode, 87 int required_freed_bytes, int max_pages = 0); 88 int ParallelSweepPage(Page* page, AllocationSpace identity, 89 SweepingMode sweeping_mode); 90 91 void EnsurePageIsSwept(Page* page); 92 93 void ScheduleIncrementalSweepingTask(); 94 95 int RawSweep(Page* p, FreeListRebuildingMode free_list_mode, 96 FreeSpaceTreatmentMode free_space_mode, 97 SweepingMode sweeping_mode, const base::MutexGuard& page_guard); 98 99 // After calling this function sweeping is considered to be in progress 100 // and the main thread can sweep lazily, but the background sweeper tasks 101 // are not running yet. 102 void StartSweeping(); 103 V8_EXPORT_PRIVATE void StartSweeperTasks(); 104 void EnsureCompleted(); 105 void DrainSweepingWorklistForSpace(AllocationSpace space); 106 bool AreSweeperTasksRunning(); 107 108 // Support concurrent sweepers from main thread 109 void SupportConcurrentSweeping(); 110 111 Page* GetSweptPageSafe(PagedSpace* space); 112 113 void AddPageForIterability(Page* page); 114 void StartIterabilityTasks(); 115 void EnsureIterabilityCompleted(); 116 117 private: 118 class IncrementalSweeperTask; 119 class IterabilityTask; 120 class SweeperJob; 121 122 static const int kNumberOfSweepingSpaces = 123 LAST_GROWABLE_PAGED_SPACE - FIRST_GROWABLE_PAGED_SPACE + 1; 124 static const int kMaxSweeperTasks = 3; 125 126 template <typename Callback> ForAllSweepingSpaces(Callback callback)127 void ForAllSweepingSpaces(Callback callback) const { 128 callback(OLD_SPACE); 129 callback(CODE_SPACE); 130 callback(MAP_SPACE); 131 } 132 133 // Helper function for RawSweep. Depending on the FreeListRebuildingMode and 134 // FreeSpaceTreatmentMode this function may add the free memory to a free 135 // list, make the memory iterable, clear it, and return the free memory to 136 // the operating system. 137 size_t FreeAndProcessFreedMemory(Address free_start, Address free_end, 138 Page* page, Space* space, 139 FreeListRebuildingMode free_list_mode, 140 FreeSpaceTreatmentMode free_space_mode); 141 142 // Helper function for RawSweep. Handle remembered set entries in the freed 143 // memory which require clearing. 144 void CleanupRememberedSetEntriesForFreedMemory( 145 Address free_start, Address free_end, Page* page, bool record_free_ranges, 146 TypedSlotSet::FreeRangesMap* free_ranges_map, SweepingMode sweeping_mode, 147 InvalidatedSlotsCleanup* old_to_new_cleanup); 148 149 // Helper function for RawSweep. Clears invalid typed slots in the given free 150 // ranges. 151 void CleanupInvalidTypedSlotsOfFreeRanges( 152 Page* page, const TypedSlotSet::FreeRangesMap& free_ranges_map, 153 SweepingMode sweeping_mode); 154 155 // Helper function for RawSweep. Clears the mark bits and ensures consistency 156 // of live bytes. 157 void ClearMarkBitsAndHandleLivenessStatistics( 158 Page* page, size_t live_bytes, FreeListRebuildingMode free_list_mode); 159 160 // Can only be called on the main thread when no tasks are running. IsDoneSweeping()161 bool IsDoneSweeping() const { 162 bool is_done = true; 163 ForAllSweepingSpaces([this, &is_done](AllocationSpace space) { 164 if (!sweeping_list_[GetSweepSpaceIndex(space)].empty()) is_done = false; 165 }); 166 return is_done; 167 } 168 169 size_t ConcurrentSweepingPageCount(); 170 171 // Concurrently sweeps many page from the given space. Returns true if there 172 // are no more pages to sweep in the given space. 173 bool ConcurrentSweepSpace(AllocationSpace identity, JobDelegate* delegate); 174 175 // Sweeps incrementally one page from the given space. Returns true if 176 // there are no more pages to sweep in the given space. 177 bool IncrementalSweepSpace(AllocationSpace identity); 178 179 Page* GetSweepingPageSafe(AllocationSpace space); 180 bool TryRemoveSweepingPageSafe(AllocationSpace space, Page* page); 181 182 void PrepareToBeSweptPage(AllocationSpace space, Page* page); 183 184 void MakeIterable(Page* page); 185 IsValidIterabilitySpace(AllocationSpace space)186 bool IsValidIterabilitySpace(AllocationSpace space) { 187 return space == NEW_SPACE || space == RO_SPACE; 188 } 189 IsValidSweepingSpace(AllocationSpace space)190 static bool IsValidSweepingSpace(AllocationSpace space) { 191 return space >= FIRST_GROWABLE_PAGED_SPACE && 192 space <= LAST_GROWABLE_PAGED_SPACE; 193 } 194 GetSweepSpaceIndex(AllocationSpace space)195 static int GetSweepSpaceIndex(AllocationSpace space) { 196 DCHECK(IsValidSweepingSpace(space)); 197 return space - FIRST_GROWABLE_PAGED_SPACE; 198 } 199 200 Heap* const heap_; 201 MajorNonAtomicMarkingState* marking_state_; 202 std::unique_ptr<JobHandle> job_handle_; 203 base::Mutex mutex_; 204 base::ConditionVariable cv_page_swept_; 205 SweptList swept_list_[kNumberOfSweepingSpaces]; 206 SweepingList sweeping_list_[kNumberOfSweepingSpaces]; 207 bool incremental_sweeper_pending_; 208 // Main thread can finalize sweeping, while background threads allocation slow 209 // path checks this flag to see whether it could support concurrent sweeping. 210 std::atomic<bool> sweeping_in_progress_; 211 212 // Pages that are only made iterable but have their free lists ignored. 213 IterabilityList iterability_list_; 214 CancelableTaskManager::Id iterability_task_id_; 215 base::Semaphore iterability_task_semaphore_; 216 bool iterability_in_progress_; 217 bool iterability_task_started_; 218 bool should_reduce_memory_; 219 }; 220 221 } // namespace internal 222 } // namespace v8 223 224 #endif // V8_HEAP_SWEEPER_H_ 225