1 /**
2 * Copyright (c) 2021-2022 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "runtime/mem/gc/gc_trigger.h"
17
18 #include <algorithm>
19 #include <atomic>
20
21 #include "libpandabase/macros.h"
22 #include "runtime/include/runtime.h"
23 #include "runtime/include/runtime_options.h"
24 #include "runtime/include/panda_vm.h"
25 #include "utils/logger.h"
26
27 namespace panda::mem {
28
29 static constexpr size_t PERCENT_100 = 100;
30
GCTriggerConfig(const RuntimeOptions & options,panda_file::SourceLang lang)31 GCTriggerConfig::GCTriggerConfig(const RuntimeOptions &options, panda_file::SourceLang lang)
32 {
33 auto runtime_lang = plugins::LangToRuntimeType(lang);
34 gc_trigger_type_ = options.GetGcTriggerType(runtime_lang);
35 debug_start_ = options.GetGcDebugTriggerStart(runtime_lang);
36 percent_threshold_ = std::min(options.GetGcTriggerPercentThreshold(), PERCENT_100_U32);
37 adaptive_multiplier_ = options.GetGcTriggerAdaptiveMultiplier();
38 min_extra_heap_size_ = options.GetMinExtraHeapSize();
39 max_extra_heap_size_ = options.GetMaxExtraHeapSize();
40 max_trigger_percent_ = std::min(options.GetMaxTriggerPercent(), PERCENT_100_U32);
41 skip_startup_gc_count_ = options.GetSkipStartupGcCount(runtime_lang);
42 }
43
44 GCTrigger::~GCTrigger() = default;
45
GCTriggerHeap(MemStatsType * mem_stats,HeapSpace * heap_space)46 GCTriggerHeap::GCTriggerHeap(MemStatsType *mem_stats, HeapSpace *heap_space)
47 : GCTrigger(heap_space), mem_stats_(mem_stats)
48 {
49 }
50
GCTriggerHeap(MemStatsType * mem_stats,HeapSpace * heap_space,size_t min_heap_size,uint8_t percent_threshold,size_t min_extra_size,size_t max_extra_size,uint32_t skip_gc_times)51 GCTriggerHeap::GCTriggerHeap(MemStatsType *mem_stats, HeapSpace *heap_space, size_t min_heap_size,
52 uint8_t percent_threshold, size_t min_extra_size, size_t max_extra_size,
53 uint32_t skip_gc_times)
54 : GCTrigger(heap_space), mem_stats_(mem_stats), skip_gc_count_(skip_gc_times)
55 {
56 percent_threshold_ = percent_threshold;
57 min_extra_size_ = min_extra_size;
58 max_extra_size_ = max_extra_size;
59 // If we have min_heap_size < 100, we get false positives in IsGcTriggered, since we divide by 100 first
60 ASSERT(min_heap_size >= 100);
61 // Atomic with relaxed order reason: data race with target_footprint_ with no synchronization or ordering
62 // constraints imposed on other reads or writes
63 target_footprint_.store((min_heap_size / PERCENT_100) * percent_threshold_, std::memory_order_relaxed);
64 LOG(DEBUG, GC_TRIGGER) << "GCTriggerHeap created, min heap size " << min_heap_size << ", percent threshold "
65 << percent_threshold << ", min_extra_size " << min_extra_size << ", max_extra_size "
66 << max_extra_size;
67 }
68
SetMinTargetFootprint(size_t target_size)69 void GCTriggerHeap::SetMinTargetFootprint(size_t target_size)
70 {
71 LOG(DEBUG, GC_TRIGGER) << "SetTempTargetFootprint target_footprint = " << target_size;
72 min_target_footprint_ = target_size;
73 // Atomic with relaxed order reason: data race with target_footprint_ with no synchronization or ordering
74 // constraints imposed on other reads or writes
75 target_footprint_.store(target_size, std::memory_order_relaxed);
76 }
77
RestoreMinTargetFootprint()78 void GCTriggerHeap::RestoreMinTargetFootprint()
79 {
80 min_target_footprint_ = DEFAULT_MIN_TARGET_FOOTPRINT;
81 }
82
ComputeNewTargetFootprint(const GCTask & task,size_t heap_size_before_gc,size_t heap_size)83 void GCTriggerHeap::ComputeNewTargetFootprint(const GCTask &task, size_t heap_size_before_gc, size_t heap_size)
84 {
85 GC *gc = Thread::GetCurrent()->GetVM()->GetGC();
86 if (gc->IsGenerational() && task.reason_ == GCTaskCause::YOUNG_GC_CAUSE) {
87 // we don't want to update heap-trigger on young-gc
88 return;
89 }
90
91 size_t target = this->ComputeTarget(heap_size_before_gc, heap_size);
92
93 // Atomic with relaxed order reason: data race with target_footprint_ with no synchronization or ordering
94 // constraints imposed on other reads or writes
95 target_footprint_.store(target, std::memory_order_relaxed);
96
97 LOG(DEBUG, GC_TRIGGER) << "ComputeNewTargetFootprint target_footprint = " << target;
98 }
99
ComputeTarget(size_t heap_size_before_gc,size_t heap_size)100 size_t GCTriggerHeap::ComputeTarget(size_t heap_size_before_gc, size_t heap_size)
101 {
102 // Note: divide by 100 first to avoid overflow
103 size_t delta = (heap_size / PERCENT_100) * percent_threshold_;
104
105 // heap increased corresponding with previous gc
106 if (heap_size > heap_size_before_gc) {
107 delta = std::min(delta, max_extra_size_);
108 } else {
109 // if heap was squeeze from 200mb to 100mb we want to set a target to 150mb, not just 100mb*percent_threshold_
110 delta = std::max(delta, (heap_size_before_gc - heap_size) / 2);
111 }
112 return heap_size + std::max(delta, min_extra_size_);
113 }
114
IsGcTriggered()115 bool GCTriggerHeap::IsGcTriggered()
116 {
117 if (skip_gc_count_ > 0) {
118 skip_gc_count_--;
119 return false;
120 }
121 size_t bytes_in_heap = mem_stats_->GetFootprintHeap();
122 // Atomic with relaxed order reason: data race with target_footprint_ with no synchronization or ordering
123 // constraints imposed on other reads or writes
124 if (UNLIKELY(bytes_in_heap >= target_footprint_.load(std::memory_order_relaxed))) {
125 LOG(DEBUG, GC_TRIGGER) << "GCTriggerHeap triggered";
126 auto gc = Runtime::GetCurrent()->GetPandaVM()->GetGC();
127 ASSERT(gc != nullptr);
128 gc->PendingGC();
129 return true;
130 }
131 return false;
132 }
133
GCAdaptiveTriggerHeap(MemStatsType * mem_stats,HeapSpace * heap_space,size_t min_heap_size,uint8_t percent_threshold,uint32_t adaptive_multiplier,size_t min_extra_size,size_t max_extra_size,uint32_t skip_gc_times)134 GCAdaptiveTriggerHeap::GCAdaptiveTriggerHeap(MemStatsType *mem_stats, HeapSpace *heap_space, size_t min_heap_size,
135 uint8_t percent_threshold, uint32_t adaptive_multiplier,
136 size_t min_extra_size, size_t max_extra_size, uint32_t skip_gc_times)
137 : GCTriggerHeap(mem_stats, heap_space, min_heap_size, percent_threshold, min_extra_size, max_extra_size,
138 skip_gc_times),
139 adaptive_multiplier_(adaptive_multiplier)
140 {
141 // Atomic with relaxed order reason: data race with target_footprint_ with no synchronization or ordering
142 // constraints imposed on other reads or writes
143 recent_target_thresholds_.push_back(target_footprint_.load(std::memory_order_relaxed));
144 }
145
ComputeTarget(size_t heap_size_before_gc,size_t heap_size)146 size_t GCAdaptiveTriggerHeap::ComputeTarget(size_t heap_size_before_gc, size_t heap_size)
147 {
148 auto delta = static_cast<size_t>(static_cast<double>(heap_size) / PERCENT_100_D * percent_threshold_);
149
150 const auto [min_threshold, max_threshold] =
151 std::minmax_element(recent_target_thresholds_.begin(), recent_target_thresholds_.end());
152 size_t window = *max_threshold - *min_threshold;
153
154 // if recent thresholds localize in "small" window then we need to get out from a location to avoid too many trigger
155 if (window <= max_extra_size_) {
156 delta = std::max(delta, adaptive_multiplier_ * max_extra_size_);
157 delta = std::min(delta, heap_size);
158 } else if (heap_size > heap_size_before_gc) { // heap increased corresponding with previous gc
159 delta = std::min(delta, max_extra_size_);
160 } else {
161 // if heap was squeeze from 200mb to 100mb we want to set a target to 150mb, not just 100mb*percent_threshold_
162 delta = std::max(delta, (heap_size_before_gc - heap_size) / 2);
163 }
164 delta = std::max(delta, min_extra_size_);
165 size_t target = heap_size + delta;
166
167 recent_target_thresholds_.push_back(target);
168
169 return target;
170 }
171
GetTriggerType(std::string_view gc_trigger_type)172 GCTriggerType GetTriggerType(std::string_view gc_trigger_type)
173 {
174 auto trigger_type = GCTriggerType::INVALID_TRIGGER;
175 if (gc_trigger_type == "heap-trigger-test") {
176 trigger_type = GCTriggerType::HEAP_TRIGGER_TEST;
177 } else if (gc_trigger_type == "heap-trigger") {
178 trigger_type = GCTriggerType::HEAP_TRIGGER;
179 } else if (gc_trigger_type == "adaptive-heap-trigger") {
180 trigger_type = GCTriggerType::ADAPTIVE_HEAP_TRIGGER;
181 } else if (gc_trigger_type == "trigger-heap-occupancy") {
182 trigger_type = GCTriggerType::TRIGGER_HEAP_OCCUPANCY;
183 } else if (gc_trigger_type == "debug") {
184 trigger_type = GCTriggerType::DEBUG;
185 } else if (gc_trigger_type == "no-gc-for-start-up") {
186 trigger_type = GCTriggerType::NO_GC_FOR_START_UP;
187 } else if (gc_trigger_type == "debug-never") {
188 trigger_type = GCTriggerType::DEBUG_NEVER;
189 }
190 return trigger_type;
191 }
192
CreateGCTrigger(MemStatsType * mem_stats,HeapSpace * heap_space,const GCTriggerConfig & config,InternalAllocatorPtr allocator)193 GCTrigger *CreateGCTrigger(MemStatsType *mem_stats, HeapSpace *heap_space, const GCTriggerConfig &config,
194 InternalAllocatorPtr allocator)
195 {
196 uint32_t skip_gc_times = config.GetSkipStartupGcCount();
197
198 constexpr size_t DEFAULT_HEAP_SIZE = 8_MB;
199 auto trigger_type = GetTriggerType(config.GetGCTriggerType());
200
201 GCTrigger *ret {nullptr};
202 switch (trigger_type) { // NOLINT(hicpp-multiway-paths-covered)
203 case GCTriggerType::HEAP_TRIGGER_TEST:
204 // TODO(dtrubenkov): replace with permanent allocator when we get it
205 ret = allocator->New<GCTriggerHeap>(mem_stats, heap_space);
206 break;
207 case GCTriggerType::HEAP_TRIGGER:
208 ret = allocator->New<GCTriggerHeap>(mem_stats, heap_space, DEFAULT_HEAP_SIZE, config.GetPercentThreshold(),
209 config.GetMinExtraHeapSize(), config.GetMaxExtraHeapSize());
210 break;
211 case GCTriggerType::ADAPTIVE_HEAP_TRIGGER:
212 ret = allocator->New<GCAdaptiveTriggerHeap>(mem_stats, heap_space, DEFAULT_HEAP_SIZE,
213 config.GetPercentThreshold(), config.GetAdaptiveMultiplier(),
214 config.GetMinExtraHeapSize(), config.GetMaxExtraHeapSize());
215 break;
216 case GCTriggerType::NO_GC_FOR_START_UP:
217 ret = allocator->New<GCTriggerHeap>(mem_stats, heap_space, DEFAULT_HEAP_SIZE, config.GetPercentThreshold(),
218 config.GetMinExtraHeapSize(), config.GetMaxExtraHeapSize(),
219 skip_gc_times);
220 break;
221 case GCTriggerType::TRIGGER_HEAP_OCCUPANCY:
222 ret = allocator->New<GCTriggerHeapOccupancy>(heap_space, config.GetMaxTriggerPercent());
223 break;
224 case GCTriggerType::DEBUG:
225 ret = allocator->New<GCTriggerDebug>(config.GetDebugStart(), heap_space);
226 break;
227 case GCTriggerType::DEBUG_NEVER:
228 ret = allocator->New<GCNeverTrigger>(heap_space);
229 break;
230 default:
231 LOG(FATAL, GC) << "Wrong GCTrigger type";
232 break;
233 }
234 return ret;
235 }
236
GCStarted(size_t heap_size)237 void GCTriggerHeap::GCStarted([[maybe_unused]] size_t heap_size)
238 {
239 heap_space_->SetIsWorkGC(true);
240 }
241
GCFinished(const GCTask & task,size_t heap_size_before_gc,size_t heap_size)242 void GCTriggerHeap::GCFinished(const GCTask &task, size_t heap_size_before_gc, size_t heap_size)
243 {
244 this->ComputeNewTargetFootprint(task, heap_size_before_gc, heap_size);
245 heap_space_->ComputeNewSize();
246 }
247
GCTriggerDebug(uint64_t debug_start,HeapSpace * heap_space)248 GCTriggerDebug::GCTriggerDebug(uint64_t debug_start, HeapSpace *heap_space)
249 : GCTrigger(heap_space), debug_start_(debug_start)
250 {
251 LOG(DEBUG, GC_TRIGGER) << "GCTriggerDebug created";
252 }
253
IsGcTriggered()254 bool GCTriggerDebug::IsGcTriggered()
255 {
256 bool ret = false;
257 static std::atomic<uint64_t> counter = 0;
258 LOG(DEBUG, GC_TRIGGER) << "GCTriggerDebug counter " << counter;
259 if (counter >= debug_start_) {
260 LOG(DEBUG, GC_TRIGGER) << "GCTriggerDebug triggered";
261 ret = true;
262 }
263 counter++;
264 return ret;
265 }
266
GCStarted(size_t heap_size)267 void GCTriggerDebug::GCStarted([[maybe_unused]] size_t heap_size)
268 {
269 heap_space_->SetIsWorkGC(true);
270 }
271
GCFinished(const GCTask & task,size_t heap_size_before_gc,size_t heap_size)272 void GCTriggerDebug::GCFinished([[maybe_unused]] const GCTask &task, [[maybe_unused]] size_t heap_size_before_gc,
273 [[maybe_unused]] size_t heap_size)
274 {
275 heap_space_->ComputeNewSize();
276 }
277
GCTriggerHeapOccupancy(HeapSpace * heap_space,uint32_t max_trigger_percent)278 GCTriggerHeapOccupancy::GCTriggerHeapOccupancy(HeapSpace *heap_space, uint32_t max_trigger_percent)
279 : GCTrigger(heap_space), max_trigger_percent_(max_trigger_percent / PERCENT_100_D)
280 {
281 LOG(DEBUG, GC_TRIGGER) << "GCTriggerHeapOccupancy created";
282 }
283
GCStarted(size_t heap_size)284 void GCTriggerHeapOccupancy::GCStarted([[maybe_unused]] size_t heap_size)
285 {
286 heap_space_->SetIsWorkGC(true);
287 }
288
GCFinished(const GCTask & task,size_t heap_size_before_gc,size_t heap_size)289 void GCTriggerHeapOccupancy::GCFinished([[maybe_unused]] const GCTask &task,
290 [[maybe_unused]] size_t heap_size_before_gc, [[maybe_unused]] size_t heap_size)
291 {
292 heap_space_->ComputeNewSize();
293 }
294
IsGcTriggered()295 bool GCTriggerHeapOccupancy::IsGcTriggered()
296 {
297 size_t current_heap_size = heap_space_->GetHeapSize();
298 size_t min_heap_size = MemConfig::GetInitialHeapSizeLimit();
299 size_t max_heap_size = MemConfig::GetHeapSizeLimit();
300 size_t threshold = std::min(min_heap_size, static_cast<size_t>(max_trigger_percent_ * max_heap_size));
301 if (current_heap_size > threshold) {
302 LOG(DEBUG, GC_TRIGGER) << "GCTriggerHeapOccupancy triggered: current heap size = " << current_heap_size
303 << ", threshold = " << threshold;
304 return true;
305 }
306 return false;
307 }
308
309 } // namespace panda::mem
310