1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/heap/incremental-marking-job.h"
6
7 #include "src/base/platform/mutex.h"
8 #include "src/base/platform/time.h"
9 #include "src/execution/isolate.h"
10 #include "src/execution/vm-state-inl.h"
11 #include "src/heap/embedder-tracing.h"
12 #include "src/heap/gc-tracer.h"
13 #include "src/heap/heap-inl.h"
14 #include "src/heap/heap.h"
15 #include "src/heap/incremental-marking.h"
16 #include "src/init/v8.h"
17
18 namespace v8 {
19 namespace internal {
20
21 class IncrementalMarkingJob::Task : public CancelableTask {
22 public:
23 static StepResult Step(Heap* heap);
24
Task(Isolate * isolate,IncrementalMarkingJob * job,EmbedderHeapTracer::EmbedderStackState stack_state,TaskType task_type)25 Task(Isolate* isolate, IncrementalMarkingJob* job,
26 EmbedderHeapTracer::EmbedderStackState stack_state, TaskType task_type)
27 : CancelableTask(isolate),
28 isolate_(isolate),
29 job_(job),
30 stack_state_(stack_state),
31 task_type_(task_type) {}
32
33 // CancelableTask overrides.
34 void RunInternal() override;
35
isolate() const36 Isolate* isolate() const { return isolate_; }
37
38 private:
39 Isolate* const isolate_;
40 IncrementalMarkingJob* const job_;
41 const EmbedderHeapTracer::EmbedderStackState stack_state_;
42 const TaskType task_type_;
43 };
44
Start(Heap * heap)45 void IncrementalMarkingJob::Start(Heap* heap) {
46 DCHECK(!heap->incremental_marking()->IsStopped());
47 ScheduleTask(heap);
48 }
49
ScheduleTask(Heap * heap,TaskType task_type)50 void IncrementalMarkingJob::ScheduleTask(Heap* heap, TaskType task_type) {
51 base::MutexGuard guard(&mutex_);
52
53 if (!IsTaskPending(task_type) && !heap->IsTearingDown() &&
54 FLAG_incremental_marking_task) {
55 v8::Isolate* isolate = reinterpret_cast<v8::Isolate*>(heap->isolate());
56 SetTaskPending(task_type, true);
57 auto taskrunner =
58 V8::GetCurrentPlatform()->GetForegroundTaskRunner(isolate);
59
60 const EmbedderHeapTracer::EmbedderStackState stack_state =
61 taskrunner->NonNestableTasksEnabled()
62 ? EmbedderHeapTracer::EmbedderStackState::kNoHeapPointers
63 : EmbedderHeapTracer::EmbedderStackState::kMayContainHeapPointers;
64 auto task =
65 std::make_unique<Task>(heap->isolate(), this, stack_state, task_type);
66 if (task_type == TaskType::kNormal) {
67 scheduled_time_ = heap->MonotonicallyIncreasingTimeInMs();
68 if (taskrunner->NonNestableTasksEnabled()) {
69 taskrunner->PostNonNestableTask(std::move(task));
70 } else {
71 taskrunner->PostTask(std::move(task));
72 }
73 } else {
74 if (taskrunner->NonNestableDelayedTasksEnabled()) {
75 taskrunner->PostNonNestableDelayedTask(std::move(task),
76 kDelayInSeconds);
77 } else {
78 taskrunner->PostDelayedTask(std::move(task), kDelayInSeconds);
79 }
80 }
81 }
82 }
83
Step(Heap * heap)84 StepResult IncrementalMarkingJob::Task::Step(Heap* heap) {
85 const int kIncrementalMarkingDelayMs = 1;
86 double deadline =
87 heap->MonotonicallyIncreasingTimeInMs() + kIncrementalMarkingDelayMs;
88 StepResult result = heap->incremental_marking()->AdvanceWithDeadline(
89 deadline, i::IncrementalMarking::NO_GC_VIA_STACK_GUARD,
90 i::StepOrigin::kTask);
91 heap->FinalizeIncrementalMarkingIfComplete(
92 GarbageCollectionReason::kFinalizeMarkingViaTask);
93 return result;
94 }
95
RunInternal()96 void IncrementalMarkingJob::Task::RunInternal() {
97 VMState<GC> state(isolate());
98 TRACE_EVENT_CALL_STATS_SCOPED(isolate(), "v8", "V8.Task");
99
100 Heap* heap = isolate()->heap();
101 EmbedderStackStateScope scope(
102 heap, EmbedderStackStateScope::kImplicitThroughTask, stack_state_);
103 if (task_type_ == TaskType::kNormal) {
104 heap->tracer()->RecordTimeToIncrementalMarkingTask(
105 heap->MonotonicallyIncreasingTimeInMs() - job_->scheduled_time_);
106 job_->scheduled_time_ = 0.0;
107 }
108 IncrementalMarking* incremental_marking = heap->incremental_marking();
109 if (incremental_marking->IsStopped()) {
110 if (heap->IncrementalMarkingLimitReached() !=
111 Heap::IncrementalMarkingLimit::kNoLimit) {
112 heap->StartIncrementalMarking(heap->GCFlagsForIncrementalMarking(),
113 GarbageCollectionReason::kTask,
114 kGCCallbackScheduleIdleGarbageCollection);
115 }
116 }
117
118 // Clear this flag after StartIncrementalMarking call to avoid
119 // scheduling a new task when starting incremental marking.
120 {
121 base::MutexGuard guard(&job_->mutex_);
122 job_->SetTaskPending(task_type_, false);
123 }
124
125 if (!incremental_marking->IsStopped()) {
126 // All objects are initialized at that point.
127 heap->new_space()->MarkLabStartInitialized();
128 heap->new_lo_space()->ResetPendingObject();
129 StepResult step_result = Step(heap);
130 if (!incremental_marking->IsStopped()) {
131 const TaskType task_type =
132 incremental_marking->finalize_marking_completed() ||
133 step_result != StepResult::kNoImmediateWork
134 ? TaskType::kNormal
135 : TaskType::kDelayed;
136 job_->ScheduleTask(heap, task_type);
137 }
138 }
139 }
140
CurrentTimeToTask(Heap * heap) const141 double IncrementalMarkingJob::CurrentTimeToTask(Heap* heap) const {
142 if (scheduled_time_ == 0.0) return 0.0;
143
144 return heap->MonotonicallyIncreasingTimeInMs() - scheduled_time_;
145 }
146
147 } // namespace internal
148 } // namespace v8
149