1 // Copyright 2021 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/baseline/baseline-batch-compiler.h"
6
7 // TODO(v8:11421): Remove #if once baseline compiler is ported to other
8 // architectures.
9 #include "src/flags/flags.h"
10 #if ENABLE_SPARKPLUG
11
12 #include <algorithm>
13
14 #include "src/baseline/baseline-compiler.h"
15 #include "src/codegen/compiler.h"
16 #include "src/execution/isolate.h"
17 #include "src/handles/global-handles-inl.h"
18 #include "src/heap/factory-inl.h"
19 #include "src/heap/heap-inl.h"
20 #include "src/heap/local-heap-inl.h"
21 #include "src/heap/parked-scope.h"
22 #include "src/objects/fixed-array-inl.h"
23 #include "src/objects/js-function-inl.h"
24 #include "src/utils/locked-queue-inl.h"
25
26 namespace v8 {
27 namespace internal {
28 namespace baseline {
29
CanCompileWithConcurrentBaseline(SharedFunctionInfo shared,Isolate * isolate)30 static bool CanCompileWithConcurrentBaseline(SharedFunctionInfo shared,
31 Isolate* isolate) {
32 return !shared.HasBaselineCode() && CanCompileWithBaseline(isolate, shared);
33 }
34
35 class BaselineCompilerTask {
36 public:
BaselineCompilerTask(Isolate * isolate,PersistentHandles * handles,SharedFunctionInfo sfi)37 BaselineCompilerTask(Isolate* isolate, PersistentHandles* handles,
38 SharedFunctionInfo sfi)
39 : shared_function_info_(handles->NewHandle(sfi)),
40 bytecode_(handles->NewHandle(sfi.GetBytecodeArray(isolate))) {
41 DCHECK(sfi.is_compiled());
42 }
43
44 BaselineCompilerTask(const BaselineCompilerTask&) V8_NOEXCEPT = delete;
45 BaselineCompilerTask(BaselineCompilerTask&&) V8_NOEXCEPT = default;
46
47 // Executed in the background thread.
Compile(LocalIsolate * local_isolate)48 void Compile(LocalIsolate* local_isolate) {
49 BaselineCompiler compiler(local_isolate, shared_function_info_, bytecode_);
50 compiler.GenerateCode();
51 maybe_code_ = local_isolate->heap()->NewPersistentMaybeHandle(
52 compiler.Build(local_isolate));
53 Handle<Code> code;
54 if (maybe_code_.ToHandle(&code)) {
55 local_isolate->heap()->RegisterCodeObject(code);
56 }
57 }
58
59 // Executed in the main thread.
Install(Isolate * isolate)60 void Install(Isolate* isolate) {
61 Handle<Code> code;
62 if (!maybe_code_.ToHandle(&code)) return;
63 if (FLAG_print_code) {
64 code->Print();
65 }
66 // Don't install the code if the bytecode has been flushed or has
67 // already some baseline code installed.
68 if (!CanCompileWithConcurrentBaseline(*shared_function_info_, isolate)) {
69 return;
70 }
71 shared_function_info_->set_baseline_code(ToCodeT(*code), kReleaseStore);
72 if (V8_LIKELY(FLAG_use_osr)) {
73 shared_function_info_->GetBytecodeArray(isolate)
74 .RequestOsrAtNextOpportunity();
75 }
76 if (FLAG_trace_baseline_concurrent_compilation) {
77 CodeTracer::Scope scope(isolate->GetCodeTracer());
78 std::stringstream ss;
79 ss << "[Concurrent Sparkplug Off Thread] Function ";
80 shared_function_info_->ShortPrint(ss);
81 ss << " installed\n";
82 OFStream os(scope.file());
83 os << ss.str();
84 }
85 }
86
87 private:
88 Handle<SharedFunctionInfo> shared_function_info_;
89 Handle<BytecodeArray> bytecode_;
90 MaybeHandle<Code> maybe_code_;
91 };
92
93 class BaselineBatchCompilerJob {
94 public:
BaselineBatchCompilerJob(Isolate * isolate,Handle<WeakFixedArray> task_queue,int batch_size)95 BaselineBatchCompilerJob(Isolate* isolate, Handle<WeakFixedArray> task_queue,
96 int batch_size) {
97 handles_ = isolate->NewPersistentHandles();
98 tasks_.reserve(batch_size);
99 for (int i = 0; i < batch_size; i++) {
100 MaybeObject maybe_sfi = task_queue->Get(i);
101 // TODO(victorgomes): Do I need to clear the value?
102 task_queue->Set(i, HeapObjectReference::ClearedValue(isolate));
103 HeapObject obj;
104 // Skip functions where weak reference is no longer valid.
105 if (!maybe_sfi.GetHeapObjectIfWeak(&obj)) continue;
106 // Skip functions where the bytecode has been flushed.
107 SharedFunctionInfo shared = SharedFunctionInfo::cast(obj);
108 if (!CanCompileWithConcurrentBaseline(shared, isolate)) continue;
109 tasks_.emplace_back(isolate, handles_.get(), shared);
110 }
111 if (FLAG_trace_baseline_concurrent_compilation) {
112 CodeTracer::Scope scope(isolate->GetCodeTracer());
113 PrintF(scope.file(), "[Concurrent Sparkplug] compiling %zu functions\n",
114 tasks_.size());
115 }
116 }
117
118 // Executed in the background thread.
Compile(LocalIsolate * local_isolate)119 void Compile(LocalIsolate* local_isolate) {
120 local_isolate->heap()->AttachPersistentHandles(std::move(handles_));
121 for (auto& task : tasks_) {
122 task.Compile(local_isolate);
123 }
124 // Get the handle back since we'd need them to install the code later.
125 handles_ = local_isolate->heap()->DetachPersistentHandles();
126 }
127
128 // Executed in the main thread.
Install(Isolate * isolate)129 void Install(Isolate* isolate) {
130 for (auto& task : tasks_) {
131 task.Install(isolate);
132 }
133 }
134
135 private:
136 std::vector<BaselineCompilerTask> tasks_;
137 std::unique_ptr<PersistentHandles> handles_;
138 };
139
140 class ConcurrentBaselineCompiler {
141 public:
142 class JobDispatcher : public v8::JobTask {
143 public:
JobDispatcher(Isolate * isolate,LockedQueue<std::unique_ptr<BaselineBatchCompilerJob>> * incoming_queue,LockedQueue<std::unique_ptr<BaselineBatchCompilerJob>> * outcoming_queue)144 JobDispatcher(
145 Isolate* isolate,
146 LockedQueue<std::unique_ptr<BaselineBatchCompilerJob>>* incoming_queue,
147 LockedQueue<std::unique_ptr<BaselineBatchCompilerJob>>* outcoming_queue)
148 : isolate_(isolate),
149 incoming_queue_(incoming_queue),
150 outgoing_queue_(outcoming_queue) {}
151
Run(JobDelegate * delegate)152 void Run(JobDelegate* delegate) override {
153 LocalIsolate local_isolate(isolate_, ThreadKind::kBackground);
154 UnparkedScope unparked_scope(&local_isolate);
155 LocalHandleScope handle_scope(&local_isolate);
156
157 // Since we're going to compile an entire batch, this guarantees that
158 // we only switch back the memory chunks to RX at the end.
159 CodePageCollectionMemoryModificationScope batch_alloc(isolate_->heap());
160
161 while (!incoming_queue_->IsEmpty() && !delegate->ShouldYield()) {
162 std::unique_ptr<BaselineBatchCompilerJob> job;
163 if (!incoming_queue_->Dequeue(&job)) break;
164 DCHECK_NOT_NULL(job);
165 job->Compile(&local_isolate);
166 outgoing_queue_->Enqueue(std::move(job));
167 }
168 isolate_->stack_guard()->RequestInstallBaselineCode();
169 }
170
GetMaxConcurrency(size_t worker_count) const171 size_t GetMaxConcurrency(size_t worker_count) const override {
172 size_t max_threads = FLAG_concurrent_sparkplug_max_threads;
173 if (max_threads > 0) {
174 return std::min(max_threads, incoming_queue_->size());
175 }
176 return incoming_queue_->size();
177 }
178
179 private:
180 Isolate* isolate_;
181 LockedQueue<std::unique_ptr<BaselineBatchCompilerJob>>* incoming_queue_;
182 LockedQueue<std::unique_ptr<BaselineBatchCompilerJob>>* outgoing_queue_;
183 };
184
ConcurrentBaselineCompiler(Isolate * isolate)185 explicit ConcurrentBaselineCompiler(Isolate* isolate) : isolate_(isolate) {
186 if (FLAG_concurrent_sparkplug) {
187 job_handle_ = V8::GetCurrentPlatform()->PostJob(
188 TaskPriority::kUserVisible,
189 std::make_unique<JobDispatcher>(isolate_, &incoming_queue_,
190 &outgoing_queue_));
191 }
192 }
193
~ConcurrentBaselineCompiler()194 ~ConcurrentBaselineCompiler() {
195 if (job_handle_ && job_handle_->IsValid()) {
196 // Wait for the job handle to complete, so that we know the queue
197 // pointers are safe.
198 job_handle_->Cancel();
199 }
200 }
201
CompileBatch(Handle<WeakFixedArray> task_queue,int batch_size)202 void CompileBatch(Handle<WeakFixedArray> task_queue, int batch_size) {
203 DCHECK(FLAG_concurrent_sparkplug);
204 RCS_SCOPE(isolate_, RuntimeCallCounterId::kCompileBaseline);
205 incoming_queue_.Enqueue(std::make_unique<BaselineBatchCompilerJob>(
206 isolate_, task_queue, batch_size));
207 job_handle_->NotifyConcurrencyIncrease();
208 }
209
InstallBatch()210 void InstallBatch() {
211 while (!outgoing_queue_.IsEmpty()) {
212 std::unique_ptr<BaselineBatchCompilerJob> job;
213 outgoing_queue_.Dequeue(&job);
214 job->Install(isolate_);
215 }
216 }
217
218 private:
219 Isolate* isolate_;
220 std::unique_ptr<JobHandle> job_handle_ = nullptr;
221 LockedQueue<std::unique_ptr<BaselineBatchCompilerJob>> incoming_queue_;
222 LockedQueue<std::unique_ptr<BaselineBatchCompilerJob>> outgoing_queue_;
223 };
224
BaselineBatchCompiler(Isolate * isolate)225 BaselineBatchCompiler::BaselineBatchCompiler(Isolate* isolate)
226 : isolate_(isolate),
227 compilation_queue_(Handle<WeakFixedArray>::null()),
228 last_index_(0),
229 estimated_instruction_size_(0),
230 enabled_(true) {
231 if (FLAG_concurrent_sparkplug) {
232 concurrent_compiler_ =
233 std::make_unique<ConcurrentBaselineCompiler>(isolate_);
234 }
235 }
236
~BaselineBatchCompiler()237 BaselineBatchCompiler::~BaselineBatchCompiler() {
238 if (!compilation_queue_.is_null()) {
239 GlobalHandles::Destroy(compilation_queue_.location());
240 compilation_queue_ = Handle<WeakFixedArray>::null();
241 }
242 }
243
EnqueueFunction(Handle<JSFunction> function)244 void BaselineBatchCompiler::EnqueueFunction(Handle<JSFunction> function) {
245 Handle<SharedFunctionInfo> shared(function->shared(), isolate_);
246 // Early return if the function is compiled with baseline already or it is not
247 // suitable for baseline compilation.
248 if (shared->HasBaselineCode()) return;
249 if (!CanCompileWithBaseline(isolate_, *shared)) return;
250
251 // Immediately compile the function if batch compilation is disabled.
252 if (!is_enabled()) {
253 IsCompiledScope is_compiled_scope(
254 function->shared().is_compiled_scope(isolate_));
255 Compiler::CompileBaseline(isolate_, function, Compiler::CLEAR_EXCEPTION,
256 &is_compiled_scope);
257 return;
258 }
259
260 int estimated_size;
261 {
262 DisallowHeapAllocation no_gc;
263 estimated_size = BaselineCompiler::EstimateInstructionSize(
264 shared->GetBytecodeArray(isolate_));
265 }
266 estimated_instruction_size_ += estimated_size;
267 if (FLAG_trace_baseline_batch_compilation) {
268 CodeTracer::Scope trace_scope(isolate_->GetCodeTracer());
269 PrintF(trace_scope.file(),
270 "[Baseline batch compilation] Enqueued function ");
271 function->PrintName(trace_scope.file());
272 PrintF(trace_scope.file(),
273 " with estimated size %d (current budget: %d/%d)\n", estimated_size,
274 estimated_instruction_size_,
275 FLAG_baseline_batch_compilation_threshold);
276 }
277 if (ShouldCompileBatch()) {
278 if (FLAG_trace_baseline_batch_compilation) {
279 CodeTracer::Scope trace_scope(isolate_->GetCodeTracer());
280 PrintF(trace_scope.file(),
281 "[Baseline batch compilation] Compiling current batch of %d "
282 "functions\n",
283 (last_index_ + 1));
284 }
285 if (FLAG_concurrent_sparkplug) {
286 Enqueue(shared);
287 concurrent_compiler_->CompileBatch(compilation_queue_, last_index_);
288 ClearBatch();
289 } else {
290 CompileBatch(function);
291 }
292 } else {
293 Enqueue(shared);
294 }
295 }
296
Enqueue(Handle<SharedFunctionInfo> shared)297 void BaselineBatchCompiler::Enqueue(Handle<SharedFunctionInfo> shared) {
298 EnsureQueueCapacity();
299 compilation_queue_->Set(last_index_++, HeapObjectReference::Weak(*shared));
300 }
301
InstallBatch()302 void BaselineBatchCompiler::InstallBatch() {
303 DCHECK(FLAG_concurrent_sparkplug);
304 concurrent_compiler_->InstallBatch();
305 }
306
EnsureQueueCapacity()307 void BaselineBatchCompiler::EnsureQueueCapacity() {
308 if (compilation_queue_.is_null()) {
309 compilation_queue_ = isolate_->global_handles()->Create(
310 *isolate_->factory()->NewWeakFixedArray(kInitialQueueSize,
311 AllocationType::kOld));
312 return;
313 }
314 if (last_index_ >= compilation_queue_->length()) {
315 Handle<WeakFixedArray> new_queue =
316 isolate_->factory()->CopyWeakFixedArrayAndGrow(compilation_queue_,
317 last_index_);
318 GlobalHandles::Destroy(compilation_queue_.location());
319 compilation_queue_ = isolate_->global_handles()->Create(*new_queue);
320 }
321 }
322
CompileBatch(Handle<JSFunction> function)323 void BaselineBatchCompiler::CompileBatch(Handle<JSFunction> function) {
324 CodePageCollectionMemoryModificationScope batch_allocation(isolate_->heap());
325 {
326 IsCompiledScope is_compiled_scope(
327 function->shared().is_compiled_scope(isolate_));
328 Compiler::CompileBaseline(isolate_, function, Compiler::CLEAR_EXCEPTION,
329 &is_compiled_scope);
330 }
331 for (int i = 0; i < last_index_; i++) {
332 MaybeObject maybe_sfi = compilation_queue_->Get(i);
333 MaybeCompileFunction(maybe_sfi);
334 compilation_queue_->Set(i, HeapObjectReference::ClearedValue(isolate_));
335 }
336 ClearBatch();
337 }
338
ShouldCompileBatch() const339 bool BaselineBatchCompiler::ShouldCompileBatch() const {
340 return estimated_instruction_size_ >=
341 FLAG_baseline_batch_compilation_threshold;
342 }
343
MaybeCompileFunction(MaybeObject maybe_sfi)344 bool BaselineBatchCompiler::MaybeCompileFunction(MaybeObject maybe_sfi) {
345 HeapObject heapobj;
346 // Skip functions where the weak reference is no longer valid.
347 if (!maybe_sfi.GetHeapObjectIfWeak(&heapobj)) return false;
348 Handle<SharedFunctionInfo> shared =
349 handle(SharedFunctionInfo::cast(heapobj), isolate_);
350 // Skip functions where the bytecode has been flushed.
351 if (!shared->is_compiled()) return false;
352
353 IsCompiledScope is_compiled_scope(shared->is_compiled_scope(isolate_));
354 return Compiler::CompileSharedWithBaseline(
355 isolate_, shared, Compiler::CLEAR_EXCEPTION, &is_compiled_scope);
356 }
357
ClearBatch()358 void BaselineBatchCompiler::ClearBatch() {
359 estimated_instruction_size_ = 0;
360 last_index_ = 0;
361 }
362
363 } // namespace baseline
364 } // namespace internal
365 } // namespace v8
366
367 #else
368
369 namespace v8 {
370 namespace internal {
371 namespace baseline {
372
373 class ConcurrentBaselineCompiler {};
374
BaselineBatchCompiler(Isolate * isolate)375 BaselineBatchCompiler::BaselineBatchCompiler(Isolate* isolate)
376 : isolate_(isolate),
377 compilation_queue_(Handle<WeakFixedArray>::null()),
378 last_index_(0),
379 estimated_instruction_size_(0),
380 enabled_(false) {}
381
~BaselineBatchCompiler()382 BaselineBatchCompiler::~BaselineBatchCompiler() {
383 if (!compilation_queue_.is_null()) {
384 GlobalHandles::Destroy(compilation_queue_.location());
385 compilation_queue_ = Handle<WeakFixedArray>::null();
386 }
387 }
388
InstallBatch()389 void BaselineBatchCompiler::InstallBatch() { UNREACHABLE(); }
390
391 } // namespace baseline
392 } // namespace internal
393 } // namespace v8
394
395 #endif
396