1 // Copyright 2015 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/interpreter/interpreter.h"
6
7 #include <fstream>
8 #include <memory>
9
10 #include "src/ast/prettyprinter.h"
11 #include "src/bootstrapper.h"
12 #include "src/compiler.h"
13 #include "src/counters-inl.h"
14 #include "src/interpreter/bytecode-generator.h"
15 #include "src/interpreter/bytecodes.h"
16 #include "src/log.h"
17 #include "src/objects-inl.h"
18 #include "src/objects/shared-function-info.h"
19 #include "src/parsing/parse-info.h"
20 #include "src/setup-isolate.h"
21 #include "src/snapshot/snapshot.h"
22 #include "src/unoptimized-compilation-info.h"
23 #include "src/visitors.h"
24
25 namespace v8 {
26 namespace internal {
27 namespace interpreter {
28
29 class InterpreterCompilationJob final : public UnoptimizedCompilationJob {
30 public:
31 InterpreterCompilationJob(ParseInfo* parse_info, FunctionLiteral* literal,
32 AccountingAllocator* allocator,
33 ZoneVector<FunctionLiteral*>* eager_inner_literals);
34
35 protected:
36 Status ExecuteJobImpl() final;
37 Status FinalizeJobImpl(Handle<SharedFunctionInfo> shared_info,
38 Isolate* isolate) final;
39
40 private:
generator()41 BytecodeGenerator* generator() { return &generator_; }
42
43 Zone zone_;
44 UnoptimizedCompilationInfo compilation_info_;
45 BytecodeGenerator generator_;
46
47 DISALLOW_COPY_AND_ASSIGN(InterpreterCompilationJob);
48 };
49
Interpreter(Isolate * isolate)50 Interpreter::Interpreter(Isolate* isolate) : isolate_(isolate) {
51 memset(dispatch_table_, 0, sizeof(dispatch_table_));
52
53 if (FLAG_trace_ignition_dispatches) {
54 static const int kBytecodeCount = static_cast<int>(Bytecode::kLast) + 1;
55 bytecode_dispatch_counters_table_.reset(
56 new uintptr_t[kBytecodeCount * kBytecodeCount]);
57 memset(bytecode_dispatch_counters_table_.get(), 0,
58 sizeof(uintptr_t) * kBytecodeCount * kBytecodeCount);
59 }
60 }
61
GetAndMaybeDeserializeBytecodeHandler(Bytecode bytecode,OperandScale operand_scale)62 Code* Interpreter::GetAndMaybeDeserializeBytecodeHandler(
63 Bytecode bytecode, OperandScale operand_scale) {
64 Code* code = GetBytecodeHandler(bytecode, operand_scale);
65
66 // Already deserialized? Then just return the handler.
67 if (!isolate_->heap()->IsDeserializeLazyHandler(code)) return code;
68
69 DCHECK(FLAG_lazy_handler_deserialization);
70 DCHECK(Bytecodes::BytecodeHasHandler(bytecode, operand_scale));
71 code = Snapshot::DeserializeHandler(isolate_, bytecode, operand_scale);
72
73 DCHECK(code->IsCode());
74 DCHECK_EQ(code->kind(), Code::BYTECODE_HANDLER);
75 DCHECK(!isolate_->heap()->IsDeserializeLazyHandler(code));
76
77 SetBytecodeHandler(bytecode, operand_scale, code);
78
79 return code;
80 }
81
GetBytecodeHandler(Bytecode bytecode,OperandScale operand_scale)82 Code* Interpreter::GetBytecodeHandler(Bytecode bytecode,
83 OperandScale operand_scale) {
84 DCHECK(IsDispatchTableInitialized());
85 DCHECK(Bytecodes::BytecodeHasHandler(bytecode, operand_scale));
86 size_t index = GetDispatchTableIndex(bytecode, operand_scale);
87 Address code_entry = dispatch_table_[index];
88 return Code::GetCodeFromTargetAddress(code_entry);
89 }
90
SetBytecodeHandler(Bytecode bytecode,OperandScale operand_scale,Code * handler)91 void Interpreter::SetBytecodeHandler(Bytecode bytecode,
92 OperandScale operand_scale,
93 Code* handler) {
94 DCHECK(handler->kind() == Code::BYTECODE_HANDLER);
95 size_t index = GetDispatchTableIndex(bytecode, operand_scale);
96 dispatch_table_[index] = handler->entry();
97 }
98
99 // static
GetDispatchTableIndex(Bytecode bytecode,OperandScale operand_scale)100 size_t Interpreter::GetDispatchTableIndex(Bytecode bytecode,
101 OperandScale operand_scale) {
102 static const size_t kEntriesPerOperandScale = 1u << kBitsPerByte;
103 size_t index = static_cast<size_t>(bytecode);
104 switch (operand_scale) {
105 case OperandScale::kSingle:
106 return index;
107 case OperandScale::kDouble:
108 return index + kEntriesPerOperandScale;
109 case OperandScale::kQuadruple:
110 return index + 2 * kEntriesPerOperandScale;
111 }
112 UNREACHABLE();
113 }
114
IterateDispatchTable(RootVisitor * v)115 void Interpreter::IterateDispatchTable(RootVisitor* v) {
116 for (int i = 0; i < kDispatchTableSize; i++) {
117 Address code_entry = dispatch_table_[i];
118 Object* code = code_entry == kNullAddress
119 ? nullptr
120 : Code::GetCodeFromTargetAddress(code_entry);
121 Object* old_code = code;
122 v->VisitRootPointer(Root::kDispatchTable, nullptr, &code);
123 if (code != old_code) {
124 dispatch_table_[i] = reinterpret_cast<Code*>(code)->entry();
125 }
126 }
127 }
128
InterruptBudget()129 int Interpreter::InterruptBudget() {
130 return FLAG_interrupt_budget;
131 }
132
133 namespace {
134
MaybePrintAst(ParseInfo * parse_info,UnoptimizedCompilationInfo * compilation_info)135 void MaybePrintAst(ParseInfo* parse_info,
136 UnoptimizedCompilationInfo* compilation_info) {
137 if (!FLAG_print_ast) return;
138
139 StdoutStream os;
140 std::unique_ptr<char[]> name = compilation_info->literal()->GetDebugName();
141 os << "[generating bytecode for function: " << name.get() << "]" << std::endl;
142 #ifdef DEBUG
143 os << "--- AST ---" << std::endl
144 << AstPrinter(parse_info->stack_limit())
145 .PrintProgram(compilation_info->literal())
146 << std::endl;
147 #endif // DEBUG
148 }
149
ShouldPrintBytecode(Handle<SharedFunctionInfo> shared)150 bool ShouldPrintBytecode(Handle<SharedFunctionInfo> shared) {
151 if (!FLAG_print_bytecode) return false;
152
153 // Checks whether function passed the filter.
154 if (shared->is_toplevel()) {
155 Vector<const char> filter = CStrVector(FLAG_print_bytecode_filter);
156 return (filter.length() == 0) || (filter.length() == 1 && filter[0] == '*');
157 } else {
158 return shared->PassesFilter(FLAG_print_bytecode_filter);
159 }
160 }
161
162 } // namespace
163
InterpreterCompilationJob(ParseInfo * parse_info,FunctionLiteral * literal,AccountingAllocator * allocator,ZoneVector<FunctionLiteral * > * eager_inner_literals)164 InterpreterCompilationJob::InterpreterCompilationJob(
165 ParseInfo* parse_info, FunctionLiteral* literal,
166 AccountingAllocator* allocator,
167 ZoneVector<FunctionLiteral*>* eager_inner_literals)
168 : UnoptimizedCompilationJob(parse_info->stack_limit(), parse_info,
169 &compilation_info_),
170 zone_(allocator, ZONE_NAME),
171 compilation_info_(&zone_, parse_info, literal),
172 generator_(&compilation_info_, parse_info->ast_string_constants(),
173 eager_inner_literals) {}
174
ExecuteJobImpl()175 InterpreterCompilationJob::Status InterpreterCompilationJob::ExecuteJobImpl() {
176 RuntimeCallTimerScope runtimeTimerScope(
177 parse_info()->runtime_call_stats(),
178 parse_info()->on_background_thread()
179 ? RuntimeCallCounterId::kCompileBackgroundIgnition
180 : RuntimeCallCounterId::kCompileIgnition);
181 // TODO(lpy): add support for background compilation RCS trace.
182 TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"), "V8.CompileIgnition");
183
184 // Print AST if flag is enabled. Note, if compiling on a background thread
185 // then ASTs from different functions may be intersperse when printed.
186 MaybePrintAst(parse_info(), compilation_info());
187
188 generator()->GenerateBytecode(stack_limit());
189
190 if (generator()->HasStackOverflow()) {
191 return FAILED;
192 }
193 return SUCCEEDED;
194 }
195
FinalizeJobImpl(Handle<SharedFunctionInfo> shared_info,Isolate * isolate)196 InterpreterCompilationJob::Status InterpreterCompilationJob::FinalizeJobImpl(
197 Handle<SharedFunctionInfo> shared_info, Isolate* isolate) {
198 RuntimeCallTimerScope runtimeTimerScope(
199 parse_info()->runtime_call_stats(),
200 RuntimeCallCounterId::kCompileIgnitionFinalization);
201 TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"),
202 "V8.CompileIgnitionFinalization");
203
204 Handle<BytecodeArray> bytecodes =
205 generator()->FinalizeBytecode(isolate, parse_info()->script());
206 if (generator()->HasStackOverflow()) {
207 return FAILED;
208 }
209
210 if (ShouldPrintBytecode(shared_info)) {
211 StdoutStream os;
212 std::unique_ptr<char[]> name =
213 compilation_info()->literal()->GetDebugName();
214 os << "[generated bytecode for function: " << name.get() << "]"
215 << std::endl;
216 bytecodes->Disassemble(os);
217 os << std::flush;
218 }
219
220 compilation_info()->SetBytecodeArray(bytecodes);
221 return SUCCEEDED;
222 }
223
NewCompilationJob(ParseInfo * parse_info,FunctionLiteral * literal,AccountingAllocator * allocator,ZoneVector<FunctionLiteral * > * eager_inner_literals)224 UnoptimizedCompilationJob* Interpreter::NewCompilationJob(
225 ParseInfo* parse_info, FunctionLiteral* literal,
226 AccountingAllocator* allocator,
227 ZoneVector<FunctionLiteral*>* eager_inner_literals) {
228 return new InterpreterCompilationJob(parse_info, literal, allocator,
229 eager_inner_literals);
230 }
231
IsDispatchTableInitialized() const232 bool Interpreter::IsDispatchTableInitialized() const {
233 return dispatch_table_[0] != kNullAddress;
234 }
235
LookupNameOfBytecodeHandler(const Code * code)236 const char* Interpreter::LookupNameOfBytecodeHandler(const Code* code) {
237 #ifdef ENABLE_DISASSEMBLER
238 #define RETURN_NAME(Name, ...) \
239 if (dispatch_table_[Bytecodes::ToByte(Bytecode::k##Name)] == \
240 code->entry()) { \
241 return #Name; \
242 }
243 BYTECODE_LIST(RETURN_NAME)
244 #undef RETURN_NAME
245 #endif // ENABLE_DISASSEMBLER
246 return nullptr;
247 }
248
GetDispatchCounter(Bytecode from,Bytecode to) const249 uintptr_t Interpreter::GetDispatchCounter(Bytecode from, Bytecode to) const {
250 int from_index = Bytecodes::ToByte(from);
251 int to_index = Bytecodes::ToByte(to);
252 return bytecode_dispatch_counters_table_[from_index * kNumberOfBytecodes +
253 to_index];
254 }
255
GetDispatchCountersObject()256 Local<v8::Object> Interpreter::GetDispatchCountersObject() {
257 v8::Isolate* isolate = reinterpret_cast<v8::Isolate*>(isolate_);
258 Local<v8::Context> context = isolate->GetCurrentContext();
259
260 Local<v8::Object> counters_map = v8::Object::New(isolate);
261
262 // Output is a JSON-encoded object of objects.
263 //
264 // The keys on the top level object are source bytecodes,
265 // and corresponding value are objects. Keys on these last are the
266 // destinations of the dispatch and the value associated is a counter for
267 // the correspondent source-destination dispatch chain.
268 //
269 // Only non-zero counters are written to file, but an entry in the top-level
270 // object is always present, even if the value is empty because all counters
271 // for that source are zero.
272
273 for (int from_index = 0; from_index < kNumberOfBytecodes; ++from_index) {
274 Bytecode from_bytecode = Bytecodes::FromByte(from_index);
275 Local<v8::Object> counters_row = v8::Object::New(isolate);
276
277 for (int to_index = 0; to_index < kNumberOfBytecodes; ++to_index) {
278 Bytecode to_bytecode = Bytecodes::FromByte(to_index);
279 uintptr_t counter = GetDispatchCounter(from_bytecode, to_bytecode);
280
281 if (counter > 0) {
282 std::string to_name = Bytecodes::ToString(to_bytecode);
283 Local<v8::String> to_name_object =
284 v8::String::NewFromUtf8(isolate, to_name.c_str(),
285 NewStringType::kNormal)
286 .ToLocalChecked();
287 Local<v8::Number> counter_object = v8::Number::New(isolate, counter);
288 CHECK(counters_row
289 ->DefineOwnProperty(context, to_name_object, counter_object)
290 .IsJust());
291 }
292 }
293
294 std::string from_name = Bytecodes::ToString(from_bytecode);
295 Local<v8::String> from_name_object =
296 v8::String::NewFromUtf8(isolate, from_name.c_str(),
297 NewStringType::kNormal)
298 .ToLocalChecked();
299
300 CHECK(
301 counters_map->DefineOwnProperty(context, from_name_object, counters_row)
302 .IsJust());
303 }
304
305 return counters_map;
306 }
307
308 } // namespace interpreter
309 } // namespace internal
310 } // namespace v8
311