1 // Copyright 2019 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include <iomanip>
6
7 #include "src/execution/isolate-utils.h"
8 #include "src/objects/code.h"
9
10 #include "src/codegen/assembler-inl.h"
11 #include "src/codegen/cpu-features.h"
12 #include "src/codegen/reloc-info.h"
13 #include "src/codegen/safepoint-table.h"
14 #include "src/deoptimizer/deoptimizer.h"
15 #include "src/interpreter/bytecode-array-iterator.h"
16 #include "src/interpreter/bytecode-decoder.h"
17 #include "src/interpreter/interpreter.h"
18 #include "src/objects/allocation-site-inl.h"
19 #include "src/roots/roots-inl.h"
20 #include "src/snapshot/embedded/embedded-data.h"
21 #include "src/utils/ostreams.h"
22
23 #ifdef ENABLE_DISASSEMBLER
24 #include "src/codegen/code-comments.h"
25 #include "src/diagnostics/disasm.h"
26 #include "src/diagnostics/disassembler.h"
27 #include "src/diagnostics/eh-frame.h"
28 #endif
29
30 namespace v8 {
31 namespace internal {
32
SafepointTableAddress() const33 Address Code::SafepointTableAddress() const {
34 return MetadataStart() + safepoint_table_offset();
35 }
36
safepoint_table_size() const37 int Code::safepoint_table_size() const {
38 DCHECK_GE(handler_table_offset() - safepoint_table_offset(), 0);
39 return handler_table_offset() - safepoint_table_offset();
40 }
41
has_safepoint_table() const42 bool Code::has_safepoint_table() const { return safepoint_table_size() > 0; }
43
HandlerTableAddress() const44 Address Code::HandlerTableAddress() const {
45 return MetadataStart() + handler_table_offset();
46 }
47
handler_table_size() const48 int Code::handler_table_size() const {
49 DCHECK_GE(constant_pool_offset() - handler_table_offset(), 0);
50 return constant_pool_offset() - handler_table_offset();
51 }
52
has_handler_table() const53 bool Code::has_handler_table() const { return handler_table_size() > 0; }
54
constant_pool_size() const55 int Code::constant_pool_size() const {
56 const int size = code_comments_offset() - constant_pool_offset();
57 DCHECK_IMPLIES(!FLAG_enable_embedded_constant_pool, size == 0);
58 DCHECK_GE(size, 0);
59 return size;
60 }
61
has_constant_pool() const62 bool Code::has_constant_pool() const { return constant_pool_size() > 0; }
63
code_comments_size() const64 int Code::code_comments_size() const {
65 DCHECK_GE(unwinding_info_offset() - code_comments_offset(), 0);
66 return unwinding_info_offset() - code_comments_offset();
67 }
68
has_code_comments() const69 bool Code::has_code_comments() const { return code_comments_size() > 0; }
70
ClearEmbeddedObjects(Heap * heap)71 void Code::ClearEmbeddedObjects(Heap* heap) {
72 HeapObject undefined = ReadOnlyRoots(heap).undefined_value();
73 int mode_mask = RelocInfo::EmbeddedObjectModeMask();
74 for (RelocIterator it(*this, mode_mask); !it.done(); it.next()) {
75 DCHECK(RelocInfo::IsEmbeddedObjectMode(it.rinfo()->rmode()));
76 it.rinfo()->set_target_object(heap, undefined, SKIP_WRITE_BARRIER);
77 }
78 set_embedded_objects_cleared(true);
79 }
80
Relocate(intptr_t delta)81 void Code::Relocate(intptr_t delta) {
82 for (RelocIterator it(*this, RelocInfo::kApplyMask); !it.done(); it.next()) {
83 it.rinfo()->apply(delta);
84 }
85 FlushICache();
86 }
87
FlushICache() const88 void Code::FlushICache() const {
89 FlushInstructionCache(raw_instruction_start(), raw_instruction_size());
90 }
91
CopyFromNoFlush(Heap * heap,const CodeDesc & desc)92 void Code::CopyFromNoFlush(Heap* heap, const CodeDesc& desc) {
93 // Copy code.
94 STATIC_ASSERT(kOnHeapBodyIsContiguous);
95 CopyBytes(reinterpret_cast<byte*>(raw_instruction_start()), desc.buffer,
96 static_cast<size_t>(desc.instr_size));
97 // TODO(jgruber,v8:11036): Merge with the above.
98 CopyBytes(reinterpret_cast<byte*>(raw_instruction_start() + desc.instr_size),
99 desc.unwinding_info, static_cast<size_t>(desc.unwinding_info_size));
100
101 // Copy reloc info.
102 CopyRelocInfoToByteArray(unchecked_relocation_info(), desc);
103
104 // Unbox handles and relocate.
105 Assembler* origin = desc.origin;
106 const int mode_mask = RelocInfo::PostCodegenRelocationMask();
107 for (RelocIterator it(*this, mode_mask); !it.done(); it.next()) {
108 RelocInfo::Mode mode = it.rinfo()->rmode();
109 if (RelocInfo::IsEmbeddedObjectMode(mode)) {
110 Handle<HeapObject> p = it.rinfo()->target_object_handle(origin);
111 it.rinfo()->set_target_object(heap, *p, UPDATE_WRITE_BARRIER,
112 SKIP_ICACHE_FLUSH);
113 } else if (RelocInfo::IsCodeTargetMode(mode)) {
114 // Rewrite code handles to direct pointers to the first instruction in the
115 // code object.
116 Handle<Object> p = it.rinfo()->target_object_handle(origin);
117 Code code = Code::cast(*p);
118 it.rinfo()->set_target_address(code.raw_instruction_start(),
119 UPDATE_WRITE_BARRIER, SKIP_ICACHE_FLUSH);
120 } else if (RelocInfo::IsRuntimeEntry(mode)) {
121 Address p = it.rinfo()->target_runtime_entry(origin);
122 it.rinfo()->set_target_runtime_entry(p, UPDATE_WRITE_BARRIER,
123 SKIP_ICACHE_FLUSH);
124 } else {
125 intptr_t delta =
126 raw_instruction_start() - reinterpret_cast<Address>(desc.buffer);
127 it.rinfo()->apply(delta);
128 }
129 }
130 }
131
GetSafepointEntry(Address pc)132 SafepointEntry Code::GetSafepointEntry(Address pc) {
133 SafepointTable table(*this);
134 return table.FindEntry(pc);
135 }
136
OffHeapInstructionSize() const137 int Code::OffHeapInstructionSize() const {
138 DCHECK(is_off_heap_trampoline());
139 if (Isolate::CurrentEmbeddedBlobCode() == nullptr) {
140 return raw_instruction_size();
141 }
142 EmbeddedData d = EmbeddedData::FromBlob();
143 return d.InstructionSizeOfBuiltin(builtin_index());
144 }
145
OffHeapInstructionStart() const146 Address Code::OffHeapInstructionStart() const {
147 DCHECK(is_off_heap_trampoline());
148 if (Isolate::CurrentEmbeddedBlobCode() == nullptr) {
149 return raw_instruction_size();
150 }
151 EmbeddedData d = EmbeddedData::FromBlob();
152 return d.InstructionStartOfBuiltin(builtin_index());
153 }
154
OffHeapInstructionEnd() const155 Address Code::OffHeapInstructionEnd() const {
156 DCHECK(is_off_heap_trampoline());
157 if (Isolate::CurrentEmbeddedBlobCode() == nullptr) {
158 return raw_instruction_size();
159 }
160 EmbeddedData d = EmbeddedData::FromBlob();
161 return d.InstructionStartOfBuiltin(builtin_index()) +
162 d.InstructionSizeOfBuiltin(builtin_index());
163 }
164
OffHeapMetadataSize() const165 int Code::OffHeapMetadataSize() const {
166 DCHECK(is_off_heap_trampoline());
167 if (Isolate::CurrentEmbeddedBlobCode() == nullptr) {
168 return raw_instruction_size();
169 }
170 EmbeddedData d = EmbeddedData::FromBlob();
171 return d.MetadataSizeOfBuiltin(builtin_index());
172 }
173
OffHeapMetadataStart() const174 Address Code::OffHeapMetadataStart() const {
175 DCHECK(is_off_heap_trampoline());
176 if (Isolate::CurrentEmbeddedBlobCode() == nullptr) {
177 return raw_instruction_size();
178 }
179 EmbeddedData d = EmbeddedData::FromBlob();
180 return d.MetadataStartOfBuiltin(builtin_index());
181 }
182
OffHeapMetadataEnd() const183 Address Code::OffHeapMetadataEnd() const {
184 DCHECK(is_off_heap_trampoline());
185 if (Isolate::CurrentEmbeddedBlobCode() == nullptr) {
186 return raw_instruction_size();
187 }
188 EmbeddedData d = EmbeddedData::FromBlob();
189 return d.MetadataStartOfBuiltin(builtin_index()) +
190 d.MetadataSizeOfBuiltin(builtin_index());
191 }
192
SourcePosition(int offset)193 int AbstractCode::SourcePosition(int offset) {
194 Object maybe_table = source_position_table();
195 if (maybe_table.IsException()) return kNoSourcePosition;
196
197 ByteArray source_position_table = ByteArray::cast(maybe_table);
198 // Subtract one because the current PC is one instruction after the call site.
199 if (IsCode()) offset--;
200 int position = 0;
201 for (SourcePositionTableIterator iterator(
202 source_position_table, SourcePositionTableIterator::kJavaScriptOnly,
203 SourcePositionTableIterator::kDontSkipFunctionEntry);
204 !iterator.done() && iterator.code_offset() <= offset;
205 iterator.Advance()) {
206 position = iterator.source_position().ScriptOffset();
207 }
208 return position;
209 }
210
SourceStatementPosition(int offset)211 int AbstractCode::SourceStatementPosition(int offset) {
212 // First find the closest position.
213 int position = SourcePosition(offset);
214 // Now find the closest statement position before the position.
215 int statement_position = 0;
216 for (SourcePositionTableIterator it(source_position_table()); !it.done();
217 it.Advance()) {
218 if (it.is_statement()) {
219 int p = it.source_position().ScriptOffset();
220 if (statement_position < p && p <= position) {
221 statement_position = p;
222 }
223 }
224 }
225 return statement_position;
226 }
227
CanDeoptAt(Address pc)228 bool Code::CanDeoptAt(Address pc) {
229 DeoptimizationData deopt_data =
230 DeoptimizationData::cast(deoptimization_data());
231 Address code_start_address = InstructionStart();
232 for (int i = 0; i < deopt_data.DeoptCount(); i++) {
233 if (deopt_data.Pc(i).value() == -1) continue;
234 Address address = code_start_address + deopt_data.Pc(i).value();
235 if (address == pc && deopt_data.BytecodeOffset(i) != BailoutId::None()) {
236 return true;
237 }
238 }
239 return false;
240 }
241
IsIsolateIndependent(Isolate * isolate)242 bool Code::IsIsolateIndependent(Isolate* isolate) {
243 static constexpr int kModeMask =
244 RelocInfo::AllRealModesMask() &
245 ~RelocInfo::ModeMask(RelocInfo::CONST_POOL) &
246 ~RelocInfo::ModeMask(RelocInfo::OFF_HEAP_TARGET) &
247 ~RelocInfo::ModeMask(RelocInfo::VENEER_POOL);
248 STATIC_ASSERT(kModeMask ==
249 (RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
250 RelocInfo::ModeMask(RelocInfo::RELATIVE_CODE_TARGET) |
251 RelocInfo::ModeMask(RelocInfo::COMPRESSED_EMBEDDED_OBJECT) |
252 RelocInfo::ModeMask(RelocInfo::FULL_EMBEDDED_OBJECT) |
253 RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
254 RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE) |
255 RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE_ENCODED) |
256 RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY) |
257 RelocInfo::ModeMask(RelocInfo::WASM_CALL) |
258 RelocInfo::ModeMask(RelocInfo::WASM_STUB_CALL)));
259
260 #if defined(V8_TARGET_ARCH_PPC) || defined(V8_TARGET_ARCH_PPC64) || \
261 defined(V8_TARGET_ARCH_MIPS64)
262 return RelocIterator(*this, kModeMask).done();
263 #elif defined(V8_TARGET_ARCH_X64) || defined(V8_TARGET_ARCH_ARM64) || \
264 defined(V8_TARGET_ARCH_ARM) || defined(V8_TARGET_ARCH_MIPS) || \
265 defined(V8_TARGET_ARCH_S390) || defined(V8_TARGET_ARCH_IA32)
266 for (RelocIterator it(*this, kModeMask); !it.done(); it.next()) {
267 // On these platforms we emit relative builtin-to-builtin
268 // jumps for isolate independent builtins in the snapshot. They are later
269 // rewritten as pc-relative jumps to the off-heap instruction stream and are
270 // thus process-independent. See also: FinalizeEmbeddedCodeTargets.
271 if (RelocInfo::IsCodeTargetMode(it.rinfo()->rmode())) {
272 Address target_address = it.rinfo()->target_address();
273 if (InstructionStream::PcIsOffHeap(isolate, target_address)) continue;
274
275 Code target = Code::GetCodeFromTargetAddress(target_address);
276 CHECK(target.IsCode());
277 if (Builtins::IsIsolateIndependentBuiltin(target)) continue;
278 }
279 return false;
280 }
281 #else
282 #error Unsupported architecture.
283 #endif
284 return true;
285 }
286
287 // Multiple native contexts live on the same heap, and V8 currently
288 // draws no clear distinction between native-context-dependent and
289 // independent objects. A good guideline is "objects embedded into
290 // bytecode are nc-independent", since bytecode is shared between
291 // native contexts. Among others, this is the case for ScopeInfo,
292 // SharedFunctionInfo, String, etc.
IsNativeContextIndependent(Isolate * isolate)293 bool Code::IsNativeContextIndependent(Isolate* isolate) {
294 static constexpr int kModeMask =
295 RelocInfo::AllRealModesMask() &
296 ~RelocInfo::ModeMask(RelocInfo::CONST_POOL) &
297 ~RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) &
298 ~RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE) &
299 ~RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE_ENCODED) &
300 ~RelocInfo::ModeMask(RelocInfo::OFF_HEAP_TARGET) &
301 ~RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY) &
302 ~RelocInfo::ModeMask(RelocInfo::VENEER_POOL);
303 STATIC_ASSERT(kModeMask ==
304 (RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
305 RelocInfo::ModeMask(RelocInfo::RELATIVE_CODE_TARGET) |
306 RelocInfo::ModeMask(RelocInfo::COMPRESSED_EMBEDDED_OBJECT) |
307 RelocInfo::ModeMask(RelocInfo::FULL_EMBEDDED_OBJECT) |
308 RelocInfo::ModeMask(RelocInfo::WASM_CALL) |
309 RelocInfo::ModeMask(RelocInfo::WASM_STUB_CALL)));
310
311 bool is_independent = true;
312 for (RelocIterator it(*this, kModeMask); !it.done(); it.next()) {
313 if (RelocInfo::IsEmbeddedObjectMode(it.rinfo()->rmode())) {
314 HeapObject o = it.rinfo()->target_object();
315 // TODO(jgruber,v8:8888): Extend this with further NCI objects,
316 // and define a more systematic
317 // IsNativeContextIndependent<T>() predicate.
318 if (o.IsString()) continue;
319 if (o.IsScopeInfo()) continue;
320 if (o.IsHeapNumber()) continue;
321 if (o.IsBigInt()) continue;
322 if (o.IsSharedFunctionInfo()) continue;
323 if (o.IsArrayBoilerplateDescription()) continue;
324 if (o.IsObjectBoilerplateDescription()) continue;
325 if (o.IsTemplateObjectDescription()) continue;
326 if (o.IsFixedArray()) {
327 // Some uses of FixedArray are valid.
328 // 1. Passed as arg to %DeclareGlobals, contains only strings
329 // and SFIs.
330 // 2. Passed as arg to %DefineClass. No well defined contents.
331 // .. ?
332 // TODO(jgruber): Consider assigning dedicated instance
333 // types instead of assuming fixed arrays are okay.
334 continue;
335 }
336 // Other objects are expected to be context-dependent.
337 PrintF("Found native-context-dependent object:\n");
338 o.Print();
339 o.map().Print();
340 }
341 is_independent = false;
342 }
343
344 return is_independent;
345 }
346
Inlines(SharedFunctionInfo sfi)347 bool Code::Inlines(SharedFunctionInfo sfi) {
348 // We can only check for inlining for optimized code.
349 DCHECK(is_optimized_code());
350 DisallowHeapAllocation no_gc;
351 DeoptimizationData const data =
352 DeoptimizationData::cast(deoptimization_data());
353 if (data.length() == 0) return false;
354 if (data.SharedFunctionInfo() == sfi) return true;
355 FixedArray const literals = data.LiteralArray();
356 int const inlined_count = data.InlinedFunctionCount().value();
357 for (int i = 0; i < inlined_count; ++i) {
358 if (SharedFunctionInfo::cast(literals.get(i)) == sfi) return true;
359 }
360 return false;
361 }
362
OptimizedCodeIterator(Isolate * isolate)363 Code::OptimizedCodeIterator::OptimizedCodeIterator(Isolate* isolate) {
364 isolate_ = isolate;
365 Object list = isolate->heap()->native_contexts_list();
366 next_context_ =
367 list.IsUndefined(isolate_) ? NativeContext() : NativeContext::cast(list);
368 }
369
Next()370 Code Code::OptimizedCodeIterator::Next() {
371 do {
372 Object next;
373 if (!current_code_.is_null()) {
374 // Get next code in the linked list.
375 next = current_code_.next_code_link();
376 } else if (!next_context_.is_null()) {
377 // Linked list of code exhausted. Get list of next context.
378 next = next_context_.OptimizedCodeListHead();
379 Object next_context = next_context_.next_context_link();
380 next_context_ = next_context.IsUndefined(isolate_)
381 ? NativeContext()
382 : NativeContext::cast(next_context);
383 } else {
384 // Exhausted contexts.
385 return Code();
386 }
387 current_code_ = next.IsUndefined(isolate_) ? Code() : Code::cast(next);
388 } while (current_code_.is_null());
389 DCHECK(CodeKindCanDeoptimize(current_code_.kind()));
390 return current_code_;
391 }
392
New(Isolate * isolate,int deopt_entry_count,AllocationType allocation)393 Handle<DeoptimizationData> DeoptimizationData::New(Isolate* isolate,
394 int deopt_entry_count,
395 AllocationType allocation) {
396 return Handle<DeoptimizationData>::cast(isolate->factory()->NewFixedArray(
397 LengthFor(deopt_entry_count), allocation));
398 }
399
Empty(Isolate * isolate)400 Handle<DeoptimizationData> DeoptimizationData::Empty(Isolate* isolate) {
401 return Handle<DeoptimizationData>::cast(
402 isolate->factory()->empty_fixed_array());
403 }
404
GetInlinedFunction(int index)405 SharedFunctionInfo DeoptimizationData::GetInlinedFunction(int index) {
406 if (index == -1) {
407 return SharedFunctionInfo::cast(SharedFunctionInfo());
408 } else {
409 return SharedFunctionInfo::cast(LiteralArray().get(index));
410 }
411 }
412
413 #ifdef ENABLE_DISASSEMBLER
414
GetName(Isolate * isolate) const415 const char* Code::GetName(Isolate* isolate) const {
416 if (kind() == CodeKind::BYTECODE_HANDLER) {
417 return isolate->interpreter()->LookupNameOfBytecodeHandler(*this);
418 } else {
419 // There are some handlers and ICs that we can also find names for with
420 // Builtins::Lookup.
421 return isolate->builtins()->Lookup(raw_instruction_start());
422 }
423 }
424
425 namespace {
print_pc(std::ostream & os,int pc)426 void print_pc(std::ostream& os, int pc) {
427 if (pc == -1) {
428 os << "NA";
429 } else {
430 os << std::hex << pc << std::dec;
431 }
432 }
433 } // anonymous namespace
434
DeoptimizationDataPrint(std::ostream & os)435 void DeoptimizationData::DeoptimizationDataPrint(std::ostream& os) { // NOLINT
436 if (length() == 0) {
437 os << "Deoptimization Input Data invalidated by lazy deoptimization\n";
438 return;
439 }
440
441 disasm::NameConverter converter;
442 int const inlined_function_count = InlinedFunctionCount().value();
443 os << "Inlined functions (count = " << inlined_function_count << ")\n";
444 for (int id = 0; id < inlined_function_count; ++id) {
445 Object info = LiteralArray().get(id);
446 os << " " << Brief(SharedFunctionInfo::cast(info)) << "\n";
447 }
448 os << "\n";
449 int deopt_count = DeoptCount();
450 os << "Deoptimization Input Data (deopt points = " << deopt_count << ")\n";
451 if (0 != deopt_count) {
452 os << " index bytecode-offset pc";
453 if (FLAG_print_code_verbose) os << " commands";
454 os << "\n";
455 }
456 for (int i = 0; i < deopt_count; i++) {
457 os << std::setw(6) << i << " " << std::setw(15)
458 << BytecodeOffset(i).ToInt() << " " << std::setw(4);
459 print_pc(os, Pc(i).value());
460 os << std::setw(2);
461
462 if (!FLAG_print_code_verbose) {
463 os << "\n";
464 continue;
465 }
466
467 // Print details of the frame translation.
468 int translation_index = TranslationIndex(i).value();
469 TranslationIterator iterator(TranslationByteArray(), translation_index);
470 Translation::Opcode opcode =
471 static_cast<Translation::Opcode>(iterator.Next());
472 DCHECK(Translation::BEGIN == opcode);
473 int frame_count = iterator.Next();
474 int jsframe_count = iterator.Next();
475 int update_feedback_count = iterator.Next();
476 os << " " << Translation::StringFor(opcode)
477 << " {frame count=" << frame_count
478 << ", js frame count=" << jsframe_count
479 << ", update_feedback_count=" << update_feedback_count << "}\n";
480
481 while (iterator.HasNext() &&
482 Translation::BEGIN !=
483 (opcode = static_cast<Translation::Opcode>(iterator.Next()))) {
484 os << std::setw(31) << " " << Translation::StringFor(opcode) << " ";
485
486 switch (opcode) {
487 case Translation::BEGIN:
488 UNREACHABLE();
489 break;
490
491 case Translation::INTERPRETED_FRAME: {
492 int bytecode_offset = iterator.Next();
493 int shared_info_id = iterator.Next();
494 unsigned height = iterator.Next();
495 int return_value_offset = iterator.Next();
496 int return_value_count = iterator.Next();
497 Object shared_info = LiteralArray().get(shared_info_id);
498 os << "{bytecode_offset=" << bytecode_offset << ", function="
499 << Brief(SharedFunctionInfo::cast(shared_info).DebugName())
500 << ", height=" << height << ", retval=@" << return_value_offset
501 << "(#" << return_value_count << ")}";
502 break;
503 }
504
505 case Translation::CONSTRUCT_STUB_FRAME: {
506 int bailout_id = iterator.Next();
507 int shared_info_id = iterator.Next();
508 Object shared_info = LiteralArray().get(shared_info_id);
509 unsigned height = iterator.Next();
510 os << "{bailout_id=" << bailout_id << ", function="
511 << Brief(SharedFunctionInfo::cast(shared_info).DebugName())
512 << ", height=" << height << "}";
513 break;
514 }
515
516 case Translation::BUILTIN_CONTINUATION_FRAME:
517 case Translation::JAVA_SCRIPT_BUILTIN_CONTINUATION_FRAME:
518 case Translation::JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH_FRAME: {
519 int bailout_id = iterator.Next();
520 int shared_info_id = iterator.Next();
521 Object shared_info = LiteralArray().get(shared_info_id);
522 unsigned height = iterator.Next();
523 os << "{bailout_id=" << bailout_id << ", function="
524 << Brief(SharedFunctionInfo::cast(shared_info).DebugName())
525 << ", height=" << height << "}";
526 break;
527 }
528
529 case Translation::ARGUMENTS_ADAPTOR_FRAME: {
530 int shared_info_id = iterator.Next();
531 Object shared_info = LiteralArray().get(shared_info_id);
532 unsigned height = iterator.Next();
533 os << "{function="
534 << Brief(SharedFunctionInfo::cast(shared_info).DebugName())
535 << ", height=" << height << "}";
536 break;
537 }
538
539 case Translation::REGISTER: {
540 int reg_code = iterator.Next();
541 os << "{input=" << converter.NameOfCPURegister(reg_code) << "}";
542 break;
543 }
544
545 case Translation::INT32_REGISTER: {
546 int reg_code = iterator.Next();
547 os << "{input=" << converter.NameOfCPURegister(reg_code)
548 << " (int32)}";
549 break;
550 }
551
552 case Translation::INT64_REGISTER: {
553 int reg_code = iterator.Next();
554 os << "{input=" << converter.NameOfCPURegister(reg_code)
555 << " (int64)}";
556 break;
557 }
558
559 case Translation::UINT32_REGISTER: {
560 int reg_code = iterator.Next();
561 os << "{input=" << converter.NameOfCPURegister(reg_code)
562 << " (uint32)}";
563 break;
564 }
565
566 case Translation::BOOL_REGISTER: {
567 int reg_code = iterator.Next();
568 os << "{input=" << converter.NameOfCPURegister(reg_code)
569 << " (bool)}";
570 break;
571 }
572
573 case Translation::FLOAT_REGISTER: {
574 int reg_code = iterator.Next();
575 os << "{input=" << FloatRegister::from_code(reg_code) << "}";
576 break;
577 }
578
579 case Translation::DOUBLE_REGISTER: {
580 int reg_code = iterator.Next();
581 os << "{input=" << DoubleRegister::from_code(reg_code) << "}";
582 break;
583 }
584
585 case Translation::STACK_SLOT: {
586 int input_slot_index = iterator.Next();
587 os << "{input=" << input_slot_index << "}";
588 break;
589 }
590
591 case Translation::INT32_STACK_SLOT: {
592 int input_slot_index = iterator.Next();
593 os << "{input=" << input_slot_index << " (int32)}";
594 break;
595 }
596
597 case Translation::INT64_STACK_SLOT: {
598 int input_slot_index = iterator.Next();
599 os << "{input=" << input_slot_index << " (int64)}";
600 break;
601 }
602
603 case Translation::UINT32_STACK_SLOT: {
604 int input_slot_index = iterator.Next();
605 os << "{input=" << input_slot_index << " (uint32)}";
606 break;
607 }
608
609 case Translation::BOOL_STACK_SLOT: {
610 int input_slot_index = iterator.Next();
611 os << "{input=" << input_slot_index << " (bool)}";
612 break;
613 }
614
615 case Translation::FLOAT_STACK_SLOT:
616 case Translation::DOUBLE_STACK_SLOT: {
617 int input_slot_index = iterator.Next();
618 os << "{input=" << input_slot_index << "}";
619 break;
620 }
621
622 case Translation::LITERAL: {
623 int literal_index = iterator.Next();
624 Object literal_value = LiteralArray().get(literal_index);
625 os << "{literal_id=" << literal_index << " (" << Brief(literal_value)
626 << ")}";
627 break;
628 }
629
630 case Translation::DUPLICATED_OBJECT: {
631 int object_index = iterator.Next();
632 os << "{object_index=" << object_index << "}";
633 break;
634 }
635
636 case Translation::ARGUMENTS_ELEMENTS: {
637 CreateArgumentsType arguments_type =
638 static_cast<CreateArgumentsType>(iterator.Next());
639 os << "{arguments_type=" << arguments_type << "}";
640 break;
641 }
642 case Translation::ARGUMENTS_LENGTH: {
643 os << "{arguments_length}";
644 break;
645 }
646
647 case Translation::CAPTURED_OBJECT: {
648 int args_length = iterator.Next();
649 os << "{length=" << args_length << "}";
650 break;
651 }
652
653 case Translation::UPDATE_FEEDBACK: {
654 int literal_index = iterator.Next();
655 FeedbackSlot slot(iterator.Next());
656 os << "{feedback={vector_index=" << literal_index << ", slot=" << slot
657 << "}}";
658 break;
659 }
660 }
661 os << "\n";
662 }
663 }
664 }
665
666 namespace {
667
DisassembleCodeRange(Isolate * isolate,std::ostream & os,Code code,Address begin,size_t size,Address current_pc)668 inline void DisassembleCodeRange(Isolate* isolate, std::ostream& os, Code code,
669 Address begin, size_t size,
670 Address current_pc) {
671 Address end = begin + size;
672 AllowHandleAllocation allow_handles;
673 DisallowHeapAllocation no_gc;
674 HandleScope handle_scope(isolate);
675 Disassembler::Decode(isolate, &os, reinterpret_cast<byte*>(begin),
676 reinterpret_cast<byte*>(end),
677 CodeReference(handle(code, isolate)), current_pc);
678 }
679
680 } // namespace
681
Disassemble(const char * name,std::ostream & os,Isolate * isolate,Address current_pc)682 void Code::Disassemble(const char* name, std::ostream& os, Isolate* isolate,
683 Address current_pc) {
684 os << "kind = " << CodeKindToString(kind()) << "\n";
685 if (name == nullptr) {
686 name = GetName(isolate);
687 }
688 if ((name != nullptr) && (name[0] != '\0')) {
689 os << "name = " << name << "\n";
690 }
691 if (CodeKindIsOptimizedJSFunction(kind())) {
692 os << "stack_slots = " << stack_slots() << "\n";
693 }
694 os << "compiler = " << (is_turbofanned() ? "turbofan" : "unknown") << "\n";
695 os << "address = " << reinterpret_cast<void*>(ptr()) << "\n\n";
696
697 if (is_off_heap_trampoline()) {
698 int trampoline_size = raw_instruction_size();
699 os << "Trampoline (size = " << trampoline_size << ")\n";
700 DisassembleCodeRange(isolate, os, *this, raw_instruction_start(),
701 trampoline_size, current_pc);
702 os << "\n";
703 }
704
705 {
706 int code_size = InstructionSize();
707 os << "Instructions (size = " << code_size << ")\n";
708 DisassembleCodeRange(isolate, os, *this, InstructionStart(), code_size,
709 current_pc);
710
711 if (int pool_size = constant_pool_size()) {
712 DCHECK_EQ(pool_size & kPointerAlignmentMask, 0);
713 os << "\nConstant Pool (size = " << pool_size << ")\n";
714 Vector<char> buf = Vector<char>::New(50);
715 intptr_t* ptr =
716 reinterpret_cast<intptr_t*>(MetadataStart() + constant_pool_offset());
717 for (int i = 0; i < pool_size; i += kSystemPointerSize, ptr++) {
718 SNPrintF(buf, "%4d %08" V8PRIxPTR, i, *ptr);
719 os << static_cast<const void*>(ptr) << " " << buf.begin() << "\n";
720 }
721 }
722 }
723 os << "\n";
724
725 {
726 SourcePositionTableIterator it(
727 SourcePositionTable(), SourcePositionTableIterator::kJavaScriptOnly);
728 if (!it.done()) {
729 os << "Source positions:\n pc offset position\n";
730 for (; !it.done(); it.Advance()) {
731 os << std::setw(10) << std::hex << it.code_offset() << std::dec
732 << std::setw(10) << it.source_position().ScriptOffset()
733 << (it.is_statement() ? " statement" : "") << "\n";
734 }
735 os << "\n";
736 }
737 }
738
739 {
740 SourcePositionTableIterator it(SourcePositionTable(),
741 SourcePositionTableIterator::kExternalOnly);
742 if (!it.done()) {
743 os << "External Source positions:\n pc offset fileid line\n";
744 for (; !it.done(); it.Advance()) {
745 DCHECK(it.source_position().IsExternal());
746 os << std::setw(10) << std::hex << it.code_offset() << std::dec
747 << std::setw(10) << it.source_position().ExternalFileId()
748 << std::setw(10) << it.source_position().ExternalLine() << "\n";
749 }
750 os << "\n";
751 }
752 }
753
754 if (CodeKindCanDeoptimize(kind())) {
755 DeoptimizationData data =
756 DeoptimizationData::cast(this->deoptimization_data());
757 data.DeoptimizationDataPrint(os);
758 }
759 os << "\n";
760
761 if (has_safepoint_info()) {
762 SafepointTable table(*this);
763 os << "Safepoints (size = " << table.size() << ")\n";
764 for (unsigned i = 0; i < table.length(); i++) {
765 unsigned pc_offset = table.GetPcOffset(i);
766 os << reinterpret_cast<const void*>(InstructionStart() + pc_offset)
767 << " ";
768 os << std::setw(6) << std::hex << pc_offset << " " << std::setw(4);
769 int trampoline_pc = table.GetTrampolinePcOffset(i);
770 print_pc(os, trampoline_pc);
771 os << std::dec << " ";
772 table.PrintEntry(i, os);
773 os << " (sp -> fp) ";
774 SafepointEntry entry = table.GetEntry(i);
775 if (entry.has_deoptimization_index()) {
776 os << std::setw(6) << entry.deoptimization_index();
777 } else {
778 os << "<none>";
779 }
780 os << "\n";
781 }
782 os << "\n";
783 }
784
785 if (has_handler_table()) {
786 HandlerTable table(*this);
787 os << "Handler Table (size = " << table.NumberOfReturnEntries() << ")\n";
788 if (CodeKindIsOptimizedJSFunction(kind()))
789 table.HandlerTableReturnPrint(os);
790 os << "\n";
791 }
792
793 os << "RelocInfo (size = " << relocation_size() << ")\n";
794 for (RelocIterator it(*this); !it.done(); it.next()) {
795 it.rinfo()->Print(isolate, os);
796 }
797 os << "\n";
798
799 if (has_unwinding_info()) {
800 os << "UnwindingInfo (size = " << unwinding_info_size() << ")\n";
801 EhFrameDisassembler eh_frame_disassembler(
802 reinterpret_cast<byte*>(unwinding_info_start()),
803 reinterpret_cast<byte*>(unwinding_info_end()));
804 eh_frame_disassembler.DisassembleToStream(os);
805 os << "\n";
806 }
807
808 if (has_code_comments()) {
809 PrintCodeCommentsSection(os, code_comments(), code_comments_size());
810 }
811 }
812 #endif // ENABLE_DISASSEMBLER
813
Disassemble(std::ostream & os)814 void BytecodeArray::Disassemble(std::ostream& os) {
815 DisallowHeapAllocation no_gc;
816
817 os << "Parameter count " << parameter_count() << "\n";
818 os << "Register count " << register_count() << "\n";
819 os << "Frame size " << frame_size() << "\n";
820
821 Address base_address = GetFirstBytecodeAddress();
822 SourcePositionTableIterator source_positions(SourcePositionTable());
823
824 // Storage for backing the handle passed to the iterator. This handle won't be
825 // updated by the gc, but that's ok because we've disallowed GCs anyway.
826 BytecodeArray handle_storage = *this;
827 Handle<BytecodeArray> handle(reinterpret_cast<Address*>(&handle_storage));
828 interpreter::BytecodeArrayIterator iterator(handle);
829 while (!iterator.done()) {
830 if (!source_positions.done() &&
831 iterator.current_offset() == source_positions.code_offset()) {
832 os << std::setw(5) << source_positions.source_position().ScriptOffset();
833 os << (source_positions.is_statement() ? " S> " : " E> ");
834 source_positions.Advance();
835 } else {
836 os << " ";
837 }
838 Address current_address = base_address + iterator.current_offset();
839 os << reinterpret_cast<const void*>(current_address) << " @ "
840 << std::setw(4) << iterator.current_offset() << " : ";
841 interpreter::BytecodeDecoder::Decode(
842 os, reinterpret_cast<byte*>(current_address),
843 static_cast<int>(parameter_count()));
844 if (interpreter::Bytecodes::IsJump(iterator.current_bytecode())) {
845 Address jump_target = base_address + iterator.GetJumpTargetOffset();
846 os << " (" << reinterpret_cast<void*>(jump_target) << " @ "
847 << iterator.GetJumpTargetOffset() << ")";
848 }
849 if (interpreter::Bytecodes::IsSwitch(iterator.current_bytecode())) {
850 os << " {";
851 bool first_entry = true;
852 for (const auto& entry : iterator.GetJumpTableTargetOffsets()) {
853 if (first_entry) {
854 first_entry = false;
855 } else {
856 os << ",";
857 }
858 os << " " << entry.case_value << ": @" << entry.target_offset;
859 }
860 os << " }";
861 }
862 os << std::endl;
863 iterator.Advance();
864 }
865
866 os << "Constant pool (size = " << constant_pool().length() << ")\n";
867 #ifdef OBJECT_PRINT
868 if (constant_pool().length() > 0) {
869 constant_pool().Print(os);
870 }
871 #endif
872
873 os << "Handler Table (size = " << handler_table().length() << ")\n";
874 #ifdef ENABLE_DISASSEMBLER
875 if (handler_table().length() > 0) {
876 HandlerTable table(*this);
877 table.HandlerTableRangePrint(os);
878 }
879 #endif
880
881 ByteArray source_position_table = SourcePositionTable();
882 os << "Source Position Table (size = " << source_position_table.length()
883 << ")\n";
884 #ifdef OBJECT_PRINT
885 if (source_position_table.length() > 0) {
886 os << Brief(source_position_table) << std::endl;
887 }
888 #endif
889 }
890
CopyBytecodesTo(BytecodeArray to)891 void BytecodeArray::CopyBytecodesTo(BytecodeArray to) {
892 BytecodeArray from = *this;
893 DCHECK_EQ(from.length(), to.length());
894 CopyBytes(reinterpret_cast<byte*>(to.GetFirstBytecodeAddress()),
895 reinterpret_cast<byte*>(from.GetFirstBytecodeAddress()),
896 from.length());
897 }
898
MakeOlder()899 void BytecodeArray::MakeOlder() {
900 // BytecodeArray is aged in concurrent marker.
901 // The word must be completely within the byte code array.
902 Address age_addr = address() + kBytecodeAgeOffset;
903 DCHECK_LE(RoundDown(age_addr, kTaggedSize) + kTaggedSize, address() + Size());
904 Age age = bytecode_age();
905 if (age < kLastBytecodeAge) {
906 base::AsAtomic8::Relaxed_CompareAndSwap(
907 reinterpret_cast<base::Atomic8*>(age_addr), age, age + 1);
908 }
909
910 DCHECK_GE(bytecode_age(), kFirstBytecodeAge);
911 DCHECK_LE(bytecode_age(), kLastBytecodeAge);
912 }
913
IsOld() const914 bool BytecodeArray::IsOld() const {
915 return bytecode_age() >= kIsOldBytecodeAge;
916 }
917
GetDependentCode(Handle<HeapObject> object)918 DependentCode DependentCode::GetDependentCode(Handle<HeapObject> object) {
919 if (object->IsMap()) {
920 return Handle<Map>::cast(object)->dependent_code();
921 } else if (object->IsPropertyCell()) {
922 return Handle<PropertyCell>::cast(object)->dependent_code();
923 } else if (object->IsAllocationSite()) {
924 return Handle<AllocationSite>::cast(object)->dependent_code();
925 }
926 UNREACHABLE();
927 }
928
SetDependentCode(Handle<HeapObject> object,Handle<DependentCode> dep)929 void DependentCode::SetDependentCode(Handle<HeapObject> object,
930 Handle<DependentCode> dep) {
931 if (object->IsMap()) {
932 Handle<Map>::cast(object)->set_dependent_code(*dep);
933 } else if (object->IsPropertyCell()) {
934 Handle<PropertyCell>::cast(object)->set_dependent_code(*dep);
935 } else if (object->IsAllocationSite()) {
936 Handle<AllocationSite>::cast(object)->set_dependent_code(*dep);
937 } else {
938 UNREACHABLE();
939 }
940 }
941
InstallDependency(Isolate * isolate,const MaybeObjectHandle & code,Handle<HeapObject> object,DependencyGroup group)942 void DependentCode::InstallDependency(Isolate* isolate,
943 const MaybeObjectHandle& code,
944 Handle<HeapObject> object,
945 DependencyGroup group) {
946 if (V8_UNLIKELY(FLAG_trace_code_dependencies)) {
947 StdoutStream{} << "Installing dependency of [" << code->GetHeapObject()
948 << "] on [" << object << "] in group ["
949 << DependencyGroupName(group) << "]\n";
950 }
951 Handle<DependentCode> old_deps(DependentCode::GetDependentCode(object),
952 isolate);
953 Handle<DependentCode> new_deps =
954 InsertWeakCode(isolate, old_deps, group, code);
955 // Update the list head if necessary.
956 if (!new_deps.is_identical_to(old_deps))
957 DependentCode::SetDependentCode(object, new_deps);
958 }
959
InsertWeakCode(Isolate * isolate,Handle<DependentCode> entries,DependencyGroup group,const MaybeObjectHandle & code)960 Handle<DependentCode> DependentCode::InsertWeakCode(
961 Isolate* isolate, Handle<DependentCode> entries, DependencyGroup group,
962 const MaybeObjectHandle& code) {
963 if (entries->length() == 0 || entries->group() > group) {
964 // There is no such group.
965 return DependentCode::New(isolate, group, code, entries);
966 }
967 if (entries->group() < group) {
968 // The group comes later in the list.
969 Handle<DependentCode> old_next(entries->next_link(), isolate);
970 Handle<DependentCode> new_next =
971 InsertWeakCode(isolate, old_next, group, code);
972 if (!old_next.is_identical_to(new_next)) {
973 entries->set_next_link(*new_next);
974 }
975 return entries;
976 }
977 DCHECK_EQ(group, entries->group());
978 int count = entries->count();
979 // Check for existing entry to avoid duplicates.
980 for (int i = 0; i < count; i++) {
981 if (entries->object_at(i) == *code) return entries;
982 }
983 if (entries->length() < kCodesStartIndex + count + 1) {
984 entries = EnsureSpace(isolate, entries);
985 // Count could have changed, reload it.
986 count = entries->count();
987 }
988 entries->set_object_at(count, *code);
989 entries->set_count(count + 1);
990 return entries;
991 }
992
New(Isolate * isolate,DependencyGroup group,const MaybeObjectHandle & object,Handle<DependentCode> next)993 Handle<DependentCode> DependentCode::New(Isolate* isolate,
994 DependencyGroup group,
995 const MaybeObjectHandle& object,
996 Handle<DependentCode> next) {
997 Handle<DependentCode> result =
998 Handle<DependentCode>::cast(isolate->factory()->NewWeakFixedArray(
999 kCodesStartIndex + 1, AllocationType::kOld));
1000 result->set_next_link(*next);
1001 result->set_flags(GroupField::encode(group) | CountField::encode(1));
1002 result->set_object_at(0, *object);
1003 return result;
1004 }
1005
EnsureSpace(Isolate * isolate,Handle<DependentCode> entries)1006 Handle<DependentCode> DependentCode::EnsureSpace(
1007 Isolate* isolate, Handle<DependentCode> entries) {
1008 if (entries->Compact()) return entries;
1009 int capacity = kCodesStartIndex + DependentCode::Grow(entries->count());
1010 int grow_by = capacity - entries->length();
1011 return Handle<DependentCode>::cast(
1012 isolate->factory()->CopyWeakFixedArrayAndGrow(entries, grow_by));
1013 }
1014
Compact()1015 bool DependentCode::Compact() {
1016 int old_count = count();
1017 int new_count = 0;
1018 for (int i = 0; i < old_count; i++) {
1019 MaybeObject obj = object_at(i);
1020 if (!obj->IsCleared()) {
1021 if (i != new_count) {
1022 copy(i, new_count);
1023 }
1024 new_count++;
1025 }
1026 }
1027 set_count(new_count);
1028 for (int i = new_count; i < old_count; i++) {
1029 clear_at(i);
1030 }
1031 return new_count < old_count;
1032 }
1033
MarkCodeForDeoptimization(DependentCode::DependencyGroup group)1034 bool DependentCode::MarkCodeForDeoptimization(
1035 DependentCode::DependencyGroup group) {
1036 if (this->length() == 0 || this->group() > group) {
1037 // There is no such group.
1038 return false;
1039 }
1040 if (this->group() < group) {
1041 // The group comes later in the list.
1042 return next_link().MarkCodeForDeoptimization(group);
1043 }
1044 DCHECK_EQ(group, this->group());
1045 DisallowHeapAllocation no_allocation_scope;
1046 // Mark all the code that needs to be deoptimized.
1047 bool marked = false;
1048 int count = this->count();
1049 for (int i = 0; i < count; i++) {
1050 MaybeObject obj = object_at(i);
1051 if (obj->IsCleared()) continue;
1052 Code code = Code::cast(obj->GetHeapObjectAssumeWeak());
1053 if (!code.marked_for_deoptimization()) {
1054 code.SetMarkedForDeoptimization(DependencyGroupName(group));
1055 marked = true;
1056 }
1057 }
1058 for (int i = 0; i < count; i++) {
1059 clear_at(i);
1060 }
1061 set_count(0);
1062 return marked;
1063 }
1064
DeoptimizeDependentCodeGroup(DependentCode::DependencyGroup group)1065 void DependentCode::DeoptimizeDependentCodeGroup(
1066 DependentCode::DependencyGroup group) {
1067 DisallowHeapAllocation no_allocation_scope;
1068 bool marked = MarkCodeForDeoptimization(group);
1069 if (marked) {
1070 DCHECK(AllowCodeDependencyChange::IsAllowed());
1071 Deoptimizer::DeoptimizeMarkedCode(GetIsolateFromWritableObject(*this));
1072 }
1073 }
1074
SetMarkedForDeoptimization(const char * reason)1075 void Code::SetMarkedForDeoptimization(const char* reason) {
1076 set_marked_for_deoptimization(true);
1077 Deoptimizer::TraceMarkForDeoptimization(*this, reason);
1078 }
1079
DependencyGroupName(DependencyGroup group)1080 const char* DependentCode::DependencyGroupName(DependencyGroup group) {
1081 switch (group) {
1082 case kTransitionGroup:
1083 return "transition";
1084 case kPrototypeCheckGroup:
1085 return "prototype-check";
1086 case kPropertyCellChangedGroup:
1087 return "property-cell-changed";
1088 case kFieldConstGroup:
1089 return "field-const";
1090 case kFieldTypeGroup:
1091 return "field-type";
1092 case kFieldRepresentationGroup:
1093 return "field-representation";
1094 case kInitialMapChangedGroup:
1095 return "initial-map-changed";
1096 case kAllocationSiteTenuringChangedGroup:
1097 return "allocation-site-tenuring-changed";
1098 case kAllocationSiteTransitionChangedGroup:
1099 return "allocation-site-transition-changed";
1100 }
1101 UNREACHABLE();
1102 }
1103
1104 } // namespace internal
1105 } // namespace v8
1106