• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/deoptimizer/deoptimizer.h"
6 
7 #include "src/base/memory.h"
8 #include "src/codegen/interface-descriptors.h"
9 #include "src/codegen/register-configuration.h"
10 #include "src/codegen/reloc-info.h"
11 #include "src/deoptimizer/deoptimized-frame-info.h"
12 #include "src/deoptimizer/materialized-object-store.h"
13 #include "src/execution/frames-inl.h"
14 #include "src/execution/isolate.h"
15 #include "src/execution/pointer-authentication.h"
16 #include "src/execution/v8threads.h"
17 #include "src/handles/handles-inl.h"
18 #include "src/heap/heap-inl.h"
19 #include "src/logging/counters.h"
20 #include "src/logging/log.h"
21 #include "src/logging/runtime-call-stats-scope.h"
22 #include "src/objects/js-function-inl.h"
23 #include "src/objects/oddball.h"
24 #include "src/snapshot/embedded/embedded-data.h"
25 
26 #if V8_ENABLE_WEBASSEMBLY
27 #include "src/wasm/wasm-linkage.h"
28 #endif  // V8_ENABLE_WEBASSEMBLY
29 
30 namespace v8 {
31 
32 using base::Memory;
33 
34 namespace internal {
35 
36 // {FrameWriter} offers a stack writer abstraction for writing
37 // FrameDescriptions. The main service the class provides is managing
38 // {top_offset_}, i.e. the offset of the next slot to write to.
39 //
40 // Note: Not in an anonymous namespace due to the friend class declaration
41 // in Deoptimizer.
42 class FrameWriter {
43  public:
44   static const int NO_INPUT_INDEX = -1;
FrameWriter(Deoptimizer * deoptimizer,FrameDescription * frame,CodeTracer::Scope * trace_scope)45   FrameWriter(Deoptimizer* deoptimizer, FrameDescription* frame,
46               CodeTracer::Scope* trace_scope)
47       : deoptimizer_(deoptimizer),
48         frame_(frame),
49         trace_scope_(trace_scope),
50         top_offset_(frame->GetFrameSize()) {}
51 
PushRawValue(intptr_t value,const char * debug_hint)52   void PushRawValue(intptr_t value, const char* debug_hint) {
53     PushValue(value);
54     if (trace_scope_ != nullptr) {
55       DebugPrintOutputValue(value, debug_hint);
56     }
57   }
58 
PushRawObject(Object obj,const char * debug_hint)59   void PushRawObject(Object obj, const char* debug_hint) {
60     intptr_t value = obj.ptr();
61     PushValue(value);
62     if (trace_scope_ != nullptr) {
63       DebugPrintOutputObject(obj, top_offset_, debug_hint);
64     }
65   }
66 
67   // There is no check against the allowed addresses for bottommost frames, as
68   // the caller's pc could be anything. The caller's pc pushed here should never
69   // be re-signed.
PushBottommostCallerPc(intptr_t pc)70   void PushBottommostCallerPc(intptr_t pc) {
71     top_offset_ -= kPCOnStackSize;
72     frame_->SetFrameSlot(top_offset_, pc);
73     DebugPrintOutputPc(pc, "bottommost caller's pc\n");
74   }
75 
PushApprovedCallerPc(intptr_t pc)76   void PushApprovedCallerPc(intptr_t pc) {
77     top_offset_ -= kPCOnStackSize;
78     frame_->SetCallerPc(top_offset_, pc);
79     DebugPrintOutputPc(pc, "caller's pc\n");
80   }
81 
PushCallerFp(intptr_t fp)82   void PushCallerFp(intptr_t fp) {
83     top_offset_ -= kFPOnStackSize;
84     frame_->SetCallerFp(top_offset_, fp);
85     DebugPrintOutputValue(fp, "caller's fp\n");
86   }
87 
PushCallerConstantPool(intptr_t cp)88   void PushCallerConstantPool(intptr_t cp) {
89     top_offset_ -= kSystemPointerSize;
90     frame_->SetCallerConstantPool(top_offset_, cp);
91     DebugPrintOutputValue(cp, "caller's constant_pool\n");
92   }
93 
PushTranslatedValue(const TranslatedFrame::iterator & iterator,const char * debug_hint="")94   void PushTranslatedValue(const TranslatedFrame::iterator& iterator,
95                            const char* debug_hint = "") {
96     Object obj = iterator->GetRawValue();
97     PushRawObject(obj, debug_hint);
98     if (trace_scope_ != nullptr) {
99       PrintF(trace_scope_->file(), " (input #%d)\n", iterator.input_index());
100     }
101     deoptimizer_->QueueValueForMaterialization(output_address(top_offset_), obj,
102                                                iterator);
103   }
104 
PushStackJSArguments(TranslatedFrame::iterator & iterator,int parameters_count)105   void PushStackJSArguments(TranslatedFrame::iterator& iterator,
106                             int parameters_count) {
107     std::vector<TranslatedFrame::iterator> parameters;
108     parameters.reserve(parameters_count);
109     for (int i = 0; i < parameters_count; ++i, ++iterator) {
110       parameters.push_back(iterator);
111     }
112     for (auto& parameter : base::Reversed(parameters)) {
113       PushTranslatedValue(parameter, "stack parameter");
114     }
115   }
116 
top_offset() const117   unsigned top_offset() const { return top_offset_; }
118 
frame()119   FrameDescription* frame() { return frame_; }
120 
121  private:
PushValue(intptr_t value)122   void PushValue(intptr_t value) {
123     CHECK_GE(top_offset_, 0);
124     top_offset_ -= kSystemPointerSize;
125     frame_->SetFrameSlot(top_offset_, value);
126   }
127 
output_address(unsigned output_offset)128   Address output_address(unsigned output_offset) {
129     Address output_address =
130         static_cast<Address>(frame_->GetTop()) + output_offset;
131     return output_address;
132   }
133 
DebugPrintOutputValue(intptr_t value,const char * debug_hint="")134   void DebugPrintOutputValue(intptr_t value, const char* debug_hint = "") {
135     if (trace_scope_ != nullptr) {
136       PrintF(trace_scope_->file(),
137              "    " V8PRIxPTR_FMT ": [top + %3d] <- " V8PRIxPTR_FMT " ;  %s",
138              output_address(top_offset_), top_offset_, value, debug_hint);
139     }
140   }
141 
DebugPrintOutputPc(intptr_t value,const char * debug_hint="")142   void DebugPrintOutputPc(intptr_t value, const char* debug_hint = "") {
143 #ifdef V8_ENABLE_CONTROL_FLOW_INTEGRITY
144     if (trace_scope_ != nullptr) {
145       PrintF(trace_scope_->file(),
146              "    " V8PRIxPTR_FMT ": [top + %3d] <- " V8PRIxPTR_FMT
147              " (signed) " V8PRIxPTR_FMT " (unsigned) ;  %s",
148              output_address(top_offset_), top_offset_, value,
149              PointerAuthentication::StripPAC(value), debug_hint);
150     }
151 #else
152     DebugPrintOutputValue(value, debug_hint);
153 #endif
154   }
155 
DebugPrintOutputObject(Object obj,unsigned output_offset,const char * debug_hint="")156   void DebugPrintOutputObject(Object obj, unsigned output_offset,
157                               const char* debug_hint = "") {
158     if (trace_scope_ != nullptr) {
159       PrintF(trace_scope_->file(), "    " V8PRIxPTR_FMT ": [top + %3d] <- ",
160              output_address(output_offset), output_offset);
161       if (obj.IsSmi()) {
162         PrintF(trace_scope_->file(), V8PRIxPTR_FMT " <Smi %d>", obj.ptr(),
163                Smi::cast(obj).value());
164       } else {
165         obj.ShortPrint(trace_scope_->file());
166       }
167       PrintF(trace_scope_->file(), " ;  %s", debug_hint);
168     }
169   }
170 
171   Deoptimizer* deoptimizer_;
172   FrameDescription* frame_;
173   CodeTracer::Scope* const trace_scope_;
174   unsigned top_offset_;
175 };
176 
FindDeoptimizingCode(Address addr)177 Code Deoptimizer::FindDeoptimizingCode(Address addr) {
178   if (function_.IsHeapObject()) {
179     // Search all deoptimizing code in the native context of the function.
180     Isolate* isolate = isolate_;
181     NativeContext native_context = function_.native_context();
182     Object element = native_context.DeoptimizedCodeListHead();
183     while (!element.IsUndefined(isolate)) {
184       Code code = FromCodeT(CodeT::cast(element));
185       CHECK(CodeKindCanDeoptimize(code.kind()));
186       if (code.contains(isolate, addr)) return code;
187       element = code.next_code_link();
188     }
189   }
190   return Code();
191 }
192 
193 // We rely on this function not causing a GC. It is called from generated code
194 // without having a real stack frame in place.
New(Address raw_function,DeoptimizeKind kind,Address from,int fp_to_sp_delta,Isolate * isolate)195 Deoptimizer* Deoptimizer::New(Address raw_function, DeoptimizeKind kind,
196                               Address from, int fp_to_sp_delta,
197                               Isolate* isolate) {
198   JSFunction function = JSFunction::cast(Object(raw_function));
199   Deoptimizer* deoptimizer =
200       new Deoptimizer(isolate, function, kind, from, fp_to_sp_delta);
201   isolate->set_current_deoptimizer(deoptimizer);
202   return deoptimizer;
203 }
204 
Grab(Isolate * isolate)205 Deoptimizer* Deoptimizer::Grab(Isolate* isolate) {
206   Deoptimizer* result = isolate->GetAndClearCurrentDeoptimizer();
207   result->DeleteFrameDescriptions();
208   return result;
209 }
210 
DebuggerInspectableFrame(JavaScriptFrame * frame,int jsframe_index,Isolate * isolate)211 DeoptimizedFrameInfo* Deoptimizer::DebuggerInspectableFrame(
212     JavaScriptFrame* frame, int jsframe_index, Isolate* isolate) {
213   CHECK(frame->is_optimized());
214 
215   TranslatedState translated_values(frame);
216   translated_values.Prepare(frame->fp());
217 
218   TranslatedState::iterator frame_it = translated_values.end();
219   int counter = jsframe_index;
220   for (auto it = translated_values.begin(); it != translated_values.end();
221        it++) {
222     if (it->kind() == TranslatedFrame::kUnoptimizedFunction ||
223         it->kind() == TranslatedFrame::kJavaScriptBuiltinContinuation ||
224         it->kind() ==
225             TranslatedFrame::kJavaScriptBuiltinContinuationWithCatch) {
226       if (counter == 0) {
227         frame_it = it;
228         break;
229       }
230       counter--;
231     }
232   }
233   CHECK(frame_it != translated_values.end());
234   // We only include kJavaScriptBuiltinContinuation frames above to get the
235   // counting right.
236   CHECK_EQ(frame_it->kind(), TranslatedFrame::kUnoptimizedFunction);
237 
238   DeoptimizedFrameInfo* info =
239       new DeoptimizedFrameInfo(&translated_values, frame_it, isolate);
240 
241   return info;
242 }
243 
244 namespace {
245 class ActivationsFinder : public ThreadVisitor {
246  public:
ActivationsFinder(std::set<Code> * codes,Code topmost_optimized_code,bool safe_to_deopt_topmost_optimized_code)247   explicit ActivationsFinder(std::set<Code>* codes, Code topmost_optimized_code,
248                              bool safe_to_deopt_topmost_optimized_code)
249       : codes_(codes) {
250 #ifdef DEBUG
251     topmost_ = topmost_optimized_code;
252     safe_to_deopt_ = safe_to_deopt_topmost_optimized_code;
253 #endif
254   }
255 
256   // Find the frames with activations of codes marked for deoptimization, search
257   // for the trampoline to the deoptimizer call respective to each code, and use
258   // it to replace the current pc on the stack.
VisitThread(Isolate * isolate,ThreadLocalTop * top)259   void VisitThread(Isolate* isolate, ThreadLocalTop* top) override {
260     for (StackFrameIterator it(isolate, top); !it.done(); it.Advance()) {
261       if (it.frame()->type() == StackFrame::OPTIMIZED) {
262         Code code = it.frame()->LookupCode();
263         if (CodeKindCanDeoptimize(code.kind()) &&
264             code.marked_for_deoptimization()) {
265           codes_->erase(code);
266           // Obtain the trampoline to the deoptimizer call.
267           SafepointEntry safepoint =
268               code.GetSafepointEntry(isolate, it.frame()->pc());
269           int trampoline_pc = safepoint.trampoline_pc();
270           DCHECK_IMPLIES(code == topmost_, safe_to_deopt_);
271           STATIC_ASSERT(SafepointEntry::kNoTrampolinePC == -1);
272           CHECK_GE(trampoline_pc, 0);
273           // Replace the current pc on the stack with the trampoline.
274           // TODO(v8:10026): avoid replacing a signed pointer.
275           Address* pc_addr = it.frame()->pc_address();
276           Address new_pc = code.raw_instruction_start() + trampoline_pc;
277           PointerAuthentication::ReplacePC(pc_addr, new_pc, kSystemPointerSize);
278         }
279       }
280     }
281   }
282 
283  private:
284   std::set<Code>* codes_;
285 
286 #ifdef DEBUG
287   Code topmost_;
288   bool safe_to_deopt_;
289 #endif
290 };
291 }  // namespace
292 
293 // Move marked code from the optimized code list to the deoptimized code list,
294 // and replace pc on the stack for codes marked for deoptimization.
295 // static
DeoptimizeMarkedCodeForContext(NativeContext native_context)296 void Deoptimizer::DeoptimizeMarkedCodeForContext(NativeContext native_context) {
297   DisallowGarbageCollection no_gc;
298 
299   Isolate* isolate = native_context.GetIsolate();
300   Code topmost_optimized_code;
301   bool safe_to_deopt_topmost_optimized_code = false;
302 #ifdef DEBUG
303   // Make sure all activations of optimized code can deopt at their current PC.
304   // The topmost optimized code has special handling because it cannot be
305   // deoptimized due to weak object dependency.
306   for (StackFrameIterator it(isolate, isolate->thread_local_top()); !it.done();
307        it.Advance()) {
308     StackFrame::Type type = it.frame()->type();
309     if (type == StackFrame::OPTIMIZED) {
310       Code code = it.frame()->LookupCode();
311       JSFunction function =
312           static_cast<OptimizedFrame*>(it.frame())->function();
313       TraceFoundActivation(isolate, function);
314       SafepointEntry safepoint =
315           code.GetSafepointEntry(isolate, it.frame()->pc());
316 
317       // Turbofan deopt is checked when we are patching addresses on stack.
318       bool safe_if_deopt_triggered = safepoint.has_deoptimization_index();
319       bool is_builtin_code = code.kind() == CodeKind::BUILTIN;
320       DCHECK(topmost_optimized_code.is_null() || safe_if_deopt_triggered ||
321              is_builtin_code);
322       if (topmost_optimized_code.is_null()) {
323         topmost_optimized_code = code;
324         safe_to_deopt_topmost_optimized_code = safe_if_deopt_triggered;
325       }
326     }
327   }
328 #endif
329 
330   // We will use this set to mark those Code objects that are marked for
331   // deoptimization and have not been found in stack frames.
332   std::set<Code> codes;
333 
334   // Move marked code from the optimized code list to the deoptimized code list.
335   // Walk over all optimized code objects in this native context.
336   Code prev;
337   Object element = native_context.OptimizedCodeListHead();
338   while (!element.IsUndefined(isolate)) {
339     Code code = FromCodeT(CodeT::cast(element));
340     CHECK(CodeKindCanDeoptimize(code.kind()));
341     Object next = code.next_code_link();
342 
343     if (code.marked_for_deoptimization()) {
344       codes.insert(code);
345 
346       if (!prev.is_null()) {
347         // Skip this code in the optimized code list.
348         prev.set_next_code_link(next);
349       } else {
350         // There was no previous node, the next node is the new head.
351         native_context.SetOptimizedCodeListHead(next);
352       }
353 
354       // Move the code to the _deoptimized_ code list.
355       code.set_next_code_link(native_context.DeoptimizedCodeListHead());
356       native_context.SetDeoptimizedCodeListHead(ToCodeT(code));
357     } else {
358       // Not marked; preserve this element.
359       prev = code;
360     }
361     element = next;
362   }
363 
364   ActivationsFinder visitor(&codes, topmost_optimized_code,
365                             safe_to_deopt_topmost_optimized_code);
366   // Iterate over the stack of this thread.
367   visitor.VisitThread(isolate, isolate->thread_local_top());
368   // In addition to iterate over the stack of this thread, we also
369   // need to consider all the other threads as they may also use
370   // the code currently beings deoptimized.
371   isolate->thread_manager()->IterateArchivedThreads(&visitor);
372 
373   // If there's no activation of a code in any stack then we can remove its
374   // deoptimization data. We do this to ensure that code objects that are
375   // unlinked don't transitively keep objects alive unnecessarily.
376   for (Code code : codes) {
377     isolate->heap()->InvalidateCodeDeoptimizationData(code);
378   }
379 
380   native_context.osr_code_cache().EvictDeoptimizedCode(isolate);
381 }
382 
DeoptimizeAll(Isolate * isolate)383 void Deoptimizer::DeoptimizeAll(Isolate* isolate) {
384   RCS_SCOPE(isolate, RuntimeCallCounterId::kDeoptimizeCode);
385   TimerEventScope<TimerEventDeoptimizeCode> timer(isolate);
386   TRACE_EVENT0("v8", "V8.DeoptimizeCode");
387   TraceDeoptAll(isolate);
388   isolate->AbortConcurrentOptimization(BlockingBehavior::kBlock);
389   DisallowGarbageCollection no_gc;
390   // For all contexts, mark all code, then deoptimize.
391   Object context = isolate->heap()->native_contexts_list();
392   while (!context.IsUndefined(isolate)) {
393     NativeContext native_context = NativeContext::cast(context);
394     MarkAllCodeForContext(native_context);
395     OSROptimizedCodeCache::Clear(isolate, native_context);
396     DeoptimizeMarkedCodeForContext(native_context);
397     context = native_context.next_context_link();
398   }
399 }
400 
DeoptimizeMarkedCode(Isolate * isolate)401 void Deoptimizer::DeoptimizeMarkedCode(Isolate* isolate) {
402   RCS_SCOPE(isolate, RuntimeCallCounterId::kDeoptimizeCode);
403   TimerEventScope<TimerEventDeoptimizeCode> timer(isolate);
404   TRACE_EVENT0("v8", "V8.DeoptimizeCode");
405   TraceDeoptMarked(isolate);
406   DisallowGarbageCollection no_gc;
407   // For all contexts, deoptimize code already marked.
408   Object context = isolate->heap()->native_contexts_list();
409   while (!context.IsUndefined(isolate)) {
410     NativeContext native_context = NativeContext::cast(context);
411     DeoptimizeMarkedCodeForContext(native_context);
412     context = native_context.next_context_link();
413   }
414 }
415 
MarkAllCodeForContext(NativeContext native_context)416 void Deoptimizer::MarkAllCodeForContext(NativeContext native_context) {
417   Object element = native_context.OptimizedCodeListHead();
418   Isolate* isolate = native_context.GetIsolate();
419   while (!element.IsUndefined(isolate)) {
420     Code code = FromCodeT(CodeT::cast(element));
421     CHECK(CodeKindCanDeoptimize(code.kind()));
422     code.set_marked_for_deoptimization(true);
423     element = code.next_code_link();
424   }
425 }
426 
DeoptimizeFunction(JSFunction function,Code code)427 void Deoptimizer::DeoptimizeFunction(JSFunction function, Code code) {
428   Isolate* isolate = function.GetIsolate();
429   RCS_SCOPE(isolate, RuntimeCallCounterId::kDeoptimizeCode);
430   TimerEventScope<TimerEventDeoptimizeCode> timer(isolate);
431   TRACE_EVENT0("v8", "V8.DeoptimizeCode");
432   function.ResetIfCodeFlushed();
433   if (code.is_null()) code = FromCodeT(function.code());
434 
435   if (CodeKindCanDeoptimize(code.kind())) {
436     // Mark the code for deoptimization and unlink any functions that also
437     // refer to that code. The code cannot be shared across native contexts,
438     // so we only need to search one.
439     code.set_marked_for_deoptimization(true);
440     // The code in the function's optimized code feedback vector slot might
441     // be different from the code on the function - evict it if necessary.
442     function.feedback_vector().EvictOptimizedCodeMarkedForDeoptimization(
443         function.shared(), "unlinking code marked for deopt");
444     DeoptimizeMarkedCodeForContext(function.native_context());
445     // TODO(mythria): Ideally EvictMarkCode should compact the cache without
446     // having to explicitly call this. We don't do this currently because
447     // compacting causes GC and DeoptimizeMarkedCodeForContext uses raw
448     // pointers. Update DeoptimizeMarkedCodeForContext to use handles and remove
449     // this call from here.
450     OSROptimizedCodeCache::Compact(
451         isolate, Handle<NativeContext>(function.native_context(), isolate));
452   }
453 }
454 
ComputeOutputFrames(Deoptimizer * deoptimizer)455 void Deoptimizer::ComputeOutputFrames(Deoptimizer* deoptimizer) {
456   deoptimizer->DoComputeOutputFrames();
457 }
458 
MessageFor(DeoptimizeKind kind)459 const char* Deoptimizer::MessageFor(DeoptimizeKind kind) {
460   switch (kind) {
461     case DeoptimizeKind::kEager:
462       return "deopt-eager";
463     case DeoptimizeKind::kUnused:
464       return "deopt-unused";
465     case DeoptimizeKind::kLazy:
466       return "deopt-lazy";
467   }
468 }
469 
Deoptimizer(Isolate * isolate,JSFunction function,DeoptimizeKind kind,Address from,int fp_to_sp_delta)470 Deoptimizer::Deoptimizer(Isolate* isolate, JSFunction function,
471                          DeoptimizeKind kind, Address from, int fp_to_sp_delta)
472     : isolate_(isolate),
473       function_(function),
474       deopt_exit_index_(kFixedExitSizeMarker),
475       deopt_kind_(kind),
476       from_(from),
477       fp_to_sp_delta_(fp_to_sp_delta),
478       deoptimizing_throw_(false),
479       catch_handler_data_(-1),
480       catch_handler_pc_offset_(-1),
481       input_(nullptr),
482       output_count_(0),
483       output_(nullptr),
484       caller_frame_top_(0),
485       caller_fp_(0),
486       caller_pc_(0),
487       caller_constant_pool_(0),
488       actual_argument_count_(0),
489       stack_fp_(0),
490       trace_scope_(FLAG_trace_deopt || FLAG_log_deopt
491                        ? new CodeTracer::Scope(isolate->GetCodeTracer())
492                        : nullptr) {
493   if (isolate->deoptimizer_lazy_throw()) {
494     isolate->set_deoptimizer_lazy_throw(false);
495     deoptimizing_throw_ = true;
496   }
497 
498   DCHECK_NE(from, kNullAddress);
499   compiled_code_ = FindOptimizedCode();
500   DCHECK(!compiled_code_.is_null());
501 
502   DCHECK(function.IsJSFunction());
503 #ifdef DEBUG
504   DCHECK(AllowGarbageCollection::IsAllowed());
505   disallow_garbage_collection_ = new DisallowGarbageCollection();
506 #endif  // DEBUG
507   CHECK(CodeKindCanDeoptimize(compiled_code_.kind()));
508   {
509     HandleScope scope(isolate_);
510     PROFILE(isolate_, CodeDeoptEvent(handle(compiled_code_, isolate_), kind,
511                                      from_, fp_to_sp_delta_));
512   }
513   unsigned size = ComputeInputFrameSize();
514   const int parameter_count =
515       function.shared().internal_formal_parameter_count_with_receiver();
516   input_ = new (size) FrameDescription(size, parameter_count);
517 
518   DCHECK_EQ(deopt_exit_index_, kFixedExitSizeMarker);
519   // Calculate the deopt exit index from return address.
520   DCHECK_GT(kEagerDeoptExitSize, 0);
521   DCHECK_GT(kLazyDeoptExitSize, 0);
522   DeoptimizationData deopt_data =
523       DeoptimizationData::cast(compiled_code_.deoptimization_data());
524   Address deopt_start = compiled_code_.raw_instruction_start() +
525                         deopt_data.DeoptExitStart().value();
526   int eager_deopt_count = deopt_data.EagerDeoptCount().value();
527   Address lazy_deopt_start =
528       deopt_start + eager_deopt_count * kEagerDeoptExitSize;
529   // The deoptimization exits are sorted so that lazy deopt exits appear after
530   // eager deopts.
531   static_assert(static_cast<int>(DeoptimizeKind::kLazy) ==
532                     static_cast<int>(kLastDeoptimizeKind),
533                 "lazy deopts are expected to be emitted last");
534   // from_ is the value of the link register after the call to the
535   // deoptimizer, so for the last lazy deopt, from_ points to the first
536   // non-lazy deopt, so we use <=, similarly for the last non-lazy deopt and
537   // the first deopt with resume entry.
538   if (from_ <= lazy_deopt_start) {
539     int offset = static_cast<int>(from_ - kEagerDeoptExitSize - deopt_start);
540     DCHECK_EQ(0, offset % kEagerDeoptExitSize);
541     deopt_exit_index_ = offset / kEagerDeoptExitSize;
542   } else {
543     int offset =
544         static_cast<int>(from_ - kLazyDeoptExitSize - lazy_deopt_start);
545     DCHECK_EQ(0, offset % kLazyDeoptExitSize);
546     deopt_exit_index_ = eager_deopt_count + (offset / kLazyDeoptExitSize);
547   }
548 }
549 
FindOptimizedCode()550 Code Deoptimizer::FindOptimizedCode() {
551   Code compiled_code = FindDeoptimizingCode(from_);
552   return !compiled_code.is_null() ? compiled_code
553                                   : isolate_->FindCodeObject(from_);
554 }
555 
function() const556 Handle<JSFunction> Deoptimizer::function() const {
557   return Handle<JSFunction>(function_, isolate());
558 }
559 
compiled_code() const560 Handle<Code> Deoptimizer::compiled_code() const {
561   return Handle<Code>(compiled_code_, isolate());
562 }
563 
~Deoptimizer()564 Deoptimizer::~Deoptimizer() {
565   DCHECK(input_ == nullptr && output_ == nullptr);
566   DCHECK_NULL(disallow_garbage_collection_);
567   delete trace_scope_;
568 }
569 
DeleteFrameDescriptions()570 void Deoptimizer::DeleteFrameDescriptions() {
571   delete input_;
572   for (int i = 0; i < output_count_; ++i) {
573     if (output_[i] != input_) delete output_[i];
574   }
575   delete[] output_;
576   input_ = nullptr;
577   output_ = nullptr;
578 #ifdef DEBUG
579   DCHECK(!AllowGarbageCollection::IsAllowed());
580   DCHECK_NOT_NULL(disallow_garbage_collection_);
581   delete disallow_garbage_collection_;
582   disallow_garbage_collection_ = nullptr;
583 #endif  // DEBUG
584 }
585 
GetDeoptimizationEntry(DeoptimizeKind kind)586 Builtin Deoptimizer::GetDeoptimizationEntry(DeoptimizeKind kind) {
587   switch (kind) {
588     case DeoptimizeKind::kEager:
589       return Builtin::kDeoptimizationEntry_Eager;
590     case DeoptimizeKind::kUnused:
591       return Builtin::kDeoptimizationEntry_Unused;
592     case DeoptimizeKind::kLazy:
593       return Builtin::kDeoptimizationEntry_Lazy;
594   }
595 }
596 
IsDeoptimizationEntry(Isolate * isolate,Address addr,DeoptimizeKind * type_out)597 bool Deoptimizer::IsDeoptimizationEntry(Isolate* isolate, Address addr,
598                                         DeoptimizeKind* type_out) {
599   Builtin builtin = OffHeapInstructionStream::TryLookupCode(isolate, addr);
600   if (!Builtins::IsBuiltinId(builtin)) return false;
601 
602   switch (builtin) {
603     case Builtin::kDeoptimizationEntry_Eager:
604       *type_out = DeoptimizeKind::kEager;
605       return true;
606     case Builtin::kDeoptimizationEntry_Unused:
607       *type_out = DeoptimizeKind::kUnused;
608       return true;
609     case Builtin::kDeoptimizationEntry_Lazy:
610       *type_out = DeoptimizeKind::kLazy;
611       return true;
612     default:
613       return false;
614   }
615 
616   UNREACHABLE();
617 }
618 
GetDeoptimizedCodeCount(Isolate * isolate)619 int Deoptimizer::GetDeoptimizedCodeCount(Isolate* isolate) {
620   int length = 0;
621   // Count all entries in the deoptimizing code list of every context.
622   Object context = isolate->heap()->native_contexts_list();
623   while (!context.IsUndefined(isolate)) {
624     NativeContext native_context = NativeContext::cast(context);
625     Object element = native_context.DeoptimizedCodeListHead();
626     while (!element.IsUndefined(isolate)) {
627       Code code = FromCodeT(CodeT::cast(element));
628       DCHECK(CodeKindCanDeoptimize(code.kind()));
629       if (!code.marked_for_deoptimization()) {
630         length++;
631       }
632       element = code.next_code_link();
633     }
634     context = Context::cast(context).next_context_link();
635   }
636   return length;
637 }
638 
639 namespace {
640 
LookupCatchHandler(Isolate * isolate,TranslatedFrame * translated_frame,int * data_out)641 int LookupCatchHandler(Isolate* isolate, TranslatedFrame* translated_frame,
642                        int* data_out) {
643   switch (translated_frame->kind()) {
644     case TranslatedFrame::kUnoptimizedFunction: {
645       int bytecode_offset = translated_frame->bytecode_offset().ToInt();
646       HandlerTable table(
647           translated_frame->raw_shared_info().GetBytecodeArray(isolate));
648       return table.LookupRange(bytecode_offset, data_out, nullptr);
649     }
650     case TranslatedFrame::kJavaScriptBuiltinContinuationWithCatch: {
651       return 0;
652     }
653     default:
654       break;
655   }
656   return -1;
657 }
658 
659 }  // namespace
660 
TraceDeoptBegin(int optimization_id,BytecodeOffset bytecode_offset)661 void Deoptimizer::TraceDeoptBegin(int optimization_id,
662                                   BytecodeOffset bytecode_offset) {
663   DCHECK(tracing_enabled());
664   FILE* file = trace_scope()->file();
665   Deoptimizer::DeoptInfo info =
666       Deoptimizer::GetDeoptInfo(compiled_code_, from_);
667   PrintF(file, "[bailout (kind: %s, reason: %s): begin. deoptimizing ",
668          MessageFor(deopt_kind_), DeoptimizeReasonToString(info.deopt_reason));
669   if (function_.IsJSFunction()) {
670     function_.ShortPrint(file);
671   } else {
672     PrintF(file, "%s", CodeKindToString(compiled_code_.kind()));
673   }
674   PrintF(file,
675          ", opt id %d, "
676 #ifdef DEBUG
677          "node id %d, "
678 #endif  // DEBUG
679          "bytecode offset %d, deopt exit %d, FP to SP "
680          "delta %d, "
681          "caller SP " V8PRIxPTR_FMT ", pc " V8PRIxPTR_FMT "]\n",
682          optimization_id,
683 #ifdef DEBUG
684          info.node_id,
685 #endif  // DEBUG
686          bytecode_offset.ToInt(), deopt_exit_index_, fp_to_sp_delta_,
687          caller_frame_top_, PointerAuthentication::StripPAC(from_));
688   if (verbose_tracing_enabled() && deopt_kind_ != DeoptimizeKind::kLazy) {
689     PrintF(file, "            ;;; deoptimize at ");
690     OFStream outstr(file);
691     info.position.Print(outstr, compiled_code_);
692     PrintF(file, "\n");
693   }
694 }
695 
TraceDeoptEnd(double deopt_duration)696 void Deoptimizer::TraceDeoptEnd(double deopt_duration) {
697   DCHECK(verbose_tracing_enabled());
698   PrintF(trace_scope()->file(), "[bailout end. took %0.3f ms]\n",
699          deopt_duration);
700 }
701 
702 // static
TraceMarkForDeoptimization(Code code,const char * reason)703 void Deoptimizer::TraceMarkForDeoptimization(Code code, const char* reason) {
704   if (!FLAG_trace_deopt && !FLAG_log_deopt) return;
705 
706   DisallowGarbageCollection no_gc;
707   Isolate* isolate = code.GetIsolate();
708   Object maybe_data = code.deoptimization_data();
709   if (maybe_data == ReadOnlyRoots(isolate).empty_fixed_array()) return;
710 
711   DeoptimizationData deopt_data = DeoptimizationData::cast(maybe_data);
712   CodeTracer::Scope scope(isolate->GetCodeTracer());
713   if (FLAG_trace_deopt) {
714     PrintF(scope.file(), "[marking dependent code " V8PRIxPTR_FMT " (",
715            code.ptr());
716     deopt_data.SharedFunctionInfo().ShortPrint(scope.file());
717     PrintF(") (opt id %d) for deoptimization, reason: %s]\n",
718            deopt_data.OptimizationId().value(), reason);
719   }
720   if (!FLAG_log_deopt) return;
721   no_gc.Release();
722   {
723     HandleScope handle_scope(isolate);
724     PROFILE(
725         isolate,
726         CodeDependencyChangeEvent(
727             handle(code, isolate),
728             handle(SharedFunctionInfo::cast(deopt_data.SharedFunctionInfo()),
729                    isolate),
730             reason));
731   }
732 }
733 
734 // static
TraceEvictFromOptimizedCodeCache(SharedFunctionInfo sfi,const char * reason)735 void Deoptimizer::TraceEvictFromOptimizedCodeCache(SharedFunctionInfo sfi,
736                                                    const char* reason) {
737   if (!FLAG_trace_deopt_verbose) return;
738 
739   DisallowGarbageCollection no_gc;
740   CodeTracer::Scope scope(sfi.GetIsolate()->GetCodeTracer());
741   PrintF(scope.file(),
742          "[evicting optimized code marked for deoptimization (%s) for ",
743          reason);
744   sfi.ShortPrint(scope.file());
745   PrintF(scope.file(), "]\n");
746 }
747 
748 #ifdef DEBUG
749 // static
TraceFoundActivation(Isolate * isolate,JSFunction function)750 void Deoptimizer::TraceFoundActivation(Isolate* isolate, JSFunction function) {
751   if (!FLAG_trace_deopt_verbose) return;
752   CodeTracer::Scope scope(isolate->GetCodeTracer());
753   PrintF(scope.file(), "[deoptimizer found activation of function: ");
754   function.PrintName(scope.file());
755   PrintF(scope.file(), " / %" V8PRIxPTR "]\n", function.ptr());
756 }
757 #endif  // DEBUG
758 
759 // static
TraceDeoptAll(Isolate * isolate)760 void Deoptimizer::TraceDeoptAll(Isolate* isolate) {
761   if (!FLAG_trace_deopt_verbose) return;
762   CodeTracer::Scope scope(isolate->GetCodeTracer());
763   PrintF(scope.file(), "[deoptimize all code in all contexts]\n");
764 }
765 
766 // static
TraceDeoptMarked(Isolate * isolate)767 void Deoptimizer::TraceDeoptMarked(Isolate* isolate) {
768   if (!FLAG_trace_deopt_verbose) return;
769   CodeTracer::Scope scope(isolate->GetCodeTracer());
770   PrintF(scope.file(), "[deoptimize marked code in all contexts]\n");
771 }
772 
773 // We rely on this function not causing a GC.  It is called from generated code
774 // without having a real stack frame in place.
DoComputeOutputFrames()775 void Deoptimizer::DoComputeOutputFrames() {
776   // When we call this function, the return address of the previous frame has
777   // been removed from the stack by the DeoptimizationEntry builtin, so the
778   // stack is not iterable by the SafeStackFrameIterator.
779 #if V8_TARGET_ARCH_STORES_RETURN_ADDRESS_ON_STACK
780   DCHECK_EQ(0, isolate()->isolate_data()->stack_is_iterable());
781 #endif
782   base::ElapsedTimer timer;
783 
784   // Determine basic deoptimization information.  The optimized frame is
785   // described by the input data.
786   DeoptimizationData input_data =
787       DeoptimizationData::cast(compiled_code_.deoptimization_data());
788 
789   {
790     // Read caller's PC, caller's FP and caller's constant pool values
791     // from input frame. Compute caller's frame top address.
792 
793     Register fp_reg = JavaScriptFrame::fp_register();
794     stack_fp_ = input_->GetRegister(fp_reg.code());
795 
796     caller_frame_top_ = stack_fp_ + ComputeInputFrameAboveFpFixedSize();
797 
798     Address fp_address = input_->GetFramePointerAddress();
799     caller_fp_ = Memory<intptr_t>(fp_address);
800     caller_pc_ =
801         Memory<intptr_t>(fp_address + CommonFrameConstants::kCallerPCOffset);
802     actual_argument_count_ = static_cast<int>(
803         Memory<intptr_t>(fp_address + StandardFrameConstants::kArgCOffset));
804 
805     if (FLAG_enable_embedded_constant_pool) {
806       caller_constant_pool_ = Memory<intptr_t>(
807           fp_address + CommonFrameConstants::kConstantPoolOffset);
808     }
809   }
810 
811   StackGuard* const stack_guard = isolate()->stack_guard();
812   CHECK_GT(static_cast<uintptr_t>(caller_frame_top_),
813            stack_guard->real_jslimit());
814 
815   BytecodeOffset bytecode_offset =
816       input_data.GetBytecodeOffset(deopt_exit_index_);
817   ByteArray translations = input_data.TranslationByteArray();
818   unsigned translation_index =
819       input_data.TranslationIndex(deopt_exit_index_).value();
820 
821   if (tracing_enabled()) {
822     timer.Start();
823     TraceDeoptBegin(input_data.OptimizationId().value(), bytecode_offset);
824   }
825 
826   FILE* trace_file =
827       verbose_tracing_enabled() ? trace_scope()->file() : nullptr;
828   TranslationArrayIterator state_iterator(translations, translation_index);
829   translated_state_.Init(
830       isolate_, input_->GetFramePointerAddress(), stack_fp_, &state_iterator,
831       input_data.LiteralArray(), input_->GetRegisterValues(), trace_file,
832       function_.IsHeapObject()
833           ? function_.shared()
834                 .internal_formal_parameter_count_without_receiver()
835           : 0,
836       actual_argument_count_ - kJSArgcReceiverSlots);
837 
838   // Do the input frame to output frame(s) translation.
839   size_t count = translated_state_.frames().size();
840   // If we are supposed to go to the catch handler, find the catching frame
841   // for the catch and make sure we only deoptimize up to that frame.
842   if (deoptimizing_throw_) {
843     size_t catch_handler_frame_index = count;
844     for (size_t i = count; i-- > 0;) {
845       catch_handler_pc_offset_ = LookupCatchHandler(
846           isolate(), &(translated_state_.frames()[i]), &catch_handler_data_);
847       if (catch_handler_pc_offset_ >= 0) {
848         catch_handler_frame_index = i;
849         break;
850       }
851     }
852     CHECK_LT(catch_handler_frame_index, count);
853     count = catch_handler_frame_index + 1;
854   }
855 
856   DCHECK_NULL(output_);
857   output_ = new FrameDescription*[count];
858   for (size_t i = 0; i < count; ++i) {
859     output_[i] = nullptr;
860   }
861   output_count_ = static_cast<int>(count);
862 
863   // Translate each output frame.
864   int frame_index = 0;
865   size_t total_output_frame_size = 0;
866   for (size_t i = 0; i < count; ++i, ++frame_index) {
867     TranslatedFrame* translated_frame = &(translated_state_.frames()[i]);
868     const bool handle_exception = deoptimizing_throw_ && i == count - 1;
869     switch (translated_frame->kind()) {
870       case TranslatedFrame::kUnoptimizedFunction:
871         DoComputeUnoptimizedFrame(translated_frame, frame_index,
872                                   handle_exception);
873         break;
874       case TranslatedFrame::kArgumentsAdaptor:
875         DoComputeArgumentsAdaptorFrame(translated_frame, frame_index);
876         break;
877       case TranslatedFrame::kConstructStub:
878         DoComputeConstructStubFrame(translated_frame, frame_index);
879         break;
880       case TranslatedFrame::kBuiltinContinuation:
881 #if V8_ENABLE_WEBASSEMBLY
882       case TranslatedFrame::kJSToWasmBuiltinContinuation:
883 #endif  // V8_ENABLE_WEBASSEMBLY
884         DoComputeBuiltinContinuation(translated_frame, frame_index,
885                                      BuiltinContinuationMode::STUB);
886         break;
887       case TranslatedFrame::kJavaScriptBuiltinContinuation:
888         DoComputeBuiltinContinuation(translated_frame, frame_index,
889                                      BuiltinContinuationMode::JAVASCRIPT);
890         break;
891       case TranslatedFrame::kJavaScriptBuiltinContinuationWithCatch:
892         DoComputeBuiltinContinuation(
893             translated_frame, frame_index,
894             handle_exception
895                 ? BuiltinContinuationMode::JAVASCRIPT_HANDLE_EXCEPTION
896                 : BuiltinContinuationMode::JAVASCRIPT_WITH_CATCH);
897         break;
898       case TranslatedFrame::kInvalid:
899         FATAL("invalid frame");
900     }
901     total_output_frame_size += output_[frame_index]->GetFrameSize();
902   }
903 
904   FrameDescription* topmost = output_[count - 1];
905   topmost->GetRegisterValues()->SetRegister(kRootRegister.code(),
906                                             isolate()->isolate_root());
907 #ifdef V8_COMPRESS_POINTERS_IN_SHARED_CAGE
908   topmost->GetRegisterValues()->SetRegister(kPtrComprCageBaseRegister.code(),
909                                             isolate()->cage_base());
910 #endif
911 
912   // Print some helpful diagnostic information.
913   if (verbose_tracing_enabled()) {
914     TraceDeoptEnd(timer.Elapsed().InMillisecondsF());
915   }
916 
917   // The following invariant is fairly tricky to guarantee, since the size of
918   // an optimized frame and its deoptimized counterparts usually differs. We
919   // thus need to consider the case in which deoptimized frames are larger than
920   // the optimized frame in stack checks in optimized code. We do this by
921   // applying an offset to stack checks (see kArchStackPointerGreaterThan in the
922   // code generator).
923   // Note that we explicitly allow deopts to exceed the limit by a certain
924   // number of slack bytes.
925   CHECK_GT(
926       static_cast<uintptr_t>(caller_frame_top_) - total_output_frame_size,
927       stack_guard->real_jslimit() - kStackLimitSlackForDeoptimizationInBytes);
928 }
929 
930 namespace {
931 
932 // Get the dispatch builtin for unoptimized frames.
DispatchBuiltinFor(bool is_baseline,bool advance_bc)933 Builtin DispatchBuiltinFor(bool is_baseline, bool advance_bc) {
934   if (is_baseline) {
935     return advance_bc ? Builtin::kBaselineOrInterpreterEnterAtNextBytecode
936                       : Builtin::kBaselineOrInterpreterEnterAtBytecode;
937   } else {
938     return advance_bc ? Builtin::kInterpreterEnterAtNextBytecode
939                       : Builtin::kInterpreterEnterAtBytecode;
940   }
941 }
942 
943 }  // namespace
944 
DoComputeUnoptimizedFrame(TranslatedFrame * translated_frame,int frame_index,bool goto_catch_handler)945 void Deoptimizer::DoComputeUnoptimizedFrame(TranslatedFrame* translated_frame,
946                                             int frame_index,
947                                             bool goto_catch_handler) {
948   SharedFunctionInfo shared = translated_frame->raw_shared_info();
949   TranslatedFrame::iterator value_iterator = translated_frame->begin();
950   const bool is_bottommost = (0 == frame_index);
951   const bool is_topmost = (output_count_ - 1 == frame_index);
952 
953   const int real_bytecode_offset = translated_frame->bytecode_offset().ToInt();
954   const int bytecode_offset =
955       goto_catch_handler ? catch_handler_pc_offset_ : real_bytecode_offset;
956 
957   const int parameters_count =
958       shared.internal_formal_parameter_count_with_receiver();
959 
960   // If this is the bottom most frame or the previous frame was the arguments
961   // adaptor fake frame, then we already have extra arguments in the stack
962   // (including any extra padding). Therefore we should not try to add any
963   // padding.
964   bool should_pad_arguments =
965       !is_bottommost && (translated_state_.frames()[frame_index - 1]).kind() !=
966                             TranslatedFrame::kArgumentsAdaptor;
967 
968   const int locals_count = translated_frame->height();
969   UnoptimizedFrameInfo frame_info = UnoptimizedFrameInfo::Precise(
970       parameters_count, locals_count, is_topmost, should_pad_arguments);
971   const uint32_t output_frame_size = frame_info.frame_size_in_bytes();
972 
973   TranslatedFrame::iterator function_iterator = value_iterator++;
974 
975   BytecodeArray bytecode_array =
976       shared.HasBreakInfo() ? shared.GetDebugInfo().DebugBytecodeArray()
977                             : shared.GetBytecodeArray(isolate());
978 
979   // Allocate and store the output frame description.
980   FrameDescription* output_frame = new (output_frame_size)
981       FrameDescription(output_frame_size, parameters_count);
982   FrameWriter frame_writer(this, output_frame, verbose_trace_scope());
983 
984   CHECK(frame_index >= 0 && frame_index < output_count_);
985   CHECK_NULL(output_[frame_index]);
986   output_[frame_index] = output_frame;
987 
988   // Compute this frame's PC and state.
989   // For interpreted frames, the PC will be a special builtin that
990   // continues the bytecode dispatch. Note that non-topmost and lazy-style
991   // bailout handlers also advance the bytecode offset before dispatch, hence
992   // simulating what normal handlers do upon completion of the operation.
993   // For baseline frames, the PC will be a builtin to convert the interpreter
994   // frame to a baseline frame before continuing execution of baseline code.
995   // We can't directly continue into baseline code, because of CFI.
996   Builtins* builtins = isolate_->builtins();
997   const bool advance_bc =
998       (!is_topmost || (deopt_kind_ == DeoptimizeKind::kLazy)) &&
999       !goto_catch_handler;
1000   const bool is_baseline = shared.HasBaselineCode();
1001   Code dispatch_builtin =
1002       FromCodeT(builtins->code(DispatchBuiltinFor(is_baseline, advance_bc)));
1003 
1004   if (verbose_tracing_enabled()) {
1005     PrintF(trace_scope()->file(), "  translating %s frame ",
1006            is_baseline ? "baseline" : "interpreted");
1007     std::unique_ptr<char[]> name = shared.DebugNameCStr();
1008     PrintF(trace_scope()->file(), "%s", name.get());
1009     PrintF(trace_scope()->file(), " => bytecode_offset=%d, ",
1010            real_bytecode_offset);
1011     PrintF(trace_scope()->file(), "variable_frame_size=%d, frame_size=%d%s\n",
1012            frame_info.frame_size_in_bytes_without_fixed(), output_frame_size,
1013            goto_catch_handler ? " (throw)" : "");
1014   }
1015 
1016   // The top address of the frame is computed from the previous frame's top and
1017   // this frame's size.
1018   const intptr_t top_address =
1019       is_bottommost ? caller_frame_top_ - output_frame_size
1020                     : output_[frame_index - 1]->GetTop() - output_frame_size;
1021   output_frame->SetTop(top_address);
1022 
1023   // Compute the incoming parameter translation.
1024   ReadOnlyRoots roots(isolate());
1025   if (should_pad_arguments) {
1026     for (int i = 0; i < ArgumentPaddingSlots(parameters_count); ++i) {
1027       frame_writer.PushRawObject(roots.the_hole_value(), "padding\n");
1028     }
1029   }
1030 
1031   if (verbose_tracing_enabled() && is_bottommost &&
1032       actual_argument_count_ > parameters_count) {
1033     PrintF(trace_scope_->file(),
1034            "    -- %d extra argument(s) already in the stack --\n",
1035            actual_argument_count_ - parameters_count);
1036   }
1037   frame_writer.PushStackJSArguments(value_iterator, parameters_count);
1038 
1039   DCHECK_EQ(output_frame->GetLastArgumentSlotOffset(should_pad_arguments),
1040             frame_writer.top_offset());
1041   if (verbose_tracing_enabled()) {
1042     PrintF(trace_scope()->file(), "    -------------------------\n");
1043   }
1044 
1045   // There are no translation commands for the caller's pc and fp, the
1046   // context, the function and the bytecode offset.  Synthesize
1047   // their values and set them up
1048   // explicitly.
1049   //
1050   // The caller's pc for the bottommost output frame is the same as in the
1051   // input frame. For all subsequent output frames, it can be read from the
1052   // previous one. This frame's pc can be computed from the non-optimized
1053   // function code and bytecode offset of the bailout.
1054   if (is_bottommost) {
1055     frame_writer.PushBottommostCallerPc(caller_pc_);
1056   } else {
1057     frame_writer.PushApprovedCallerPc(output_[frame_index - 1]->GetPc());
1058   }
1059 
1060   // The caller's frame pointer for the bottommost output frame is the same
1061   // as in the input frame.  For all subsequent output frames, it can be
1062   // read from the previous one.  Also compute and set this frame's frame
1063   // pointer.
1064   const intptr_t caller_fp =
1065       is_bottommost ? caller_fp_ : output_[frame_index - 1]->GetFp();
1066   frame_writer.PushCallerFp(caller_fp);
1067 
1068   const intptr_t fp_value = top_address + frame_writer.top_offset();
1069   output_frame->SetFp(fp_value);
1070   if (is_topmost) {
1071     Register fp_reg = UnoptimizedFrame::fp_register();
1072     output_frame->SetRegister(fp_reg.code(), fp_value);
1073   }
1074 
1075   if (FLAG_enable_embedded_constant_pool) {
1076     // For the bottommost output frame the constant pool pointer can be gotten
1077     // from the input frame. For subsequent output frames, it can be read from
1078     // the previous frame.
1079     const intptr_t caller_cp =
1080         is_bottommost ? caller_constant_pool_
1081                       : output_[frame_index - 1]->GetConstantPool();
1082     frame_writer.PushCallerConstantPool(caller_cp);
1083   }
1084 
1085   // For the bottommost output frame the context can be gotten from the input
1086   // frame. For all subsequent output frames it can be gotten from the function
1087   // so long as we don't inline functions that need local contexts.
1088 
1089   // When deoptimizing into a catch block, we need to take the context
1090   // from a register that was specified in the handler table.
1091   TranslatedFrame::iterator context_pos = value_iterator++;
1092   if (goto_catch_handler) {
1093     // Skip to the translated value of the register specified
1094     // in the handler table.
1095     for (int i = 0; i < catch_handler_data_ + 1; ++i) {
1096       context_pos++;
1097     }
1098   }
1099   // Read the context from the translations.
1100   Object context = context_pos->GetRawValue();
1101   output_frame->SetContext(static_cast<intptr_t>(context.ptr()));
1102   frame_writer.PushTranslatedValue(context_pos, "context");
1103 
1104   // The function was mentioned explicitly in the BEGIN_FRAME.
1105   frame_writer.PushTranslatedValue(function_iterator, "function");
1106 
1107   // Actual argument count.
1108   int argc;
1109   if (is_bottommost) {
1110     argc = actual_argument_count_;
1111   } else {
1112     TranslatedFrame::Kind previous_frame_kind =
1113         (translated_state_.frames()[frame_index - 1]).kind();
1114     argc = previous_frame_kind == TranslatedFrame::kArgumentsAdaptor
1115                ? output_[frame_index - 1]->parameter_count()
1116                : parameters_count;
1117   }
1118   frame_writer.PushRawValue(argc, "actual argument count\n");
1119 
1120   // Set the bytecode array pointer.
1121   frame_writer.PushRawObject(bytecode_array, "bytecode array\n");
1122 
1123   // The bytecode offset was mentioned explicitly in the BEGIN_FRAME.
1124   const int raw_bytecode_offset =
1125       BytecodeArray::kHeaderSize - kHeapObjectTag + bytecode_offset;
1126   Smi smi_bytecode_offset = Smi::FromInt(raw_bytecode_offset);
1127   frame_writer.PushRawObject(smi_bytecode_offset, "bytecode offset\n");
1128 
1129   if (verbose_tracing_enabled()) {
1130     PrintF(trace_scope()->file(), "    -------------------------\n");
1131   }
1132 
1133   // Translate the rest of the interpreter registers in the frame.
1134   // The return_value_offset is counted from the top. Here, we compute the
1135   // register index (counted from the start).
1136   const int return_value_first_reg =
1137       locals_count - translated_frame->return_value_offset();
1138   const int return_value_count = translated_frame->return_value_count();
1139   for (int i = 0; i < locals_count; ++i, ++value_iterator) {
1140     // Ensure we write the return value if we have one and we are returning
1141     // normally to a lazy deopt point.
1142     if (is_topmost && !goto_catch_handler &&
1143         deopt_kind_ == DeoptimizeKind::kLazy && i >= return_value_first_reg &&
1144         i < return_value_first_reg + return_value_count) {
1145       const int return_index = i - return_value_first_reg;
1146       if (return_index == 0) {
1147         frame_writer.PushRawValue(input_->GetRegister(kReturnRegister0.code()),
1148                                   "return value 0\n");
1149         // We do not handle the situation when one return value should go into
1150         // the accumulator and another one into an ordinary register. Since
1151         // the interpreter should never create such situation, just assert
1152         // this does not happen.
1153         CHECK_LE(return_value_first_reg + return_value_count, locals_count);
1154       } else {
1155         CHECK_EQ(return_index, 1);
1156         frame_writer.PushRawValue(input_->GetRegister(kReturnRegister1.code()),
1157                                   "return value 1\n");
1158       }
1159     } else {
1160       // This is not return value, just write the value from the translations.
1161       frame_writer.PushTranslatedValue(value_iterator, "stack parameter");
1162     }
1163   }
1164 
1165   uint32_t register_slots_written = static_cast<uint32_t>(locals_count);
1166   DCHECK_LE(register_slots_written, frame_info.register_stack_slot_count());
1167   // Some architectures must pad the stack frame with extra stack slots
1168   // to ensure the stack frame is aligned. Do this now.
1169   while (register_slots_written < frame_info.register_stack_slot_count()) {
1170     register_slots_written++;
1171     frame_writer.PushRawObject(roots.the_hole_value(), "padding\n");
1172   }
1173 
1174   // Translate the accumulator register (depending on frame position).
1175   if (is_topmost) {
1176     for (int i = 0; i < ArgumentPaddingSlots(1); ++i) {
1177       frame_writer.PushRawObject(roots.the_hole_value(), "padding\n");
1178     }
1179     // For topmost frame, put the accumulator on the stack. The
1180     // {NotifyDeoptimized} builtin pops it off the topmost frame (possibly
1181     // after materialization).
1182     if (goto_catch_handler) {
1183       // If we are lazy deopting to a catch handler, we set the accumulator to
1184       // the exception (which lives in the result register).
1185       intptr_t accumulator_value =
1186           input_->GetRegister(kInterpreterAccumulatorRegister.code());
1187       frame_writer.PushRawObject(Object(accumulator_value), "accumulator\n");
1188     } else {
1189       // If we are lazily deoptimizing make sure we store the deopt
1190       // return value into the appropriate slot.
1191       if (deopt_kind_ == DeoptimizeKind::kLazy &&
1192           translated_frame->return_value_offset() == 0 &&
1193           translated_frame->return_value_count() > 0) {
1194         CHECK_EQ(translated_frame->return_value_count(), 1);
1195         frame_writer.PushRawValue(input_->GetRegister(kReturnRegister0.code()),
1196                                   "return value 0\n");
1197       } else {
1198         frame_writer.PushTranslatedValue(value_iterator, "accumulator");
1199       }
1200     }
1201     ++value_iterator;  // Move over the accumulator.
1202   } else {
1203     // For non-topmost frames, skip the accumulator translation. For those
1204     // frames, the return value from the callee will become the accumulator.
1205     ++value_iterator;
1206   }
1207   CHECK_EQ(translated_frame->end(), value_iterator);
1208   CHECK_EQ(0u, frame_writer.top_offset());
1209 
1210   const intptr_t pc =
1211       static_cast<intptr_t>(dispatch_builtin.InstructionStart());
1212   if (is_topmost) {
1213     // Only the pc of the topmost frame needs to be signed since it is
1214     // authenticated at the end of the DeoptimizationEntry builtin.
1215     const intptr_t top_most_pc = PointerAuthentication::SignAndCheckPC(
1216         pc, frame_writer.frame()->GetTop());
1217     output_frame->SetPc(top_most_pc);
1218   } else {
1219     output_frame->SetPc(pc);
1220   }
1221 
1222   // Update constant pool.
1223   if (FLAG_enable_embedded_constant_pool) {
1224     intptr_t constant_pool_value =
1225         static_cast<intptr_t>(dispatch_builtin.constant_pool());
1226     output_frame->SetConstantPool(constant_pool_value);
1227     if (is_topmost) {
1228       Register constant_pool_reg =
1229           UnoptimizedFrame::constant_pool_pointer_register();
1230       output_frame->SetRegister(constant_pool_reg.code(), constant_pool_value);
1231     }
1232   }
1233 
1234   // Clear the context register. The context might be a de-materialized object
1235   // and will be materialized by {Runtime_NotifyDeoptimized}. For additional
1236   // safety we use Smi(0) instead of the potential {arguments_marker} here.
1237   if (is_topmost) {
1238     intptr_t context_value = static_cast<intptr_t>(Smi::zero().ptr());
1239     Register context_reg = JavaScriptFrame::context_register();
1240     output_frame->SetRegister(context_reg.code(), context_value);
1241     // Set the continuation for the topmost frame.
1242     CodeT continuation = builtins->code(Builtin::kNotifyDeoptimized);
1243     output_frame->SetContinuation(
1244         static_cast<intptr_t>(continuation.InstructionStart()));
1245   }
1246 }
1247 
DoComputeArgumentsAdaptorFrame(TranslatedFrame * translated_frame,int frame_index)1248 void Deoptimizer::DoComputeArgumentsAdaptorFrame(
1249     TranslatedFrame* translated_frame, int frame_index) {
1250   // Arguments adaptor can not be top most, nor the bottom most frames.
1251   CHECK(frame_index < output_count_ - 1);
1252   CHECK_GT(frame_index, 0);
1253   CHECK_NULL(output_[frame_index]);
1254 
1255   // During execution, V8 does not understand arguments adaptor frames anymore,
1256   // so during deoptimization we only push the extra arguments (arguments with
1257   // index greater than the formal parameter count). Therefore we call this
1258   // TranslatedFrame the fake adaptor frame.
1259   // For more info, see the design document:
1260   // https://docs.google.com/document/d/150wGaUREaZI6YWqOQFD5l2mWQXaPbbZjcAIJLOFrzMs
1261 
1262   TranslatedFrame::iterator value_iterator = translated_frame->begin();
1263   const int argument_count_without_receiver = translated_frame->height() - 1;
1264   const int formal_parameter_count =
1265       translated_frame->raw_shared_info()
1266           .internal_formal_parameter_count_without_receiver();
1267   const int extra_argument_count =
1268       argument_count_without_receiver - formal_parameter_count;
1269   // The number of pushed arguments is the maximum of the actual argument count
1270   // and the formal parameter count + the receiver.
1271   const int padding = ArgumentPaddingSlots(
1272       std::max(argument_count_without_receiver, formal_parameter_count) + 1);
1273   const int output_frame_size =
1274       (std::max(0, extra_argument_count) + padding) * kSystemPointerSize;
1275   if (verbose_tracing_enabled()) {
1276     PrintF(trace_scope_->file(),
1277            "  translating arguments adaptor => variable_size=%d\n",
1278            output_frame_size);
1279   }
1280 
1281   // Allocate and store the output frame description.
1282   FrameDescription* output_frame = new (output_frame_size) FrameDescription(
1283       output_frame_size, JSParameterCount(argument_count_without_receiver));
1284   // The top address of the frame is computed from the previous frame's top and
1285   // this frame's size.
1286   const intptr_t top_address =
1287       output_[frame_index - 1]->GetTop() - output_frame_size;
1288   output_frame->SetTop(top_address);
1289   // This is not a real frame, we take PC and FP values from the parent frame.
1290   output_frame->SetPc(output_[frame_index - 1]->GetPc());
1291   output_frame->SetFp(output_[frame_index - 1]->GetFp());
1292   output_[frame_index] = output_frame;
1293 
1294   FrameWriter frame_writer(this, output_frame, verbose_trace_scope());
1295 
1296   ReadOnlyRoots roots(isolate());
1297   for (int i = 0; i < padding; ++i) {
1298     frame_writer.PushRawObject(roots.the_hole_value(), "padding\n");
1299   }
1300 
1301   if (extra_argument_count > 0) {
1302     // The receiver and arguments with index below the formal parameter
1303     // count are in the fake adaptor frame, because they are used to create the
1304     // arguments object. We should however not push them, since the interpreter
1305     // frame with do that.
1306     value_iterator++;  // Skip function.
1307     value_iterator++;  // Skip receiver.
1308     for (int i = 0; i < formal_parameter_count; i++) value_iterator++;
1309     frame_writer.PushStackJSArguments(value_iterator, extra_argument_count);
1310   }
1311 }
1312 
DoComputeConstructStubFrame(TranslatedFrame * translated_frame,int frame_index)1313 void Deoptimizer::DoComputeConstructStubFrame(TranslatedFrame* translated_frame,
1314                                               int frame_index) {
1315   TranslatedFrame::iterator value_iterator = translated_frame->begin();
1316   const bool is_topmost = (output_count_ - 1 == frame_index);
1317   // The construct frame could become topmost only if we inlined a constructor
1318   // call which does a tail call (otherwise the tail callee's frame would be
1319   // the topmost one). So it could only be the DeoptimizeKind::kLazy case.
1320   CHECK(!is_topmost || deopt_kind_ == DeoptimizeKind::kLazy);
1321 
1322   Builtins* builtins = isolate_->builtins();
1323   Code construct_stub =
1324       FromCodeT(builtins->code(Builtin::kJSConstructStubGeneric));
1325   BytecodeOffset bytecode_offset = translated_frame->bytecode_offset();
1326 
1327   const int parameters_count = translated_frame->height();
1328   ConstructStubFrameInfo frame_info =
1329       ConstructStubFrameInfo::Precise(parameters_count, is_topmost);
1330   const uint32_t output_frame_size = frame_info.frame_size_in_bytes();
1331 
1332   TranslatedFrame::iterator function_iterator = value_iterator++;
1333   if (verbose_tracing_enabled()) {
1334     PrintF(trace_scope()->file(),
1335            "  translating construct stub => bytecode_offset=%d (%s), "
1336            "variable_frame_size=%d, frame_size=%d\n",
1337            bytecode_offset.ToInt(),
1338            bytecode_offset == BytecodeOffset::ConstructStubCreate() ? "create"
1339                                                                     : "invoke",
1340            frame_info.frame_size_in_bytes_without_fixed(), output_frame_size);
1341   }
1342 
1343   // Allocate and store the output frame description.
1344   FrameDescription* output_frame = new (output_frame_size)
1345       FrameDescription(output_frame_size, parameters_count);
1346   FrameWriter frame_writer(this, output_frame, verbose_trace_scope());
1347 
1348   // Construct stub can not be topmost.
1349   DCHECK(frame_index > 0 && frame_index < output_count_);
1350   DCHECK_NULL(output_[frame_index]);
1351   output_[frame_index] = output_frame;
1352 
1353   // The top address of the frame is computed from the previous frame's top and
1354   // this frame's size.
1355   const intptr_t top_address =
1356       output_[frame_index - 1]->GetTop() - output_frame_size;
1357   output_frame->SetTop(top_address);
1358 
1359   ReadOnlyRoots roots(isolate());
1360   for (int i = 0; i < ArgumentPaddingSlots(parameters_count); ++i) {
1361     frame_writer.PushRawObject(roots.the_hole_value(), "padding\n");
1362   }
1363 
1364   // The allocated receiver of a construct stub frame is passed as the
1365   // receiver parameter through the translation. It might be encoding
1366   // a captured object, so we need save it for later.
1367   TranslatedFrame::iterator receiver_iterator = value_iterator;
1368 
1369   // Compute the incoming parameter translation.
1370   frame_writer.PushStackJSArguments(value_iterator, parameters_count);
1371 
1372   DCHECK_EQ(output_frame->GetLastArgumentSlotOffset(),
1373             frame_writer.top_offset());
1374 
1375   // Read caller's PC from the previous frame.
1376   const intptr_t caller_pc = output_[frame_index - 1]->GetPc();
1377   frame_writer.PushApprovedCallerPc(caller_pc);
1378 
1379   // Read caller's FP from the previous frame, and set this frame's FP.
1380   const intptr_t caller_fp = output_[frame_index - 1]->GetFp();
1381   frame_writer.PushCallerFp(caller_fp);
1382 
1383   const intptr_t fp_value = top_address + frame_writer.top_offset();
1384   output_frame->SetFp(fp_value);
1385   if (is_topmost) {
1386     Register fp_reg = JavaScriptFrame::fp_register();
1387     output_frame->SetRegister(fp_reg.code(), fp_value);
1388   }
1389 
1390   if (FLAG_enable_embedded_constant_pool) {
1391     // Read the caller's constant pool from the previous frame.
1392     const intptr_t caller_cp = output_[frame_index - 1]->GetConstantPool();
1393     frame_writer.PushCallerConstantPool(caller_cp);
1394   }
1395 
1396   // A marker value is used to mark the frame.
1397   intptr_t marker = StackFrame::TypeToMarker(StackFrame::CONSTRUCT);
1398   frame_writer.PushRawValue(marker, "context (construct stub sentinel)\n");
1399 
1400   frame_writer.PushTranslatedValue(value_iterator++, "context");
1401 
1402   // Number of incoming arguments.
1403   const uint32_t argc = parameters_count;
1404   frame_writer.PushRawObject(Smi::FromInt(argc), "argc\n");
1405 
1406   // The constructor function was mentioned explicitly in the
1407   // CONSTRUCT_STUB_FRAME.
1408   frame_writer.PushTranslatedValue(function_iterator, "constructor function\n");
1409 
1410   // The deopt info contains the implicit receiver or the new target at the
1411   // position of the receiver. Copy it to the top of stack, with the hole value
1412   // as padding to maintain alignment.
1413 
1414   frame_writer.PushRawObject(roots.the_hole_value(), "padding\n");
1415 
1416   CHECK(bytecode_offset == BytecodeOffset::ConstructStubCreate() ||
1417         bytecode_offset == BytecodeOffset::ConstructStubInvoke());
1418   const char* debug_hint =
1419       bytecode_offset == BytecodeOffset::ConstructStubCreate()
1420           ? "new target\n"
1421           : "allocated receiver\n";
1422   frame_writer.PushTranslatedValue(receiver_iterator, debug_hint);
1423 
1424   if (is_topmost) {
1425     for (int i = 0; i < ArgumentPaddingSlots(1); ++i) {
1426       frame_writer.PushRawObject(roots.the_hole_value(), "padding\n");
1427     }
1428     // Ensure the result is restored back when we return to the stub.
1429     Register result_reg = kReturnRegister0;
1430     intptr_t result = input_->GetRegister(result_reg.code());
1431     frame_writer.PushRawValue(result, "subcall result\n");
1432   }
1433 
1434   CHECK_EQ(translated_frame->end(), value_iterator);
1435   CHECK_EQ(0u, frame_writer.top_offset());
1436 
1437   // Compute this frame's PC.
1438   DCHECK(bytecode_offset.IsValidForConstructStub());
1439   Address start = construct_stub.InstructionStart();
1440   const int pc_offset =
1441       bytecode_offset == BytecodeOffset::ConstructStubCreate()
1442           ? isolate_->heap()->construct_stub_create_deopt_pc_offset().value()
1443           : isolate_->heap()->construct_stub_invoke_deopt_pc_offset().value();
1444   intptr_t pc_value = static_cast<intptr_t>(start + pc_offset);
1445   if (is_topmost) {
1446     // Only the pc of the topmost frame needs to be signed since it is
1447     // authenticated at the end of the DeoptimizationEntry builtin.
1448     output_frame->SetPc(PointerAuthentication::SignAndCheckPC(
1449         pc_value, frame_writer.frame()->GetTop()));
1450   } else {
1451     output_frame->SetPc(pc_value);
1452   }
1453 
1454   // Update constant pool.
1455   if (FLAG_enable_embedded_constant_pool) {
1456     intptr_t constant_pool_value =
1457         static_cast<intptr_t>(construct_stub.constant_pool());
1458     output_frame->SetConstantPool(constant_pool_value);
1459     if (is_topmost) {
1460       Register constant_pool_reg =
1461           JavaScriptFrame::constant_pool_pointer_register();
1462       output_frame->SetRegister(constant_pool_reg.code(), constant_pool_value);
1463     }
1464   }
1465 
1466   // Clear the context register. The context might be a de-materialized object
1467   // and will be materialized by {Runtime_NotifyDeoptimized}. For additional
1468   // safety we use Smi(0) instead of the potential {arguments_marker} here.
1469   if (is_topmost) {
1470     intptr_t context_value = static_cast<intptr_t>(Smi::zero().ptr());
1471     Register context_reg = JavaScriptFrame::context_register();
1472     output_frame->SetRegister(context_reg.code(), context_value);
1473   }
1474 
1475   // Set the continuation for the topmost frame.
1476   if (is_topmost) {
1477     DCHECK_EQ(DeoptimizeKind::kLazy, deopt_kind_);
1478     CodeT continuation = builtins->code(Builtin::kNotifyDeoptimized);
1479     output_frame->SetContinuation(
1480         static_cast<intptr_t>(continuation.InstructionStart()));
1481   }
1482 }
1483 
1484 namespace {
1485 
BuiltinContinuationModeIsJavaScript(BuiltinContinuationMode mode)1486 bool BuiltinContinuationModeIsJavaScript(BuiltinContinuationMode mode) {
1487   switch (mode) {
1488     case BuiltinContinuationMode::STUB:
1489       return false;
1490     case BuiltinContinuationMode::JAVASCRIPT:
1491     case BuiltinContinuationMode::JAVASCRIPT_WITH_CATCH:
1492     case BuiltinContinuationMode::JAVASCRIPT_HANDLE_EXCEPTION:
1493       return true;
1494   }
1495   UNREACHABLE();
1496 }
1497 
BuiltinContinuationModeToFrameType(BuiltinContinuationMode mode)1498 StackFrame::Type BuiltinContinuationModeToFrameType(
1499     BuiltinContinuationMode mode) {
1500   switch (mode) {
1501     case BuiltinContinuationMode::STUB:
1502       return StackFrame::BUILTIN_CONTINUATION;
1503     case BuiltinContinuationMode::JAVASCRIPT:
1504       return StackFrame::JAVA_SCRIPT_BUILTIN_CONTINUATION;
1505     case BuiltinContinuationMode::JAVASCRIPT_WITH_CATCH:
1506       return StackFrame::JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH;
1507     case BuiltinContinuationMode::JAVASCRIPT_HANDLE_EXCEPTION:
1508       return StackFrame::JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH;
1509   }
1510   UNREACHABLE();
1511 }
1512 
1513 }  // namespace
1514 
TrampolineForBuiltinContinuation(BuiltinContinuationMode mode,bool must_handle_result)1515 Builtin Deoptimizer::TrampolineForBuiltinContinuation(
1516     BuiltinContinuationMode mode, bool must_handle_result) {
1517   switch (mode) {
1518     case BuiltinContinuationMode::STUB:
1519       return must_handle_result ? Builtin::kContinueToCodeStubBuiltinWithResult
1520                                 : Builtin::kContinueToCodeStubBuiltin;
1521     case BuiltinContinuationMode::JAVASCRIPT:
1522     case BuiltinContinuationMode::JAVASCRIPT_WITH_CATCH:
1523     case BuiltinContinuationMode::JAVASCRIPT_HANDLE_EXCEPTION:
1524       return must_handle_result
1525                  ? Builtin::kContinueToJavaScriptBuiltinWithResult
1526                  : Builtin::kContinueToJavaScriptBuiltin;
1527   }
1528   UNREACHABLE();
1529 }
1530 
1531 #if V8_ENABLE_WEBASSEMBLY
TranslatedValueForWasmReturnKind(base::Optional<wasm::ValueKind> wasm_call_return_kind)1532 TranslatedValue Deoptimizer::TranslatedValueForWasmReturnKind(
1533     base::Optional<wasm::ValueKind> wasm_call_return_kind) {
1534   if (wasm_call_return_kind) {
1535     switch (wasm_call_return_kind.value()) {
1536       case wasm::kI32:
1537         return TranslatedValue::NewInt32(
1538             &translated_state_,
1539             (int32_t)input_->GetRegister(kReturnRegister0.code()));
1540       case wasm::kI64:
1541         return TranslatedValue::NewInt64ToBigInt(
1542             &translated_state_,
1543             (int64_t)input_->GetRegister(kReturnRegister0.code()));
1544       case wasm::kF32:
1545         return TranslatedValue::NewFloat(
1546             &translated_state_,
1547             Float32(*reinterpret_cast<float*>(
1548                 input_->GetDoubleRegister(wasm::kFpReturnRegisters[0].code())
1549                     .get_bits_address())));
1550       case wasm::kF64:
1551         return TranslatedValue::NewDouble(
1552             &translated_state_,
1553             input_->GetDoubleRegister(wasm::kFpReturnRegisters[0].code()));
1554       default:
1555         UNREACHABLE();
1556     }
1557   }
1558   return TranslatedValue::NewTagged(&translated_state_,
1559                                     ReadOnlyRoots(isolate()).undefined_value());
1560 }
1561 #endif  // V8_ENABLE_WEBASSEMBLY
1562 
1563 // BuiltinContinuationFrames capture the machine state that is expected as input
1564 // to a builtin, including both input register values and stack parameters. When
1565 // the frame is reactivated (i.e. the frame below it returns), a
1566 // ContinueToBuiltin stub restores the register state from the frame and tail
1567 // calls to the actual target builtin, making it appear that the stub had been
1568 // directly called by the frame above it. The input values to populate the frame
1569 // are taken from the deopt's FrameState.
1570 //
1571 // Frame translation happens in two modes, EAGER and LAZY. In EAGER mode, all of
1572 // the parameters to the Builtin are explicitly specified in the TurboFan
1573 // FrameState node. In LAZY mode, there is always one fewer parameters specified
1574 // in the FrameState than expected by the Builtin. In that case, construction of
1575 // BuiltinContinuationFrame adds the final missing parameter during
1576 // deoptimization, and that parameter is always on the stack and contains the
1577 // value returned from the callee of the call site triggering the LAZY deopt
1578 // (e.g. rax on x64). This requires that continuation Builtins for LAZY deopts
1579 // must have at least one stack parameter.
1580 //
1581 //                TO
1582 //    |          ....           |
1583 //    +-------------------------+
1584 //    | arg padding (arch dept) |<- at most 1*kSystemPointerSize
1585 //    +-------------------------+
1586 //    |     builtin param 0     |<- FrameState input value n becomes
1587 //    +-------------------------+
1588 //    |           ...           |
1589 //    +-------------------------+
1590 //    |     builtin param m     |<- FrameState input value n+m-1, or in
1591 //    +-----needs-alignment-----+   the LAZY case, return LAZY result value
1592 //    | ContinueToBuiltin entry |
1593 //    +-------------------------+
1594 // |  |    saved frame (FP)     |
1595 // |  +=====needs=alignment=====+<- fpreg
1596 // |  |constant pool (if ool_cp)|
1597 // v  +-------------------------+
1598 //    |BUILTIN_CONTINUATION mark|
1599 //    +-------------------------+
1600 //    |  JSFunction (or zero)   |<- only if JavaScript builtin
1601 //    +-------------------------+
1602 //    |  frame height above FP  |
1603 //    +-------------------------+
1604 //    |         context         |<- this non-standard context slot contains
1605 //    +-------------------------+   the context, even for non-JS builtins.
1606 //    |      builtin index      |
1607 //    +-------------------------+
1608 //    | builtin input GPR reg0  |<- populated from deopt FrameState using
1609 //    +-------------------------+   the builtin's CallInterfaceDescriptor
1610 //    |          ...            |   to map a FrameState's 0..n-1 inputs to
1611 //    +-------------------------+   the builtin's n input register params.
1612 //    | builtin input GPR regn  |
1613 //    +-------------------------+
1614 //    | reg padding (arch dept) |
1615 //    +-----needs--alignment----+
1616 //    | res padding (arch dept) |<- only if {is_topmost}; result is pop'd by
1617 //    +-------------------------+<- kNotifyDeopt ASM stub and moved to acc
1618 //    |      result  value      |<- reg, as ContinueToBuiltin stub expects.
1619 //    +-----needs-alignment-----+<- spreg
1620 //
DoComputeBuiltinContinuation(TranslatedFrame * translated_frame,int frame_index,BuiltinContinuationMode mode)1621 void Deoptimizer::DoComputeBuiltinContinuation(
1622     TranslatedFrame* translated_frame, int frame_index,
1623     BuiltinContinuationMode mode) {
1624   TranslatedFrame::iterator result_iterator = translated_frame->end();
1625 
1626   bool is_js_to_wasm_builtin_continuation = false;
1627 #if V8_ENABLE_WEBASSEMBLY
1628   is_js_to_wasm_builtin_continuation =
1629       translated_frame->kind() == TranslatedFrame::kJSToWasmBuiltinContinuation;
1630   if (is_js_to_wasm_builtin_continuation) {
1631     // For JSToWasmBuiltinContinuations, add a TranslatedValue with the result
1632     // of the Wasm call, extracted from the input FrameDescription.
1633     // This TranslatedValue will be written in the output frame in place of the
1634     // hole and we'll use ContinueToCodeStubBuiltin in place of
1635     // ContinueToCodeStubBuiltinWithResult.
1636     TranslatedValue result = TranslatedValueForWasmReturnKind(
1637         translated_frame->wasm_call_return_kind());
1638     translated_frame->Add(result);
1639   }
1640 #endif  // V8_ENABLE_WEBASSEMBLY
1641 
1642   TranslatedFrame::iterator value_iterator = translated_frame->begin();
1643 
1644   const BytecodeOffset bytecode_offset = translated_frame->bytecode_offset();
1645   Builtin builtin = Builtins::GetBuiltinFromBytecodeOffset(bytecode_offset);
1646   CallInterfaceDescriptor continuation_descriptor =
1647       Builtins::CallInterfaceDescriptorFor(builtin);
1648 
1649   const RegisterConfiguration* config = RegisterConfiguration::Default();
1650 
1651   const bool is_bottommost = (0 == frame_index);
1652   const bool is_topmost = (output_count_ - 1 == frame_index);
1653 
1654   const int parameters_count = translated_frame->height();
1655   BuiltinContinuationFrameInfo frame_info =
1656       BuiltinContinuationFrameInfo::Precise(parameters_count,
1657                                             continuation_descriptor, config,
1658                                             is_topmost, deopt_kind_, mode);
1659 
1660   const unsigned output_frame_size = frame_info.frame_size_in_bytes();
1661   const unsigned output_frame_size_above_fp =
1662       frame_info.frame_size_in_bytes_above_fp();
1663 
1664   // Validate types of parameters. They must all be tagged except for argc for
1665   // JS builtins.
1666   bool has_argc = false;
1667   const int register_parameter_count =
1668       continuation_descriptor.GetRegisterParameterCount();
1669   for (int i = 0; i < register_parameter_count; ++i) {
1670     MachineType type = continuation_descriptor.GetParameterType(i);
1671     int code = continuation_descriptor.GetRegisterParameter(i).code();
1672     // Only tagged and int32 arguments are supported, and int32 only for the
1673     // arguments count on JavaScript builtins.
1674     if (type == MachineType::Int32()) {
1675       CHECK_EQ(code, kJavaScriptCallArgCountRegister.code());
1676       has_argc = true;
1677     } else {
1678       // Any other argument must be a tagged value.
1679       CHECK(IsAnyTagged(type.representation()));
1680     }
1681   }
1682   CHECK_EQ(BuiltinContinuationModeIsJavaScript(mode), has_argc);
1683 
1684   if (verbose_tracing_enabled()) {
1685     PrintF(trace_scope()->file(),
1686            "  translating BuiltinContinuation to %s,"
1687            " => register_param_count=%d,"
1688            " stack_param_count=%d, frame_size=%d\n",
1689            Builtins::name(builtin), register_parameter_count,
1690            frame_info.stack_parameter_count(), output_frame_size);
1691   }
1692 
1693   FrameDescription* output_frame = new (output_frame_size)
1694       FrameDescription(output_frame_size, frame_info.stack_parameter_count());
1695   output_[frame_index] = output_frame;
1696   FrameWriter frame_writer(this, output_frame, verbose_trace_scope());
1697 
1698   // The top address of the frame is computed from the previous frame's top and
1699   // this frame's size.
1700   const intptr_t top_address =
1701       is_bottommost ? caller_frame_top_ - output_frame_size
1702                     : output_[frame_index - 1]->GetTop() - output_frame_size;
1703   output_frame->SetTop(top_address);
1704 
1705   // Get the possible JSFunction for the case that this is a
1706   // JavaScriptBuiltinContinuationFrame, which needs the JSFunction pointer
1707   // like a normal JavaScriptFrame.
1708   const intptr_t maybe_function = value_iterator->GetRawValue().ptr();
1709   ++value_iterator;
1710 
1711   ReadOnlyRoots roots(isolate());
1712   const int padding = ArgumentPaddingSlots(frame_info.stack_parameter_count());
1713   for (int i = 0; i < padding; ++i) {
1714     frame_writer.PushRawObject(roots.the_hole_value(), "padding\n");
1715   }
1716 
1717   if (mode == BuiltinContinuationMode::STUB) {
1718     DCHECK_EQ(
1719         Builtins::CallInterfaceDescriptorFor(builtin).GetStackArgumentOrder(),
1720         StackArgumentOrder::kDefault);
1721     for (uint32_t i = 0; i < frame_info.translated_stack_parameter_count();
1722          ++i, ++value_iterator) {
1723       frame_writer.PushTranslatedValue(value_iterator, "stack parameter");
1724     }
1725     if (frame_info.frame_has_result_stack_slot()) {
1726       if (is_js_to_wasm_builtin_continuation) {
1727         frame_writer.PushTranslatedValue(result_iterator,
1728                                          "return result on lazy deopt\n");
1729       } else {
1730         DCHECK_EQ(result_iterator, translated_frame->end());
1731         frame_writer.PushRawObject(
1732             roots.the_hole_value(),
1733             "placeholder for return result on lazy deopt\n");
1734       }
1735     }
1736   } else {
1737     // JavaScript builtin.
1738     if (frame_info.frame_has_result_stack_slot()) {
1739       frame_writer.PushRawObject(
1740           roots.the_hole_value(),
1741           "placeholder for return result on lazy deopt\n");
1742     }
1743     switch (mode) {
1744       case BuiltinContinuationMode::STUB:
1745         UNREACHABLE();
1746       case BuiltinContinuationMode::JAVASCRIPT:
1747         break;
1748       case BuiltinContinuationMode::JAVASCRIPT_WITH_CATCH: {
1749         frame_writer.PushRawObject(roots.the_hole_value(),
1750                                    "placeholder for exception on lazy deopt\n");
1751       } break;
1752       case BuiltinContinuationMode::JAVASCRIPT_HANDLE_EXCEPTION: {
1753         intptr_t accumulator_value =
1754             input_->GetRegister(kInterpreterAccumulatorRegister.code());
1755         frame_writer.PushRawObject(Object(accumulator_value),
1756                                    "exception (from accumulator)\n");
1757       } break;
1758     }
1759     frame_writer.PushStackJSArguments(
1760         value_iterator, frame_info.translated_stack_parameter_count());
1761   }
1762 
1763   DCHECK_EQ(output_frame->GetLastArgumentSlotOffset(),
1764             frame_writer.top_offset());
1765 
1766   std::vector<TranslatedFrame::iterator> register_values;
1767   int total_registers = config->num_general_registers();
1768   register_values.resize(total_registers, {value_iterator});
1769 
1770   for (int i = 0; i < register_parameter_count; ++i, ++value_iterator) {
1771     int code = continuation_descriptor.GetRegisterParameter(i).code();
1772     register_values[code] = value_iterator;
1773   }
1774 
1775   // The context register is always implicit in the CallInterfaceDescriptor but
1776   // its register must be explicitly set when continuing to the builtin. Make
1777   // sure that it's harvested from the translation and copied into the register
1778   // set (it was automatically added at the end of the FrameState by the
1779   // instruction selector).
1780   Object context = value_iterator->GetRawValue();
1781   const intptr_t value = context.ptr();
1782   TranslatedFrame::iterator context_register_value = value_iterator++;
1783   register_values[kContextRegister.code()] = context_register_value;
1784   output_frame->SetContext(value);
1785   output_frame->SetRegister(kContextRegister.code(), value);
1786 
1787   // Set caller's PC (JSFunction continuation).
1788   if (is_bottommost) {
1789     frame_writer.PushBottommostCallerPc(caller_pc_);
1790   } else {
1791     frame_writer.PushApprovedCallerPc(output_[frame_index - 1]->GetPc());
1792   }
1793 
1794   // Read caller's FP from the previous frame, and set this frame's FP.
1795   const intptr_t caller_fp =
1796       is_bottommost ? caller_fp_ : output_[frame_index - 1]->GetFp();
1797   frame_writer.PushCallerFp(caller_fp);
1798 
1799   const intptr_t fp_value = top_address + frame_writer.top_offset();
1800   output_frame->SetFp(fp_value);
1801 
1802   DCHECK_EQ(output_frame_size_above_fp, frame_writer.top_offset());
1803 
1804   if (FLAG_enable_embedded_constant_pool) {
1805     // Read the caller's constant pool from the previous frame.
1806     const intptr_t caller_cp =
1807         is_bottommost ? caller_constant_pool_
1808                       : output_[frame_index - 1]->GetConstantPool();
1809     frame_writer.PushCallerConstantPool(caller_cp);
1810   }
1811 
1812   // A marker value is used in place of the context.
1813   const intptr_t marker =
1814       StackFrame::TypeToMarker(BuiltinContinuationModeToFrameType(mode));
1815   frame_writer.PushRawValue(marker,
1816                             "context (builtin continuation sentinel)\n");
1817 
1818   if (BuiltinContinuationModeIsJavaScript(mode)) {
1819     frame_writer.PushRawValue(maybe_function, "JSFunction\n");
1820   } else {
1821     frame_writer.PushRawValue(0, "unused\n");
1822   }
1823 
1824   // The delta from the SP to the FP; used to reconstruct SP in
1825   // Isolate::UnwindAndFindHandler.
1826   frame_writer.PushRawObject(Smi::FromInt(output_frame_size_above_fp),
1827                              "frame height at deoptimization\n");
1828 
1829   // The context even if this is a stub continuation frame. We can't use the
1830   // usual context slot, because we must store the frame marker there.
1831   frame_writer.PushTranslatedValue(context_register_value,
1832                                    "builtin JavaScript context\n");
1833 
1834   // The builtin to continue to.
1835   frame_writer.PushRawObject(Smi::FromInt(static_cast<int>(builtin)),
1836                              "builtin index\n");
1837 
1838   const int allocatable_register_count =
1839       config->num_allocatable_general_registers();
1840   for (int i = 0; i < allocatable_register_count; ++i) {
1841     int code = config->GetAllocatableGeneralCode(i);
1842     base::ScopedVector<char> str(128);
1843     if (verbose_tracing_enabled()) {
1844       if (BuiltinContinuationModeIsJavaScript(mode) &&
1845           code == kJavaScriptCallArgCountRegister.code()) {
1846         SNPrintF(
1847             str,
1848             "tagged argument count %s (will be untagged by continuation)\n",
1849             RegisterName(Register::from_code(code)));
1850       } else {
1851         SNPrintF(str, "builtin register argument %s\n",
1852                  RegisterName(Register::from_code(code)));
1853       }
1854     }
1855     frame_writer.PushTranslatedValue(
1856         register_values[code], verbose_tracing_enabled() ? str.begin() : "");
1857   }
1858 
1859   // Some architectures must pad the stack frame with extra stack slots
1860   // to ensure the stack frame is aligned.
1861   const int padding_slot_count =
1862       BuiltinContinuationFrameConstants::PaddingSlotCount(
1863           allocatable_register_count);
1864   for (int i = 0; i < padding_slot_count; ++i) {
1865     frame_writer.PushRawObject(roots.the_hole_value(), "padding\n");
1866   }
1867 
1868   if (is_topmost) {
1869     for (int i = 0; i < ArgumentPaddingSlots(1); ++i) {
1870       frame_writer.PushRawObject(roots.the_hole_value(), "padding\n");
1871     }
1872 
1873     // Ensure the result is restored back when we return to the stub.
1874     if (frame_info.frame_has_result_stack_slot()) {
1875       Register result_reg = kReturnRegister0;
1876       frame_writer.PushRawValue(input_->GetRegister(result_reg.code()),
1877                                 "callback result\n");
1878     } else {
1879       frame_writer.PushRawObject(roots.undefined_value(), "callback result\n");
1880     }
1881   }
1882 
1883   CHECK_EQ(result_iterator, value_iterator);
1884   CHECK_EQ(0u, frame_writer.top_offset());
1885 
1886   // Clear the context register. The context might be a de-materialized object
1887   // and will be materialized by {Runtime_NotifyDeoptimized}. For additional
1888   // safety we use Smi(0) instead of the potential {arguments_marker} here.
1889   if (is_topmost) {
1890     intptr_t context_value = static_cast<intptr_t>(Smi::zero().ptr());
1891     Register context_reg = JavaScriptFrame::context_register();
1892     output_frame->SetRegister(context_reg.code(), context_value);
1893   }
1894 
1895   // Ensure the frame pointer register points to the callee's frame. The builtin
1896   // will build its own frame once we continue to it.
1897   Register fp_reg = JavaScriptFrame::fp_register();
1898   output_frame->SetRegister(fp_reg.code(), fp_value);
1899   // For JSToWasmBuiltinContinuations use ContinueToCodeStubBuiltin, and not
1900   // ContinueToCodeStubBuiltinWithResult because we don't want to overwrite the
1901   // return value that we have already set.
1902   CodeT continue_to_builtin =
1903       isolate()->builtins()->code(TrampolineForBuiltinContinuation(
1904           mode, frame_info.frame_has_result_stack_slot() &&
1905                     !is_js_to_wasm_builtin_continuation));
1906   if (is_topmost) {
1907     // Only the pc of the topmost frame needs to be signed since it is
1908     // authenticated at the end of the DeoptimizationEntry builtin.
1909     const intptr_t top_most_pc = PointerAuthentication::SignAndCheckPC(
1910         static_cast<intptr_t>(continue_to_builtin.InstructionStart()),
1911         frame_writer.frame()->GetTop());
1912     output_frame->SetPc(top_most_pc);
1913   } else {
1914     output_frame->SetPc(
1915         static_cast<intptr_t>(continue_to_builtin.InstructionStart()));
1916   }
1917 
1918   CodeT continuation = isolate()->builtins()->code(Builtin::kNotifyDeoptimized);
1919   output_frame->SetContinuation(
1920       static_cast<intptr_t>(continuation.InstructionStart()));
1921 }
1922 
MaterializeHeapObjects()1923 void Deoptimizer::MaterializeHeapObjects() {
1924   translated_state_.Prepare(static_cast<Address>(stack_fp_));
1925   if (FLAG_deopt_every_n_times > 0) {
1926     // Doing a GC here will find problems with the deoptimized frames.
1927     isolate_->heap()->CollectAllGarbage(Heap::kNoGCFlags,
1928                                         GarbageCollectionReason::kTesting);
1929   }
1930 
1931   for (auto& materialization : values_to_materialize_) {
1932     Handle<Object> value = materialization.value_->GetValue();
1933 
1934     if (verbose_tracing_enabled()) {
1935       PrintF(trace_scope()->file(),
1936              "Materialization [" V8PRIxPTR_FMT "] <- " V8PRIxPTR_FMT " ;  ",
1937              static_cast<intptr_t>(materialization.output_slot_address_),
1938              value->ptr());
1939       value->ShortPrint(trace_scope()->file());
1940       PrintF(trace_scope()->file(), "\n");
1941     }
1942 
1943     *(reinterpret_cast<Address*>(materialization.output_slot_address_)) =
1944         value->ptr();
1945   }
1946 
1947   translated_state_.VerifyMaterializedObjects();
1948 
1949   bool feedback_updated = translated_state_.DoUpdateFeedback();
1950   if (verbose_tracing_enabled() && feedback_updated) {
1951     FILE* file = trace_scope()->file();
1952     Deoptimizer::DeoptInfo info =
1953         Deoptimizer::GetDeoptInfo(compiled_code_, from_);
1954     PrintF(file, "Feedback updated from deoptimization at ");
1955     OFStream outstr(file);
1956     info.position.Print(outstr, compiled_code_);
1957     PrintF(file, ", %s\n", DeoptimizeReasonToString(info.deopt_reason));
1958   }
1959 
1960   isolate_->materialized_object_store()->Remove(
1961       static_cast<Address>(stack_fp_));
1962 }
1963 
QueueValueForMaterialization(Address output_address,Object obj,const TranslatedFrame::iterator & iterator)1964 void Deoptimizer::QueueValueForMaterialization(
1965     Address output_address, Object obj,
1966     const TranslatedFrame::iterator& iterator) {
1967   if (obj == ReadOnlyRoots(isolate_).arguments_marker()) {
1968     values_to_materialize_.push_back({output_address, iterator});
1969   }
1970 }
1971 
ComputeInputFrameAboveFpFixedSize() const1972 unsigned Deoptimizer::ComputeInputFrameAboveFpFixedSize() const {
1973   unsigned fixed_size = CommonFrameConstants::kFixedFrameSizeAboveFp;
1974   // TODO(jkummerow): If {function_->IsSmi()} can indeed be true, then
1975   // {function_} should not have type {JSFunction}.
1976   if (!function_.IsSmi()) {
1977     fixed_size += ComputeIncomingArgumentSize(function_.shared());
1978   }
1979   return fixed_size;
1980 }
1981 
ComputeInputFrameSize() const1982 unsigned Deoptimizer::ComputeInputFrameSize() const {
1983   // The fp-to-sp delta already takes the context, constant pool pointer and the
1984   // function into account so we have to avoid double counting them.
1985   unsigned fixed_size_above_fp = ComputeInputFrameAboveFpFixedSize();
1986   unsigned result = fixed_size_above_fp + fp_to_sp_delta_;
1987   DCHECK(CodeKindCanDeoptimize(compiled_code_.kind()));
1988   unsigned stack_slots = compiled_code_.stack_slots();
1989   unsigned outgoing_size = 0;
1990   CHECK_EQ(fixed_size_above_fp + (stack_slots * kSystemPointerSize) -
1991                CommonFrameConstants::kFixedFrameSizeAboveFp + outgoing_size,
1992            result);
1993   return result;
1994 }
1995 
1996 // static
ComputeIncomingArgumentSize(SharedFunctionInfo shared)1997 unsigned Deoptimizer::ComputeIncomingArgumentSize(SharedFunctionInfo shared) {
1998   int parameter_slots = shared.internal_formal_parameter_count_with_receiver();
1999   return parameter_slots * kSystemPointerSize;
2000 }
2001 
GetDeoptInfo(Code code,Address pc)2002 Deoptimizer::DeoptInfo Deoptimizer::GetDeoptInfo(Code code, Address pc) {
2003   CHECK(code.InstructionStart() <= pc && pc <= code.InstructionEnd());
2004   SourcePosition last_position = SourcePosition::Unknown();
2005   DeoptimizeReason last_reason = DeoptimizeReason::kUnknown;
2006   uint32_t last_node_id = 0;
2007   int last_deopt_id = kNoDeoptimizationId;
2008   int mask = RelocInfo::ModeMask(RelocInfo::DEOPT_REASON) |
2009              RelocInfo::ModeMask(RelocInfo::DEOPT_ID) |
2010              RelocInfo::ModeMask(RelocInfo::DEOPT_SCRIPT_OFFSET) |
2011              RelocInfo::ModeMask(RelocInfo::DEOPT_INLINING_ID) |
2012              RelocInfo::ModeMask(RelocInfo::DEOPT_NODE_ID);
2013   for (RelocIterator it(code, mask); !it.done(); it.next()) {
2014     RelocInfo* info = it.rinfo();
2015     if (info->pc() >= pc) break;
2016     if (info->rmode() == RelocInfo::DEOPT_SCRIPT_OFFSET) {
2017       int script_offset = static_cast<int>(info->data());
2018       it.next();
2019       DCHECK(it.rinfo()->rmode() == RelocInfo::DEOPT_INLINING_ID);
2020       int inlining_id = static_cast<int>(it.rinfo()->data());
2021       last_position = SourcePosition(script_offset, inlining_id);
2022     } else if (info->rmode() == RelocInfo::DEOPT_ID) {
2023       last_deopt_id = static_cast<int>(info->data());
2024     } else if (info->rmode() == RelocInfo::DEOPT_REASON) {
2025       last_reason = static_cast<DeoptimizeReason>(info->data());
2026     } else if (info->rmode() == RelocInfo::DEOPT_NODE_ID) {
2027       last_node_id = static_cast<uint32_t>(info->data());
2028     }
2029   }
2030   return DeoptInfo(last_position, last_reason, last_node_id, last_deopt_id);
2031 }
2032 
2033 // static
ComputeSourcePositionFromBytecodeArray(Isolate * isolate,SharedFunctionInfo shared,BytecodeOffset bytecode_offset)2034 int Deoptimizer::ComputeSourcePositionFromBytecodeArray(
2035     Isolate* isolate, SharedFunctionInfo shared,
2036     BytecodeOffset bytecode_offset) {
2037   DCHECK(shared.HasBytecodeArray());
2038   return AbstractCode::cast(shared.GetBytecodeArray(isolate))
2039       .SourcePosition(bytecode_offset.ToInt());
2040 }
2041 
2042 }  // namespace internal
2043 }  // namespace v8
2044