• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/deoptimizer.h"
6 
7 #include <memory>
8 
9 #include "src/accessors.h"
10 #include "src/assembler-inl.h"
11 #include "src/ast/prettyprinter.h"
12 #include "src/callable.h"
13 #include "src/disasm.h"
14 #include "src/frames-inl.h"
15 #include "src/global-handles.h"
16 #include "src/interpreter/interpreter.h"
17 #include "src/macro-assembler.h"
18 #include "src/objects/debug-objects-inl.h"
19 #include "src/tracing/trace-event.h"
20 #include "src/v8.h"
21 
22 // Has to be the last include (doesn't have include guards)
23 #include "src/objects/object-macros.h"
24 
25 namespace v8 {
26 namespace internal {
27 
28 // {FrameWriter} offers a stack writer abstraction for writing
29 // FrameDescriptions. The main service the class provides is managing
30 // {top_offset_}, i.e. the offset of the next slot to write to.
31 class FrameWriter {
32  public:
33   static const int NO_INPUT_INDEX = -1;
FrameWriter(Deoptimizer * deoptimizer,FrameDescription * frame,CodeTracer::Scope * trace_scope)34   FrameWriter(Deoptimizer* deoptimizer, FrameDescription* frame,
35               CodeTracer::Scope* trace_scope)
36       : deoptimizer_(deoptimizer),
37         frame_(frame),
38         trace_scope_(trace_scope),
39         top_offset_(frame->GetFrameSize()) {}
40 
PushRawValue(intptr_t value,const char * debug_hint)41   void PushRawValue(intptr_t value, const char* debug_hint) {
42     PushValue(value);
43 
44     if (trace_scope_ != nullptr) {
45       DebugPrintOutputValue(value, debug_hint);
46     }
47   }
48 
PushRawObject(Object * obj,const char * debug_hint)49   void PushRawObject(Object* obj, const char* debug_hint) {
50     intptr_t value = reinterpret_cast<intptr_t>(obj);
51     PushValue(value);
52     if (trace_scope_ != nullptr) {
53       DebugPrintOutputObject(obj, top_offset_, debug_hint);
54     }
55   }
56 
PushCallerPc(intptr_t pc)57   void PushCallerPc(intptr_t pc) {
58     top_offset_ -= kPCOnStackSize;
59     frame_->SetCallerPc(top_offset_, pc);
60     DebugPrintOutputValue(pc, "caller's pc\n");
61   }
62 
PushCallerFp(intptr_t fp)63   void PushCallerFp(intptr_t fp) {
64     top_offset_ -= kFPOnStackSize;
65     frame_->SetCallerFp(top_offset_, fp);
66     DebugPrintOutputValue(fp, "caller's fp\n");
67   }
68 
PushCallerConstantPool(intptr_t cp)69   void PushCallerConstantPool(intptr_t cp) {
70     top_offset_ -= kPointerSize;
71     frame_->SetCallerConstantPool(top_offset_, cp);
72     DebugPrintOutputValue(cp, "caller's constant_pool\n");
73   }
74 
PushTranslatedValue(const TranslatedFrame::iterator & iterator,const char * debug_hint="")75   void PushTranslatedValue(const TranslatedFrame::iterator& iterator,
76                            const char* debug_hint = "") {
77     Object* obj = iterator->GetRawValue();
78 
79     PushRawObject(obj, debug_hint);
80 
81     if (trace_scope_) {
82       PrintF(trace_scope_->file(), " (input #%d)\n", iterator.input_index());
83     }
84 
85     deoptimizer_->QueueValueForMaterialization(output_address(top_offset_), obj,
86                                                iterator);
87   }
88 
top_offset() const89   unsigned top_offset() const { return top_offset_; }
90 
91  private:
PushValue(intptr_t value)92   void PushValue(intptr_t value) {
93     CHECK_GE(top_offset_, 0);
94     top_offset_ -= kPointerSize;
95     frame_->SetFrameSlot(top_offset_, value);
96   }
97 
output_address(unsigned output_offset)98   Address output_address(unsigned output_offset) {
99     Address output_address =
100         static_cast<Address>(frame_->GetTop()) + output_offset;
101     return output_address;
102   }
103 
DebugPrintOutputValue(intptr_t value,const char * debug_hint="")104   void DebugPrintOutputValue(intptr_t value, const char* debug_hint = "") {
105     if (trace_scope_ != nullptr) {
106       PrintF(trace_scope_->file(),
107              "    " V8PRIxPTR_FMT ": [top + %3d] <- " V8PRIxPTR_FMT " ;  %s",
108              output_address(top_offset_), top_offset_, value, debug_hint);
109     }
110   }
111 
DebugPrintOutputObject(Object * obj,unsigned output_offset,const char * debug_hint="")112   void DebugPrintOutputObject(Object* obj, unsigned output_offset,
113                               const char* debug_hint = "") {
114     if (trace_scope_ != nullptr) {
115       PrintF(trace_scope_->file(), "    " V8PRIxPTR_FMT ": [top + %3d] <- ",
116              output_address(output_offset), output_offset);
117       if (obj->IsSmi()) {
118         PrintF(V8PRIxPTR_FMT " <Smi %d>", reinterpret_cast<Address>(obj),
119                Smi::cast(obj)->value());
120       } else {
121         obj->ShortPrint(trace_scope_->file());
122       }
123       PrintF(trace_scope_->file(), " ;  %s", debug_hint);
124     }
125   }
126 
127   Deoptimizer* deoptimizer_;
128   FrameDescription* frame_;
129   CodeTracer::Scope* trace_scope_;
130   unsigned top_offset_;
131 };
132 
DeoptimizerData(Heap * heap)133 DeoptimizerData::DeoptimizerData(Heap* heap) : heap_(heap), current_(nullptr) {
134   for (int i = 0; i <= DeoptimizerData::kLastDeoptimizeKind; ++i) {
135     deopt_entry_code_[i] = nullptr;
136   }
137   Code** start = &deopt_entry_code_[0];
138   Code** end = &deopt_entry_code_[DeoptimizerData::kLastDeoptimizeKind + 1];
139   heap_->RegisterStrongRoots(reinterpret_cast<Object**>(start),
140                              reinterpret_cast<Object**>(end));
141 }
142 
143 
~DeoptimizerData()144 DeoptimizerData::~DeoptimizerData() {
145   for (int i = 0; i <= DeoptimizerData::kLastDeoptimizeKind; ++i) {
146     deopt_entry_code_[i] = nullptr;
147   }
148   Code** start = &deopt_entry_code_[0];
149   heap_->UnregisterStrongRoots(reinterpret_cast<Object**>(start));
150 }
151 
deopt_entry_code(DeoptimizeKind kind)152 Code* DeoptimizerData::deopt_entry_code(DeoptimizeKind kind) {
153   return deopt_entry_code_[static_cast<int>(kind)];
154 }
155 
set_deopt_entry_code(DeoptimizeKind kind,Code * code)156 void DeoptimizerData::set_deopt_entry_code(DeoptimizeKind kind, Code* code) {
157   deopt_entry_code_[static_cast<int>(kind)] = code;
158 }
159 
FindDeoptimizingCode(Address addr)160 Code* Deoptimizer::FindDeoptimizingCode(Address addr) {
161   if (function_->IsHeapObject()) {
162     // Search all deoptimizing code in the native context of the function.
163     Isolate* isolate = isolate_;
164     Context* native_context = function_->context()->native_context();
165     Object* element = native_context->DeoptimizedCodeListHead();
166     while (!element->IsUndefined(isolate)) {
167       Code* code = Code::cast(element);
168       CHECK(code->kind() == Code::OPTIMIZED_FUNCTION);
169       if (code->contains(addr)) return code;
170       element = code->next_code_link();
171     }
172   }
173   return nullptr;
174 }
175 
176 
177 // We rely on this function not causing a GC.  It is called from generated code
178 // without having a real stack frame in place.
New(JSFunction * function,DeoptimizeKind kind,unsigned bailout_id,Address from,int fp_to_sp_delta,Isolate * isolate)179 Deoptimizer* Deoptimizer::New(JSFunction* function, DeoptimizeKind kind,
180                               unsigned bailout_id, Address from,
181                               int fp_to_sp_delta, Isolate* isolate) {
182   Deoptimizer* deoptimizer = new Deoptimizer(isolate, function, kind,
183                                              bailout_id, from, fp_to_sp_delta);
184   CHECK_NULL(isolate->deoptimizer_data()->current_);
185   isolate->deoptimizer_data()->current_ = deoptimizer;
186   return deoptimizer;
187 }
188 
189 
Grab(Isolate * isolate)190 Deoptimizer* Deoptimizer::Grab(Isolate* isolate) {
191   Deoptimizer* result = isolate->deoptimizer_data()->current_;
192   CHECK_NOT_NULL(result);
193   result->DeleteFrameDescriptions();
194   isolate->deoptimizer_data()->current_ = nullptr;
195   return result;
196 }
197 
DebuggerInspectableFrame(JavaScriptFrame * frame,int jsframe_index,Isolate * isolate)198 DeoptimizedFrameInfo* Deoptimizer::DebuggerInspectableFrame(
199     JavaScriptFrame* frame,
200     int jsframe_index,
201     Isolate* isolate) {
202   CHECK(frame->is_optimized());
203 
204   TranslatedState translated_values(frame);
205   translated_values.Prepare(frame->fp());
206 
207   TranslatedState::iterator frame_it = translated_values.end();
208   int counter = jsframe_index;
209   for (auto it = translated_values.begin(); it != translated_values.end();
210        it++) {
211     if (it->kind() == TranslatedFrame::kInterpretedFunction ||
212         it->kind() == TranslatedFrame::kJavaScriptBuiltinContinuation ||
213         it->kind() ==
214             TranslatedFrame::kJavaScriptBuiltinContinuationWithCatch) {
215       if (counter == 0) {
216         frame_it = it;
217         break;
218       }
219       counter--;
220     }
221   }
222   CHECK(frame_it != translated_values.end());
223   // We only include kJavaScriptBuiltinContinuation frames above to get the
224   // counting right.
225   CHECK_EQ(frame_it->kind(), TranslatedFrame::kInterpretedFunction);
226 
227   DeoptimizedFrameInfo* info =
228       new DeoptimizedFrameInfo(&translated_values, frame_it, isolate);
229 
230   return info;
231 }
232 
GenerateDeoptimizationEntries(MacroAssembler * masm,int count,DeoptimizeKind kind)233 void Deoptimizer::GenerateDeoptimizationEntries(MacroAssembler* masm, int count,
234                                                 DeoptimizeKind kind) {
235   NoRootArrayScope no_root_array(masm);
236   TableEntryGenerator generator(masm, kind, count);
237   generator.Generate();
238 }
239 
240 namespace {
241 class ActivationsFinder : public ThreadVisitor {
242  public:
ActivationsFinder(std::set<Code * > * codes,Code * topmost_optimized_code,bool safe_to_deopt_topmost_optimized_code)243   explicit ActivationsFinder(std::set<Code*>* codes,
244                              Code* topmost_optimized_code,
245                              bool safe_to_deopt_topmost_optimized_code)
246       : codes_(codes) {
247 #ifdef DEBUG
248     topmost_ = topmost_optimized_code;
249     safe_to_deopt_ = safe_to_deopt_topmost_optimized_code;
250 #endif
251   }
252 
253   // Find the frames with activations of codes marked for deoptimization, search
254   // for the trampoline to the deoptimizer call respective to each code, and use
255   // it to replace the current pc on the stack.
VisitThread(Isolate * isolate,ThreadLocalTop * top)256   void VisitThread(Isolate* isolate, ThreadLocalTop* top) {
257     for (StackFrameIterator it(isolate, top); !it.done(); it.Advance()) {
258       if (it.frame()->type() == StackFrame::OPTIMIZED) {
259         Code* code = it.frame()->LookupCode();
260         if (code->kind() == Code::OPTIMIZED_FUNCTION &&
261             code->marked_for_deoptimization()) {
262           codes_->erase(code);
263           // Obtain the trampoline to the deoptimizer call.
264           SafepointEntry safepoint = code->GetSafepointEntry(it.frame()->pc());
265           int trampoline_pc = safepoint.trampoline_pc();
266           DCHECK_IMPLIES(code == topmost_, safe_to_deopt_);
267           // Replace the current pc on the stack with the trampoline.
268           it.frame()->set_pc(code->raw_instruction_start() + trampoline_pc);
269         }
270       }
271     }
272   }
273 
274  private:
275   std::set<Code*>* codes_;
276 
277 #ifdef DEBUG
278   Code* topmost_;
279   bool safe_to_deopt_;
280 #endif
281 };
282 }  // namespace
283 
284 // Move marked code from the optimized code list to the deoptimized code list,
285 // and replace pc on the stack for codes marked for deoptimization.
DeoptimizeMarkedCodeForContext(Context * context)286 void Deoptimizer::DeoptimizeMarkedCodeForContext(Context* context) {
287   DisallowHeapAllocation no_allocation;
288 
289   Isolate* isolate = context->GetHeap()->isolate();
290   Code* topmost_optimized_code = nullptr;
291   bool safe_to_deopt_topmost_optimized_code = false;
292 #ifdef DEBUG
293   // Make sure all activations of optimized code can deopt at their current PC.
294   // The topmost optimized code has special handling because it cannot be
295   // deoptimized due to weak object dependency.
296   for (StackFrameIterator it(isolate, isolate->thread_local_top());
297        !it.done(); it.Advance()) {
298     StackFrame::Type type = it.frame()->type();
299     if (type == StackFrame::OPTIMIZED) {
300       Code* code = it.frame()->LookupCode();
301       JSFunction* function =
302           static_cast<OptimizedFrame*>(it.frame())->function();
303       if (FLAG_trace_deopt) {
304         CodeTracer::Scope scope(isolate->GetCodeTracer());
305         PrintF(scope.file(), "[deoptimizer found activation of function: ");
306         function->PrintName(scope.file());
307         PrintF(scope.file(),
308                " / %" V8PRIxPTR "]\n", reinterpret_cast<intptr_t>(function));
309       }
310       SafepointEntry safepoint = code->GetSafepointEntry(it.frame()->pc());
311       int deopt_index = safepoint.deoptimization_index();
312 
313       // Turbofan deopt is checked when we are patching addresses on stack.
314       bool safe_if_deopt_triggered =
315           deopt_index != Safepoint::kNoDeoptimizationIndex;
316       bool is_builtin_code = code->kind() == Code::BUILTIN;
317       DCHECK(topmost_optimized_code == nullptr || safe_if_deopt_triggered ||
318              is_builtin_code);
319       if (topmost_optimized_code == nullptr) {
320         topmost_optimized_code = code;
321         safe_to_deopt_topmost_optimized_code = safe_if_deopt_triggered;
322       }
323     }
324   }
325 #endif
326 
327   // We will use this set to mark those Code objects that are marked for
328   // deoptimization and have not been found in stack frames.
329   std::set<Code*> codes;
330 
331   // Move marked code from the optimized code list to the deoptimized code list.
332   // Walk over all optimized code objects in this native context.
333   Code* prev = nullptr;
334   Object* element = context->OptimizedCodeListHead();
335   while (!element->IsUndefined(isolate)) {
336     Code* code = Code::cast(element);
337     CHECK_EQ(code->kind(), Code::OPTIMIZED_FUNCTION);
338     Object* next = code->next_code_link();
339 
340     if (code->marked_for_deoptimization()) {
341       // Make sure that this object does not point to any garbage.
342       isolate->heap()->InvalidateCodeEmbeddedObjects(code);
343       codes.insert(code);
344 
345       if (prev != nullptr) {
346         // Skip this code in the optimized code list.
347         prev->set_next_code_link(next);
348       } else {
349         // There was no previous node, the next node is the new head.
350         context->SetOptimizedCodeListHead(next);
351       }
352 
353       // Move the code to the _deoptimized_ code list.
354       code->set_next_code_link(context->DeoptimizedCodeListHead());
355       context->SetDeoptimizedCodeListHead(code);
356     } else {
357       // Not marked; preserve this element.
358       prev = code;
359     }
360     element = next;
361   }
362 
363   ActivationsFinder visitor(&codes, topmost_optimized_code,
364                             safe_to_deopt_topmost_optimized_code);
365   // Iterate over the stack of this thread.
366   visitor.VisitThread(isolate, isolate->thread_local_top());
367   // In addition to iterate over the stack of this thread, we also
368   // need to consider all the other threads as they may also use
369   // the code currently beings deoptimized.
370   isolate->thread_manager()->IterateArchivedThreads(&visitor);
371 
372   // If there's no activation of a code in any stack then we can remove its
373   // deoptimization data. We do this to ensure that code objects that are
374   // unlinked don't transitively keep objects alive unnecessarily.
375   for (Code* code : codes) {
376     isolate->heap()->InvalidateCodeDeoptimizationData(code);
377   }
378 }
379 
380 
DeoptimizeAll(Isolate * isolate)381 void Deoptimizer::DeoptimizeAll(Isolate* isolate) {
382   RuntimeCallTimerScope runtimeTimer(isolate,
383                                      RuntimeCallCounterId::kDeoptimizeCode);
384   TimerEventScope<TimerEventDeoptimizeCode> timer(isolate);
385   TRACE_EVENT0("v8", "V8.DeoptimizeCode");
386   if (FLAG_trace_deopt) {
387     CodeTracer::Scope scope(isolate->GetCodeTracer());
388     PrintF(scope.file(), "[deoptimize all code in all contexts]\n");
389   }
390   isolate->AbortConcurrentOptimization(BlockingBehavior::kBlock);
391   DisallowHeapAllocation no_allocation;
392   // For all contexts, mark all code, then deoptimize.
393   Object* context = isolate->heap()->native_contexts_list();
394   while (!context->IsUndefined(isolate)) {
395     Context* native_context = Context::cast(context);
396     MarkAllCodeForContext(native_context);
397     DeoptimizeMarkedCodeForContext(native_context);
398     context = native_context->next_context_link();
399   }
400 }
401 
402 
DeoptimizeMarkedCode(Isolate * isolate)403 void Deoptimizer::DeoptimizeMarkedCode(Isolate* isolate) {
404   RuntimeCallTimerScope runtimeTimer(isolate,
405                                      RuntimeCallCounterId::kDeoptimizeCode);
406   TimerEventScope<TimerEventDeoptimizeCode> timer(isolate);
407   TRACE_EVENT0("v8", "V8.DeoptimizeCode");
408   if (FLAG_trace_deopt) {
409     CodeTracer::Scope scope(isolate->GetCodeTracer());
410     PrintF(scope.file(), "[deoptimize marked code in all contexts]\n");
411   }
412   DisallowHeapAllocation no_allocation;
413   // For all contexts, deoptimize code already marked.
414   Object* context = isolate->heap()->native_contexts_list();
415   while (!context->IsUndefined(isolate)) {
416     Context* native_context = Context::cast(context);
417     DeoptimizeMarkedCodeForContext(native_context);
418     context = native_context->next_context_link();
419   }
420 }
421 
MarkAllCodeForContext(Context * context)422 void Deoptimizer::MarkAllCodeForContext(Context* context) {
423   Object* element = context->OptimizedCodeListHead();
424   Isolate* isolate = context->GetIsolate();
425   while (!element->IsUndefined(isolate)) {
426     Code* code = Code::cast(element);
427     CHECK_EQ(code->kind(), Code::OPTIMIZED_FUNCTION);
428     code->set_marked_for_deoptimization(true);
429     element = code->next_code_link();
430   }
431 }
432 
DeoptimizeFunction(JSFunction * function,Code * code)433 void Deoptimizer::DeoptimizeFunction(JSFunction* function, Code* code) {
434   Isolate* isolate = function->GetIsolate();
435   RuntimeCallTimerScope runtimeTimer(isolate,
436                                      RuntimeCallCounterId::kDeoptimizeCode);
437   TimerEventScope<TimerEventDeoptimizeCode> timer(isolate);
438   TRACE_EVENT0("v8", "V8.DeoptimizeCode");
439   if (code == nullptr) code = function->code();
440 
441   if (code->kind() == Code::OPTIMIZED_FUNCTION) {
442     // Mark the code for deoptimization and unlink any functions that also
443     // refer to that code. The code cannot be shared across native contexts,
444     // so we only need to search one.
445     code->set_marked_for_deoptimization(true);
446     // The code in the function's optimized code feedback vector slot might
447     // be different from the code on the function - evict it if necessary.
448     function->feedback_vector()->EvictOptimizedCodeMarkedForDeoptimization(
449         function->shared(), "unlinking code marked for deopt");
450     if (!code->deopt_already_counted()) {
451       function->feedback_vector()->increment_deopt_count();
452       code->set_deopt_already_counted(true);
453     }
454     DeoptimizeMarkedCodeForContext(function->context()->native_context());
455   }
456 }
457 
458 
ComputeOutputFrames(Deoptimizer * deoptimizer)459 void Deoptimizer::ComputeOutputFrames(Deoptimizer* deoptimizer) {
460   deoptimizer->DoComputeOutputFrames();
461 }
462 
MessageFor(DeoptimizeKind kind)463 const char* Deoptimizer::MessageFor(DeoptimizeKind kind) {
464   switch (kind) {
465     case DeoptimizeKind::kEager:
466       return "eager";
467     case DeoptimizeKind::kSoft:
468       return "soft";
469     case DeoptimizeKind::kLazy:
470       return "lazy";
471   }
472   FATAL("Unsupported deopt kind");
473   return nullptr;
474 }
475 
Deoptimizer(Isolate * isolate,JSFunction * function,DeoptimizeKind kind,unsigned bailout_id,Address from,int fp_to_sp_delta)476 Deoptimizer::Deoptimizer(Isolate* isolate, JSFunction* function,
477                          DeoptimizeKind kind, unsigned bailout_id, Address from,
478                          int fp_to_sp_delta)
479     : isolate_(isolate),
480       function_(function),
481       bailout_id_(bailout_id),
482       deopt_kind_(kind),
483       from_(from),
484       fp_to_sp_delta_(fp_to_sp_delta),
485       deoptimizing_throw_(false),
486       catch_handler_data_(-1),
487       catch_handler_pc_offset_(-1),
488       input_(nullptr),
489       output_count_(0),
490       jsframe_count_(0),
491       output_(nullptr),
492       caller_frame_top_(0),
493       caller_fp_(0),
494       caller_pc_(0),
495       caller_constant_pool_(0),
496       input_frame_context_(0),
497       stack_fp_(0),
498       trace_scope_(nullptr) {
499   if (isolate->deoptimizer_lazy_throw()) {
500     isolate->set_deoptimizer_lazy_throw(false);
501     deoptimizing_throw_ = true;
502   }
503 
504   DCHECK_NE(from, kNullAddress);
505   compiled_code_ = FindOptimizedCode();
506   DCHECK_NOT_NULL(compiled_code_);
507 
508   DCHECK(function->IsJSFunction());
509   trace_scope_ = FLAG_trace_deopt
510                      ? new CodeTracer::Scope(isolate->GetCodeTracer())
511                      : nullptr;
512 #ifdef DEBUG
513   DCHECK(AllowHeapAllocation::IsAllowed());
514   disallow_heap_allocation_ = new DisallowHeapAllocation();
515 #endif  // DEBUG
516   if (compiled_code_->kind() != Code::OPTIMIZED_FUNCTION ||
517       !compiled_code_->deopt_already_counted()) {
518     // If the function is optimized, and we haven't counted that deopt yet, then
519     // increment the function's deopt count so that we can avoid optimising
520     // functions that deopt too often.
521 
522     if (deopt_kind_ == DeoptimizeKind::kSoft) {
523       // Soft deopts shouldn't count against the overall deoptimization count
524       // that can eventually lead to disabling optimization for a function.
525       isolate->counters()->soft_deopts_executed()->Increment();
526     } else if (function != nullptr) {
527       function->feedback_vector()->increment_deopt_count();
528     }
529   }
530   if (compiled_code_->kind() == Code::OPTIMIZED_FUNCTION) {
531     compiled_code_->set_deopt_already_counted(true);
532     PROFILE(isolate_,
533             CodeDeoptEvent(compiled_code_, kind, from_, fp_to_sp_delta_));
534   }
535   unsigned size = ComputeInputFrameSize();
536   int parameter_count =
537       function->shared()->internal_formal_parameter_count() + 1;
538   input_ = new (size) FrameDescription(size, parameter_count);
539 }
540 
FindOptimizedCode()541 Code* Deoptimizer::FindOptimizedCode() {
542   Code* compiled_code = FindDeoptimizingCode(from_);
543   return (compiled_code == nullptr)
544              ? static_cast<Code*>(isolate_->FindCodeObject(from_))
545              : compiled_code;
546 }
547 
548 
PrintFunctionName()549 void Deoptimizer::PrintFunctionName() {
550   if (function_->IsHeapObject() && function_->IsJSFunction()) {
551     function_->ShortPrint(trace_scope_->file());
552   } else {
553     PrintF(trace_scope_->file(),
554            "%s", Code::Kind2String(compiled_code_->kind()));
555   }
556 }
557 
function() const558 Handle<JSFunction> Deoptimizer::function() const {
559   return Handle<JSFunction>(function_, isolate());
560 }
compiled_code() const561 Handle<Code> Deoptimizer::compiled_code() const {
562   return Handle<Code>(compiled_code_, isolate());
563 }
564 
~Deoptimizer()565 Deoptimizer::~Deoptimizer() {
566   DCHECK(input_ == nullptr && output_ == nullptr);
567   DCHECK_NULL(disallow_heap_allocation_);
568   delete trace_scope_;
569 }
570 
571 
DeleteFrameDescriptions()572 void Deoptimizer::DeleteFrameDescriptions() {
573   delete input_;
574   for (int i = 0; i < output_count_; ++i) {
575     if (output_[i] != input_) delete output_[i];
576   }
577   delete[] output_;
578   input_ = nullptr;
579   output_ = nullptr;
580 #ifdef DEBUG
581   DCHECK(!AllowHeapAllocation::IsAllowed());
582   DCHECK_NOT_NULL(disallow_heap_allocation_);
583   delete disallow_heap_allocation_;
584   disallow_heap_allocation_ = nullptr;
585 #endif  // DEBUG
586 }
587 
GetDeoptimizationEntry(Isolate * isolate,int id,DeoptimizeKind kind)588 Address Deoptimizer::GetDeoptimizationEntry(Isolate* isolate, int id,
589                                             DeoptimizeKind kind) {
590   CHECK_GE(id, 0);
591   if (id >= kMaxNumberOfEntries) return kNullAddress;
592   DeoptimizerData* data = isolate->deoptimizer_data();
593   CHECK_LE(kind, DeoptimizerData::kLastDeoptimizeKind);
594   CHECK_NOT_NULL(data->deopt_entry_code(kind));
595   Code* code = data->deopt_entry_code(kind);
596   return code->raw_instruction_start() + (id * table_entry_size_);
597 }
598 
GetDeoptimizationId(Isolate * isolate,Address addr,DeoptimizeKind kind)599 int Deoptimizer::GetDeoptimizationId(Isolate* isolate, Address addr,
600                                      DeoptimizeKind kind) {
601   DeoptimizerData* data = isolate->deoptimizer_data();
602   CHECK_LE(kind, DeoptimizerData::kLastDeoptimizeKind);
603   DCHECK(IsInDeoptimizationTable(isolate, addr, kind));
604   Code* code = data->deopt_entry_code(kind);
605   Address start = code->raw_instruction_start();
606   DCHECK_EQ(0,
607             static_cast<int>(addr - start) % table_entry_size_);
608   return static_cast<int>(addr - start) / table_entry_size_;
609 }
610 
IsInDeoptimizationTable(Isolate * isolate,Address addr,DeoptimizeKind type)611 bool Deoptimizer::IsInDeoptimizationTable(Isolate* isolate, Address addr,
612                                           DeoptimizeKind type) {
613   DeoptimizerData* data = isolate->deoptimizer_data();
614   CHECK_LE(type, DeoptimizerData::kLastDeoptimizeKind);
615   Code* code = data->deopt_entry_code(type);
616   if (code == nullptr) return false;
617   Address start = code->raw_instruction_start();
618   return ((table_entry_size_ == 0 && addr == start) ||
619           (addr >= start &&
620            addr < start + (kMaxNumberOfEntries * table_entry_size_)));
621 }
622 
IsDeoptimizationEntry(Isolate * isolate,Address addr,DeoptimizeKind * type)623 bool Deoptimizer::IsDeoptimizationEntry(Isolate* isolate, Address addr,
624                                         DeoptimizeKind* type) {
625   if (IsInDeoptimizationTable(isolate, addr, DeoptimizeKind::kEager)) {
626     *type = DeoptimizeKind::kEager;
627     return true;
628   }
629   if (IsInDeoptimizationTable(isolate, addr, DeoptimizeKind::kSoft)) {
630     *type = DeoptimizeKind::kSoft;
631     return true;
632   }
633   if (IsInDeoptimizationTable(isolate, addr, DeoptimizeKind::kLazy)) {
634     *type = DeoptimizeKind::kLazy;
635     return true;
636   }
637   return false;
638 }
639 
GetDeoptimizedCodeCount(Isolate * isolate)640 int Deoptimizer::GetDeoptimizedCodeCount(Isolate* isolate) {
641   int length = 0;
642   // Count all entries in the deoptimizing code list of every context.
643   Object* context = isolate->heap()->native_contexts_list();
644   while (!context->IsUndefined(isolate)) {
645     Context* native_context = Context::cast(context);
646     Object* element = native_context->DeoptimizedCodeListHead();
647     while (!element->IsUndefined(isolate)) {
648       Code* code = Code::cast(element);
649       DCHECK(code->kind() == Code::OPTIMIZED_FUNCTION);
650       if (!code->marked_for_deoptimization()) {
651         length++;
652       }
653       element = code->next_code_link();
654     }
655     context = Context::cast(context)->next_context_link();
656   }
657   return length;
658 }
659 
660 namespace {
661 
LookupCatchHandler(TranslatedFrame * translated_frame,int * data_out)662 int LookupCatchHandler(TranslatedFrame* translated_frame, int* data_out) {
663   switch (translated_frame->kind()) {
664     case TranslatedFrame::kInterpretedFunction: {
665       int bytecode_offset = translated_frame->node_id().ToInt();
666       HandlerTable table(
667           translated_frame->raw_shared_info()->GetBytecodeArray());
668       return table.LookupRange(bytecode_offset, data_out, nullptr);
669     }
670     case TranslatedFrame::kJavaScriptBuiltinContinuationWithCatch: {
671       return 0;
672     }
673     default:
674       break;
675   }
676   return -1;
677 }
678 
ShouldPadArguments(int arg_count)679 bool ShouldPadArguments(int arg_count) {
680   return kPadArguments && (arg_count % 2 != 0);
681 }
682 
683 }  // namespace
684 
685 // We rely on this function not causing a GC.  It is called from generated code
686 // without having a real stack frame in place.
DoComputeOutputFrames()687 void Deoptimizer::DoComputeOutputFrames() {
688   base::ElapsedTimer timer;
689 
690   // Determine basic deoptimization information.  The optimized frame is
691   // described by the input data.
692   DeoptimizationData* input_data =
693       DeoptimizationData::cast(compiled_code_->deoptimization_data());
694 
695   {
696     // Read caller's PC, caller's FP and caller's constant pool values
697     // from input frame. Compute caller's frame top address.
698 
699     Register fp_reg = JavaScriptFrame::fp_register();
700     stack_fp_ = input_->GetRegister(fp_reg.code());
701 
702     caller_frame_top_ = stack_fp_ + ComputeInputFrameAboveFpFixedSize();
703 
704     Address fp_address = input_->GetFramePointerAddress();
705     caller_fp_ = Memory<intptr_t>(fp_address);
706     caller_pc_ =
707         Memory<intptr_t>(fp_address + CommonFrameConstants::kCallerPCOffset);
708     input_frame_context_ = Memory<intptr_t>(
709         fp_address + CommonFrameConstants::kContextOrFrameTypeOffset);
710 
711     if (FLAG_enable_embedded_constant_pool) {
712       caller_constant_pool_ = Memory<intptr_t>(
713           fp_address + CommonFrameConstants::kConstantPoolOffset);
714     }
715   }
716 
717   if (trace_scope_ != nullptr) {
718     timer.Start();
719     PrintF(trace_scope_->file(), "[deoptimizing (DEOPT %s): begin ",
720            MessageFor(deopt_kind_));
721     PrintFunctionName();
722     PrintF(trace_scope_->file(),
723            " (opt #%d) @%d, FP to SP delta: %d, caller sp: " V8PRIxPTR_FMT
724            "]\n",
725            input_data->OptimizationId()->value(), bailout_id_, fp_to_sp_delta_,
726            caller_frame_top_);
727     if (deopt_kind_ == DeoptimizeKind::kEager ||
728         deopt_kind_ == DeoptimizeKind::kSoft) {
729       compiled_code_->PrintDeoptLocation(
730           trace_scope_->file(), "            ;;; deoptimize at ", from_);
731     }
732   }
733 
734   BailoutId node_id = input_data->BytecodeOffset(bailout_id_);
735   ByteArray* translations = input_data->TranslationByteArray();
736   unsigned translation_index =
737       input_data->TranslationIndex(bailout_id_)->value();
738 
739   TranslationIterator state_iterator(translations, translation_index);
740   translated_state_.Init(
741       isolate_, input_->GetFramePointerAddress(), &state_iterator,
742       input_data->LiteralArray(), input_->GetRegisterValues(),
743       trace_scope_ == nullptr ? nullptr : trace_scope_->file(),
744       function_->IsHeapObject()
745           ? function_->shared()->internal_formal_parameter_count()
746           : 0);
747 
748   // Do the input frame to output frame(s) translation.
749   size_t count = translated_state_.frames().size();
750   // If we are supposed to go to the catch handler, find the catching frame
751   // for the catch and make sure we only deoptimize upto that frame.
752   if (deoptimizing_throw_) {
753     size_t catch_handler_frame_index = count;
754     for (size_t i = count; i-- > 0;) {
755       catch_handler_pc_offset_ = LookupCatchHandler(
756           &(translated_state_.frames()[i]), &catch_handler_data_);
757       if (catch_handler_pc_offset_ >= 0) {
758         catch_handler_frame_index = i;
759         break;
760       }
761     }
762     CHECK_LT(catch_handler_frame_index, count);
763     count = catch_handler_frame_index + 1;
764   }
765 
766   DCHECK_NULL(output_);
767   output_ = new FrameDescription*[count];
768   for (size_t i = 0; i < count; ++i) {
769     output_[i] = nullptr;
770   }
771   output_count_ = static_cast<int>(count);
772 
773   // Translate each output frame.
774   int frame_index = 0;  // output_frame_index
775   for (size_t i = 0; i < count; ++i, ++frame_index) {
776     // Read the ast node id, function, and frame height for this output frame.
777     TranslatedFrame* translated_frame = &(translated_state_.frames()[i]);
778     bool handle_exception = deoptimizing_throw_ && i == count - 1;
779     switch (translated_frame->kind()) {
780       case TranslatedFrame::kInterpretedFunction:
781         DoComputeInterpretedFrame(translated_frame, frame_index,
782                                   handle_exception);
783         jsframe_count_++;
784         break;
785       case TranslatedFrame::kArgumentsAdaptor:
786         DoComputeArgumentsAdaptorFrame(translated_frame, frame_index);
787         break;
788       case TranslatedFrame::kConstructStub:
789         DoComputeConstructStubFrame(translated_frame, frame_index);
790         break;
791       case TranslatedFrame::kBuiltinContinuation:
792         DoComputeBuiltinContinuation(translated_frame, frame_index,
793                                      BuiltinContinuationMode::STUB);
794         break;
795       case TranslatedFrame::kJavaScriptBuiltinContinuation:
796         DoComputeBuiltinContinuation(translated_frame, frame_index,
797                                      BuiltinContinuationMode::JAVASCRIPT);
798         break;
799       case TranslatedFrame::kJavaScriptBuiltinContinuationWithCatch:
800         DoComputeBuiltinContinuation(
801             translated_frame, frame_index,
802             handle_exception
803                 ? BuiltinContinuationMode::JAVASCRIPT_HANDLE_EXCEPTION
804                 : BuiltinContinuationMode::JAVASCRIPT_WITH_CATCH);
805         break;
806       case TranslatedFrame::kInvalid:
807         FATAL("invalid frame");
808         break;
809     }
810   }
811 
812   // Print some helpful diagnostic information.
813   if (trace_scope_ != nullptr) {
814     double ms = timer.Elapsed().InMillisecondsF();
815     int index = output_count_ - 1;  // Index of the topmost frame.
816     PrintF(trace_scope_->file(), "[deoptimizing (%s): end ",
817            MessageFor(deopt_kind_));
818     PrintFunctionName();
819     PrintF(trace_scope_->file(),
820            " @%d => node=%d, pc=" V8PRIxPTR_FMT ", caller sp=" V8PRIxPTR_FMT
821            ", took %0.3f ms]\n",
822            bailout_id_, node_id.ToInt(), output_[index]->GetPc(),
823            caller_frame_top_, ms);
824   }
825 }
826 
DoComputeInterpretedFrame(TranslatedFrame * translated_frame,int frame_index,bool goto_catch_handler)827 void Deoptimizer::DoComputeInterpretedFrame(TranslatedFrame* translated_frame,
828                                             int frame_index,
829                                             bool goto_catch_handler) {
830   SharedFunctionInfo* shared = translated_frame->raw_shared_info();
831 
832   TranslatedFrame::iterator value_iterator = translated_frame->begin();
833   bool is_bottommost = (0 == frame_index);
834   bool is_topmost = (output_count_ - 1 == frame_index);
835 
836   int bytecode_offset = translated_frame->node_id().ToInt();
837   int height = translated_frame->height();
838   int register_count = height - 1;  // Exclude accumulator.
839   int register_stack_slot_count =
840       InterpreterFrameConstants::RegisterStackSlotCount(register_count);
841   int height_in_bytes = register_stack_slot_count * kPointerSize;
842 
843   // The topmost frame will contain the accumulator.
844   if (is_topmost) {
845     height_in_bytes += kPointerSize;
846     if (PadTopOfStackRegister()) height_in_bytes += kPointerSize;
847   }
848 
849   TranslatedFrame::iterator function_iterator = value_iterator++;
850   if (trace_scope_ != nullptr) {
851     PrintF(trace_scope_->file(), "  translating interpreted frame ");
852     std::unique_ptr<char[]> name = shared->DebugName()->ToCString();
853     PrintF(trace_scope_->file(), "%s", name.get());
854     PrintF(trace_scope_->file(), " => bytecode_offset=%d, height=%d%s\n",
855            bytecode_offset, height_in_bytes,
856            goto_catch_handler ? " (throw)" : "");
857   }
858   if (goto_catch_handler) {
859     bytecode_offset = catch_handler_pc_offset_;
860   }
861 
862   // The 'fixed' part of the frame consists of the incoming parameters and
863   // the part described by InterpreterFrameConstants. This will include
864   // argument padding, when needed.
865   unsigned fixed_frame_size = ComputeInterpretedFixedSize(shared);
866   unsigned output_frame_size = height_in_bytes + fixed_frame_size;
867 
868   // Allocate and store the output frame description.
869   int parameter_count = shared->internal_formal_parameter_count() + 1;
870   FrameDescription* output_frame = new (output_frame_size)
871       FrameDescription(output_frame_size, parameter_count);
872   FrameWriter frame_writer(this, output_frame, trace_scope_);
873 
874   CHECK(frame_index >= 0 && frame_index < output_count_);
875   CHECK_NULL(output_[frame_index]);
876   output_[frame_index] = output_frame;
877 
878   // The top address of the frame is computed from the previous frame's top and
879   // this frame's size.
880   intptr_t top_address;
881   if (is_bottommost) {
882     top_address = caller_frame_top_ - output_frame_size;
883   } else {
884     top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
885   }
886   output_frame->SetTop(top_address);
887 
888   // Compute the incoming parameter translation.
889 
890   ReadOnlyRoots roots(isolate());
891   if (ShouldPadArguments(parameter_count)) {
892     frame_writer.PushRawObject(roots.the_hole_value(), "padding\n");
893   }
894 
895   for (int i = 0; i < parameter_count; ++i, ++value_iterator) {
896     frame_writer.PushTranslatedValue(value_iterator, "stack parameter");
897   }
898 
899   DCHECK_EQ(output_frame->GetLastArgumentSlotOffset(),
900             frame_writer.top_offset());
901   if (trace_scope_ != nullptr) {
902     PrintF(trace_scope_->file(), "    -------------------------\n");
903   }
904 
905   // There are no translation commands for the caller's pc and fp, the
906   // context, the function and the bytecode offset.  Synthesize
907   // their values and set them up
908   // explicitly.
909   //
910   // The caller's pc for the bottommost output frame is the same as in the
911   // input frame.  For all subsequent output frames, it can be read from the
912   // previous one.  This frame's pc can be computed from the non-optimized
913   // function code and AST id of the bailout.
914   const intptr_t caller_pc =
915       is_bottommost ? caller_pc_ : output_[frame_index - 1]->GetPc();
916   frame_writer.PushCallerPc(caller_pc);
917 
918   // The caller's frame pointer for the bottommost output frame is the same
919   // as in the input frame.  For all subsequent output frames, it can be
920   // read from the previous one.  Also compute and set this frame's frame
921   // pointer.
922   const intptr_t caller_fp =
923       is_bottommost ? caller_fp_ : output_[frame_index - 1]->GetFp();
924   frame_writer.PushCallerFp(caller_fp);
925 
926   intptr_t fp_value = top_address + frame_writer.top_offset();
927   output_frame->SetFp(fp_value);
928   if (is_topmost) {
929     Register fp_reg = InterpretedFrame::fp_register();
930     output_frame->SetRegister(fp_reg.code(), fp_value);
931   }
932 
933   if (FLAG_enable_embedded_constant_pool) {
934     // For the bottommost output frame the constant pool pointer can be gotten
935     // from the input frame. For subsequent output frames, it can be read from
936     // the previous frame.
937     const intptr_t caller_cp =
938         is_bottommost ? caller_constant_pool_
939                       : output_[frame_index - 1]->GetConstantPool();
940     frame_writer.PushCallerConstantPool(caller_cp);
941   }
942 
943   // For the bottommost output frame the context can be gotten from the input
944   // frame. For all subsequent output frames it can be gotten from the function
945   // so long as we don't inline functions that need local contexts.
946 
947   // When deoptimizing into a catch block, we need to take the context
948   // from a register that was specified in the handler table.
949   TranslatedFrame::iterator context_pos = value_iterator++;
950   if (goto_catch_handler) {
951     // Skip to the translated value of the register specified
952     // in the handler table.
953     for (int i = 0; i < catch_handler_data_ + 1; ++i) {
954       context_pos++;
955     }
956   }
957   // Read the context from the translations.
958   Object* context = context_pos->GetRawValue();
959   output_frame->SetContext(reinterpret_cast<intptr_t>(context));
960   frame_writer.PushTranslatedValue(context_pos, "context\n");
961 
962   // The function was mentioned explicitly in the BEGIN_FRAME.
963   frame_writer.PushTranslatedValue(function_iterator, "function\n");
964 
965   // Set the bytecode array pointer.
966   Object* bytecode_array = shared->HasBreakInfo()
967                                ? shared->GetDebugInfo()->DebugBytecodeArray()
968                                : shared->GetBytecodeArray();
969   frame_writer.PushRawObject(bytecode_array, "bytecode array\n");
970 
971   // The bytecode offset was mentioned explicitly in the BEGIN_FRAME.
972   int raw_bytecode_offset =
973       BytecodeArray::kHeaderSize - kHeapObjectTag + bytecode_offset;
974   Smi* smi_bytecode_offset = Smi::FromInt(raw_bytecode_offset);
975   frame_writer.PushRawObject(smi_bytecode_offset, "bytecode offset\n");
976 
977   if (trace_scope_ != nullptr) {
978     PrintF(trace_scope_->file(), "    -------------------------\n");
979   }
980 
981   // Translate the rest of the interpreter registers in the frame.
982   for (int i = 0; i < register_count; ++i, ++value_iterator) {
983     frame_writer.PushTranslatedValue(value_iterator, "stack parameter");
984   }
985 
986   int register_slots_written = register_count;
987   DCHECK_LE(register_slots_written, register_stack_slot_count);
988   // Some architectures must pad the stack frame with extra stack slots
989   // to ensure the stack frame is aligned. Do this now.
990   while (register_slots_written < register_stack_slot_count) {
991     register_slots_written++;
992     frame_writer.PushRawObject(roots.the_hole_value(), "padding\n");
993   }
994 
995   // Translate the accumulator register (depending on frame position).
996   if (is_topmost) {
997     if (PadTopOfStackRegister()) {
998       frame_writer.PushRawObject(roots.the_hole_value(), "padding\n");
999     }
1000     // For topmost frame, put the accumulator on the stack. The
1001     // {NotifyDeoptimized} builtin pops it off the topmost frame (possibly
1002     // after materialization).
1003     if (goto_catch_handler) {
1004       // If we are lazy deopting to a catch handler, we set the accumulator to
1005       // the exception (which lives in the result register).
1006       intptr_t accumulator_value =
1007           input_->GetRegister(kInterpreterAccumulatorRegister.code());
1008       frame_writer.PushRawObject(reinterpret_cast<Object*>(accumulator_value),
1009                                  "accumulator\n");
1010       ++value_iterator;  // Skip the accumulator.
1011     } else {
1012       frame_writer.PushTranslatedValue(value_iterator++, "accumulator");
1013     }
1014   } else {
1015     // For non-topmost frames, skip the accumulator translation. For those
1016     // frames, the return value from the callee will become the accumulator.
1017     ++value_iterator;
1018   }
1019   CHECK_EQ(translated_frame->end(), value_iterator);
1020   CHECK_EQ(0u, frame_writer.top_offset());
1021 
1022   // Compute this frame's PC and state. The PC will be a special builtin that
1023   // continues the bytecode dispatch. Note that non-topmost and lazy-style
1024   // bailout handlers also advance the bytecode offset before dispatch, hence
1025   // simulating what normal handlers do upon completion of the operation.
1026   Builtins* builtins = isolate_->builtins();
1027   Code* dispatch_builtin =
1028       (!is_topmost || (deopt_kind_ == DeoptimizeKind::kLazy)) &&
1029               !goto_catch_handler
1030           ? builtins->builtin(Builtins::kInterpreterEnterBytecodeAdvance)
1031           : builtins->builtin(Builtins::kInterpreterEnterBytecodeDispatch);
1032   output_frame->SetPc(
1033       static_cast<intptr_t>(dispatch_builtin->InstructionStart()));
1034 
1035   // Update constant pool.
1036   if (FLAG_enable_embedded_constant_pool) {
1037     intptr_t constant_pool_value =
1038         static_cast<intptr_t>(dispatch_builtin->constant_pool());
1039     output_frame->SetConstantPool(constant_pool_value);
1040     if (is_topmost) {
1041       Register constant_pool_reg =
1042           InterpretedFrame::constant_pool_pointer_register();
1043       output_frame->SetRegister(constant_pool_reg.code(), constant_pool_value);
1044     }
1045   }
1046 
1047   // Clear the context register. The context might be a de-materialized object
1048   // and will be materialized by {Runtime_NotifyDeoptimized}. For additional
1049   // safety we use Smi(0) instead of the potential {arguments_marker} here.
1050   if (is_topmost) {
1051     intptr_t context_value = reinterpret_cast<intptr_t>(Smi::kZero);
1052     Register context_reg = JavaScriptFrame::context_register();
1053     output_frame->SetRegister(context_reg.code(), context_value);
1054     // Set the continuation for the topmost frame.
1055     Code* continuation = builtins->builtin(Builtins::kNotifyDeoptimized);
1056     output_frame->SetContinuation(
1057         static_cast<intptr_t>(continuation->InstructionStart()));
1058   }
1059 }
1060 
DoComputeArgumentsAdaptorFrame(TranslatedFrame * translated_frame,int frame_index)1061 void Deoptimizer::DoComputeArgumentsAdaptorFrame(
1062     TranslatedFrame* translated_frame, int frame_index) {
1063   TranslatedFrame::iterator value_iterator = translated_frame->begin();
1064   bool is_bottommost = (0 == frame_index);
1065 
1066   unsigned height = translated_frame->height();
1067   unsigned height_in_bytes = height * kPointerSize;
1068   int parameter_count = height;
1069   if (ShouldPadArguments(parameter_count)) height_in_bytes += kPointerSize;
1070 
1071   TranslatedFrame::iterator function_iterator = value_iterator++;
1072   if (trace_scope_ != nullptr) {
1073     PrintF(trace_scope_->file(),
1074            "  translating arguments adaptor => height=%d\n", height_in_bytes);
1075   }
1076 
1077   unsigned fixed_frame_size = ArgumentsAdaptorFrameConstants::kFixedFrameSize;
1078   unsigned output_frame_size = height_in_bytes + fixed_frame_size;
1079 
1080   // Allocate and store the output frame description.
1081   FrameDescription* output_frame = new (output_frame_size)
1082       FrameDescription(output_frame_size, parameter_count);
1083   FrameWriter frame_writer(this, output_frame, trace_scope_);
1084 
1085   // Arguments adaptor can not be topmost.
1086   CHECK(frame_index < output_count_ - 1);
1087   CHECK_NULL(output_[frame_index]);
1088   output_[frame_index] = output_frame;
1089 
1090   // The top address of the frame is computed from the previous frame's top and
1091   // this frame's size.
1092   intptr_t top_address;
1093   if (is_bottommost) {
1094     top_address = caller_frame_top_ - output_frame_size;
1095   } else {
1096     top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
1097   }
1098   output_frame->SetTop(top_address);
1099 
1100   ReadOnlyRoots roots(isolate());
1101   if (ShouldPadArguments(parameter_count)) {
1102     frame_writer.PushRawObject(roots.the_hole_value(), "padding\n");
1103   }
1104 
1105   // Compute the incoming parameter translation.
1106   for (int i = 0; i < parameter_count; ++i, ++value_iterator) {
1107     frame_writer.PushTranslatedValue(value_iterator, "stack parameter");
1108   }
1109 
1110   DCHECK_EQ(output_frame->GetLastArgumentSlotOffset(),
1111             frame_writer.top_offset());
1112 
1113   // Read caller's PC from the previous frame.
1114   const intptr_t caller_pc =
1115       is_bottommost ? caller_pc_ : output_[frame_index - 1]->GetPc();
1116   frame_writer.PushCallerPc(caller_pc);
1117 
1118   // Read caller's FP from the previous frame, and set this frame's FP.
1119   const intptr_t caller_fp =
1120       is_bottommost ? caller_fp_ : output_[frame_index - 1]->GetFp();
1121   frame_writer.PushCallerFp(caller_fp);
1122 
1123   intptr_t fp_value = top_address + frame_writer.top_offset();
1124   output_frame->SetFp(fp_value);
1125 
1126   if (FLAG_enable_embedded_constant_pool) {
1127     // Read the caller's constant pool from the previous frame.
1128     const intptr_t caller_cp =
1129         is_bottommost ? caller_constant_pool_
1130                       : output_[frame_index - 1]->GetConstantPool();
1131     frame_writer.PushCallerConstantPool(caller_cp);
1132   }
1133 
1134   // A marker value is used in place of the context.
1135   intptr_t marker = StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR);
1136   frame_writer.PushRawValue(marker, "context (adaptor sentinel)\n");
1137 
1138   // The function was mentioned explicitly in the ARGUMENTS_ADAPTOR_FRAME.
1139   frame_writer.PushTranslatedValue(function_iterator, "function\n");
1140 
1141   // Number of incoming arguments.
1142   frame_writer.PushRawObject(Smi::FromInt(height - 1), "argc\n");
1143 
1144   frame_writer.PushRawObject(roots.the_hole_value(), "padding\n");
1145 
1146   CHECK_EQ(translated_frame->end(), value_iterator);
1147   DCHECK_EQ(0, frame_writer.top_offset());
1148 
1149   Builtins* builtins = isolate_->builtins();
1150   Code* adaptor_trampoline =
1151       builtins->builtin(Builtins::kArgumentsAdaptorTrampoline);
1152   intptr_t pc_value = static_cast<intptr_t>(
1153       adaptor_trampoline->InstructionStart() +
1154       isolate_->heap()->arguments_adaptor_deopt_pc_offset()->value());
1155   output_frame->SetPc(pc_value);
1156   if (FLAG_enable_embedded_constant_pool) {
1157     intptr_t constant_pool_value =
1158         static_cast<intptr_t>(adaptor_trampoline->constant_pool());
1159     output_frame->SetConstantPool(constant_pool_value);
1160   }
1161 }
1162 
DoComputeConstructStubFrame(TranslatedFrame * translated_frame,int frame_index)1163 void Deoptimizer::DoComputeConstructStubFrame(TranslatedFrame* translated_frame,
1164                                               int frame_index) {
1165   TranslatedFrame::iterator value_iterator = translated_frame->begin();
1166   bool is_topmost = (output_count_ - 1 == frame_index);
1167   // The construct frame could become topmost only if we inlined a constructor
1168   // call which does a tail call (otherwise the tail callee's frame would be
1169   // the topmost one). So it could only be the DeoptimizeKind::kLazy case.
1170   CHECK(!is_topmost || deopt_kind_ == DeoptimizeKind::kLazy);
1171 
1172   Builtins* builtins = isolate_->builtins();
1173   Code* construct_stub = builtins->builtin(Builtins::kJSConstructStubGeneric);
1174   BailoutId bailout_id = translated_frame->node_id();
1175   unsigned height = translated_frame->height();
1176   unsigned height_in_bytes = height * kPointerSize;
1177 
1178   // If the construct frame appears to be topmost we should ensure that the
1179   // value of result register is preserved during continuation execution.
1180   // We do this here by "pushing" the result of the constructor function to the
1181   // top of the reconstructed stack and popping it in
1182   // {Builtins::kNotifyDeoptimized}.
1183   if (is_topmost) {
1184     height_in_bytes += kPointerSize;
1185     if (PadTopOfStackRegister()) height_in_bytes += kPointerSize;
1186   }
1187 
1188   int parameter_count = height;
1189   if (ShouldPadArguments(parameter_count)) height_in_bytes += kPointerSize;
1190 
1191   TranslatedFrame::iterator function_iterator = value_iterator++;
1192   if (trace_scope_ != nullptr) {
1193     PrintF(trace_scope_->file(),
1194            "  translating construct stub => bailout_id=%d (%s), height=%d\n",
1195            bailout_id.ToInt(),
1196            bailout_id == BailoutId::ConstructStubCreate() ? "create" : "invoke",
1197            height_in_bytes);
1198   }
1199 
1200   unsigned fixed_frame_size = ConstructFrameConstants::kFixedFrameSize;
1201   unsigned output_frame_size = height_in_bytes + fixed_frame_size;
1202 
1203   // Allocate and store the output frame description.
1204   FrameDescription* output_frame = new (output_frame_size)
1205       FrameDescription(output_frame_size, parameter_count);
1206   FrameWriter frame_writer(this, output_frame, trace_scope_);
1207 
1208   // Construct stub can not be topmost.
1209   DCHECK(frame_index > 0 && frame_index < output_count_);
1210   DCHECK_NULL(output_[frame_index]);
1211   output_[frame_index] = output_frame;
1212 
1213   // The top address of the frame is computed from the previous frame's top and
1214   // this frame's size.
1215   intptr_t top_address;
1216   top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
1217   output_frame->SetTop(top_address);
1218 
1219   ReadOnlyRoots roots(isolate());
1220   if (ShouldPadArguments(parameter_count)) {
1221     frame_writer.PushRawObject(roots.the_hole_value(), "padding\n");
1222   }
1223 
1224   // The allocated receiver of a construct stub frame is passed as the
1225   // receiver parameter through the translation. It might be encoding
1226   // a captured object, so we need save it for later.
1227   TranslatedFrame::iterator receiver_iterator = value_iterator;
1228 
1229   // Compute the incoming parameter translation.
1230   for (int i = 0; i < parameter_count; ++i, ++value_iterator) {
1231     frame_writer.PushTranslatedValue(value_iterator, "stack parameter");
1232   }
1233 
1234   DCHECK_EQ(output_frame->GetLastArgumentSlotOffset(),
1235             frame_writer.top_offset());
1236 
1237   // Read caller's PC from the previous frame.
1238   const intptr_t caller_pc = output_[frame_index - 1]->GetPc();
1239   frame_writer.PushCallerPc(caller_pc);
1240 
1241   // Read caller's FP from the previous frame, and set this frame's FP.
1242   const intptr_t caller_fp = output_[frame_index - 1]->GetFp();
1243   frame_writer.PushCallerFp(caller_fp);
1244 
1245   intptr_t fp_value = top_address + frame_writer.top_offset();
1246   output_frame->SetFp(fp_value);
1247   if (is_topmost) {
1248     Register fp_reg = JavaScriptFrame::fp_register();
1249     output_frame->SetRegister(fp_reg.code(), fp_value);
1250   }
1251 
1252   if (FLAG_enable_embedded_constant_pool) {
1253     // Read the caller's constant pool from the previous frame.
1254     const intptr_t caller_cp = output_[frame_index - 1]->GetConstantPool();
1255     frame_writer.PushCallerConstantPool(caller_cp);
1256   }
1257 
1258   // A marker value is used to mark the frame.
1259   intptr_t marker = StackFrame::TypeToMarker(StackFrame::CONSTRUCT);
1260   frame_writer.PushRawValue(marker, "context (construct stub sentinel)\n");
1261 
1262   // The context can be gotten from the previous frame.
1263   Object* context =
1264       reinterpret_cast<Object*>(output_[frame_index - 1]->GetContext());
1265   frame_writer.PushRawObject(context, "context\n");
1266 
1267   // Number of incoming arguments.
1268   frame_writer.PushRawObject(Smi::FromInt(height - 1), "argc\n");
1269 
1270   // The constructor function was mentioned explicitly in the
1271   // CONSTRUCT_STUB_FRAME.
1272   frame_writer.PushTranslatedValue(function_iterator, "constuctor function\n");
1273 
1274   // The deopt info contains the implicit receiver or the new target at the
1275   // position of the receiver. Copy it to the top of stack, with the hole value
1276   // as padding to maintain alignment.
1277 
1278   frame_writer.PushRawObject(roots.the_hole_value(), "padding\n");
1279 
1280   CHECK(bailout_id == BailoutId::ConstructStubCreate() ||
1281         bailout_id == BailoutId::ConstructStubInvoke());
1282   const char* debug_hint = bailout_id == BailoutId::ConstructStubCreate()
1283                                ? "new target\n"
1284                                : "allocated receiver\n";
1285   frame_writer.PushTranslatedValue(receiver_iterator, debug_hint);
1286 
1287   if (is_topmost) {
1288     if (PadTopOfStackRegister()) {
1289       frame_writer.PushRawObject(roots.the_hole_value(), "padding\n");
1290     }
1291     // Ensure the result is restored back when we return to the stub.
1292     Register result_reg = kReturnRegister0;
1293     intptr_t result = input_->GetRegister(result_reg.code());
1294     frame_writer.PushRawValue(result, "subcall result\n");
1295   }
1296 
1297   CHECK_EQ(translated_frame->end(), value_iterator);
1298   CHECK_EQ(0u, frame_writer.top_offset());
1299 
1300   // Compute this frame's PC.
1301   DCHECK(bailout_id.IsValidForConstructStub());
1302   Address start = construct_stub->InstructionStart();
1303   int pc_offset =
1304       bailout_id == BailoutId::ConstructStubCreate()
1305           ? isolate_->heap()->construct_stub_create_deopt_pc_offset()->value()
1306           : isolate_->heap()->construct_stub_invoke_deopt_pc_offset()->value();
1307   intptr_t pc_value = static_cast<intptr_t>(start + pc_offset);
1308   output_frame->SetPc(pc_value);
1309 
1310   // Update constant pool.
1311   if (FLAG_enable_embedded_constant_pool) {
1312     intptr_t constant_pool_value =
1313         static_cast<intptr_t>(construct_stub->constant_pool());
1314     output_frame->SetConstantPool(constant_pool_value);
1315     if (is_topmost) {
1316       Register constant_pool_reg =
1317           JavaScriptFrame::constant_pool_pointer_register();
1318       output_frame->SetRegister(constant_pool_reg.code(), constant_pool_value);
1319     }
1320   }
1321 
1322   // Clear the context register. The context might be a de-materialized object
1323   // and will be materialized by {Runtime_NotifyDeoptimized}. For additional
1324   // safety we use Smi(0) instead of the potential {arguments_marker} here.
1325   if (is_topmost) {
1326     intptr_t context_value = reinterpret_cast<intptr_t>(Smi::kZero);
1327     Register context_reg = JavaScriptFrame::context_register();
1328     output_frame->SetRegister(context_reg.code(), context_value);
1329   }
1330 
1331   // Set the continuation for the topmost frame.
1332   if (is_topmost) {
1333     Builtins* builtins = isolate_->builtins();
1334     DCHECK_EQ(DeoptimizeKind::kLazy, deopt_kind_);
1335     Code* continuation = builtins->builtin(Builtins::kNotifyDeoptimized);
1336     output_frame->SetContinuation(
1337         static_cast<intptr_t>(continuation->InstructionStart()));
1338   }
1339 }
1340 
BuiltinContinuationModeIsJavaScript(BuiltinContinuationMode mode)1341 bool Deoptimizer::BuiltinContinuationModeIsJavaScript(
1342     BuiltinContinuationMode mode) {
1343   switch (mode) {
1344     case BuiltinContinuationMode::STUB:
1345       return false;
1346     case BuiltinContinuationMode::JAVASCRIPT:
1347     case BuiltinContinuationMode::JAVASCRIPT_WITH_CATCH:
1348     case BuiltinContinuationMode::JAVASCRIPT_HANDLE_EXCEPTION:
1349       return true;
1350   }
1351   UNREACHABLE();
1352 }
1353 
BuiltinContinuationModeIsWithCatch(BuiltinContinuationMode mode)1354 bool Deoptimizer::BuiltinContinuationModeIsWithCatch(
1355     BuiltinContinuationMode mode) {
1356   switch (mode) {
1357     case BuiltinContinuationMode::STUB:
1358     case BuiltinContinuationMode::JAVASCRIPT:
1359       return false;
1360     case BuiltinContinuationMode::JAVASCRIPT_WITH_CATCH:
1361     case BuiltinContinuationMode::JAVASCRIPT_HANDLE_EXCEPTION:
1362       return true;
1363   }
1364   UNREACHABLE();
1365 }
1366 
BuiltinContinuationModeToFrameType(BuiltinContinuationMode mode)1367 StackFrame::Type Deoptimizer::BuiltinContinuationModeToFrameType(
1368     BuiltinContinuationMode mode) {
1369   switch (mode) {
1370     case BuiltinContinuationMode::STUB:
1371       return StackFrame::BUILTIN_CONTINUATION;
1372     case BuiltinContinuationMode::JAVASCRIPT:
1373       return StackFrame::JAVA_SCRIPT_BUILTIN_CONTINUATION;
1374     case BuiltinContinuationMode::JAVASCRIPT_WITH_CATCH:
1375       return StackFrame::JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH;
1376     case BuiltinContinuationMode::JAVASCRIPT_HANDLE_EXCEPTION:
1377       return StackFrame::JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH;
1378   }
1379   UNREACHABLE();
1380 }
1381 
TrampolineForBuiltinContinuation(BuiltinContinuationMode mode,bool must_handle_result)1382 Builtins::Name Deoptimizer::TrampolineForBuiltinContinuation(
1383     BuiltinContinuationMode mode, bool must_handle_result) {
1384   switch (mode) {
1385     case BuiltinContinuationMode::STUB:
1386       return must_handle_result ? Builtins::kContinueToCodeStubBuiltinWithResult
1387                                 : Builtins::kContinueToCodeStubBuiltin;
1388     case BuiltinContinuationMode::JAVASCRIPT:
1389     case BuiltinContinuationMode::JAVASCRIPT_WITH_CATCH:
1390     case BuiltinContinuationMode::JAVASCRIPT_HANDLE_EXCEPTION:
1391       return must_handle_result
1392                  ? Builtins::kContinueToJavaScriptBuiltinWithResult
1393                  : Builtins::kContinueToJavaScriptBuiltin;
1394   }
1395   UNREACHABLE();
1396 }
1397 
1398 // BuiltinContinuationFrames capture the machine state that is expected as input
1399 // to a builtin, including both input register values and stack parameters. When
1400 // the frame is reactivated (i.e. the frame below it returns), a
1401 // ContinueToBuiltin stub restores the register state from the frame and tail
1402 // calls to the actual target builtin, making it appear that the stub had been
1403 // directly called by the frame above it. The input values to populate the frame
1404 // are taken from the deopt's FrameState.
1405 //
1406 // Frame translation happens in two modes, EAGER and LAZY. In EAGER mode, all of
1407 // the parameters to the Builtin are explicitly specified in the TurboFan
1408 // FrameState node. In LAZY mode, there is always one fewer parameters specified
1409 // in the FrameState than expected by the Builtin. In that case, construction of
1410 // BuiltinContinuationFrame adds the final missing parameter during
1411 // deoptimization, and that parameter is always on the stack and contains the
1412 // value returned from the callee of the call site triggering the LAZY deopt
1413 // (e.g. rax on x64). This requires that continuation Builtins for LAZY deopts
1414 // must have at least one stack parameter.
1415 //
1416 //                TO
1417 //    |          ....           |
1418 //    +-------------------------+
1419 //    | arg padding (arch dept) |<- at most 1*kPointerSize
1420 //    +-------------------------+
1421 //    |     builtin param 0     |<- FrameState input value n becomes
1422 //    +-------------------------+
1423 //    |           ...           |
1424 //    +-------------------------+
1425 //    |     builtin param m     |<- FrameState input value n+m-1, or in
1426 //    +-----needs-alignment-----+   the LAZY case, return LAZY result value
1427 //    | ContinueToBuiltin entry |
1428 //    +-------------------------+
1429 // |  |    saved frame (FP)     |
1430 // |  +=====needs=alignment=====+<- fpreg
1431 // |  |constant pool (if ool_cp)|
1432 // v  +-------------------------+
1433 //    |BUILTIN_CONTINUATION mark|
1434 //    +-------------------------+
1435 //    |  JSFunction (or zero)   |<- only if JavaScript builtin
1436 //    +-------------------------+
1437 //    |  frame height above FP  |
1438 //    +-------------------------+
1439 //    |         context         |<- this non-standard context slot contains
1440 //    +-------------------------+   the context, even for non-JS builtins.
1441 //    |     builtin address     |
1442 //    +-------------------------+
1443 //    | builtin input GPR reg0  |<- populated from deopt FrameState using
1444 //    +-------------------------+   the builtin's CallInterfaceDescriptor
1445 //    |          ...            |   to map a FrameState's 0..n-1 inputs to
1446 //    +-------------------------+   the builtin's n input register params.
1447 //    | builtin input GPR regn  |
1448 //    +-------------------------+
1449 //    | reg padding (arch dept) |
1450 //    +-----needs--alignment----+
1451 //    | res padding (arch dept) |<- only if {is_topmost}; result is pop'd by
1452 //    +-------------------------+<- kNotifyDeopt ASM stub and moved to acc
1453 //    |      result  value      |<- reg, as ContinueToBuiltin stub expects.
1454 //    +-----needs-alignment-----+<- spreg
1455 //
DoComputeBuiltinContinuation(TranslatedFrame * translated_frame,int frame_index,BuiltinContinuationMode mode)1456 void Deoptimizer::DoComputeBuiltinContinuation(
1457     TranslatedFrame* translated_frame, int frame_index,
1458     BuiltinContinuationMode mode) {
1459   TranslatedFrame::iterator value_iterator = translated_frame->begin();
1460 
1461   // The output frame must have room for all of the parameters that need to be
1462   // passed to the builtin continuation.
1463   const int height_in_words = translated_frame->height();
1464 
1465   BailoutId bailout_id = translated_frame->node_id();
1466   Builtins::Name builtin_name = Builtins::GetBuiltinFromBailoutId(bailout_id);
1467   CHECK(!Builtins::IsLazy(builtin_name));
1468   Code* builtin = isolate()->builtins()->builtin(builtin_name);
1469   Callable continuation_callable =
1470       Builtins::CallableFor(isolate(), builtin_name);
1471   CallInterfaceDescriptor continuation_descriptor =
1472       continuation_callable.descriptor();
1473 
1474   const bool is_bottommost = (0 == frame_index);
1475   const bool is_topmost = (output_count_ - 1 == frame_index);
1476   const bool must_handle_result =
1477       !is_topmost || deopt_kind_ == DeoptimizeKind::kLazy;
1478 
1479   const RegisterConfiguration* config(RegisterConfiguration::Default());
1480   const int allocatable_register_count =
1481       config->num_allocatable_general_registers();
1482   const int padding_slot_count =
1483       BuiltinContinuationFrameConstants::PaddingSlotCount(
1484           allocatable_register_count);
1485 
1486   const int register_parameter_count =
1487       continuation_descriptor.GetRegisterParameterCount();
1488   // Make sure to account for the context by removing it from the register
1489   // parameter count.
1490   const int translated_stack_parameters =
1491       height_in_words - register_parameter_count - 1;
1492   const int stack_param_count =
1493       translated_stack_parameters + (must_handle_result ? 1 : 0) +
1494       (BuiltinContinuationModeIsWithCatch(mode) ? 1 : 0);
1495   const int stack_param_pad_count =
1496       ShouldPadArguments(stack_param_count) ? 1 : 0;
1497 
1498   // If the builtins frame appears to be topmost we should ensure that the
1499   // value of result register is preserved during continuation execution.
1500   // We do this here by "pushing" the result of callback function to the
1501   // top of the reconstructed stack and popping it in
1502   // {Builtins::kNotifyDeoptimized}.
1503   const int push_result_count =
1504       is_topmost ? (PadTopOfStackRegister() ? 2 : 1) : 0;
1505 
1506   const unsigned output_frame_size =
1507       kPointerSize * (stack_param_count + stack_param_pad_count +
1508                       allocatable_register_count + padding_slot_count +
1509                       push_result_count) +
1510       BuiltinContinuationFrameConstants::kFixedFrameSize;
1511 
1512   const unsigned output_frame_size_above_fp =
1513       kPointerSize * (allocatable_register_count + padding_slot_count +
1514                       push_result_count) +
1515       (BuiltinContinuationFrameConstants::kFixedFrameSize -
1516        BuiltinContinuationFrameConstants::kFixedFrameSizeAboveFp);
1517 
1518   // Validate types of parameters. They must all be tagged except for argc for
1519   // JS builtins.
1520   bool has_argc = false;
1521   for (int i = 0; i < register_parameter_count; ++i) {
1522     MachineType type = continuation_descriptor.GetParameterType(i);
1523     int code = continuation_descriptor.GetRegisterParameter(i).code();
1524     // Only tagged and int32 arguments are supported, and int32 only for the
1525     // arguments count on JavaScript builtins.
1526     if (type == MachineType::Int32()) {
1527       CHECK_EQ(code, kJavaScriptCallArgCountRegister.code());
1528       has_argc = true;
1529     } else {
1530       // Any other argument must be a tagged value.
1531       CHECK(IsAnyTagged(type.representation()));
1532     }
1533   }
1534   CHECK_EQ(BuiltinContinuationModeIsJavaScript(mode), has_argc);
1535 
1536   if (trace_scope_ != nullptr) {
1537     PrintF(trace_scope_->file(),
1538            "  translating BuiltinContinuation to %s,"
1539            " register param count %d,"
1540            " stack param count %d\n",
1541            Builtins::name(builtin_name), register_parameter_count,
1542            stack_param_count);
1543   }
1544 
1545   FrameDescription* output_frame = new (output_frame_size)
1546       FrameDescription(output_frame_size, stack_param_count);
1547   output_[frame_index] = output_frame;
1548   FrameWriter frame_writer(this, output_frame, trace_scope_);
1549 
1550   // The top address of the frame is computed from the previous frame's top and
1551   // this frame's size.
1552   intptr_t top_address;
1553   if (is_bottommost) {
1554     top_address = caller_frame_top_ - output_frame_size;
1555   } else {
1556     top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
1557   }
1558   output_frame->SetTop(top_address);
1559 
1560   // Get the possible JSFunction for the case that this is a
1561   // JavaScriptBuiltinContinuationFrame, which needs the JSFunction pointer
1562   // like a normal JavaScriptFrame.
1563   const intptr_t maybe_function =
1564       reinterpret_cast<intptr_t>(value_iterator->GetRawValue());
1565   ++value_iterator;
1566 
1567   ReadOnlyRoots roots(isolate());
1568   if (ShouldPadArguments(stack_param_count)) {
1569     frame_writer.PushRawObject(roots.the_hole_value(), "padding\n");
1570   }
1571 
1572   for (int i = 0; i < translated_stack_parameters; ++i, ++value_iterator) {
1573     frame_writer.PushTranslatedValue(value_iterator, "stack parameter");
1574   }
1575 
1576   switch (mode) {
1577     case BuiltinContinuationMode::STUB:
1578       break;
1579     case BuiltinContinuationMode::JAVASCRIPT:
1580       break;
1581     case BuiltinContinuationMode::JAVASCRIPT_WITH_CATCH: {
1582       frame_writer.PushRawObject(roots.the_hole_value(),
1583                                  "placeholder for exception on lazy deopt\n");
1584     } break;
1585     case BuiltinContinuationMode::JAVASCRIPT_HANDLE_EXCEPTION: {
1586       intptr_t accumulator_value =
1587           input_->GetRegister(kInterpreterAccumulatorRegister.code());
1588       frame_writer.PushRawObject(reinterpret_cast<Object*>(accumulator_value),
1589                                  "exception (from accumulator)\n");
1590     } break;
1591   }
1592 
1593   if (must_handle_result) {
1594     frame_writer.PushRawObject(roots.the_hole_value(),
1595                                "placeholder for return result on lazy deopt\n");
1596   }
1597 
1598   DCHECK_EQ(output_frame->GetLastArgumentSlotOffset(),
1599             frame_writer.top_offset());
1600 
1601   std::vector<TranslatedFrame::iterator> register_values;
1602   int total_registers = config->num_general_registers();
1603   register_values.resize(total_registers, {value_iterator});
1604 
1605   for (int i = 0; i < register_parameter_count; ++i, ++value_iterator) {
1606     int code = continuation_descriptor.GetRegisterParameter(i).code();
1607     register_values[code] = value_iterator;
1608   }
1609 
1610   // The context register is always implicit in the CallInterfaceDescriptor but
1611   // its register must be explicitly set when continuing to the builtin. Make
1612   // sure that it's harvested from the translation and copied into the register
1613   // set (it was automatically added at the end of the FrameState by the
1614   // instruction selector).
1615   Object* context = value_iterator->GetRawValue();
1616   const intptr_t value = reinterpret_cast<intptr_t>(context);
1617   TranslatedFrame::iterator context_register_value = value_iterator++;
1618   register_values[kContextRegister.code()] = context_register_value;
1619   output_frame->SetContext(value);
1620   output_frame->SetRegister(kContextRegister.code(), value);
1621 
1622   // Set caller's PC (JSFunction continuation).
1623   const intptr_t caller_pc =
1624       is_bottommost ? caller_pc_ : output_[frame_index - 1]->GetPc();
1625   frame_writer.PushCallerPc(caller_pc);
1626 
1627   // Read caller's FP from the previous frame, and set this frame's FP.
1628   const intptr_t caller_fp =
1629       is_bottommost ? caller_fp_ : output_[frame_index - 1]->GetFp();
1630   frame_writer.PushCallerFp(caller_fp);
1631 
1632   const intptr_t fp_value = top_address + frame_writer.top_offset();
1633   output_frame->SetFp(fp_value);
1634 
1635   DCHECK_EQ(output_frame_size_above_fp, frame_writer.top_offset());
1636 
1637   if (FLAG_enable_embedded_constant_pool) {
1638     // Read the caller's constant pool from the previous frame.
1639     const intptr_t caller_cp =
1640         is_bottommost ? caller_constant_pool_
1641                       : output_[frame_index - 1]->GetConstantPool();
1642     frame_writer.PushCallerConstantPool(caller_cp);
1643   }
1644 
1645   // A marker value is used in place of the context.
1646   const intptr_t marker =
1647       StackFrame::TypeToMarker(BuiltinContinuationModeToFrameType(mode));
1648   frame_writer.PushRawValue(marker,
1649                             "context (builtin continuation sentinel)\n");
1650 
1651   if (BuiltinContinuationModeIsJavaScript(mode)) {
1652     frame_writer.PushRawValue(maybe_function, "JSFunction\n");
1653   } else {
1654     frame_writer.PushRawValue(0, "unused\n");
1655   }
1656 
1657   // The delta from the SP to the FP; used to reconstruct SP in
1658   // Isolate::UnwindAndFindHandler.
1659   frame_writer.PushRawObject(Smi::FromInt(output_frame_size_above_fp),
1660                              "frame height at deoptimization\n");
1661 
1662   // The context even if this is a stub contininuation frame. We can't use the
1663   // usual context slot, because we must store the frame marker there.
1664   frame_writer.PushTranslatedValue(context_register_value,
1665                                    "builtin JavaScript context\n");
1666 
1667   // The builtin to continue to.
1668   frame_writer.PushRawObject(builtin, "builtin address\n");
1669 
1670   for (int i = 0; i < allocatable_register_count; ++i) {
1671     int code = config->GetAllocatableGeneralCode(i);
1672     ScopedVector<char> str(128);
1673     if (trace_scope_ != nullptr) {
1674       if (BuiltinContinuationModeIsJavaScript(mode) &&
1675           code == kJavaScriptCallArgCountRegister.code()) {
1676         SNPrintF(
1677             str,
1678             "tagged argument count %s (will be untagged by continuation)\n",
1679             config->GetGeneralRegisterName(code));
1680       } else {
1681         SNPrintF(str, "builtin register argument %s\n",
1682                  config->GetGeneralRegisterName(code));
1683       }
1684     }
1685     frame_writer.PushTranslatedValue(
1686         register_values[code], trace_scope_ != nullptr ? str.start() : "");
1687   }
1688 
1689   // Some architectures must pad the stack frame with extra stack slots
1690   // to ensure the stack frame is aligned.
1691   for (int i = 0; i < padding_slot_count; ++i) {
1692     frame_writer.PushRawObject(roots.the_hole_value(), "padding\n");
1693   }
1694 
1695   if (is_topmost) {
1696     if (PadTopOfStackRegister()) {
1697       frame_writer.PushRawObject(roots.the_hole_value(), "padding\n");
1698     }
1699     // Ensure the result is restored back when we return to the stub.
1700 
1701     if (must_handle_result) {
1702       Register result_reg = kReturnRegister0;
1703       frame_writer.PushRawValue(input_->GetRegister(result_reg.code()),
1704                                 "callback result\n");
1705     } else {
1706       frame_writer.PushRawObject(roots.undefined_value(), "callback result\n");
1707     }
1708   }
1709 
1710   CHECK_EQ(translated_frame->end(), value_iterator);
1711   CHECK_EQ(0u, frame_writer.top_offset());
1712 
1713   // Clear the context register. The context might be a de-materialized object
1714   // and will be materialized by {Runtime_NotifyDeoptimized}. For additional
1715   // safety we use Smi(0) instead of the potential {arguments_marker} here.
1716   if (is_topmost) {
1717     intptr_t context_value = reinterpret_cast<intptr_t>(Smi::kZero);
1718     Register context_reg = JavaScriptFrame::context_register();
1719     output_frame->SetRegister(context_reg.code(), context_value);
1720   }
1721 
1722   // Ensure the frame pointer register points to the callee's frame. The builtin
1723   // will build its own frame once we continue to it.
1724   Register fp_reg = JavaScriptFrame::fp_register();
1725   output_frame->SetRegister(fp_reg.code(), fp_value);
1726 
1727   Code* continue_to_builtin = isolate()->builtins()->builtin(
1728       TrampolineForBuiltinContinuation(mode, must_handle_result));
1729   output_frame->SetPc(
1730       static_cast<intptr_t>(continue_to_builtin->InstructionStart()));
1731 
1732   Code* continuation =
1733       isolate()->builtins()->builtin(Builtins::kNotifyDeoptimized);
1734   output_frame->SetContinuation(
1735       static_cast<intptr_t>(continuation->InstructionStart()));
1736 }
1737 
MaterializeHeapObjects()1738 void Deoptimizer::MaterializeHeapObjects() {
1739   translated_state_.Prepare(static_cast<Address>(stack_fp_));
1740   if (FLAG_deopt_every_n_times > 0) {
1741     // Doing a GC here will find problems with the deoptimized frames.
1742     isolate_->heap()->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
1743                                         GarbageCollectionReason::kTesting);
1744   }
1745 
1746   for (auto& materialization : values_to_materialize_) {
1747     Handle<Object> value = materialization.value_->GetValue();
1748 
1749     if (trace_scope_ != nullptr) {
1750       PrintF("Materialization [" V8PRIxPTR_FMT "] <- " V8PRIxPTR_FMT " ;  ",
1751              static_cast<intptr_t>(materialization.output_slot_address_),
1752              reinterpret_cast<intptr_t>(*value));
1753       value->ShortPrint(trace_scope_->file());
1754       PrintF(trace_scope_->file(), "\n");
1755     }
1756 
1757     *(reinterpret_cast<intptr_t*>(materialization.output_slot_address_)) =
1758         reinterpret_cast<intptr_t>(*value);
1759   }
1760 
1761   translated_state_.VerifyMaterializedObjects();
1762 
1763   bool feedback_updated = translated_state_.DoUpdateFeedback();
1764   if (trace_scope_ != nullptr && feedback_updated) {
1765     PrintF(trace_scope_->file(), "Feedback updated");
1766     compiled_code_->PrintDeoptLocation(trace_scope_->file(),
1767                                        " from deoptimization at ", from_);
1768   }
1769 
1770   isolate_->materialized_object_store()->Remove(
1771       static_cast<Address>(stack_fp_));
1772 }
1773 
QueueValueForMaterialization(Address output_address,Object * obj,const TranslatedFrame::iterator & iterator)1774 void Deoptimizer::QueueValueForMaterialization(
1775     Address output_address, Object* obj,
1776     const TranslatedFrame::iterator& iterator) {
1777   if (obj == ReadOnlyRoots(isolate_).arguments_marker()) {
1778     values_to_materialize_.push_back({output_address, iterator});
1779   }
1780 }
1781 
ComputeInputFrameAboveFpFixedSize() const1782 unsigned Deoptimizer::ComputeInputFrameAboveFpFixedSize() const {
1783   unsigned fixed_size = CommonFrameConstants::kFixedFrameSizeAboveFp;
1784   if (!function_->IsSmi()) {
1785     fixed_size += ComputeIncomingArgumentSize(function_->shared());
1786   }
1787   return fixed_size;
1788 }
1789 
ComputeInputFrameSize() const1790 unsigned Deoptimizer::ComputeInputFrameSize() const {
1791   // The fp-to-sp delta already takes the context, constant pool pointer and the
1792   // function into account so we have to avoid double counting them.
1793   unsigned fixed_size_above_fp = ComputeInputFrameAboveFpFixedSize();
1794   unsigned result = fixed_size_above_fp + fp_to_sp_delta_;
1795   if (compiled_code_->kind() == Code::OPTIMIZED_FUNCTION) {
1796     unsigned stack_slots = compiled_code_->stack_slots();
1797     unsigned outgoing_size = 0;
1798     //        ComputeOutgoingArgumentSize(compiled_code_, bailout_id_);
1799     CHECK_EQ(fixed_size_above_fp + (stack_slots * kPointerSize) -
1800                  CommonFrameConstants::kFixedFrameSizeAboveFp + outgoing_size,
1801              result);
1802   }
1803   return result;
1804 }
1805 
1806 // static
ComputeInterpretedFixedSize(SharedFunctionInfo * shared)1807 unsigned Deoptimizer::ComputeInterpretedFixedSize(SharedFunctionInfo* shared) {
1808   // The fixed part of the frame consists of the return address, frame
1809   // pointer, function, context, bytecode offset and all the incoming arguments.
1810   return ComputeIncomingArgumentSize(shared) +
1811          InterpreterFrameConstants::kFixedFrameSize;
1812 }
1813 
1814 // static
ComputeIncomingArgumentSize(SharedFunctionInfo * shared)1815 unsigned Deoptimizer::ComputeIncomingArgumentSize(SharedFunctionInfo* shared) {
1816   int parameter_slots = shared->internal_formal_parameter_count() + 1;
1817   if (kPadArguments) parameter_slots = RoundUp(parameter_slots, 2);
1818   return parameter_slots * kPointerSize;
1819 }
1820 
EnsureCodeForDeoptimizationEntry(Isolate * isolate,DeoptimizeKind kind)1821 void Deoptimizer::EnsureCodeForDeoptimizationEntry(Isolate* isolate,
1822                                                    DeoptimizeKind kind) {
1823   CHECK(kind == DeoptimizeKind::kEager || kind == DeoptimizeKind::kSoft ||
1824         kind == DeoptimizeKind::kLazy);
1825   DeoptimizerData* data = isolate->deoptimizer_data();
1826   if (data->deopt_entry_code(kind) != nullptr) return;
1827 
1828   MacroAssembler masm(isolate, nullptr, 16 * KB, CodeObjectRequired::kYes);
1829   masm.set_emit_debug_code(false);
1830   GenerateDeoptimizationEntries(&masm, kMaxNumberOfEntries, kind);
1831   CodeDesc desc;
1832   masm.GetCode(isolate, &desc);
1833   DCHECK(!RelocInfo::RequiresRelocationAfterCodegen(desc));
1834 
1835   // Allocate the code as immovable since the entry addresses will be used
1836   // directly and there is no support for relocating them.
1837   Handle<Code> code = isolate->factory()->NewCode(
1838       desc, Code::STUB, Handle<Object>(), Builtins::kNoBuiltinId,
1839       MaybeHandle<ByteArray>(), MaybeHandle<DeoptimizationData>(), kImmovable);
1840   CHECK(Heap::IsImmovable(*code));
1841 
1842   CHECK_NULL(data->deopt_entry_code(kind));
1843   data->set_deopt_entry_code(kind, *code);
1844 }
1845 
EnsureCodeForMaxDeoptimizationEntries(Isolate * isolate)1846 void Deoptimizer::EnsureCodeForMaxDeoptimizationEntries(Isolate* isolate) {
1847   EnsureCodeForDeoptimizationEntry(isolate, DeoptimizeKind::kEager);
1848   EnsureCodeForDeoptimizationEntry(isolate, DeoptimizeKind::kLazy);
1849   EnsureCodeForDeoptimizationEntry(isolate, DeoptimizeKind::kSoft);
1850 }
1851 
FrameDescription(uint32_t frame_size,int parameter_count)1852 FrameDescription::FrameDescription(uint32_t frame_size, int parameter_count)
1853     : frame_size_(frame_size),
1854       parameter_count_(parameter_count),
1855       top_(kZapUint32),
1856       pc_(kZapUint32),
1857       fp_(kZapUint32),
1858       context_(kZapUint32),
1859       constant_pool_(kZapUint32) {
1860   // Zap all the registers.
1861   for (int r = 0; r < Register::kNumRegisters; r++) {
1862     // TODO(jbramley): It isn't safe to use kZapUint32 here. If the register
1863     // isn't used before the next safepoint, the GC will try to scan it as a
1864     // tagged value. kZapUint32 looks like a valid tagged pointer, but it isn't.
1865     SetRegister(r, kZapUint32);
1866   }
1867 
1868   // Zap all the slots.
1869   for (unsigned o = 0; o < frame_size; o += kPointerSize) {
1870     SetFrameSlot(o, kZapUint32);
1871   }
1872 }
1873 
Add(int32_t value)1874 void TranslationBuffer::Add(int32_t value) {
1875   // This wouldn't handle kMinInt correctly if it ever encountered it.
1876   DCHECK_NE(value, kMinInt);
1877   // Encode the sign bit in the least significant bit.
1878   bool is_negative = (value < 0);
1879   uint32_t bits = (static_cast<uint32_t>(is_negative ? -value : value) << 1) |
1880                   static_cast<uint32_t>(is_negative);
1881   // Encode the individual bytes using the least significant bit of
1882   // each byte to indicate whether or not more bytes follow.
1883   do {
1884     uint32_t next = bits >> 7;
1885     contents_.push_back(((bits << 1) & 0xFF) | (next != 0));
1886     bits = next;
1887   } while (bits != 0);
1888 }
1889 
TranslationIterator(ByteArray * buffer,int index)1890 TranslationIterator::TranslationIterator(ByteArray* buffer, int index)
1891     : buffer_(buffer), index_(index) {
1892   DCHECK(index >= 0 && index < buffer->length());
1893 }
1894 
Next()1895 int32_t TranslationIterator::Next() {
1896   // Run through the bytes until we reach one with a least significant
1897   // bit of zero (marks the end).
1898   uint32_t bits = 0;
1899   for (int i = 0; true; i += 7) {
1900     DCHECK(HasNext());
1901     uint8_t next = buffer_->get(index_++);
1902     bits |= (next >> 1) << i;
1903     if ((next & 1) == 0) break;
1904   }
1905   // The bits encode the sign in the least significant bit.
1906   bool is_negative = (bits & 1) == 1;
1907   int32_t result = bits >> 1;
1908   return is_negative ? -result : result;
1909 }
1910 
HasNext() const1911 bool TranslationIterator::HasNext() const { return index_ < buffer_->length(); }
1912 
CreateByteArray(Factory * factory)1913 Handle<ByteArray> TranslationBuffer::CreateByteArray(Factory* factory) {
1914   Handle<ByteArray> result = factory->NewByteArray(CurrentIndex(), TENURED);
1915   contents_.CopyTo(result->GetDataStartAddress());
1916   return result;
1917 }
1918 
BeginBuiltinContinuationFrame(BailoutId bailout_id,int literal_id,unsigned height)1919 void Translation::BeginBuiltinContinuationFrame(BailoutId bailout_id,
1920                                                 int literal_id,
1921                                                 unsigned height) {
1922   buffer_->Add(BUILTIN_CONTINUATION_FRAME);
1923   buffer_->Add(bailout_id.ToInt());
1924   buffer_->Add(literal_id);
1925   buffer_->Add(height);
1926 }
1927 
BeginJavaScriptBuiltinContinuationFrame(BailoutId bailout_id,int literal_id,unsigned height)1928 void Translation::BeginJavaScriptBuiltinContinuationFrame(BailoutId bailout_id,
1929                                                           int literal_id,
1930                                                           unsigned height) {
1931   buffer_->Add(JAVA_SCRIPT_BUILTIN_CONTINUATION_FRAME);
1932   buffer_->Add(bailout_id.ToInt());
1933   buffer_->Add(literal_id);
1934   buffer_->Add(height);
1935 }
1936 
BeginJavaScriptBuiltinContinuationWithCatchFrame(BailoutId bailout_id,int literal_id,unsigned height)1937 void Translation::BeginJavaScriptBuiltinContinuationWithCatchFrame(
1938     BailoutId bailout_id, int literal_id, unsigned height) {
1939   buffer_->Add(JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH_FRAME);
1940   buffer_->Add(bailout_id.ToInt());
1941   buffer_->Add(literal_id);
1942   buffer_->Add(height);
1943 }
1944 
BeginConstructStubFrame(BailoutId bailout_id,int literal_id,unsigned height)1945 void Translation::BeginConstructStubFrame(BailoutId bailout_id, int literal_id,
1946                                           unsigned height) {
1947   buffer_->Add(CONSTRUCT_STUB_FRAME);
1948   buffer_->Add(bailout_id.ToInt());
1949   buffer_->Add(literal_id);
1950   buffer_->Add(height);
1951 }
1952 
1953 
BeginArgumentsAdaptorFrame(int literal_id,unsigned height)1954 void Translation::BeginArgumentsAdaptorFrame(int literal_id, unsigned height) {
1955   buffer_->Add(ARGUMENTS_ADAPTOR_FRAME);
1956   buffer_->Add(literal_id);
1957   buffer_->Add(height);
1958 }
1959 
BeginInterpretedFrame(BailoutId bytecode_offset,int literal_id,unsigned height)1960 void Translation::BeginInterpretedFrame(BailoutId bytecode_offset,
1961                                         int literal_id, unsigned height) {
1962   buffer_->Add(INTERPRETED_FRAME);
1963   buffer_->Add(bytecode_offset.ToInt());
1964   buffer_->Add(literal_id);
1965   buffer_->Add(height);
1966 }
1967 
ArgumentsElements(CreateArgumentsType type)1968 void Translation::ArgumentsElements(CreateArgumentsType type) {
1969   buffer_->Add(ARGUMENTS_ELEMENTS);
1970   buffer_->Add(static_cast<uint8_t>(type));
1971 }
1972 
ArgumentsLength(CreateArgumentsType type)1973 void Translation::ArgumentsLength(CreateArgumentsType type) {
1974   buffer_->Add(ARGUMENTS_LENGTH);
1975   buffer_->Add(static_cast<uint8_t>(type));
1976 }
1977 
BeginCapturedObject(int length)1978 void Translation::BeginCapturedObject(int length) {
1979   buffer_->Add(CAPTURED_OBJECT);
1980   buffer_->Add(length);
1981 }
1982 
1983 
DuplicateObject(int object_index)1984 void Translation::DuplicateObject(int object_index) {
1985   buffer_->Add(DUPLICATED_OBJECT);
1986   buffer_->Add(object_index);
1987 }
1988 
1989 
StoreRegister(Register reg)1990 void Translation::StoreRegister(Register reg) {
1991   buffer_->Add(REGISTER);
1992   buffer_->Add(reg.code());
1993 }
1994 
1995 
StoreInt32Register(Register reg)1996 void Translation::StoreInt32Register(Register reg) {
1997   buffer_->Add(INT32_REGISTER);
1998   buffer_->Add(reg.code());
1999 }
2000 
2001 
StoreUint32Register(Register reg)2002 void Translation::StoreUint32Register(Register reg) {
2003   buffer_->Add(UINT32_REGISTER);
2004   buffer_->Add(reg.code());
2005 }
2006 
2007 
StoreBoolRegister(Register reg)2008 void Translation::StoreBoolRegister(Register reg) {
2009   buffer_->Add(BOOL_REGISTER);
2010   buffer_->Add(reg.code());
2011 }
2012 
StoreFloatRegister(FloatRegister reg)2013 void Translation::StoreFloatRegister(FloatRegister reg) {
2014   buffer_->Add(FLOAT_REGISTER);
2015   buffer_->Add(reg.code());
2016 }
2017 
StoreDoubleRegister(DoubleRegister reg)2018 void Translation::StoreDoubleRegister(DoubleRegister reg) {
2019   buffer_->Add(DOUBLE_REGISTER);
2020   buffer_->Add(reg.code());
2021 }
2022 
2023 
StoreStackSlot(int index)2024 void Translation::StoreStackSlot(int index) {
2025   buffer_->Add(STACK_SLOT);
2026   buffer_->Add(index);
2027 }
2028 
2029 
StoreInt32StackSlot(int index)2030 void Translation::StoreInt32StackSlot(int index) {
2031   buffer_->Add(INT32_STACK_SLOT);
2032   buffer_->Add(index);
2033 }
2034 
2035 
StoreUint32StackSlot(int index)2036 void Translation::StoreUint32StackSlot(int index) {
2037   buffer_->Add(UINT32_STACK_SLOT);
2038   buffer_->Add(index);
2039 }
2040 
2041 
StoreBoolStackSlot(int index)2042 void Translation::StoreBoolStackSlot(int index) {
2043   buffer_->Add(BOOL_STACK_SLOT);
2044   buffer_->Add(index);
2045 }
2046 
StoreFloatStackSlot(int index)2047 void Translation::StoreFloatStackSlot(int index) {
2048   buffer_->Add(FLOAT_STACK_SLOT);
2049   buffer_->Add(index);
2050 }
2051 
StoreDoubleStackSlot(int index)2052 void Translation::StoreDoubleStackSlot(int index) {
2053   buffer_->Add(DOUBLE_STACK_SLOT);
2054   buffer_->Add(index);
2055 }
2056 
2057 
StoreLiteral(int literal_id)2058 void Translation::StoreLiteral(int literal_id) {
2059   buffer_->Add(LITERAL);
2060   buffer_->Add(literal_id);
2061 }
2062 
AddUpdateFeedback(int vector_literal,int slot)2063 void Translation::AddUpdateFeedback(int vector_literal, int slot) {
2064   buffer_->Add(UPDATE_FEEDBACK);
2065   buffer_->Add(vector_literal);
2066   buffer_->Add(slot);
2067 }
2068 
StoreJSFrameFunction()2069 void Translation::StoreJSFrameFunction() {
2070   StoreStackSlot((StandardFrameConstants::kCallerPCOffset -
2071                   StandardFrameConstants::kFunctionOffset) /
2072                  kPointerSize);
2073 }
2074 
NumberOfOperandsFor(Opcode opcode)2075 int Translation::NumberOfOperandsFor(Opcode opcode) {
2076   switch (opcode) {
2077     case DUPLICATED_OBJECT:
2078     case ARGUMENTS_ELEMENTS:
2079     case ARGUMENTS_LENGTH:
2080     case CAPTURED_OBJECT:
2081     case REGISTER:
2082     case INT32_REGISTER:
2083     case UINT32_REGISTER:
2084     case BOOL_REGISTER:
2085     case FLOAT_REGISTER:
2086     case DOUBLE_REGISTER:
2087     case STACK_SLOT:
2088     case INT32_STACK_SLOT:
2089     case UINT32_STACK_SLOT:
2090     case BOOL_STACK_SLOT:
2091     case FLOAT_STACK_SLOT:
2092     case DOUBLE_STACK_SLOT:
2093     case LITERAL:
2094       return 1;
2095     case ARGUMENTS_ADAPTOR_FRAME:
2096     case UPDATE_FEEDBACK:
2097       return 2;
2098     case BEGIN:
2099     case INTERPRETED_FRAME:
2100     case CONSTRUCT_STUB_FRAME:
2101     case BUILTIN_CONTINUATION_FRAME:
2102     case JAVA_SCRIPT_BUILTIN_CONTINUATION_FRAME:
2103     case JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH_FRAME:
2104       return 3;
2105   }
2106   FATAL("Unexpected translation type");
2107   return -1;
2108 }
2109 
2110 
2111 #if defined(OBJECT_PRINT) || defined(ENABLE_DISASSEMBLER)
2112 
StringFor(Opcode opcode)2113 const char* Translation::StringFor(Opcode opcode) {
2114 #define TRANSLATION_OPCODE_CASE(item)   case item: return #item;
2115   switch (opcode) {
2116     TRANSLATION_OPCODE_LIST(TRANSLATION_OPCODE_CASE)
2117   }
2118 #undef TRANSLATION_OPCODE_CASE
2119   UNREACHABLE();
2120 }
2121 
2122 #endif
2123 
2124 
Get(Address fp)2125 Handle<FixedArray> MaterializedObjectStore::Get(Address fp) {
2126   int index = StackIdToIndex(fp);
2127   if (index == -1) {
2128     return Handle<FixedArray>::null();
2129   }
2130   Handle<FixedArray> array = GetStackEntries();
2131   CHECK_GT(array->length(), index);
2132   return Handle<FixedArray>::cast(Handle<Object>(array->get(index), isolate()));
2133 }
2134 
2135 
Set(Address fp,Handle<FixedArray> materialized_objects)2136 void MaterializedObjectStore::Set(Address fp,
2137                                   Handle<FixedArray> materialized_objects) {
2138   int index = StackIdToIndex(fp);
2139   if (index == -1) {
2140     index = static_cast<int>(frame_fps_.size());
2141     frame_fps_.push_back(fp);
2142   }
2143 
2144   Handle<FixedArray> array = EnsureStackEntries(index + 1);
2145   array->set(index, *materialized_objects);
2146 }
2147 
2148 
Remove(Address fp)2149 bool MaterializedObjectStore::Remove(Address fp) {
2150   auto it = std::find(frame_fps_.begin(), frame_fps_.end(), fp);
2151   if (it == frame_fps_.end()) return false;
2152   int index = static_cast<int>(std::distance(frame_fps_.begin(), it));
2153 
2154   frame_fps_.erase(it);
2155   FixedArray* array = isolate()->heap()->materialized_objects();
2156 
2157   CHECK_LT(index, array->length());
2158   int fps_size = static_cast<int>(frame_fps_.size());
2159   for (int i = index; i < fps_size; i++) {
2160     array->set(i, array->get(i + 1));
2161   }
2162   array->set(fps_size, ReadOnlyRoots(isolate()).undefined_value());
2163   return true;
2164 }
2165 
2166 
StackIdToIndex(Address fp)2167 int MaterializedObjectStore::StackIdToIndex(Address fp) {
2168   auto it = std::find(frame_fps_.begin(), frame_fps_.end(), fp);
2169   return it == frame_fps_.end()
2170              ? -1
2171              : static_cast<int>(std::distance(frame_fps_.begin(), it));
2172 }
2173 
2174 
GetStackEntries()2175 Handle<FixedArray> MaterializedObjectStore::GetStackEntries() {
2176   return Handle<FixedArray>(isolate()->heap()->materialized_objects(),
2177                             isolate());
2178 }
2179 
2180 
EnsureStackEntries(int length)2181 Handle<FixedArray> MaterializedObjectStore::EnsureStackEntries(int length) {
2182   Handle<FixedArray> array = GetStackEntries();
2183   if (array->length() >= length) {
2184     return array;
2185   }
2186 
2187   int new_length = length > 10 ? length : 10;
2188   if (new_length < 2 * array->length()) {
2189     new_length = 2 * array->length();
2190   }
2191 
2192   Handle<FixedArray> new_array =
2193       isolate()->factory()->NewFixedArray(new_length, TENURED);
2194   for (int i = 0; i < array->length(); i++) {
2195     new_array->set(i, array->get(i));
2196   }
2197   HeapObject* undefined_value = ReadOnlyRoots(isolate()).undefined_value();
2198   for (int i = array->length(); i < length; i++) {
2199     new_array->set(i, undefined_value);
2200   }
2201   isolate()->heap()->SetRootMaterializedObjects(*new_array);
2202   return new_array;
2203 }
2204 
2205 namespace {
2206 
GetValueForDebugger(TranslatedFrame::iterator it,Isolate * isolate)2207 Handle<Object> GetValueForDebugger(TranslatedFrame::iterator it,
2208                                    Isolate* isolate) {
2209   if (it->GetRawValue() == ReadOnlyRoots(isolate).arguments_marker()) {
2210     if (!it->IsMaterializableByDebugger()) {
2211       return isolate->factory()->optimized_out();
2212     }
2213   }
2214   return it->GetValue();
2215 }
2216 
2217 }  // namespace
2218 
DeoptimizedFrameInfo(TranslatedState * state,TranslatedState::iterator frame_it,Isolate * isolate)2219 DeoptimizedFrameInfo::DeoptimizedFrameInfo(TranslatedState* state,
2220                                            TranslatedState::iterator frame_it,
2221                                            Isolate* isolate) {
2222   int parameter_count =
2223       frame_it->shared_info()->internal_formal_parameter_count();
2224   TranslatedFrame::iterator stack_it = frame_it->begin();
2225 
2226   // Get the function. Note that this might materialize the function.
2227   // In case the debugger mutates this value, we should deoptimize
2228   // the function and remember the value in the materialized value store.
2229   function_ = Handle<JSFunction>::cast(stack_it->GetValue());
2230   stack_it++;  // Skip the function.
2231   stack_it++;  // Skip the receiver.
2232 
2233   DCHECK_EQ(TranslatedFrame::kInterpretedFunction, frame_it->kind());
2234   source_position_ = Deoptimizer::ComputeSourcePositionFromBytecodeArray(
2235       *frame_it->shared_info(), frame_it->node_id());
2236 
2237   DCHECK_EQ(parameter_count,
2238             function_->shared()->internal_formal_parameter_count());
2239 
2240   parameters_.resize(static_cast<size_t>(parameter_count));
2241   for (int i = 0; i < parameter_count; i++) {
2242     Handle<Object> parameter = GetValueForDebugger(stack_it, isolate);
2243     SetParameter(i, parameter);
2244     stack_it++;
2245   }
2246 
2247   // Get the context.
2248   context_ = GetValueForDebugger(stack_it, isolate);
2249   stack_it++;
2250 
2251   // Get the expression stack.
2252   int stack_height = frame_it->height();
2253   if (frame_it->kind() == TranslatedFrame::kInterpretedFunction) {
2254     // For interpreter frames, we should not count the accumulator.
2255     // TODO(jarin): Clean up the indexing in translated frames.
2256     stack_height--;
2257   }
2258   expression_stack_.resize(static_cast<size_t>(stack_height));
2259   for (int i = 0; i < stack_height; i++) {
2260     Handle<Object> expression = GetValueForDebugger(stack_it, isolate);
2261     SetExpression(i, expression);
2262     stack_it++;
2263   }
2264 
2265   // For interpreter frame, skip the accumulator.
2266   if (frame_it->kind() == TranslatedFrame::kInterpretedFunction) {
2267     stack_it++;
2268   }
2269   CHECK(stack_it == frame_it->end());
2270 }
2271 
2272 
GetDeoptInfo(Code * code,Address pc)2273 Deoptimizer::DeoptInfo Deoptimizer::GetDeoptInfo(Code* code, Address pc) {
2274   CHECK(code->InstructionStart() <= pc && pc <= code->InstructionEnd());
2275   SourcePosition last_position = SourcePosition::Unknown();
2276   DeoptimizeReason last_reason = DeoptimizeReason::kUnknown;
2277   int last_deopt_id = kNoDeoptimizationId;
2278   int mask = RelocInfo::ModeMask(RelocInfo::DEOPT_REASON) |
2279              RelocInfo::ModeMask(RelocInfo::DEOPT_ID) |
2280              RelocInfo::ModeMask(RelocInfo::DEOPT_SCRIPT_OFFSET) |
2281              RelocInfo::ModeMask(RelocInfo::DEOPT_INLINING_ID);
2282   for (RelocIterator it(code, mask); !it.done(); it.next()) {
2283     RelocInfo* info = it.rinfo();
2284     if (info->pc() >= pc) break;
2285     if (info->rmode() == RelocInfo::DEOPT_SCRIPT_OFFSET) {
2286       int script_offset = static_cast<int>(info->data());
2287       it.next();
2288       DCHECK(it.rinfo()->rmode() == RelocInfo::DEOPT_INLINING_ID);
2289       int inlining_id = static_cast<int>(it.rinfo()->data());
2290       last_position = SourcePosition(script_offset, inlining_id);
2291     } else if (info->rmode() == RelocInfo::DEOPT_ID) {
2292       last_deopt_id = static_cast<int>(info->data());
2293     } else if (info->rmode() == RelocInfo::DEOPT_REASON) {
2294       last_reason = static_cast<DeoptimizeReason>(info->data());
2295     }
2296   }
2297   return DeoptInfo(last_position, last_reason, last_deopt_id);
2298 }
2299 
2300 
2301 // static
ComputeSourcePositionFromBytecodeArray(SharedFunctionInfo * shared,BailoutId node_id)2302 int Deoptimizer::ComputeSourcePositionFromBytecodeArray(
2303     SharedFunctionInfo* shared, BailoutId node_id) {
2304   DCHECK(shared->HasBytecodeArray());
2305   return AbstractCode::cast(shared->GetBytecodeArray())
2306       ->SourcePosition(node_id.ToInt());
2307 }
2308 
2309 // static
NewDeferredObject(TranslatedState * container,int length,int object_index)2310 TranslatedValue TranslatedValue::NewDeferredObject(TranslatedState* container,
2311                                                    int length,
2312                                                    int object_index) {
2313   TranslatedValue slot(container, kCapturedObject);
2314   slot.materialization_info_ = {object_index, length};
2315   return slot;
2316 }
2317 
2318 
2319 // static
NewDuplicateObject(TranslatedState * container,int id)2320 TranslatedValue TranslatedValue::NewDuplicateObject(TranslatedState* container,
2321                                                     int id) {
2322   TranslatedValue slot(container, kDuplicatedObject);
2323   slot.materialization_info_ = {id, -1};
2324   return slot;
2325 }
2326 
2327 
2328 // static
NewFloat(TranslatedState * container,Float32 value)2329 TranslatedValue TranslatedValue::NewFloat(TranslatedState* container,
2330                                           Float32 value) {
2331   TranslatedValue slot(container, kFloat);
2332   slot.float_value_ = value;
2333   return slot;
2334 }
2335 
2336 // static
NewDouble(TranslatedState * container,Float64 value)2337 TranslatedValue TranslatedValue::NewDouble(TranslatedState* container,
2338                                            Float64 value) {
2339   TranslatedValue slot(container, kDouble);
2340   slot.double_value_ = value;
2341   return slot;
2342 }
2343 
2344 
2345 // static
NewInt32(TranslatedState * container,int32_t value)2346 TranslatedValue TranslatedValue::NewInt32(TranslatedState* container,
2347                                           int32_t value) {
2348   TranslatedValue slot(container, kInt32);
2349   slot.int32_value_ = value;
2350   return slot;
2351 }
2352 
2353 
2354 // static
NewUInt32(TranslatedState * container,uint32_t value)2355 TranslatedValue TranslatedValue::NewUInt32(TranslatedState* container,
2356                                            uint32_t value) {
2357   TranslatedValue slot(container, kUInt32);
2358   slot.uint32_value_ = value;
2359   return slot;
2360 }
2361 
2362 
2363 // static
NewBool(TranslatedState * container,uint32_t value)2364 TranslatedValue TranslatedValue::NewBool(TranslatedState* container,
2365                                          uint32_t value) {
2366   TranslatedValue slot(container, kBoolBit);
2367   slot.uint32_value_ = value;
2368   return slot;
2369 }
2370 
2371 
2372 // static
NewTagged(TranslatedState * container,Object * literal)2373 TranslatedValue TranslatedValue::NewTagged(TranslatedState* container,
2374                                            Object* literal) {
2375   TranslatedValue slot(container, kTagged);
2376   slot.raw_literal_ = literal;
2377   return slot;
2378 }
2379 
2380 
2381 // static
NewInvalid(TranslatedState * container)2382 TranslatedValue TranslatedValue::NewInvalid(TranslatedState* container) {
2383   return TranslatedValue(container, kInvalid);
2384 }
2385 
2386 
isolate() const2387 Isolate* TranslatedValue::isolate() const { return container_->isolate(); }
2388 
2389 
raw_literal() const2390 Object* TranslatedValue::raw_literal() const {
2391   DCHECK_EQ(kTagged, kind());
2392   return raw_literal_;
2393 }
2394 
2395 
int32_value() const2396 int32_t TranslatedValue::int32_value() const {
2397   DCHECK_EQ(kInt32, kind());
2398   return int32_value_;
2399 }
2400 
2401 
uint32_value() const2402 uint32_t TranslatedValue::uint32_value() const {
2403   DCHECK(kind() == kUInt32 || kind() == kBoolBit);
2404   return uint32_value_;
2405 }
2406 
float_value() const2407 Float32 TranslatedValue::float_value() const {
2408   DCHECK_EQ(kFloat, kind());
2409   return float_value_;
2410 }
2411 
double_value() const2412 Float64 TranslatedValue::double_value() const {
2413   DCHECK_EQ(kDouble, kind());
2414   return double_value_;
2415 }
2416 
2417 
object_length() const2418 int TranslatedValue::object_length() const {
2419   DCHECK_EQ(kind(), kCapturedObject);
2420   return materialization_info_.length_;
2421 }
2422 
2423 
object_index() const2424 int TranslatedValue::object_index() const {
2425   DCHECK(kind() == kCapturedObject || kind() == kDuplicatedObject);
2426   return materialization_info_.id_;
2427 }
2428 
2429 
GetRawValue() const2430 Object* TranslatedValue::GetRawValue() const {
2431   // If we have a value, return it.
2432   if (materialization_state() == kFinished) {
2433     return *storage_;
2434   }
2435 
2436   // Otherwise, do a best effort to get the value without allocation.
2437   switch (kind()) {
2438     case kTagged:
2439       return raw_literal();
2440 
2441     case kInt32: {
2442       bool is_smi = Smi::IsValid(int32_value());
2443       if (is_smi) {
2444         return Smi::FromInt(int32_value());
2445       }
2446       break;
2447     }
2448 
2449     case kUInt32: {
2450       bool is_smi = (uint32_value() <= static_cast<uintptr_t>(Smi::kMaxValue));
2451       if (is_smi) {
2452         return Smi::FromInt(static_cast<int32_t>(uint32_value()));
2453       }
2454       break;
2455     }
2456 
2457     case kBoolBit: {
2458       if (uint32_value() == 0) {
2459         return ReadOnlyRoots(isolate()).false_value();
2460       } else {
2461         CHECK_EQ(1U, uint32_value());
2462         return ReadOnlyRoots(isolate()).true_value();
2463       }
2464     }
2465 
2466     default:
2467       break;
2468   }
2469 
2470   // If we could not get the value without allocation, return the arguments
2471   // marker.
2472   return ReadOnlyRoots(isolate()).arguments_marker();
2473 }
2474 
set_initialized_storage(Handle<Object> storage)2475 void TranslatedValue::set_initialized_storage(Handle<Object> storage) {
2476   DCHECK_EQ(kUninitialized, materialization_state());
2477   storage_ = storage;
2478   materialization_state_ = kFinished;
2479 }
2480 
GetValue()2481 Handle<Object> TranslatedValue::GetValue() {
2482   // If we already have a value, then get it.
2483   if (materialization_state() == kFinished) return storage_;
2484 
2485   // Otherwise we have to materialize.
2486   switch (kind()) {
2487     case TranslatedValue::kTagged:
2488     case TranslatedValue::kInt32:
2489     case TranslatedValue::kUInt32:
2490     case TranslatedValue::kBoolBit:
2491     case TranslatedValue::kFloat:
2492     case TranslatedValue::kDouble: {
2493       MaterializeSimple();
2494       return storage_;
2495     }
2496 
2497     case TranslatedValue::kCapturedObject:
2498     case TranslatedValue::kDuplicatedObject: {
2499       // We need to materialize the object (or possibly even object graphs).
2500       // To make the object verifier happy, we materialize in two steps.
2501 
2502       // 1. Allocate storage for reachable objects. This makes sure that for
2503       //    each object we have allocated space on heap. The space will be
2504       //    a byte array that will be later initialized, or a fully
2505       //    initialized object if it is safe to allocate one that will
2506       //    pass the verifier.
2507       container_->EnsureObjectAllocatedAt(this);
2508 
2509       // 2. Initialize the objects. If we have allocated only byte arrays
2510       //    for some objects, we now overwrite the byte arrays with the
2511       //    correct object fields. Note that this phase does not allocate
2512       //    any new objects, so it does not trigger the object verifier.
2513       return container_->InitializeObjectAt(this);
2514     }
2515 
2516     case TranslatedValue::kInvalid:
2517       FATAL("unexpected case");
2518       return Handle<Object>::null();
2519   }
2520 
2521   FATAL("internal error: value missing");
2522   return Handle<Object>::null();
2523 }
2524 
MaterializeSimple()2525 void TranslatedValue::MaterializeSimple() {
2526   // If we already have materialized, return.
2527   if (materialization_state() == kFinished) return;
2528 
2529   Object* raw_value = GetRawValue();
2530   if (raw_value != ReadOnlyRoots(isolate()).arguments_marker()) {
2531     // We can get the value without allocation, just return it here.
2532     set_initialized_storage(Handle<Object>(raw_value, isolate()));
2533     return;
2534   }
2535 
2536   switch (kind()) {
2537     case kInt32:
2538       set_initialized_storage(
2539           Handle<Object>(isolate()->factory()->NewNumber(int32_value())));
2540       return;
2541 
2542     case kUInt32:
2543       set_initialized_storage(
2544           Handle<Object>(isolate()->factory()->NewNumber(uint32_value())));
2545       return;
2546 
2547     case kFloat: {
2548       double scalar_value = float_value().get_scalar();
2549       set_initialized_storage(
2550           Handle<Object>(isolate()->factory()->NewNumber(scalar_value)));
2551       return;
2552     }
2553 
2554     case kDouble: {
2555       double scalar_value = double_value().get_scalar();
2556       set_initialized_storage(
2557           Handle<Object>(isolate()->factory()->NewNumber(scalar_value)));
2558       return;
2559     }
2560 
2561     case kCapturedObject:
2562     case kDuplicatedObject:
2563     case kInvalid:
2564     case kTagged:
2565     case kBoolBit:
2566       FATAL("internal error: unexpected materialization.");
2567       break;
2568   }
2569 }
2570 
2571 
IsMaterializedObject() const2572 bool TranslatedValue::IsMaterializedObject() const {
2573   switch (kind()) {
2574     case kCapturedObject:
2575     case kDuplicatedObject:
2576       return true;
2577     default:
2578       return false;
2579   }
2580 }
2581 
IsMaterializableByDebugger() const2582 bool TranslatedValue::IsMaterializableByDebugger() const {
2583   // At the moment, we only allow materialization of doubles.
2584   return (kind() == kDouble);
2585 }
2586 
GetChildrenCount() const2587 int TranslatedValue::GetChildrenCount() const {
2588   if (kind() == kCapturedObject) {
2589     return object_length();
2590   } else {
2591     return 0;
2592   }
2593 }
2594 
2595 
GetUInt32Slot(Address fp,int slot_offset)2596 uint32_t TranslatedState::GetUInt32Slot(Address fp, int slot_offset) {
2597   Address address = fp + slot_offset;
2598 #if V8_TARGET_BIG_ENDIAN && V8_HOST_ARCH_64_BIT
2599   return Memory<uint32_t>(address + kIntSize);
2600 #else
2601   return Memory<uint32_t>(address);
2602 #endif
2603 }
2604 
GetFloatSlot(Address fp,int slot_offset)2605 Float32 TranslatedState::GetFloatSlot(Address fp, int slot_offset) {
2606 #if !V8_TARGET_ARCH_S390X && !V8_TARGET_ARCH_PPC64
2607   return Float32::FromBits(GetUInt32Slot(fp, slot_offset));
2608 #else
2609   return Float32::FromBits(Memory<uint32_t>(fp + slot_offset));
2610 #endif
2611 }
2612 
GetDoubleSlot(Address fp,int slot_offset)2613 Float64 TranslatedState::GetDoubleSlot(Address fp, int slot_offset) {
2614   return Float64::FromBits(Memory<uint64_t>(fp + slot_offset));
2615 }
2616 
Handlify()2617 void TranslatedValue::Handlify() {
2618   if (kind() == kTagged) {
2619     set_initialized_storage(Handle<Object>(raw_literal(), isolate()));
2620     raw_literal_ = nullptr;
2621   }
2622 }
2623 
2624 
InterpretedFrame(BailoutId bytecode_offset,SharedFunctionInfo * shared_info,int height)2625 TranslatedFrame TranslatedFrame::InterpretedFrame(
2626     BailoutId bytecode_offset, SharedFunctionInfo* shared_info, int height) {
2627   TranslatedFrame frame(kInterpretedFunction, shared_info, height);
2628   frame.node_id_ = bytecode_offset;
2629   return frame;
2630 }
2631 
2632 
ArgumentsAdaptorFrame(SharedFunctionInfo * shared_info,int height)2633 TranslatedFrame TranslatedFrame::ArgumentsAdaptorFrame(
2634     SharedFunctionInfo* shared_info, int height) {
2635   return TranslatedFrame(kArgumentsAdaptor, shared_info, height);
2636 }
2637 
ConstructStubFrame(BailoutId bailout_id,SharedFunctionInfo * shared_info,int height)2638 TranslatedFrame TranslatedFrame::ConstructStubFrame(
2639     BailoutId bailout_id, SharedFunctionInfo* shared_info, int height) {
2640   TranslatedFrame frame(kConstructStub, shared_info, height);
2641   frame.node_id_ = bailout_id;
2642   return frame;
2643 }
2644 
BuiltinContinuationFrame(BailoutId bailout_id,SharedFunctionInfo * shared_info,int height)2645 TranslatedFrame TranslatedFrame::BuiltinContinuationFrame(
2646     BailoutId bailout_id, SharedFunctionInfo* shared_info, int height) {
2647   TranslatedFrame frame(kBuiltinContinuation, shared_info, height);
2648   frame.node_id_ = bailout_id;
2649   return frame;
2650 }
2651 
JavaScriptBuiltinContinuationFrame(BailoutId bailout_id,SharedFunctionInfo * shared_info,int height)2652 TranslatedFrame TranslatedFrame::JavaScriptBuiltinContinuationFrame(
2653     BailoutId bailout_id, SharedFunctionInfo* shared_info, int height) {
2654   TranslatedFrame frame(kJavaScriptBuiltinContinuation, shared_info, height);
2655   frame.node_id_ = bailout_id;
2656   return frame;
2657 }
2658 
JavaScriptBuiltinContinuationWithCatchFrame(BailoutId bailout_id,SharedFunctionInfo * shared_info,int height)2659 TranslatedFrame TranslatedFrame::JavaScriptBuiltinContinuationWithCatchFrame(
2660     BailoutId bailout_id, SharedFunctionInfo* shared_info, int height) {
2661   TranslatedFrame frame(kJavaScriptBuiltinContinuationWithCatch, shared_info,
2662                         height);
2663   frame.node_id_ = bailout_id;
2664   return frame;
2665 }
2666 
GetValueCount()2667 int TranslatedFrame::GetValueCount() {
2668   switch (kind()) {
2669     case kInterpretedFunction: {
2670       int parameter_count =
2671           raw_shared_info_->internal_formal_parameter_count() + 1;
2672       // + 2 for function and context.
2673       return height_ + parameter_count + 2;
2674     }
2675 
2676     case kArgumentsAdaptor:
2677     case kConstructStub:
2678     case kBuiltinContinuation:
2679     case kJavaScriptBuiltinContinuation:
2680     case kJavaScriptBuiltinContinuationWithCatch:
2681       return 1 + height_;
2682 
2683     case kInvalid:
2684       UNREACHABLE();
2685       break;
2686   }
2687   UNREACHABLE();
2688 }
2689 
2690 
Handlify()2691 void TranslatedFrame::Handlify() {
2692   if (raw_shared_info_ != nullptr) {
2693     shared_info_ = Handle<SharedFunctionInfo>(raw_shared_info_,
2694                                               raw_shared_info_->GetIsolate());
2695     raw_shared_info_ = nullptr;
2696   }
2697   for (auto& value : values_) {
2698     value.Handlify();
2699   }
2700 }
2701 
2702 
CreateNextTranslatedFrame(TranslationIterator * iterator,FixedArray * literal_array,Address fp,FILE * trace_file)2703 TranslatedFrame TranslatedState::CreateNextTranslatedFrame(
2704     TranslationIterator* iterator, FixedArray* literal_array, Address fp,
2705     FILE* trace_file) {
2706   Translation::Opcode opcode =
2707       static_cast<Translation::Opcode>(iterator->Next());
2708   switch (opcode) {
2709     case Translation::INTERPRETED_FRAME: {
2710       BailoutId bytecode_offset = BailoutId(iterator->Next());
2711       SharedFunctionInfo* shared_info =
2712           SharedFunctionInfo::cast(literal_array->get(iterator->Next()));
2713       int height = iterator->Next();
2714       if (trace_file != nullptr) {
2715         std::unique_ptr<char[]> name = shared_info->DebugName()->ToCString();
2716         PrintF(trace_file, "  reading input frame %s", name.get());
2717         int arg_count = shared_info->internal_formal_parameter_count() + 1;
2718         PrintF(trace_file,
2719                " => bytecode_offset=%d, args=%d, height=%d; inputs:\n",
2720                bytecode_offset.ToInt(), arg_count, height);
2721       }
2722       return TranslatedFrame::InterpretedFrame(bytecode_offset, shared_info,
2723                                                height);
2724     }
2725 
2726     case Translation::ARGUMENTS_ADAPTOR_FRAME: {
2727       SharedFunctionInfo* shared_info =
2728           SharedFunctionInfo::cast(literal_array->get(iterator->Next()));
2729       int height = iterator->Next();
2730       if (trace_file != nullptr) {
2731         std::unique_ptr<char[]> name = shared_info->DebugName()->ToCString();
2732         PrintF(trace_file, "  reading arguments adaptor frame %s", name.get());
2733         PrintF(trace_file, " => height=%d; inputs:\n", height);
2734       }
2735       return TranslatedFrame::ArgumentsAdaptorFrame(shared_info, height);
2736     }
2737 
2738     case Translation::CONSTRUCT_STUB_FRAME: {
2739       BailoutId bailout_id = BailoutId(iterator->Next());
2740       SharedFunctionInfo* shared_info =
2741           SharedFunctionInfo::cast(literal_array->get(iterator->Next()));
2742       int height = iterator->Next();
2743       if (trace_file != nullptr) {
2744         std::unique_ptr<char[]> name = shared_info->DebugName()->ToCString();
2745         PrintF(trace_file, "  reading construct stub frame %s", name.get());
2746         PrintF(trace_file, " => bailout_id=%d, height=%d; inputs:\n",
2747                bailout_id.ToInt(), height);
2748       }
2749       return TranslatedFrame::ConstructStubFrame(bailout_id, shared_info,
2750                                                  height);
2751     }
2752 
2753     case Translation::BUILTIN_CONTINUATION_FRAME: {
2754       BailoutId bailout_id = BailoutId(iterator->Next());
2755       SharedFunctionInfo* shared_info =
2756           SharedFunctionInfo::cast(literal_array->get(iterator->Next()));
2757       int height = iterator->Next();
2758       if (trace_file != nullptr) {
2759         std::unique_ptr<char[]> name = shared_info->DebugName()->ToCString();
2760         PrintF(trace_file, "  reading builtin continuation frame %s",
2761                name.get());
2762         PrintF(trace_file, " => bailout_id=%d, height=%d; inputs:\n",
2763                bailout_id.ToInt(), height);
2764       }
2765       // Add one to the height to account for the context which was implicitly
2766       // added to the translation during code generation.
2767       int height_with_context = height + 1;
2768       return TranslatedFrame::BuiltinContinuationFrame(bailout_id, shared_info,
2769                                                        height_with_context);
2770     }
2771 
2772     case Translation::JAVA_SCRIPT_BUILTIN_CONTINUATION_FRAME: {
2773       BailoutId bailout_id = BailoutId(iterator->Next());
2774       SharedFunctionInfo* shared_info =
2775           SharedFunctionInfo::cast(literal_array->get(iterator->Next()));
2776       int height = iterator->Next();
2777       if (trace_file != nullptr) {
2778         std::unique_ptr<char[]> name = shared_info->DebugName()->ToCString();
2779         PrintF(trace_file, "  reading JavaScript builtin continuation frame %s",
2780                name.get());
2781         PrintF(trace_file, " => bailout_id=%d, height=%d; inputs:\n",
2782                bailout_id.ToInt(), height);
2783       }
2784       // Add one to the height to account for the context which was implicitly
2785       // added to the translation during code generation.
2786       int height_with_context = height + 1;
2787       return TranslatedFrame::JavaScriptBuiltinContinuationFrame(
2788           bailout_id, shared_info, height_with_context);
2789     }
2790     case Translation::JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH_FRAME: {
2791       BailoutId bailout_id = BailoutId(iterator->Next());
2792       SharedFunctionInfo* shared_info =
2793           SharedFunctionInfo::cast(literal_array->get(iterator->Next()));
2794       int height = iterator->Next();
2795       if (trace_file != nullptr) {
2796         std::unique_ptr<char[]> name = shared_info->DebugName()->ToCString();
2797         PrintF(trace_file,
2798                "  reading JavaScript builtin continuation frame with catch %s",
2799                name.get());
2800         PrintF(trace_file, " => bailout_id=%d, height=%d; inputs:\n",
2801                bailout_id.ToInt(), height);
2802       }
2803       // Add one to the height to account for the context which was implicitly
2804       // added to the translation during code generation.
2805       int height_with_context = height + 1;
2806       return TranslatedFrame::JavaScriptBuiltinContinuationWithCatchFrame(
2807           bailout_id, shared_info, height_with_context);
2808     }
2809     case Translation::UPDATE_FEEDBACK:
2810     case Translation::BEGIN:
2811     case Translation::DUPLICATED_OBJECT:
2812     case Translation::ARGUMENTS_ELEMENTS:
2813     case Translation::ARGUMENTS_LENGTH:
2814     case Translation::CAPTURED_OBJECT:
2815     case Translation::REGISTER:
2816     case Translation::INT32_REGISTER:
2817     case Translation::UINT32_REGISTER:
2818     case Translation::BOOL_REGISTER:
2819     case Translation::FLOAT_REGISTER:
2820     case Translation::DOUBLE_REGISTER:
2821     case Translation::STACK_SLOT:
2822     case Translation::INT32_STACK_SLOT:
2823     case Translation::UINT32_STACK_SLOT:
2824     case Translation::BOOL_STACK_SLOT:
2825     case Translation::FLOAT_STACK_SLOT:
2826     case Translation::DOUBLE_STACK_SLOT:
2827     case Translation::LITERAL:
2828       break;
2829   }
2830   FATAL("We should never get here - unexpected deopt info.");
2831   return TranslatedFrame::InvalidFrame();
2832 }
2833 
2834 
2835 // static
AdvanceIterator(std::deque<TranslatedValue>::iterator * iter)2836 void TranslatedFrame::AdvanceIterator(
2837     std::deque<TranslatedValue>::iterator* iter) {
2838   int values_to_skip = 1;
2839   while (values_to_skip > 0) {
2840     // Consume the current element.
2841     values_to_skip--;
2842     // Add all the children.
2843     values_to_skip += (*iter)->GetChildrenCount();
2844 
2845     (*iter)++;
2846   }
2847 }
2848 
ComputeArgumentsPosition(Address input_frame_pointer,CreateArgumentsType type,int * length)2849 Address TranslatedState::ComputeArgumentsPosition(Address input_frame_pointer,
2850                                                   CreateArgumentsType type,
2851                                                   int* length) {
2852   Address parent_frame_pointer = *reinterpret_cast<Address*>(
2853       input_frame_pointer + StandardFrameConstants::kCallerFPOffset);
2854   intptr_t parent_frame_type = Memory<intptr_t>(
2855       parent_frame_pointer + CommonFrameConstants::kContextOrFrameTypeOffset);
2856 
2857   Address arguments_frame;
2858   if (parent_frame_type ==
2859       StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)) {
2860     if (length)
2861       *length = Smi::cast(*reinterpret_cast<Object**>(
2862                               parent_frame_pointer +
2863                               ArgumentsAdaptorFrameConstants::kLengthOffset))
2864                     ->value();
2865     arguments_frame = parent_frame_pointer;
2866   } else {
2867     if (length) *length = formal_parameter_count_;
2868     arguments_frame = input_frame_pointer;
2869   }
2870 
2871   if (type == CreateArgumentsType::kRestParameter) {
2872     // If the actual number of arguments is less than the number of formal
2873     // parameters, we have zero rest parameters.
2874     if (length) *length = std::max(0, *length - formal_parameter_count_);
2875   }
2876 
2877   return arguments_frame;
2878 }
2879 
2880 // Creates translated values for an arguments backing store, or the backing
2881 // store for rest parameters depending on the given {type}. The TranslatedValue
2882 // objects for the fields are not read from the TranslationIterator, but instead
2883 // created on-the-fly based on dynamic information in the optimized frame.
CreateArgumentsElementsTranslatedValues(int frame_index,Address input_frame_pointer,CreateArgumentsType type,FILE * trace_file)2884 void TranslatedState::CreateArgumentsElementsTranslatedValues(
2885     int frame_index, Address input_frame_pointer, CreateArgumentsType type,
2886     FILE* trace_file) {
2887   TranslatedFrame& frame = frames_[frame_index];
2888 
2889   int length;
2890   Address arguments_frame =
2891       ComputeArgumentsPosition(input_frame_pointer, type, &length);
2892 
2893   int object_index = static_cast<int>(object_positions_.size());
2894   int value_index = static_cast<int>(frame.values_.size());
2895   if (trace_file != nullptr) {
2896     PrintF(trace_file, "arguments elements object #%d (type = %d, length = %d)",
2897            object_index, static_cast<uint8_t>(type), length);
2898   }
2899 
2900   object_positions_.push_back({frame_index, value_index});
2901   frame.Add(TranslatedValue::NewDeferredObject(
2902       this, length + FixedArray::kHeaderSize / kPointerSize, object_index));
2903 
2904   ReadOnlyRoots roots(isolate_);
2905   frame.Add(TranslatedValue::NewTagged(this, roots.fixed_array_map()));
2906   frame.Add(TranslatedValue::NewInt32(this, length));
2907 
2908   int number_of_holes = 0;
2909   if (type == CreateArgumentsType::kMappedArguments) {
2910     // If the actual number of arguments is less than the number of formal
2911     // parameters, we have fewer holes to fill to not overshoot the length.
2912     number_of_holes = Min(formal_parameter_count_, length);
2913   }
2914   for (int i = 0; i < number_of_holes; ++i) {
2915     frame.Add(TranslatedValue::NewTagged(this, roots.the_hole_value()));
2916   }
2917   for (int i = length - number_of_holes - 1; i >= 0; --i) {
2918     Address argument_slot = arguments_frame +
2919                             CommonFrameConstants::kFixedFrameSizeAboveFp +
2920                             i * kPointerSize;
2921     frame.Add(TranslatedValue::NewTagged(
2922         this, *reinterpret_cast<Object**>(argument_slot)));
2923   }
2924 }
2925 
2926 // We can't intermix stack decoding and allocations because the deoptimization
2927 // infrastracture is not GC safe.
2928 // Thus we build a temporary structure in malloced space.
2929 // The TranslatedValue objects created correspond to the static translation
2930 // instructions from the TranslationIterator, except for
2931 // Translation::ARGUMENTS_ELEMENTS, where the number and values of the
2932 // FixedArray elements depend on dynamic information from the optimized frame.
2933 // Returns the number of expected nested translations from the
2934 // TranslationIterator.
CreateNextTranslatedValue(int frame_index,TranslationIterator * iterator,FixedArray * literal_array,Address fp,RegisterValues * registers,FILE * trace_file)2935 int TranslatedState::CreateNextTranslatedValue(
2936     int frame_index, TranslationIterator* iterator, FixedArray* literal_array,
2937     Address fp, RegisterValues* registers, FILE* trace_file) {
2938   disasm::NameConverter converter;
2939 
2940   TranslatedFrame& frame = frames_[frame_index];
2941   int value_index = static_cast<int>(frame.values_.size());
2942 
2943   Translation::Opcode opcode =
2944       static_cast<Translation::Opcode>(iterator->Next());
2945   switch (opcode) {
2946     case Translation::BEGIN:
2947     case Translation::INTERPRETED_FRAME:
2948     case Translation::ARGUMENTS_ADAPTOR_FRAME:
2949     case Translation::CONSTRUCT_STUB_FRAME:
2950     case Translation::JAVA_SCRIPT_BUILTIN_CONTINUATION_FRAME:
2951     case Translation::JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH_FRAME:
2952     case Translation::BUILTIN_CONTINUATION_FRAME:
2953     case Translation::UPDATE_FEEDBACK:
2954       // Peeled off before getting here.
2955       break;
2956 
2957     case Translation::DUPLICATED_OBJECT: {
2958       int object_id = iterator->Next();
2959       if (trace_file != nullptr) {
2960         PrintF(trace_file, "duplicated object #%d", object_id);
2961       }
2962       object_positions_.push_back(object_positions_[object_id]);
2963       TranslatedValue translated_value =
2964           TranslatedValue::NewDuplicateObject(this, object_id);
2965       frame.Add(translated_value);
2966       return translated_value.GetChildrenCount();
2967     }
2968 
2969     case Translation::ARGUMENTS_ELEMENTS: {
2970       CreateArgumentsType arguments_type =
2971           static_cast<CreateArgumentsType>(iterator->Next());
2972       CreateArgumentsElementsTranslatedValues(frame_index, fp, arguments_type,
2973                                               trace_file);
2974       return 0;
2975     }
2976 
2977     case Translation::ARGUMENTS_LENGTH: {
2978       CreateArgumentsType arguments_type =
2979           static_cast<CreateArgumentsType>(iterator->Next());
2980       int length;
2981       ComputeArgumentsPosition(fp, arguments_type, &length);
2982       if (trace_file != nullptr) {
2983         PrintF(trace_file, "arguments length field (type = %d, length = %d)",
2984                static_cast<uint8_t>(arguments_type), length);
2985       }
2986       frame.Add(TranslatedValue::NewInt32(this, length));
2987       return 0;
2988     }
2989 
2990     case Translation::CAPTURED_OBJECT: {
2991       int field_count = iterator->Next();
2992       int object_index = static_cast<int>(object_positions_.size());
2993       if (trace_file != nullptr) {
2994         PrintF(trace_file, "captured object #%d (length = %d)", object_index,
2995                field_count);
2996       }
2997       object_positions_.push_back({frame_index, value_index});
2998       TranslatedValue translated_value =
2999           TranslatedValue::NewDeferredObject(this, field_count, object_index);
3000       frame.Add(translated_value);
3001       return translated_value.GetChildrenCount();
3002     }
3003 
3004     case Translation::REGISTER: {
3005       int input_reg = iterator->Next();
3006       if (registers == nullptr) {
3007         TranslatedValue translated_value = TranslatedValue::NewInvalid(this);
3008         frame.Add(translated_value);
3009         return translated_value.GetChildrenCount();
3010       }
3011       intptr_t value = registers->GetRegister(input_reg);
3012       if (trace_file != nullptr) {
3013         PrintF(trace_file, V8PRIxPTR_FMT " ; %s ", value,
3014                converter.NameOfCPURegister(input_reg));
3015         reinterpret_cast<Object*>(value)->ShortPrint(trace_file);
3016       }
3017       TranslatedValue translated_value =
3018           TranslatedValue::NewTagged(this, reinterpret_cast<Object*>(value));
3019       frame.Add(translated_value);
3020       return translated_value.GetChildrenCount();
3021     }
3022 
3023     case Translation::INT32_REGISTER: {
3024       int input_reg = iterator->Next();
3025       if (registers == nullptr) {
3026         TranslatedValue translated_value = TranslatedValue::NewInvalid(this);
3027         frame.Add(translated_value);
3028         return translated_value.GetChildrenCount();
3029       }
3030       intptr_t value = registers->GetRegister(input_reg);
3031       if (trace_file != nullptr) {
3032         PrintF(trace_file, "%" V8PRIdPTR " ; %s ", value,
3033                converter.NameOfCPURegister(input_reg));
3034       }
3035       TranslatedValue translated_value =
3036           TranslatedValue::NewInt32(this, static_cast<int32_t>(value));
3037       frame.Add(translated_value);
3038       return translated_value.GetChildrenCount();
3039     }
3040 
3041     case Translation::UINT32_REGISTER: {
3042       int input_reg = iterator->Next();
3043       if (registers == nullptr) {
3044         TranslatedValue translated_value = TranslatedValue::NewInvalid(this);
3045         frame.Add(translated_value);
3046         return translated_value.GetChildrenCount();
3047       }
3048       intptr_t value = registers->GetRegister(input_reg);
3049       if (trace_file != nullptr) {
3050         PrintF(trace_file, "%" V8PRIuPTR " ; %s (uint)", value,
3051                converter.NameOfCPURegister(input_reg));
3052       }
3053       TranslatedValue translated_value =
3054           TranslatedValue::NewUInt32(this, static_cast<uint32_t>(value));
3055       frame.Add(translated_value);
3056       return translated_value.GetChildrenCount();
3057     }
3058 
3059     case Translation::BOOL_REGISTER: {
3060       int input_reg = iterator->Next();
3061       if (registers == nullptr) {
3062         TranslatedValue translated_value = TranslatedValue::NewInvalid(this);
3063         frame.Add(translated_value);
3064         return translated_value.GetChildrenCount();
3065       }
3066       intptr_t value = registers->GetRegister(input_reg);
3067       if (trace_file != nullptr) {
3068         PrintF(trace_file, "%" V8PRIdPTR " ; %s (bool)", value,
3069                converter.NameOfCPURegister(input_reg));
3070       }
3071       TranslatedValue translated_value =
3072           TranslatedValue::NewBool(this, static_cast<uint32_t>(value));
3073       frame.Add(translated_value);
3074       return translated_value.GetChildrenCount();
3075     }
3076 
3077     case Translation::FLOAT_REGISTER: {
3078       int input_reg = iterator->Next();
3079       if (registers == nullptr) {
3080         TranslatedValue translated_value = TranslatedValue::NewInvalid(this);
3081         frame.Add(translated_value);
3082         return translated_value.GetChildrenCount();
3083       }
3084       Float32 value = registers->GetFloatRegister(input_reg);
3085       if (trace_file != nullptr) {
3086         PrintF(
3087             trace_file, "%e ; %s (float)", value.get_scalar(),
3088             RegisterConfiguration::Default()->GetFloatRegisterName(input_reg));
3089       }
3090       TranslatedValue translated_value = TranslatedValue::NewFloat(this, value);
3091       frame.Add(translated_value);
3092       return translated_value.GetChildrenCount();
3093     }
3094 
3095     case Translation::DOUBLE_REGISTER: {
3096       int input_reg = iterator->Next();
3097       if (registers == nullptr) {
3098         TranslatedValue translated_value = TranslatedValue::NewInvalid(this);
3099         frame.Add(translated_value);
3100         return translated_value.GetChildrenCount();
3101       }
3102       Float64 value = registers->GetDoubleRegister(input_reg);
3103       if (trace_file != nullptr) {
3104         PrintF(
3105             trace_file, "%e ; %s (double)", value.get_scalar(),
3106             RegisterConfiguration::Default()->GetDoubleRegisterName(input_reg));
3107       }
3108       TranslatedValue translated_value =
3109           TranslatedValue::NewDouble(this, value);
3110       frame.Add(translated_value);
3111       return translated_value.GetChildrenCount();
3112     }
3113 
3114     case Translation::STACK_SLOT: {
3115       int slot_offset =
3116           OptimizedFrame::StackSlotOffsetRelativeToFp(iterator->Next());
3117       intptr_t value = *(reinterpret_cast<intptr_t*>(fp + slot_offset));
3118       if (trace_file != nullptr) {
3119         PrintF(trace_file, V8PRIxPTR_FMT " ;  [fp %c %3d]  ", value,
3120                slot_offset < 0 ? '-' : '+', std::abs(slot_offset));
3121         reinterpret_cast<Object*>(value)->ShortPrint(trace_file);
3122       }
3123       TranslatedValue translated_value =
3124           TranslatedValue::NewTagged(this, reinterpret_cast<Object*>(value));
3125       frame.Add(translated_value);
3126       return translated_value.GetChildrenCount();
3127     }
3128 
3129     case Translation::INT32_STACK_SLOT: {
3130       int slot_offset =
3131           OptimizedFrame::StackSlotOffsetRelativeToFp(iterator->Next());
3132       uint32_t value = GetUInt32Slot(fp, slot_offset);
3133       if (trace_file != nullptr) {
3134         PrintF(trace_file, "%d ; (int) [fp %c %3d] ",
3135                static_cast<int32_t>(value), slot_offset < 0 ? '-' : '+',
3136                std::abs(slot_offset));
3137       }
3138       TranslatedValue translated_value = TranslatedValue::NewInt32(this, value);
3139       frame.Add(translated_value);
3140       return translated_value.GetChildrenCount();
3141     }
3142 
3143     case Translation::UINT32_STACK_SLOT: {
3144       int slot_offset =
3145           OptimizedFrame::StackSlotOffsetRelativeToFp(iterator->Next());
3146       uint32_t value = GetUInt32Slot(fp, slot_offset);
3147       if (trace_file != nullptr) {
3148         PrintF(trace_file, "%u ; (uint) [fp %c %3d] ", value,
3149                slot_offset < 0 ? '-' : '+', std::abs(slot_offset));
3150       }
3151       TranslatedValue translated_value =
3152           TranslatedValue::NewUInt32(this, value);
3153       frame.Add(translated_value);
3154       return translated_value.GetChildrenCount();
3155     }
3156 
3157     case Translation::BOOL_STACK_SLOT: {
3158       int slot_offset =
3159           OptimizedFrame::StackSlotOffsetRelativeToFp(iterator->Next());
3160       uint32_t value = GetUInt32Slot(fp, slot_offset);
3161       if (trace_file != nullptr) {
3162         PrintF(trace_file, "%u ; (bool) [fp %c %3d] ", value,
3163                slot_offset < 0 ? '-' : '+', std::abs(slot_offset));
3164       }
3165       TranslatedValue translated_value = TranslatedValue::NewBool(this, value);
3166       frame.Add(translated_value);
3167       return translated_value.GetChildrenCount();
3168     }
3169 
3170     case Translation::FLOAT_STACK_SLOT: {
3171       int slot_offset =
3172           OptimizedFrame::StackSlotOffsetRelativeToFp(iterator->Next());
3173       Float32 value = GetFloatSlot(fp, slot_offset);
3174       if (trace_file != nullptr) {
3175         PrintF(trace_file, "%e ; (float) [fp %c %3d] ", value.get_scalar(),
3176                slot_offset < 0 ? '-' : '+', std::abs(slot_offset));
3177       }
3178       TranslatedValue translated_value = TranslatedValue::NewFloat(this, value);
3179       frame.Add(translated_value);
3180       return translated_value.GetChildrenCount();
3181     }
3182 
3183     case Translation::DOUBLE_STACK_SLOT: {
3184       int slot_offset =
3185           OptimizedFrame::StackSlotOffsetRelativeToFp(iterator->Next());
3186       Float64 value = GetDoubleSlot(fp, slot_offset);
3187       if (trace_file != nullptr) {
3188         PrintF(trace_file, "%e ; (double) [fp %c %d] ", value.get_scalar(),
3189                slot_offset < 0 ? '-' : '+', std::abs(slot_offset));
3190       }
3191       TranslatedValue translated_value =
3192           TranslatedValue::NewDouble(this, value);
3193       frame.Add(translated_value);
3194       return translated_value.GetChildrenCount();
3195     }
3196 
3197     case Translation::LITERAL: {
3198       int literal_index = iterator->Next();
3199       Object* value = literal_array->get(literal_index);
3200       if (trace_file != nullptr) {
3201         PrintF(trace_file, V8PRIxPTR_FMT " ; (literal %2d) ",
3202                reinterpret_cast<intptr_t>(value), literal_index);
3203         reinterpret_cast<Object*>(value)->ShortPrint(trace_file);
3204       }
3205 
3206       TranslatedValue translated_value =
3207           TranslatedValue::NewTagged(this, value);
3208       frame.Add(translated_value);
3209       return translated_value.GetChildrenCount();
3210     }
3211   }
3212 
3213   FATAL("We should never get here - unexpected deopt info.");
3214 }
3215 
TranslatedState(const JavaScriptFrame * frame)3216 TranslatedState::TranslatedState(const JavaScriptFrame* frame) {
3217   int deopt_index = Safepoint::kNoDeoptimizationIndex;
3218   DeoptimizationData* data =
3219       static_cast<const OptimizedFrame*>(frame)->GetDeoptimizationData(
3220           &deopt_index);
3221   DCHECK(data != nullptr && deopt_index != Safepoint::kNoDeoptimizationIndex);
3222   TranslationIterator it(data->TranslationByteArray(),
3223                          data->TranslationIndex(deopt_index)->value());
3224   Init(frame->isolate(), frame->fp(), &it, data->LiteralArray(),
3225        nullptr /* registers */, nullptr /* trace file */,
3226        frame->function()->shared()->internal_formal_parameter_count());
3227 }
3228 
Init(Isolate * isolate,Address input_frame_pointer,TranslationIterator * iterator,FixedArray * literal_array,RegisterValues * registers,FILE * trace_file,int formal_parameter_count)3229 void TranslatedState::Init(Isolate* isolate, Address input_frame_pointer,
3230                            TranslationIterator* iterator,
3231                            FixedArray* literal_array, RegisterValues* registers,
3232                            FILE* trace_file, int formal_parameter_count) {
3233   DCHECK(frames_.empty());
3234 
3235   formal_parameter_count_ = formal_parameter_count;
3236   isolate_ = isolate;
3237 
3238   // Read out the 'header' translation.
3239   Translation::Opcode opcode =
3240       static_cast<Translation::Opcode>(iterator->Next());
3241   CHECK(opcode == Translation::BEGIN);
3242 
3243   int count = iterator->Next();
3244   frames_.reserve(count);
3245   iterator->Next();  // Drop JS frames count.
3246   int update_feedback_count = iterator->Next();
3247   CHECK_GE(update_feedback_count, 0);
3248   CHECK_LE(update_feedback_count, 1);
3249 
3250   if (update_feedback_count == 1) {
3251     ReadUpdateFeedback(iterator, literal_array, trace_file);
3252   }
3253 
3254   std::stack<int> nested_counts;
3255 
3256   // Read the frames
3257   for (int frame_index = 0; frame_index < count; frame_index++) {
3258     // Read the frame descriptor.
3259     frames_.push_back(CreateNextTranslatedFrame(
3260         iterator, literal_array, input_frame_pointer, trace_file));
3261     TranslatedFrame& frame = frames_.back();
3262 
3263     // Read the values.
3264     int values_to_process = frame.GetValueCount();
3265     while (values_to_process > 0 || !nested_counts.empty()) {
3266       if (trace_file != nullptr) {
3267         if (nested_counts.empty()) {
3268           // For top level values, print the value number.
3269           PrintF(trace_file, "    %3i: ",
3270                  frame.GetValueCount() - values_to_process);
3271         } else {
3272           // Take care of indenting for nested values.
3273           PrintF(trace_file, "         ");
3274           for (size_t j = 0; j < nested_counts.size(); j++) {
3275             PrintF(trace_file, "  ");
3276           }
3277         }
3278       }
3279 
3280       int nested_count =
3281           CreateNextTranslatedValue(frame_index, iterator, literal_array,
3282                                     input_frame_pointer, registers, trace_file);
3283 
3284       if (trace_file != nullptr) {
3285         PrintF(trace_file, "\n");
3286       }
3287 
3288       // Update the value count and resolve the nesting.
3289       values_to_process--;
3290       if (nested_count > 0) {
3291         nested_counts.push(values_to_process);
3292         values_to_process = nested_count;
3293       } else {
3294         while (values_to_process == 0 && !nested_counts.empty()) {
3295           values_to_process = nested_counts.top();
3296           nested_counts.pop();
3297         }
3298       }
3299     }
3300   }
3301 
3302   CHECK(!iterator->HasNext() ||
3303         static_cast<Translation::Opcode>(iterator->Next()) ==
3304             Translation::BEGIN);
3305 }
3306 
Prepare(Address stack_frame_pointer)3307 void TranslatedState::Prepare(Address stack_frame_pointer) {
3308   for (auto& frame : frames_) frame.Handlify();
3309 
3310   if (feedback_vector_ != nullptr) {
3311     feedback_vector_handle_ =
3312         Handle<FeedbackVector>(feedback_vector_, isolate());
3313     feedback_vector_ = nullptr;
3314   }
3315   stack_frame_pointer_ = stack_frame_pointer;
3316 
3317   UpdateFromPreviouslyMaterializedObjects();
3318 }
3319 
GetValueByObjectIndex(int object_index)3320 TranslatedValue* TranslatedState::GetValueByObjectIndex(int object_index) {
3321   CHECK_LT(static_cast<size_t>(object_index), object_positions_.size());
3322   TranslatedState::ObjectPosition pos = object_positions_[object_index];
3323   return &(frames_[pos.frame_index_].values_[pos.value_index_]);
3324 }
3325 
InitializeObjectAt(TranslatedValue * slot)3326 Handle<Object> TranslatedState::InitializeObjectAt(TranslatedValue* slot) {
3327   slot = ResolveCapturedObject(slot);
3328 
3329   DisallowHeapAllocation no_allocation;
3330   if (slot->materialization_state() != TranslatedValue::kFinished) {
3331     std::stack<int> worklist;
3332     worklist.push(slot->object_index());
3333     slot->mark_finished();
3334 
3335     while (!worklist.empty()) {
3336       int index = worklist.top();
3337       worklist.pop();
3338       InitializeCapturedObjectAt(index, &worklist, no_allocation);
3339     }
3340   }
3341   return slot->GetStorage();
3342 }
3343 
InitializeCapturedObjectAt(int object_index,std::stack<int> * worklist,const DisallowHeapAllocation & no_allocation)3344 void TranslatedState::InitializeCapturedObjectAt(
3345     int object_index, std::stack<int>* worklist,
3346     const DisallowHeapAllocation& no_allocation) {
3347   CHECK_LT(static_cast<size_t>(object_index), object_positions_.size());
3348   TranslatedState::ObjectPosition pos = object_positions_[object_index];
3349   int value_index = pos.value_index_;
3350 
3351   TranslatedFrame* frame = &(frames_[pos.frame_index_]);
3352   TranslatedValue* slot = &(frame->values_[value_index]);
3353   value_index++;
3354 
3355   CHECK_EQ(TranslatedValue::kFinished, slot->materialization_state());
3356   CHECK_EQ(TranslatedValue::kCapturedObject, slot->kind());
3357 
3358   // Ensure all fields are initialized.
3359   int children_init_index = value_index;
3360   for (int i = 0; i < slot->GetChildrenCount(); i++) {
3361     // If the field is an object that has not been initialized yet, queue it
3362     // for initialization (and mark it as such).
3363     TranslatedValue* child_slot = frame->ValueAt(children_init_index);
3364     if (child_slot->kind() == TranslatedValue::kCapturedObject ||
3365         child_slot->kind() == TranslatedValue::kDuplicatedObject) {
3366       child_slot = ResolveCapturedObject(child_slot);
3367       if (child_slot->materialization_state() != TranslatedValue::kFinished) {
3368         DCHECK_EQ(TranslatedValue::kAllocated,
3369                   child_slot->materialization_state());
3370         worklist->push(child_slot->object_index());
3371         child_slot->mark_finished();
3372       }
3373     }
3374     SkipSlots(1, frame, &children_init_index);
3375   }
3376 
3377   // Read the map.
3378   // The map should never be materialized, so let us check we already have
3379   // an existing object here.
3380   CHECK_EQ(frame->values_[value_index].kind(), TranslatedValue::kTagged);
3381   Handle<Map> map = Handle<Map>::cast(frame->values_[value_index].GetValue());
3382   CHECK(map->IsMap());
3383   value_index++;
3384 
3385   // Handle the special cases.
3386   switch (map->instance_type()) {
3387     case MUTABLE_HEAP_NUMBER_TYPE:
3388     case FIXED_DOUBLE_ARRAY_TYPE:
3389       return;
3390 
3391     case FIXED_ARRAY_TYPE:
3392     case BLOCK_CONTEXT_TYPE:
3393     case CATCH_CONTEXT_TYPE:
3394     case DEBUG_EVALUATE_CONTEXT_TYPE:
3395     case EVAL_CONTEXT_TYPE:
3396     case FUNCTION_CONTEXT_TYPE:
3397     case MODULE_CONTEXT_TYPE:
3398     case NATIVE_CONTEXT_TYPE:
3399     case SCRIPT_CONTEXT_TYPE:
3400     case WITH_CONTEXT_TYPE:
3401     case OBJECT_BOILERPLATE_DESCRIPTION_TYPE:
3402     case HASH_TABLE_TYPE:
3403     case ORDERED_HASH_MAP_TYPE:
3404     case ORDERED_HASH_SET_TYPE:
3405     case NAME_DICTIONARY_TYPE:
3406     case GLOBAL_DICTIONARY_TYPE:
3407     case NUMBER_DICTIONARY_TYPE:
3408     case SIMPLE_NUMBER_DICTIONARY_TYPE:
3409     case STRING_TABLE_TYPE:
3410     case PROPERTY_ARRAY_TYPE:
3411     case SCRIPT_CONTEXT_TABLE_TYPE:
3412       InitializeObjectWithTaggedFieldsAt(frame, &value_index, slot, map,
3413                                          no_allocation);
3414       break;
3415 
3416     default:
3417       CHECK(map->IsJSObjectMap());
3418       InitializeJSObjectAt(frame, &value_index, slot, map, no_allocation);
3419       break;
3420   }
3421   CHECK_EQ(value_index, children_init_index);
3422 }
3423 
EnsureObjectAllocatedAt(TranslatedValue * slot)3424 void TranslatedState::EnsureObjectAllocatedAt(TranslatedValue* slot) {
3425   slot = ResolveCapturedObject(slot);
3426 
3427   if (slot->materialization_state() == TranslatedValue::kUninitialized) {
3428     std::stack<int> worklist;
3429     worklist.push(slot->object_index());
3430     slot->mark_allocated();
3431 
3432     while (!worklist.empty()) {
3433       int index = worklist.top();
3434       worklist.pop();
3435       EnsureCapturedObjectAllocatedAt(index, &worklist);
3436     }
3437   }
3438 }
3439 
MaterializeFixedDoubleArray(TranslatedFrame * frame,int * value_index,TranslatedValue * slot,Handle<Map> map)3440 void TranslatedState::MaterializeFixedDoubleArray(TranslatedFrame* frame,
3441                                                   int* value_index,
3442                                                   TranslatedValue* slot,
3443                                                   Handle<Map> map) {
3444   int length = Smi::cast(frame->values_[*value_index].GetRawValue())->value();
3445   (*value_index)++;
3446   Handle<FixedDoubleArray> array = Handle<FixedDoubleArray>::cast(
3447       isolate()->factory()->NewFixedDoubleArray(length));
3448   CHECK_GT(length, 0);
3449   for (int i = 0; i < length; i++) {
3450     CHECK_NE(TranslatedValue::kCapturedObject,
3451              frame->values_[*value_index].kind());
3452     Handle<Object> value = frame->values_[*value_index].GetValue();
3453     if (value->IsNumber()) {
3454       array->set(i, value->Number());
3455     } else {
3456       CHECK(value.is_identical_to(isolate()->factory()->the_hole_value()));
3457       array->set_the_hole(isolate(), i);
3458     }
3459     (*value_index)++;
3460   }
3461   slot->set_storage(array);
3462 }
3463 
MaterializeMutableHeapNumber(TranslatedFrame * frame,int * value_index,TranslatedValue * slot)3464 void TranslatedState::MaterializeMutableHeapNumber(TranslatedFrame* frame,
3465                                                    int* value_index,
3466                                                    TranslatedValue* slot) {
3467   CHECK_NE(TranslatedValue::kCapturedObject,
3468            frame->values_[*value_index].kind());
3469   Handle<Object> value = frame->values_[*value_index].GetValue();
3470   CHECK(value->IsNumber());
3471   Handle<MutableHeapNumber> box =
3472       isolate()->factory()->NewMutableHeapNumber(value->Number());
3473   (*value_index)++;
3474   slot->set_storage(box);
3475 }
3476 
3477 namespace {
3478 
3479 enum DoubleStorageKind : uint8_t {
3480   kStoreTagged,
3481   kStoreUnboxedDouble,
3482   kStoreMutableHeapNumber,
3483 };
3484 
3485 }  // namespace
3486 
SkipSlots(int slots_to_skip,TranslatedFrame * frame,int * value_index)3487 void TranslatedState::SkipSlots(int slots_to_skip, TranslatedFrame* frame,
3488                                 int* value_index) {
3489   while (slots_to_skip > 0) {
3490     TranslatedValue* slot = &(frame->values_[*value_index]);
3491     (*value_index)++;
3492     slots_to_skip--;
3493 
3494     if (slot->kind() == TranslatedValue::kCapturedObject) {
3495       slots_to_skip += slot->GetChildrenCount();
3496     }
3497   }
3498 }
3499 
EnsureCapturedObjectAllocatedAt(int object_index,std::stack<int> * worklist)3500 void TranslatedState::EnsureCapturedObjectAllocatedAt(
3501     int object_index, std::stack<int>* worklist) {
3502   CHECK_LT(static_cast<size_t>(object_index), object_positions_.size());
3503   TranslatedState::ObjectPosition pos = object_positions_[object_index];
3504   int value_index = pos.value_index_;
3505 
3506   TranslatedFrame* frame = &(frames_[pos.frame_index_]);
3507   TranslatedValue* slot = &(frame->values_[value_index]);
3508   value_index++;
3509 
3510   CHECK_EQ(TranslatedValue::kAllocated, slot->materialization_state());
3511   CHECK_EQ(TranslatedValue::kCapturedObject, slot->kind());
3512 
3513   // Read the map.
3514   // The map should never be materialized, so let us check we already have
3515   // an existing object here.
3516   CHECK_EQ(frame->values_[value_index].kind(), TranslatedValue::kTagged);
3517   Handle<Map> map = Handle<Map>::cast(frame->values_[value_index].GetValue());
3518   CHECK(map->IsMap());
3519   value_index++;
3520 
3521   // Handle the special cases.
3522   switch (map->instance_type()) {
3523     case FIXED_DOUBLE_ARRAY_TYPE:
3524       // Materialize (i.e. allocate&initialize) the array and return since
3525       // there is no need to process the children.
3526       return MaterializeFixedDoubleArray(frame, &value_index, slot, map);
3527 
3528     case MUTABLE_HEAP_NUMBER_TYPE:
3529       // Materialize (i.e. allocate&initialize) the heap number and return.
3530       // There is no need to process the children.
3531       return MaterializeMutableHeapNumber(frame, &value_index, slot);
3532 
3533     case FIXED_ARRAY_TYPE:
3534     case SCRIPT_CONTEXT_TABLE_TYPE:
3535     case BLOCK_CONTEXT_TYPE:
3536     case CATCH_CONTEXT_TYPE:
3537     case DEBUG_EVALUATE_CONTEXT_TYPE:
3538     case EVAL_CONTEXT_TYPE:
3539     case FUNCTION_CONTEXT_TYPE:
3540     case MODULE_CONTEXT_TYPE:
3541     case NATIVE_CONTEXT_TYPE:
3542     case SCRIPT_CONTEXT_TYPE:
3543     case WITH_CONTEXT_TYPE:
3544     case HASH_TABLE_TYPE:
3545     case ORDERED_HASH_MAP_TYPE:
3546     case ORDERED_HASH_SET_TYPE:
3547     case NAME_DICTIONARY_TYPE:
3548     case GLOBAL_DICTIONARY_TYPE:
3549     case NUMBER_DICTIONARY_TYPE:
3550     case SIMPLE_NUMBER_DICTIONARY_TYPE:
3551     case STRING_TABLE_TYPE: {
3552       // Check we have the right size.
3553       int array_length =
3554           Smi::cast(frame->values_[value_index].GetRawValue())->value();
3555 
3556       int instance_size = FixedArray::SizeFor(array_length);
3557       CHECK_EQ(instance_size, slot->GetChildrenCount() * kPointerSize);
3558 
3559       // Canonicalize empty fixed array.
3560       if (*map == ReadOnlyRoots(isolate()).empty_fixed_array()->map() &&
3561           array_length == 0) {
3562         slot->set_storage(isolate()->factory()->empty_fixed_array());
3563       } else {
3564         slot->set_storage(AllocateStorageFor(slot));
3565       }
3566 
3567       // Make sure all the remaining children (after the map) are allocated.
3568       return EnsureChildrenAllocated(slot->GetChildrenCount() - 1, frame,
3569                                      &value_index, worklist);
3570     }
3571 
3572     case PROPERTY_ARRAY_TYPE: {
3573       // Check we have the right size.
3574       int length_or_hash =
3575           Smi::cast(frame->values_[value_index].GetRawValue())->value();
3576       int array_length = PropertyArray::LengthField::decode(length_or_hash);
3577       int instance_size = PropertyArray::SizeFor(array_length);
3578       CHECK_EQ(instance_size, slot->GetChildrenCount() * kPointerSize);
3579 
3580       slot->set_storage(AllocateStorageFor(slot));
3581       // Make sure all the remaining children (after the map) are allocated.
3582       return EnsureChildrenAllocated(slot->GetChildrenCount() - 1, frame,
3583                                      &value_index, worklist);
3584     }
3585 
3586     default:
3587       CHECK(map->IsJSObjectMap());
3588       EnsureJSObjectAllocated(slot, map);
3589       TranslatedValue* properties_slot = &(frame->values_[value_index]);
3590       value_index++;
3591       if (properties_slot->kind() == TranslatedValue::kCapturedObject) {
3592         // If we are materializing the property array, make sure we put
3593         // the mutable heap numbers at the right places.
3594         EnsurePropertiesAllocatedAndMarked(properties_slot, map);
3595         EnsureChildrenAllocated(properties_slot->GetChildrenCount(), frame,
3596                                 &value_index, worklist);
3597       }
3598       // Make sure all the remaining children (after the map and properties) are
3599       // allocated.
3600       return EnsureChildrenAllocated(slot->GetChildrenCount() - 2, frame,
3601                                      &value_index, worklist);
3602   }
3603   UNREACHABLE();
3604 }
3605 
EnsureChildrenAllocated(int count,TranslatedFrame * frame,int * value_index,std::stack<int> * worklist)3606 void TranslatedState::EnsureChildrenAllocated(int count, TranslatedFrame* frame,
3607                                               int* value_index,
3608                                               std::stack<int>* worklist) {
3609   // Ensure all children are allocated.
3610   for (int i = 0; i < count; i++) {
3611     // If the field is an object that has not been allocated yet, queue it
3612     // for initialization (and mark it as such).
3613     TranslatedValue* child_slot = frame->ValueAt(*value_index);
3614     if (child_slot->kind() == TranslatedValue::kCapturedObject ||
3615         child_slot->kind() == TranslatedValue::kDuplicatedObject) {
3616       child_slot = ResolveCapturedObject(child_slot);
3617       if (child_slot->materialization_state() ==
3618           TranslatedValue::kUninitialized) {
3619         worklist->push(child_slot->object_index());
3620         child_slot->mark_allocated();
3621       }
3622     } else {
3623       // Make sure the simple values (heap numbers, etc.) are properly
3624       // initialized.
3625       child_slot->MaterializeSimple();
3626     }
3627     SkipSlots(1, frame, value_index);
3628   }
3629 }
3630 
EnsurePropertiesAllocatedAndMarked(TranslatedValue * properties_slot,Handle<Map> map)3631 void TranslatedState::EnsurePropertiesAllocatedAndMarked(
3632     TranslatedValue* properties_slot, Handle<Map> map) {
3633   CHECK_EQ(TranslatedValue::kUninitialized,
3634            properties_slot->materialization_state());
3635 
3636   Handle<ByteArray> object_storage = AllocateStorageFor(properties_slot);
3637   properties_slot->mark_allocated();
3638   properties_slot->set_storage(object_storage);
3639 
3640   // Set markers for the double properties.
3641   Handle<DescriptorArray> descriptors(map->instance_descriptors(), isolate());
3642   int field_count = map->NumberOfOwnDescriptors();
3643   for (int i = 0; i < field_count; i++) {
3644     FieldIndex index = FieldIndex::ForDescriptor(*map, i);
3645     if (descriptors->GetDetails(i).representation().IsDouble() &&
3646         !index.is_inobject()) {
3647       CHECK(!map->IsUnboxedDoubleField(index));
3648       int outobject_index = index.outobject_array_index();
3649       int array_index = outobject_index * kPointerSize;
3650       object_storage->set(array_index, kStoreMutableHeapNumber);
3651     }
3652   }
3653 }
3654 
AllocateStorageFor(TranslatedValue * slot)3655 Handle<ByteArray> TranslatedState::AllocateStorageFor(TranslatedValue* slot) {
3656   int allocate_size =
3657       ByteArray::LengthFor(slot->GetChildrenCount() * kPointerSize);
3658   // It is important to allocate all the objects tenured so that the marker
3659   // does not visit them.
3660   Handle<ByteArray> object_storage =
3661       isolate()->factory()->NewByteArray(allocate_size, TENURED);
3662   for (int i = 0; i < object_storage->length(); i++) {
3663     object_storage->set(i, kStoreTagged);
3664   }
3665   return object_storage;
3666 }
3667 
EnsureJSObjectAllocated(TranslatedValue * slot,Handle<Map> map)3668 void TranslatedState::EnsureJSObjectAllocated(TranslatedValue* slot,
3669                                               Handle<Map> map) {
3670   CHECK_EQ(map->instance_size(), slot->GetChildrenCount() * kPointerSize);
3671 
3672   Handle<ByteArray> object_storage = AllocateStorageFor(slot);
3673   // Now we handle the interesting (JSObject) case.
3674   Handle<DescriptorArray> descriptors(map->instance_descriptors(), isolate());
3675   int field_count = map->NumberOfOwnDescriptors();
3676 
3677   // Set markers for the double properties.
3678   for (int i = 0; i < field_count; i++) {
3679     FieldIndex index = FieldIndex::ForDescriptor(*map, i);
3680     if (descriptors->GetDetails(i).representation().IsDouble() &&
3681         index.is_inobject()) {
3682       CHECK_GE(index.index(), FixedArray::kHeaderSize / kPointerSize);
3683       int array_index = index.index() * kPointerSize - FixedArray::kHeaderSize;
3684       uint8_t marker = map->IsUnboxedDoubleField(index)
3685                            ? kStoreUnboxedDouble
3686                            : kStoreMutableHeapNumber;
3687       object_storage->set(array_index, marker);
3688     }
3689   }
3690   slot->set_storage(object_storage);
3691 }
3692 
GetValueAndAdvance(TranslatedFrame * frame,int * value_index)3693 Handle<Object> TranslatedState::GetValueAndAdvance(TranslatedFrame* frame,
3694                                                    int* value_index) {
3695   TranslatedValue* slot = frame->ValueAt(*value_index);
3696   SkipSlots(1, frame, value_index);
3697   if (slot->kind() == TranslatedValue::kDuplicatedObject) {
3698     slot = ResolveCapturedObject(slot);
3699   }
3700   CHECK_NE(TranslatedValue::kUninitialized, slot->materialization_state());
3701   return slot->GetStorage();
3702 }
3703 
InitializeJSObjectAt(TranslatedFrame * frame,int * value_index,TranslatedValue * slot,Handle<Map> map,const DisallowHeapAllocation & no_allocation)3704 void TranslatedState::InitializeJSObjectAt(
3705     TranslatedFrame* frame, int* value_index, TranslatedValue* slot,
3706     Handle<Map> map, const DisallowHeapAllocation& no_allocation) {
3707   Handle<HeapObject> object_storage = Handle<HeapObject>::cast(slot->storage_);
3708   DCHECK_EQ(TranslatedValue::kCapturedObject, slot->kind());
3709 
3710   // The object should have at least a map and some payload.
3711   CHECK_GE(slot->GetChildrenCount(), 2);
3712 
3713   // Notify the concurrent marker about the layout change.
3714   isolate()->heap()->NotifyObjectLayoutChange(
3715       *object_storage, slot->GetChildrenCount() * kPointerSize, no_allocation);
3716 
3717   // Fill the property array field.
3718   {
3719     Handle<Object> properties = GetValueAndAdvance(frame, value_index);
3720     WRITE_FIELD(*object_storage, JSObject::kPropertiesOrHashOffset,
3721                 *properties);
3722     WRITE_BARRIER(*object_storage, JSObject::kPropertiesOrHashOffset,
3723                   *properties);
3724   }
3725 
3726   // For all the other fields we first look at the fixed array and check the
3727   // marker to see if we store an unboxed double.
3728   DCHECK_EQ(kPointerSize, JSObject::kPropertiesOrHashOffset);
3729   for (int i = 2; i < slot->GetChildrenCount(); i++) {
3730     // Initialize and extract the value from its slot.
3731     Handle<Object> field_value = GetValueAndAdvance(frame, value_index);
3732 
3733     // Read out the marker and ensure the field is consistent with
3734     // what the markers in the storage say (note that all heap numbers
3735     // should be fully initialized by now).
3736     int offset = i * kPointerSize;
3737     uint8_t marker = READ_UINT8_FIELD(*object_storage, offset);
3738     if (marker == kStoreUnboxedDouble) {
3739       double double_field_value;
3740       if (field_value->IsSmi()) {
3741         double_field_value = Smi::cast(*field_value)->value();
3742       } else {
3743         CHECK(field_value->IsHeapNumber());
3744         double_field_value = HeapNumber::cast(*field_value)->value();
3745       }
3746       WRITE_DOUBLE_FIELD(*object_storage, offset, double_field_value);
3747     } else if (marker == kStoreMutableHeapNumber) {
3748       CHECK(field_value->IsMutableHeapNumber());
3749       WRITE_FIELD(*object_storage, offset, *field_value);
3750       WRITE_BARRIER(*object_storage, offset, *field_value);
3751     } else {
3752       CHECK_EQ(kStoreTagged, marker);
3753       WRITE_FIELD(*object_storage, offset, *field_value);
3754       WRITE_BARRIER(*object_storage, offset, *field_value);
3755     }
3756   }
3757   object_storage->synchronized_set_map(*map);
3758 }
3759 
InitializeObjectWithTaggedFieldsAt(TranslatedFrame * frame,int * value_index,TranslatedValue * slot,Handle<Map> map,const DisallowHeapAllocation & no_allocation)3760 void TranslatedState::InitializeObjectWithTaggedFieldsAt(
3761     TranslatedFrame* frame, int* value_index, TranslatedValue* slot,
3762     Handle<Map> map, const DisallowHeapAllocation& no_allocation) {
3763   Handle<HeapObject> object_storage = Handle<HeapObject>::cast(slot->storage_);
3764 
3765   // Skip the writes if we already have the canonical empty fixed array.
3766   if (*object_storage == ReadOnlyRoots(isolate()).empty_fixed_array()) {
3767     CHECK_EQ(2, slot->GetChildrenCount());
3768     Handle<Object> length_value = GetValueAndAdvance(frame, value_index);
3769     CHECK_EQ(*length_value, Smi::FromInt(0));
3770     return;
3771   }
3772 
3773   // Notify the concurrent marker about the layout change.
3774   isolate()->heap()->NotifyObjectLayoutChange(
3775       *object_storage, slot->GetChildrenCount() * kPointerSize, no_allocation);
3776 
3777   // Write the fields to the object.
3778   for (int i = 1; i < slot->GetChildrenCount(); i++) {
3779     Handle<Object> field_value = GetValueAndAdvance(frame, value_index);
3780     int offset = i * kPointerSize;
3781     uint8_t marker = READ_UINT8_FIELD(*object_storage, offset);
3782     if (i > 1 && marker == kStoreMutableHeapNumber) {
3783       CHECK(field_value->IsMutableHeapNumber());
3784     } else {
3785       CHECK(marker == kStoreTagged || i == 1);
3786       CHECK(!field_value->IsMutableHeapNumber());
3787     }
3788 
3789     WRITE_FIELD(*object_storage, offset, *field_value);
3790     WRITE_BARRIER(*object_storage, offset, *field_value);
3791   }
3792 
3793   object_storage->synchronized_set_map(*map);
3794 }
3795 
ResolveCapturedObject(TranslatedValue * slot)3796 TranslatedValue* TranslatedState::ResolveCapturedObject(TranslatedValue* slot) {
3797   while (slot->kind() == TranslatedValue::kDuplicatedObject) {
3798     slot = GetValueByObjectIndex(slot->object_index());
3799   }
3800   CHECK_EQ(TranslatedValue::kCapturedObject, slot->kind());
3801   return slot;
3802 }
3803 
GetFrameFromJSFrameIndex(int jsframe_index)3804 TranslatedFrame* TranslatedState::GetFrameFromJSFrameIndex(int jsframe_index) {
3805   for (size_t i = 0; i < frames_.size(); i++) {
3806     if (frames_[i].kind() == TranslatedFrame::kInterpretedFunction ||
3807         frames_[i].kind() == TranslatedFrame::kJavaScriptBuiltinContinuation ||
3808         frames_[i].kind() ==
3809             TranslatedFrame::kJavaScriptBuiltinContinuationWithCatch) {
3810       if (jsframe_index > 0) {
3811         jsframe_index--;
3812       } else {
3813         return &(frames_[i]);
3814       }
3815     }
3816   }
3817   return nullptr;
3818 }
3819 
GetArgumentsInfoFromJSFrameIndex(int jsframe_index,int * args_count)3820 TranslatedFrame* TranslatedState::GetArgumentsInfoFromJSFrameIndex(
3821     int jsframe_index, int* args_count) {
3822   for (size_t i = 0; i < frames_.size(); i++) {
3823     if (frames_[i].kind() == TranslatedFrame::kInterpretedFunction ||
3824         frames_[i].kind() == TranslatedFrame::kJavaScriptBuiltinContinuation ||
3825         frames_[i].kind() ==
3826             TranslatedFrame::kJavaScriptBuiltinContinuationWithCatch) {
3827       if (jsframe_index > 0) {
3828         jsframe_index--;
3829       } else {
3830         // We have the JS function frame, now check if it has arguments
3831         // adaptor.
3832         if (i > 0 &&
3833             frames_[i - 1].kind() == TranslatedFrame::kArgumentsAdaptor) {
3834           *args_count = frames_[i - 1].height();
3835           return &(frames_[i - 1]);
3836         }
3837         *args_count =
3838             frames_[i].shared_info()->internal_formal_parameter_count() + 1;
3839         return &(frames_[i]);
3840       }
3841     }
3842   }
3843   return nullptr;
3844 }
3845 
StoreMaterializedValuesAndDeopt(JavaScriptFrame * frame)3846 void TranslatedState::StoreMaterializedValuesAndDeopt(JavaScriptFrame* frame) {
3847   MaterializedObjectStore* materialized_store =
3848       isolate_->materialized_object_store();
3849   Handle<FixedArray> previously_materialized_objects =
3850       materialized_store->Get(stack_frame_pointer_);
3851 
3852   Handle<Object> marker = isolate_->factory()->arguments_marker();
3853 
3854   int length = static_cast<int>(object_positions_.size());
3855   bool new_store = false;
3856   if (previously_materialized_objects.is_null()) {
3857     previously_materialized_objects =
3858         isolate_->factory()->NewFixedArray(length, TENURED);
3859     for (int i = 0; i < length; i++) {
3860       previously_materialized_objects->set(i, *marker);
3861     }
3862     new_store = true;
3863   }
3864 
3865   CHECK_EQ(length, previously_materialized_objects->length());
3866 
3867   bool value_changed = false;
3868   for (int i = 0; i < length; i++) {
3869     TranslatedState::ObjectPosition pos = object_positions_[i];
3870     TranslatedValue* value_info =
3871         &(frames_[pos.frame_index_].values_[pos.value_index_]);
3872 
3873     CHECK(value_info->IsMaterializedObject());
3874 
3875     // Skip duplicate objects (i.e., those that point to some
3876     // other object id).
3877     if (value_info->object_index() != i) continue;
3878 
3879     Handle<Object> value(value_info->GetRawValue(), isolate_);
3880 
3881     if (!value.is_identical_to(marker)) {
3882       if (previously_materialized_objects->get(i) == *marker) {
3883         previously_materialized_objects->set(i, *value);
3884         value_changed = true;
3885       } else {
3886         CHECK(previously_materialized_objects->get(i) == *value);
3887       }
3888     }
3889   }
3890   if (new_store && value_changed) {
3891     materialized_store->Set(stack_frame_pointer_,
3892                             previously_materialized_objects);
3893     CHECK_EQ(frames_[0].kind(), TranslatedFrame::kInterpretedFunction);
3894     CHECK_EQ(frame->function(), frames_[0].front().GetRawValue());
3895     Deoptimizer::DeoptimizeFunction(frame->function(), frame->LookupCode());
3896   }
3897 }
3898 
UpdateFromPreviouslyMaterializedObjects()3899 void TranslatedState::UpdateFromPreviouslyMaterializedObjects() {
3900   MaterializedObjectStore* materialized_store =
3901       isolate_->materialized_object_store();
3902   Handle<FixedArray> previously_materialized_objects =
3903       materialized_store->Get(stack_frame_pointer_);
3904 
3905   // If we have no previously materialized objects, there is nothing to do.
3906   if (previously_materialized_objects.is_null()) return;
3907 
3908   Handle<Object> marker = isolate_->factory()->arguments_marker();
3909 
3910   int length = static_cast<int>(object_positions_.size());
3911   CHECK_EQ(length, previously_materialized_objects->length());
3912 
3913   for (int i = 0; i < length; i++) {
3914     // For a previously materialized objects, inject their value into the
3915     // translated values.
3916     if (previously_materialized_objects->get(i) != *marker) {
3917       TranslatedState::ObjectPosition pos = object_positions_[i];
3918       TranslatedValue* value_info =
3919           &(frames_[pos.frame_index_].values_[pos.value_index_]);
3920       CHECK(value_info->IsMaterializedObject());
3921 
3922       if (value_info->kind() == TranslatedValue::kCapturedObject) {
3923         value_info->set_initialized_storage(
3924             Handle<Object>(previously_materialized_objects->get(i), isolate_));
3925       }
3926     }
3927   }
3928 }
3929 
VerifyMaterializedObjects()3930 void TranslatedState::VerifyMaterializedObjects() {
3931 #if VERIFY_HEAP
3932   int length = static_cast<int>(object_positions_.size());
3933   for (int i = 0; i < length; i++) {
3934     TranslatedValue* slot = GetValueByObjectIndex(i);
3935     if (slot->kind() == TranslatedValue::kCapturedObject) {
3936       CHECK_EQ(slot, GetValueByObjectIndex(slot->object_index()));
3937       if (slot->materialization_state() == TranslatedValue::kFinished) {
3938         slot->GetStorage()->ObjectVerify(isolate());
3939       } else {
3940         CHECK_EQ(slot->materialization_state(),
3941                  TranslatedValue::kUninitialized);
3942       }
3943     }
3944   }
3945 #endif
3946 }
3947 
DoUpdateFeedback()3948 bool TranslatedState::DoUpdateFeedback() {
3949   if (!feedback_vector_handle_.is_null()) {
3950     CHECK(!feedback_slot_.IsInvalid());
3951     isolate()->CountUsage(v8::Isolate::kDeoptimizerDisableSpeculation);
3952     FeedbackNexus nexus(feedback_vector_handle_, feedback_slot_);
3953     nexus.SetSpeculationMode(SpeculationMode::kDisallowSpeculation);
3954     return true;
3955   }
3956   return false;
3957 }
3958 
ReadUpdateFeedback(TranslationIterator * iterator,FixedArray * literal_array,FILE * trace_file)3959 void TranslatedState::ReadUpdateFeedback(TranslationIterator* iterator,
3960                                          FixedArray* literal_array,
3961                                          FILE* trace_file) {
3962   CHECK_EQ(Translation::UPDATE_FEEDBACK, iterator->Next());
3963   feedback_vector_ = FeedbackVector::cast(literal_array->get(iterator->Next()));
3964   feedback_slot_ = FeedbackSlot(iterator->Next());
3965   if (trace_file != nullptr) {
3966     PrintF(trace_file, "  reading FeedbackVector (slot %d)\n",
3967            feedback_slot_.ToInt());
3968   }
3969 }
3970 
3971 }  // namespace internal
3972 }  // namespace v8
3973 
3974 // Undefine the heap manipulation macros.
3975 #include "src/objects/object-macros-undef.h"
3976