1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/v8.h"
6
7 #include "src/accessors.h"
8 #include "src/codegen.h"
9 #include "src/deoptimizer.h"
10 #include "src/disasm.h"
11 #include "src/full-codegen.h"
12 #include "src/global-handles.h"
13 #include "src/macro-assembler.h"
14 #include "src/prettyprinter.h"
15
16
17 namespace v8 {
18 namespace internal {
19
AllocateCodeChunk(MemoryAllocator * allocator)20 static MemoryChunk* AllocateCodeChunk(MemoryAllocator* allocator) {
21 return allocator->AllocateChunk(Deoptimizer::GetMaxDeoptTableSize(),
22 OS::CommitPageSize(),
23 #if defined(__native_client__)
24 // The Native Client port of V8 uses an interpreter,
25 // so code pages don't need PROT_EXEC.
26 NOT_EXECUTABLE,
27 #else
28 EXECUTABLE,
29 #endif
30 NULL);
31 }
32
33
DeoptimizerData(MemoryAllocator * allocator)34 DeoptimizerData::DeoptimizerData(MemoryAllocator* allocator)
35 : allocator_(allocator),
36 deoptimized_frame_info_(NULL),
37 current_(NULL) {
38 for (int i = 0; i < Deoptimizer::kBailoutTypesWithCodeEntry; ++i) {
39 deopt_entry_code_entries_[i] = -1;
40 deopt_entry_code_[i] = AllocateCodeChunk(allocator);
41 }
42 }
43
44
~DeoptimizerData()45 DeoptimizerData::~DeoptimizerData() {
46 for (int i = 0; i < Deoptimizer::kBailoutTypesWithCodeEntry; ++i) {
47 allocator_->Free(deopt_entry_code_[i]);
48 deopt_entry_code_[i] = NULL;
49 }
50 }
51
52
Iterate(ObjectVisitor * v)53 void DeoptimizerData::Iterate(ObjectVisitor* v) {
54 if (deoptimized_frame_info_ != NULL) {
55 deoptimized_frame_info_->Iterate(v);
56 }
57 }
58
59
FindDeoptimizingCode(Address addr)60 Code* Deoptimizer::FindDeoptimizingCode(Address addr) {
61 if (function_->IsHeapObject()) {
62 // Search all deoptimizing code in the native context of the function.
63 Context* native_context = function_->context()->native_context();
64 Object* element = native_context->DeoptimizedCodeListHead();
65 while (!element->IsUndefined()) {
66 Code* code = Code::cast(element);
67 CHECK(code->kind() == Code::OPTIMIZED_FUNCTION);
68 if (code->contains(addr)) return code;
69 element = code->next_code_link();
70 }
71 }
72 return NULL;
73 }
74
75
76 // We rely on this function not causing a GC. It is called from generated code
77 // without having a real stack frame in place.
New(JSFunction * function,BailoutType type,unsigned bailout_id,Address from,int fp_to_sp_delta,Isolate * isolate)78 Deoptimizer* Deoptimizer::New(JSFunction* function,
79 BailoutType type,
80 unsigned bailout_id,
81 Address from,
82 int fp_to_sp_delta,
83 Isolate* isolate) {
84 Deoptimizer* deoptimizer = new Deoptimizer(isolate,
85 function,
86 type,
87 bailout_id,
88 from,
89 fp_to_sp_delta,
90 NULL);
91 CHECK(isolate->deoptimizer_data()->current_ == NULL);
92 isolate->deoptimizer_data()->current_ = deoptimizer;
93 return deoptimizer;
94 }
95
96
97 // No larger than 2K on all platforms
98 static const int kDeoptTableMaxEpilogueCodeSize = 2 * KB;
99
100
GetMaxDeoptTableSize()101 size_t Deoptimizer::GetMaxDeoptTableSize() {
102 int entries_size =
103 Deoptimizer::kMaxNumberOfEntries * Deoptimizer::table_entry_size_;
104 int commit_page_size = static_cast<int>(OS::CommitPageSize());
105 int page_count = ((kDeoptTableMaxEpilogueCodeSize + entries_size - 1) /
106 commit_page_size) + 1;
107 return static_cast<size_t>(commit_page_size * page_count);
108 }
109
110
Grab(Isolate * isolate)111 Deoptimizer* Deoptimizer::Grab(Isolate* isolate) {
112 Deoptimizer* result = isolate->deoptimizer_data()->current_;
113 CHECK_NE(result, NULL);
114 result->DeleteFrameDescriptions();
115 isolate->deoptimizer_data()->current_ = NULL;
116 return result;
117 }
118
119
ConvertJSFrameIndexToFrameIndex(int jsframe_index)120 int Deoptimizer::ConvertJSFrameIndexToFrameIndex(int jsframe_index) {
121 if (jsframe_index == 0) return 0;
122
123 int frame_index = 0;
124 while (jsframe_index >= 0) {
125 FrameDescription* frame = output_[frame_index];
126 if (frame->GetFrameType() == StackFrame::JAVA_SCRIPT) {
127 jsframe_index--;
128 }
129 frame_index++;
130 }
131
132 return frame_index - 1;
133 }
134
135
DebuggerInspectableFrame(JavaScriptFrame * frame,int jsframe_index,Isolate * isolate)136 DeoptimizedFrameInfo* Deoptimizer::DebuggerInspectableFrame(
137 JavaScriptFrame* frame,
138 int jsframe_index,
139 Isolate* isolate) {
140 CHECK(frame->is_optimized());
141 CHECK(isolate->deoptimizer_data()->deoptimized_frame_info_ == NULL);
142
143 // Get the function and code from the frame.
144 JSFunction* function = frame->function();
145 Code* code = frame->LookupCode();
146
147 // Locate the deoptimization point in the code. As we are at a call the
148 // return address must be at a place in the code with deoptimization support.
149 SafepointEntry safepoint_entry = code->GetSafepointEntry(frame->pc());
150 int deoptimization_index = safepoint_entry.deoptimization_index();
151 CHECK_NE(deoptimization_index, Safepoint::kNoDeoptimizationIndex);
152
153 // Always use the actual stack slots when calculating the fp to sp
154 // delta adding two for the function and context.
155 unsigned stack_slots = code->stack_slots();
156 unsigned fp_to_sp_delta = (stack_slots * kPointerSize) +
157 StandardFrameConstants::kFixedFrameSizeFromFp;
158
159 Deoptimizer* deoptimizer = new Deoptimizer(isolate,
160 function,
161 Deoptimizer::DEBUGGER,
162 deoptimization_index,
163 frame->pc(),
164 fp_to_sp_delta,
165 code);
166 Address tos = frame->fp() - fp_to_sp_delta;
167 deoptimizer->FillInputFrame(tos, frame);
168
169 // Calculate the output frames.
170 Deoptimizer::ComputeOutputFrames(deoptimizer);
171
172 // Create the GC safe output frame information and register it for GC
173 // handling.
174 CHECK_LT(jsframe_index, deoptimizer->jsframe_count());
175
176 // Convert JS frame index into frame index.
177 int frame_index = deoptimizer->ConvertJSFrameIndexToFrameIndex(jsframe_index);
178
179 bool has_arguments_adaptor =
180 frame_index > 0 &&
181 deoptimizer->output_[frame_index - 1]->GetFrameType() ==
182 StackFrame::ARGUMENTS_ADAPTOR;
183
184 int construct_offset = has_arguments_adaptor ? 2 : 1;
185 bool has_construct_stub =
186 frame_index >= construct_offset &&
187 deoptimizer->output_[frame_index - construct_offset]->GetFrameType() ==
188 StackFrame::CONSTRUCT;
189
190 DeoptimizedFrameInfo* info = new DeoptimizedFrameInfo(deoptimizer,
191 frame_index,
192 has_arguments_adaptor,
193 has_construct_stub);
194 isolate->deoptimizer_data()->deoptimized_frame_info_ = info;
195
196 // Get the "simulated" top and size for the requested frame.
197 FrameDescription* parameters_frame =
198 deoptimizer->output_[
199 has_arguments_adaptor ? (frame_index - 1) : frame_index];
200
201 uint32_t parameters_size = (info->parameters_count() + 1) * kPointerSize;
202 Address parameters_top = reinterpret_cast<Address>(
203 parameters_frame->GetTop() + (parameters_frame->GetFrameSize() -
204 parameters_size));
205
206 uint32_t expressions_size = info->expression_count() * kPointerSize;
207 Address expressions_top = reinterpret_cast<Address>(
208 deoptimizer->output_[frame_index]->GetTop());
209
210 // Done with the GC-unsafe frame descriptions. This re-enables allocation.
211 deoptimizer->DeleteFrameDescriptions();
212
213 // Allocate a heap number for the doubles belonging to this frame.
214 deoptimizer->MaterializeHeapNumbersForDebuggerInspectableFrame(
215 parameters_top, parameters_size, expressions_top, expressions_size, info);
216
217 // Finished using the deoptimizer instance.
218 delete deoptimizer;
219
220 return info;
221 }
222
223
DeleteDebuggerInspectableFrame(DeoptimizedFrameInfo * info,Isolate * isolate)224 void Deoptimizer::DeleteDebuggerInspectableFrame(DeoptimizedFrameInfo* info,
225 Isolate* isolate) {
226 CHECK_EQ(isolate->deoptimizer_data()->deoptimized_frame_info_, info);
227 delete info;
228 isolate->deoptimizer_data()->deoptimized_frame_info_ = NULL;
229 }
230
231
GenerateDeoptimizationEntries(MacroAssembler * masm,int count,BailoutType type)232 void Deoptimizer::GenerateDeoptimizationEntries(MacroAssembler* masm,
233 int count,
234 BailoutType type) {
235 TableEntryGenerator generator(masm, type, count);
236 generator.Generate();
237 }
238
239
VisitAllOptimizedFunctionsForContext(Context * context,OptimizedFunctionVisitor * visitor)240 void Deoptimizer::VisitAllOptimizedFunctionsForContext(
241 Context* context, OptimizedFunctionVisitor* visitor) {
242 DisallowHeapAllocation no_allocation;
243
244 CHECK(context->IsNativeContext());
245
246 visitor->EnterContext(context);
247
248 // Visit the list of optimized functions, removing elements that
249 // no longer refer to optimized code.
250 JSFunction* prev = NULL;
251 Object* element = context->OptimizedFunctionsListHead();
252 while (!element->IsUndefined()) {
253 JSFunction* function = JSFunction::cast(element);
254 Object* next = function->next_function_link();
255 if (function->code()->kind() != Code::OPTIMIZED_FUNCTION ||
256 (visitor->VisitFunction(function),
257 function->code()->kind() != Code::OPTIMIZED_FUNCTION)) {
258 // The function no longer refers to optimized code, or the visitor
259 // changed the code to which it refers to no longer be optimized code.
260 // Remove the function from this list.
261 if (prev != NULL) {
262 prev->set_next_function_link(next);
263 } else {
264 context->SetOptimizedFunctionsListHead(next);
265 }
266 // The visitor should not alter the link directly.
267 CHECK_EQ(function->next_function_link(), next);
268 // Set the next function link to undefined to indicate it is no longer
269 // in the optimized functions list.
270 function->set_next_function_link(context->GetHeap()->undefined_value());
271 } else {
272 // The visitor should not alter the link directly.
273 CHECK_EQ(function->next_function_link(), next);
274 // preserve this element.
275 prev = function;
276 }
277 element = next;
278 }
279
280 visitor->LeaveContext(context);
281 }
282
283
VisitAllOptimizedFunctions(Isolate * isolate,OptimizedFunctionVisitor * visitor)284 void Deoptimizer::VisitAllOptimizedFunctions(
285 Isolate* isolate,
286 OptimizedFunctionVisitor* visitor) {
287 DisallowHeapAllocation no_allocation;
288
289 // Run through the list of all native contexts.
290 Object* context = isolate->heap()->native_contexts_list();
291 while (!context->IsUndefined()) {
292 VisitAllOptimizedFunctionsForContext(Context::cast(context), visitor);
293 context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK);
294 }
295 }
296
297
298 // Unlink functions referring to code marked for deoptimization, then move
299 // marked code from the optimized code list to the deoptimized code list,
300 // and patch code for lazy deopt.
DeoptimizeMarkedCodeForContext(Context * context)301 void Deoptimizer::DeoptimizeMarkedCodeForContext(Context* context) {
302 DisallowHeapAllocation no_allocation;
303
304 // A "closure" that unlinks optimized code that is going to be
305 // deoptimized from the functions that refer to it.
306 class SelectedCodeUnlinker: public OptimizedFunctionVisitor {
307 public:
308 virtual void EnterContext(Context* context) { } // Don't care.
309 virtual void LeaveContext(Context* context) { } // Don't care.
310 virtual void VisitFunction(JSFunction* function) {
311 Code* code = function->code();
312 if (!code->marked_for_deoptimization()) return;
313
314 // Unlink this function and evict from optimized code map.
315 SharedFunctionInfo* shared = function->shared();
316 function->set_code(shared->code());
317
318 if (FLAG_trace_deopt) {
319 CodeTracer::Scope scope(code->GetHeap()->isolate()->GetCodeTracer());
320 PrintF(scope.file(), "[deoptimizer unlinked: ");
321 function->PrintName(scope.file());
322 PrintF(scope.file(),
323 " / %" V8PRIxPTR "]\n", reinterpret_cast<intptr_t>(function));
324 }
325 }
326 };
327
328 // Unlink all functions that refer to marked code.
329 SelectedCodeUnlinker unlinker;
330 VisitAllOptimizedFunctionsForContext(context, &unlinker);
331
332 Isolate* isolate = context->GetHeap()->isolate();
333 #ifdef DEBUG
334 Code* topmost_optimized_code = NULL;
335 bool safe_to_deopt_topmost_optimized_code = false;
336 // Make sure all activations of optimized code can deopt at their current PC.
337 // The topmost optimized code has special handling because it cannot be
338 // deoptimized due to weak object dependency.
339 for (StackFrameIterator it(isolate, isolate->thread_local_top());
340 !it.done(); it.Advance()) {
341 StackFrame::Type type = it.frame()->type();
342 if (type == StackFrame::OPTIMIZED) {
343 Code* code = it.frame()->LookupCode();
344 if (FLAG_trace_deopt) {
345 JSFunction* function =
346 static_cast<OptimizedFrame*>(it.frame())->function();
347 CodeTracer::Scope scope(isolate->GetCodeTracer());
348 PrintF(scope.file(), "[deoptimizer found activation of function: ");
349 function->PrintName(scope.file());
350 PrintF(scope.file(),
351 " / %" V8PRIxPTR "]\n", reinterpret_cast<intptr_t>(function));
352 }
353 SafepointEntry safepoint = code->GetSafepointEntry(it.frame()->pc());
354 int deopt_index = safepoint.deoptimization_index();
355 bool safe_to_deopt = deopt_index != Safepoint::kNoDeoptimizationIndex;
356 CHECK(topmost_optimized_code == NULL || safe_to_deopt);
357 if (topmost_optimized_code == NULL) {
358 topmost_optimized_code = code;
359 safe_to_deopt_topmost_optimized_code = safe_to_deopt;
360 }
361 }
362 }
363 #endif
364
365 // Move marked code from the optimized code list to the deoptimized
366 // code list, collecting them into a ZoneList.
367 Zone zone(isolate);
368 ZoneList<Code*> codes(10, &zone);
369
370 // Walk over all optimized code objects in this native context.
371 Code* prev = NULL;
372 Object* element = context->OptimizedCodeListHead();
373 while (!element->IsUndefined()) {
374 Code* code = Code::cast(element);
375 CHECK_EQ(code->kind(), Code::OPTIMIZED_FUNCTION);
376 Object* next = code->next_code_link();
377 if (code->marked_for_deoptimization()) {
378 // Put the code into the list for later patching.
379 codes.Add(code, &zone);
380
381 if (prev != NULL) {
382 // Skip this code in the optimized code list.
383 prev->set_next_code_link(next);
384 } else {
385 // There was no previous node, the next node is the new head.
386 context->SetOptimizedCodeListHead(next);
387 }
388
389 // Move the code to the _deoptimized_ code list.
390 code->set_next_code_link(context->DeoptimizedCodeListHead());
391 context->SetDeoptimizedCodeListHead(code);
392 } else {
393 // Not marked; preserve this element.
394 prev = code;
395 }
396 element = next;
397 }
398
399 // TODO(titzer): we need a handle scope only because of the macro assembler,
400 // which is only used in EnsureCodeForDeoptimizationEntry.
401 HandleScope scope(isolate);
402
403 // Now patch all the codes for deoptimization.
404 for (int i = 0; i < codes.length(); i++) {
405 #ifdef DEBUG
406 if (codes[i] == topmost_optimized_code) {
407 ASSERT(safe_to_deopt_topmost_optimized_code);
408 }
409 #endif
410 // It is finally time to die, code object.
411 // Do platform-specific patching to force any activations to lazy deopt.
412 PatchCodeForDeoptimization(isolate, codes[i]);
413
414 // We might be in the middle of incremental marking with compaction.
415 // Tell collector to treat this code object in a special way and
416 // ignore all slots that might have been recorded on it.
417 isolate->heap()->mark_compact_collector()->InvalidateCode(codes[i]);
418 }
419 }
420
421
DeoptimizeAll(Isolate * isolate)422 void Deoptimizer::DeoptimizeAll(Isolate* isolate) {
423 if (FLAG_trace_deopt) {
424 CodeTracer::Scope scope(isolate->GetCodeTracer());
425 PrintF(scope.file(), "[deoptimize all code in all contexts]\n");
426 }
427 DisallowHeapAllocation no_allocation;
428 // For all contexts, mark all code, then deoptimize.
429 Object* context = isolate->heap()->native_contexts_list();
430 while (!context->IsUndefined()) {
431 Context* native_context = Context::cast(context);
432 MarkAllCodeForContext(native_context);
433 DeoptimizeMarkedCodeForContext(native_context);
434 context = native_context->get(Context::NEXT_CONTEXT_LINK);
435 }
436 }
437
438
DeoptimizeMarkedCode(Isolate * isolate)439 void Deoptimizer::DeoptimizeMarkedCode(Isolate* isolate) {
440 if (FLAG_trace_deopt) {
441 CodeTracer::Scope scope(isolate->GetCodeTracer());
442 PrintF(scope.file(), "[deoptimize marked code in all contexts]\n");
443 }
444 DisallowHeapAllocation no_allocation;
445 // For all contexts, deoptimize code already marked.
446 Object* context = isolate->heap()->native_contexts_list();
447 while (!context->IsUndefined()) {
448 Context* native_context = Context::cast(context);
449 DeoptimizeMarkedCodeForContext(native_context);
450 context = native_context->get(Context::NEXT_CONTEXT_LINK);
451 }
452 }
453
454
DeoptimizeGlobalObject(JSObject * object)455 void Deoptimizer::DeoptimizeGlobalObject(JSObject* object) {
456 if (FLAG_trace_deopt) {
457 CodeTracer::Scope scope(object->GetHeap()->isolate()->GetCodeTracer());
458 PrintF(scope.file(), "[deoptimize global object @ 0x%08" V8PRIxPTR "]\n",
459 reinterpret_cast<intptr_t>(object));
460 }
461 if (object->IsJSGlobalProxy()) {
462 Object* proto = object->GetPrototype();
463 CHECK(proto->IsJSGlobalObject());
464 Context* native_context = GlobalObject::cast(proto)->native_context();
465 MarkAllCodeForContext(native_context);
466 DeoptimizeMarkedCodeForContext(native_context);
467 } else if (object->IsGlobalObject()) {
468 Context* native_context = GlobalObject::cast(object)->native_context();
469 MarkAllCodeForContext(native_context);
470 DeoptimizeMarkedCodeForContext(native_context);
471 }
472 }
473
474
MarkAllCodeForContext(Context * context)475 void Deoptimizer::MarkAllCodeForContext(Context* context) {
476 Object* element = context->OptimizedCodeListHead();
477 while (!element->IsUndefined()) {
478 Code* code = Code::cast(element);
479 CHECK_EQ(code->kind(), Code::OPTIMIZED_FUNCTION);
480 code->set_marked_for_deoptimization(true);
481 element = code->next_code_link();
482 }
483 }
484
485
DeoptimizeFunction(JSFunction * function)486 void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
487 Code* code = function->code();
488 if (code->kind() == Code::OPTIMIZED_FUNCTION) {
489 // Mark the code for deoptimization and unlink any functions that also
490 // refer to that code. The code cannot be shared across native contexts,
491 // so we only need to search one.
492 code->set_marked_for_deoptimization(true);
493 DeoptimizeMarkedCodeForContext(function->context()->native_context());
494 }
495 }
496
497
ComputeOutputFrames(Deoptimizer * deoptimizer)498 void Deoptimizer::ComputeOutputFrames(Deoptimizer* deoptimizer) {
499 deoptimizer->DoComputeOutputFrames();
500 }
501
502
TraceEnabledFor(BailoutType deopt_type,StackFrame::Type frame_type)503 bool Deoptimizer::TraceEnabledFor(BailoutType deopt_type,
504 StackFrame::Type frame_type) {
505 switch (deopt_type) {
506 case EAGER:
507 case SOFT:
508 case LAZY:
509 case DEBUGGER:
510 return (frame_type == StackFrame::STUB)
511 ? FLAG_trace_stub_failures
512 : FLAG_trace_deopt;
513 }
514 FATAL("Unsupported deopt type");
515 return false;
516 }
517
518
MessageFor(BailoutType type)519 const char* Deoptimizer::MessageFor(BailoutType type) {
520 switch (type) {
521 case EAGER: return "eager";
522 case SOFT: return "soft";
523 case LAZY: return "lazy";
524 case DEBUGGER: return "debugger";
525 }
526 FATAL("Unsupported deopt type");
527 return NULL;
528 }
529
530
Deoptimizer(Isolate * isolate,JSFunction * function,BailoutType type,unsigned bailout_id,Address from,int fp_to_sp_delta,Code * optimized_code)531 Deoptimizer::Deoptimizer(Isolate* isolate,
532 JSFunction* function,
533 BailoutType type,
534 unsigned bailout_id,
535 Address from,
536 int fp_to_sp_delta,
537 Code* optimized_code)
538 : isolate_(isolate),
539 function_(function),
540 bailout_id_(bailout_id),
541 bailout_type_(type),
542 from_(from),
543 fp_to_sp_delta_(fp_to_sp_delta),
544 has_alignment_padding_(0),
545 input_(NULL),
546 output_count_(0),
547 jsframe_count_(0),
548 output_(NULL),
549 deferred_objects_tagged_values_(0),
550 deferred_objects_double_values_(0),
551 deferred_objects_(0),
552 deferred_heap_numbers_(0),
553 jsframe_functions_(0),
554 jsframe_has_adapted_arguments_(0),
555 materialized_values_(NULL),
556 materialized_objects_(NULL),
557 materialization_value_index_(0),
558 materialization_object_index_(0),
559 trace_scope_(NULL) {
560 // For COMPILED_STUBs called from builtins, the function pointer is a SMI
561 // indicating an internal frame.
562 if (function->IsSmi()) {
563 function = NULL;
564 }
565 ASSERT(from != NULL);
566 if (function != NULL && function->IsOptimized()) {
567 function->shared()->increment_deopt_count();
568 if (bailout_type_ == Deoptimizer::SOFT) {
569 isolate->counters()->soft_deopts_executed()->Increment();
570 // Soft deopts shouldn't count against the overall re-optimization count
571 // that can eventually lead to disabling optimization for a function.
572 int opt_count = function->shared()->opt_count();
573 if (opt_count > 0) opt_count--;
574 function->shared()->set_opt_count(opt_count);
575 }
576 }
577 compiled_code_ = FindOptimizedCode(function, optimized_code);
578
579 #if DEBUG
580 ASSERT(compiled_code_ != NULL);
581 if (type == EAGER || type == SOFT || type == LAZY) {
582 ASSERT(compiled_code_->kind() != Code::FUNCTION);
583 }
584 #endif
585
586 StackFrame::Type frame_type = function == NULL
587 ? StackFrame::STUB
588 : StackFrame::JAVA_SCRIPT;
589 trace_scope_ = TraceEnabledFor(type, frame_type) ?
590 new CodeTracer::Scope(isolate->GetCodeTracer()) : NULL;
591 #ifdef DEBUG
592 CHECK(AllowHeapAllocation::IsAllowed());
593 disallow_heap_allocation_ = new DisallowHeapAllocation();
594 #endif // DEBUG
595 unsigned size = ComputeInputFrameSize();
596 input_ = new(size) FrameDescription(size, function);
597 input_->SetFrameType(frame_type);
598 }
599
600
FindOptimizedCode(JSFunction * function,Code * optimized_code)601 Code* Deoptimizer::FindOptimizedCode(JSFunction* function,
602 Code* optimized_code) {
603 switch (bailout_type_) {
604 case Deoptimizer::SOFT:
605 case Deoptimizer::EAGER:
606 case Deoptimizer::LAZY: {
607 Code* compiled_code = FindDeoptimizingCode(from_);
608 return (compiled_code == NULL)
609 ? static_cast<Code*>(isolate_->FindCodeObject(from_))
610 : compiled_code;
611 }
612 case Deoptimizer::DEBUGGER:
613 ASSERT(optimized_code->contains(from_));
614 return optimized_code;
615 }
616 FATAL("Could not find code for optimized function");
617 return NULL;
618 }
619
620
PrintFunctionName()621 void Deoptimizer::PrintFunctionName() {
622 if (function_->IsJSFunction()) {
623 function_->PrintName(trace_scope_->file());
624 } else {
625 PrintF(trace_scope_->file(),
626 "%s", Code::Kind2String(compiled_code_->kind()));
627 }
628 }
629
630
~Deoptimizer()631 Deoptimizer::~Deoptimizer() {
632 ASSERT(input_ == NULL && output_ == NULL);
633 ASSERT(disallow_heap_allocation_ == NULL);
634 delete trace_scope_;
635 }
636
637
DeleteFrameDescriptions()638 void Deoptimizer::DeleteFrameDescriptions() {
639 delete input_;
640 for (int i = 0; i < output_count_; ++i) {
641 if (output_[i] != input_) delete output_[i];
642 }
643 delete[] output_;
644 input_ = NULL;
645 output_ = NULL;
646 #ifdef DEBUG
647 CHECK(!AllowHeapAllocation::IsAllowed());
648 CHECK(disallow_heap_allocation_ != NULL);
649 delete disallow_heap_allocation_;
650 disallow_heap_allocation_ = NULL;
651 #endif // DEBUG
652 }
653
654
GetDeoptimizationEntry(Isolate * isolate,int id,BailoutType type,GetEntryMode mode)655 Address Deoptimizer::GetDeoptimizationEntry(Isolate* isolate,
656 int id,
657 BailoutType type,
658 GetEntryMode mode) {
659 CHECK_GE(id, 0);
660 if (id >= kMaxNumberOfEntries) return NULL;
661 if (mode == ENSURE_ENTRY_CODE) {
662 EnsureCodeForDeoptimizationEntry(isolate, type, id);
663 } else {
664 CHECK_EQ(mode, CALCULATE_ENTRY_ADDRESS);
665 }
666 DeoptimizerData* data = isolate->deoptimizer_data();
667 CHECK_LT(type, kBailoutTypesWithCodeEntry);
668 MemoryChunk* base = data->deopt_entry_code_[type];
669 return base->area_start() + (id * table_entry_size_);
670 }
671
672
GetDeoptimizationId(Isolate * isolate,Address addr,BailoutType type)673 int Deoptimizer::GetDeoptimizationId(Isolate* isolate,
674 Address addr,
675 BailoutType type) {
676 DeoptimizerData* data = isolate->deoptimizer_data();
677 MemoryChunk* base = data->deopt_entry_code_[type];
678 Address start = base->area_start();
679 if (base == NULL ||
680 addr < start ||
681 addr >= start + (kMaxNumberOfEntries * table_entry_size_)) {
682 return kNotDeoptimizationEntry;
683 }
684 ASSERT_EQ(0,
685 static_cast<int>(addr - start) % table_entry_size_);
686 return static_cast<int>(addr - start) / table_entry_size_;
687 }
688
689
GetOutputInfo(DeoptimizationOutputData * data,BailoutId id,SharedFunctionInfo * shared)690 int Deoptimizer::GetOutputInfo(DeoptimizationOutputData* data,
691 BailoutId id,
692 SharedFunctionInfo* shared) {
693 // TODO(kasperl): For now, we do a simple linear search for the PC
694 // offset associated with the given node id. This should probably be
695 // changed to a binary search.
696 int length = data->DeoptPoints();
697 for (int i = 0; i < length; i++) {
698 if (data->AstId(i) == id) {
699 return data->PcAndState(i)->value();
700 }
701 }
702 PrintF(stderr, "[couldn't find pc offset for node=%d]\n", id.ToInt());
703 PrintF(stderr, "[method: %s]\n", shared->DebugName()->ToCString().get());
704 // Print the source code if available.
705 HeapStringAllocator string_allocator;
706 StringStream stream(&string_allocator);
707 shared->SourceCodePrint(&stream, -1);
708 PrintF(stderr, "[source:\n%s\n]", stream.ToCString().get());
709
710 FATAL("unable to find pc offset during deoptimization");
711 return -1;
712 }
713
714
GetDeoptimizedCodeCount(Isolate * isolate)715 int Deoptimizer::GetDeoptimizedCodeCount(Isolate* isolate) {
716 int length = 0;
717 // Count all entries in the deoptimizing code list of every context.
718 Object* context = isolate->heap()->native_contexts_list();
719 while (!context->IsUndefined()) {
720 Context* native_context = Context::cast(context);
721 Object* element = native_context->DeoptimizedCodeListHead();
722 while (!element->IsUndefined()) {
723 Code* code = Code::cast(element);
724 ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION);
725 length++;
726 element = code->next_code_link();
727 }
728 context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK);
729 }
730 return length;
731 }
732
733
734 // We rely on this function not causing a GC. It is called from generated code
735 // without having a real stack frame in place.
DoComputeOutputFrames()736 void Deoptimizer::DoComputeOutputFrames() {
737 // Print some helpful diagnostic information.
738 if (FLAG_log_timer_events &&
739 compiled_code_->kind() == Code::OPTIMIZED_FUNCTION) {
740 LOG(isolate(), CodeDeoptEvent(compiled_code_));
741 }
742 ElapsedTimer timer;
743
744 // Determine basic deoptimization information. The optimized frame is
745 // described by the input data.
746 DeoptimizationInputData* input_data =
747 DeoptimizationInputData::cast(compiled_code_->deoptimization_data());
748
749 if (trace_scope_ != NULL) {
750 timer.Start();
751 PrintF(trace_scope_->file(),
752 "[deoptimizing (DEOPT %s): begin 0x%08" V8PRIxPTR " ",
753 MessageFor(bailout_type_),
754 reinterpret_cast<intptr_t>(function_));
755 PrintFunctionName();
756 PrintF(trace_scope_->file(),
757 " (opt #%d) @%d, FP to SP delta: %d]\n",
758 input_data->OptimizationId()->value(),
759 bailout_id_,
760 fp_to_sp_delta_);
761 if (bailout_type_ == EAGER || bailout_type_ == SOFT) {
762 compiled_code_->PrintDeoptLocation(trace_scope_->file(), bailout_id_);
763 }
764 }
765
766 BailoutId node_id = input_data->AstId(bailout_id_);
767 ByteArray* translations = input_data->TranslationByteArray();
768 unsigned translation_index =
769 input_data->TranslationIndex(bailout_id_)->value();
770
771 // Do the input frame to output frame(s) translation.
772 TranslationIterator iterator(translations, translation_index);
773 Translation::Opcode opcode =
774 static_cast<Translation::Opcode>(iterator.Next());
775 ASSERT(Translation::BEGIN == opcode);
776 USE(opcode);
777 // Read the number of output frames and allocate an array for their
778 // descriptions.
779 int count = iterator.Next();
780 iterator.Next(); // Drop JS frames count.
781 ASSERT(output_ == NULL);
782 output_ = new FrameDescription*[count];
783 for (int i = 0; i < count; ++i) {
784 output_[i] = NULL;
785 }
786 output_count_ = count;
787
788 Register fp_reg = JavaScriptFrame::fp_register();
789 stack_fp_ = reinterpret_cast<Address>(
790 input_->GetRegister(fp_reg.code()) +
791 has_alignment_padding_ * kPointerSize);
792
793 // Translate each output frame.
794 for (int i = 0; i < count; ++i) {
795 // Read the ast node id, function, and frame height for this output frame.
796 Translation::Opcode opcode =
797 static_cast<Translation::Opcode>(iterator.Next());
798 switch (opcode) {
799 case Translation::JS_FRAME:
800 DoComputeJSFrame(&iterator, i);
801 jsframe_count_++;
802 break;
803 case Translation::ARGUMENTS_ADAPTOR_FRAME:
804 DoComputeArgumentsAdaptorFrame(&iterator, i);
805 break;
806 case Translation::CONSTRUCT_STUB_FRAME:
807 DoComputeConstructStubFrame(&iterator, i);
808 break;
809 case Translation::GETTER_STUB_FRAME:
810 DoComputeAccessorStubFrame(&iterator, i, false);
811 break;
812 case Translation::SETTER_STUB_FRAME:
813 DoComputeAccessorStubFrame(&iterator, i, true);
814 break;
815 case Translation::COMPILED_STUB_FRAME:
816 DoComputeCompiledStubFrame(&iterator, i);
817 break;
818 case Translation::BEGIN:
819 case Translation::REGISTER:
820 case Translation::INT32_REGISTER:
821 case Translation::UINT32_REGISTER:
822 case Translation::DOUBLE_REGISTER:
823 case Translation::STACK_SLOT:
824 case Translation::INT32_STACK_SLOT:
825 case Translation::UINT32_STACK_SLOT:
826 case Translation::DOUBLE_STACK_SLOT:
827 case Translation::LITERAL:
828 case Translation::ARGUMENTS_OBJECT:
829 default:
830 FATAL("Unsupported translation");
831 break;
832 }
833 }
834
835 // Print some helpful diagnostic information.
836 if (trace_scope_ != NULL) {
837 double ms = timer.Elapsed().InMillisecondsF();
838 int index = output_count_ - 1; // Index of the topmost frame.
839 JSFunction* function = output_[index]->GetFunction();
840 PrintF(trace_scope_->file(),
841 "[deoptimizing (%s): end 0x%08" V8PRIxPTR " ",
842 MessageFor(bailout_type_),
843 reinterpret_cast<intptr_t>(function));
844 PrintFunctionName();
845 PrintF(trace_scope_->file(),
846 " @%d => node=%d, pc=0x%08" V8PRIxPTR ", state=%s, alignment=%s,"
847 " took %0.3f ms]\n",
848 bailout_id_,
849 node_id.ToInt(),
850 output_[index]->GetPc(),
851 FullCodeGenerator::State2String(
852 static_cast<FullCodeGenerator::State>(
853 output_[index]->GetState()->value())),
854 has_alignment_padding_ ? "with padding" : "no padding",
855 ms);
856 }
857 }
858
859
DoComputeJSFrame(TranslationIterator * iterator,int frame_index)860 void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator,
861 int frame_index) {
862 BailoutId node_id = BailoutId(iterator->Next());
863 JSFunction* function;
864 if (frame_index != 0) {
865 function = JSFunction::cast(ComputeLiteral(iterator->Next()));
866 } else {
867 int closure_id = iterator->Next();
868 USE(closure_id);
869 CHECK_EQ(Translation::kSelfLiteralId, closure_id);
870 function = function_;
871 }
872 unsigned height = iterator->Next();
873 unsigned height_in_bytes = height * kPointerSize;
874 if (trace_scope_ != NULL) {
875 PrintF(trace_scope_->file(), " translating ");
876 function->PrintName(trace_scope_->file());
877 PrintF(trace_scope_->file(),
878 " => node=%d, height=%d\n", node_id.ToInt(), height_in_bytes);
879 }
880
881 // The 'fixed' part of the frame consists of the incoming parameters and
882 // the part described by JavaScriptFrameConstants.
883 unsigned fixed_frame_size = ComputeFixedSize(function);
884 unsigned input_frame_size = input_->GetFrameSize();
885 unsigned output_frame_size = height_in_bytes + fixed_frame_size;
886
887 // Allocate and store the output frame description.
888 FrameDescription* output_frame =
889 new(output_frame_size) FrameDescription(output_frame_size, function);
890 output_frame->SetFrameType(StackFrame::JAVA_SCRIPT);
891
892 bool is_bottommost = (0 == frame_index);
893 bool is_topmost = (output_count_ - 1 == frame_index);
894 CHECK(frame_index >= 0 && frame_index < output_count_);
895 CHECK_EQ(output_[frame_index], NULL);
896 output_[frame_index] = output_frame;
897
898 // The top address for the bottommost output frame can be computed from
899 // the input frame pointer and the output frame's height. For all
900 // subsequent output frames, it can be computed from the previous one's
901 // top address and the current frame's size.
902 Register fp_reg = JavaScriptFrame::fp_register();
903 intptr_t top_address;
904 if (is_bottommost) {
905 // Determine whether the input frame contains alignment padding.
906 has_alignment_padding_ = HasAlignmentPadding(function) ? 1 : 0;
907 // 2 = context and function in the frame.
908 // If the optimized frame had alignment padding, adjust the frame pointer
909 // to point to the new position of the old frame pointer after padding
910 // is removed. Subtract 2 * kPointerSize for the context and function slots.
911 top_address = input_->GetRegister(fp_reg.code()) -
912 StandardFrameConstants::kFixedFrameSizeFromFp -
913 height_in_bytes + has_alignment_padding_ * kPointerSize;
914 } else {
915 top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
916 }
917 output_frame->SetTop(top_address);
918
919 // Compute the incoming parameter translation.
920 int parameter_count = function->shared()->formal_parameter_count() + 1;
921 unsigned output_offset = output_frame_size;
922 unsigned input_offset = input_frame_size;
923 for (int i = 0; i < parameter_count; ++i) {
924 output_offset -= kPointerSize;
925 DoTranslateCommand(iterator, frame_index, output_offset);
926 }
927 input_offset -= (parameter_count * kPointerSize);
928
929 // There are no translation commands for the caller's pc and fp, the
930 // context, and the function. Synthesize their values and set them up
931 // explicitly.
932 //
933 // The caller's pc for the bottommost output frame is the same as in the
934 // input frame. For all subsequent output frames, it can be read from the
935 // previous one. This frame's pc can be computed from the non-optimized
936 // function code and AST id of the bailout.
937 output_offset -= kPCOnStackSize;
938 input_offset -= kPCOnStackSize;
939 intptr_t value;
940 if (is_bottommost) {
941 value = input_->GetFrameSlot(input_offset);
942 } else {
943 value = output_[frame_index - 1]->GetPc();
944 }
945 output_frame->SetCallerPc(output_offset, value);
946 if (trace_scope_ != NULL) {
947 PrintF(trace_scope_->file(),
948 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
949 V8PRIxPTR " ; caller's pc\n",
950 top_address + output_offset, output_offset, value);
951 }
952
953 // The caller's frame pointer for the bottommost output frame is the same
954 // as in the input frame. For all subsequent output frames, it can be
955 // read from the previous one. Also compute and set this frame's frame
956 // pointer.
957 output_offset -= kFPOnStackSize;
958 input_offset -= kFPOnStackSize;
959 if (is_bottommost) {
960 value = input_->GetFrameSlot(input_offset);
961 } else {
962 value = output_[frame_index - 1]->GetFp();
963 }
964 output_frame->SetCallerFp(output_offset, value);
965 intptr_t fp_value = top_address + output_offset;
966 ASSERT(!is_bottommost || (input_->GetRegister(fp_reg.code()) +
967 has_alignment_padding_ * kPointerSize) == fp_value);
968 output_frame->SetFp(fp_value);
969 if (is_topmost) output_frame->SetRegister(fp_reg.code(), fp_value);
970 if (trace_scope_ != NULL) {
971 PrintF(trace_scope_->file(),
972 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
973 V8PRIxPTR " ; caller's fp\n",
974 fp_value, output_offset, value);
975 }
976 ASSERT(!is_bottommost || !has_alignment_padding_ ||
977 (fp_value & kPointerSize) != 0);
978
979 if (FLAG_enable_ool_constant_pool) {
980 // For the bottommost output frame the constant pool pointer can be gotten
981 // from the input frame. For subsequent output frames, it can be read from
982 // the previous frame.
983 output_offset -= kPointerSize;
984 input_offset -= kPointerSize;
985 if (is_bottommost) {
986 value = input_->GetFrameSlot(input_offset);
987 } else {
988 value = output_[frame_index - 1]->GetConstantPool();
989 }
990 output_frame->SetCallerConstantPool(output_offset, value);
991 if (trace_scope_) {
992 PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
993 V8PRIxPTR "; caller's constant_pool\n",
994 top_address + output_offset, output_offset, value);
995 }
996 }
997
998 // For the bottommost output frame the context can be gotten from the input
999 // frame. For all subsequent output frames it can be gotten from the function
1000 // so long as we don't inline functions that need local contexts.
1001 Register context_reg = JavaScriptFrame::context_register();
1002 output_offset -= kPointerSize;
1003 input_offset -= kPointerSize;
1004 if (is_bottommost) {
1005 value = input_->GetFrameSlot(input_offset);
1006 } else {
1007 value = reinterpret_cast<intptr_t>(function->context());
1008 }
1009 output_frame->SetFrameSlot(output_offset, value);
1010 output_frame->SetContext(value);
1011 if (is_topmost) output_frame->SetRegister(context_reg.code(), value);
1012 if (trace_scope_ != NULL) {
1013 PrintF(trace_scope_->file(),
1014 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1015 V8PRIxPTR "; context\n",
1016 top_address + output_offset, output_offset, value);
1017 }
1018
1019 // The function was mentioned explicitly in the BEGIN_FRAME.
1020 output_offset -= kPointerSize;
1021 input_offset -= kPointerSize;
1022 value = reinterpret_cast<intptr_t>(function);
1023 // The function for the bottommost output frame should also agree with the
1024 // input frame.
1025 ASSERT(!is_bottommost || input_->GetFrameSlot(input_offset) == value);
1026 output_frame->SetFrameSlot(output_offset, value);
1027 if (trace_scope_ != NULL) {
1028 PrintF(trace_scope_->file(),
1029 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1030 V8PRIxPTR "; function\n",
1031 top_address + output_offset, output_offset, value);
1032 }
1033
1034 // Translate the rest of the frame.
1035 for (unsigned i = 0; i < height; ++i) {
1036 output_offset -= kPointerSize;
1037 DoTranslateCommand(iterator, frame_index, output_offset);
1038 }
1039 CHECK_EQ(0, output_offset);
1040
1041 // Compute this frame's PC, state, and continuation.
1042 Code* non_optimized_code = function->shared()->code();
1043 FixedArray* raw_data = non_optimized_code->deoptimization_data();
1044 DeoptimizationOutputData* data = DeoptimizationOutputData::cast(raw_data);
1045 Address start = non_optimized_code->instruction_start();
1046 unsigned pc_and_state = GetOutputInfo(data, node_id, function->shared());
1047 unsigned pc_offset = FullCodeGenerator::PcField::decode(pc_and_state);
1048 intptr_t pc_value = reinterpret_cast<intptr_t>(start + pc_offset);
1049 output_frame->SetPc(pc_value);
1050
1051 // Update constant pool.
1052 if (FLAG_enable_ool_constant_pool) {
1053 intptr_t constant_pool_value =
1054 reinterpret_cast<intptr_t>(non_optimized_code->constant_pool());
1055 output_frame->SetConstantPool(constant_pool_value);
1056 if (is_topmost) {
1057 Register constant_pool_reg =
1058 JavaScriptFrame::constant_pool_pointer_register();
1059 output_frame->SetRegister(constant_pool_reg.code(), constant_pool_value);
1060 }
1061 }
1062
1063 FullCodeGenerator::State state =
1064 FullCodeGenerator::StateField::decode(pc_and_state);
1065 output_frame->SetState(Smi::FromInt(state));
1066
1067 // Set the continuation for the topmost frame.
1068 if (is_topmost && bailout_type_ != DEBUGGER) {
1069 Builtins* builtins = isolate_->builtins();
1070 Code* continuation = builtins->builtin(Builtins::kNotifyDeoptimized);
1071 if (bailout_type_ == LAZY) {
1072 continuation = builtins->builtin(Builtins::kNotifyLazyDeoptimized);
1073 } else if (bailout_type_ == SOFT) {
1074 continuation = builtins->builtin(Builtins::kNotifySoftDeoptimized);
1075 } else {
1076 CHECK_EQ(bailout_type_, EAGER);
1077 }
1078 output_frame->SetContinuation(
1079 reinterpret_cast<intptr_t>(continuation->entry()));
1080 }
1081 }
1082
1083
DoComputeArgumentsAdaptorFrame(TranslationIterator * iterator,int frame_index)1084 void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
1085 int frame_index) {
1086 JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next()));
1087 unsigned height = iterator->Next();
1088 unsigned height_in_bytes = height * kPointerSize;
1089 if (trace_scope_ != NULL) {
1090 PrintF(trace_scope_->file(),
1091 " translating arguments adaptor => height=%d\n", height_in_bytes);
1092 }
1093
1094 unsigned fixed_frame_size = ArgumentsAdaptorFrameConstants::kFrameSize;
1095 unsigned output_frame_size = height_in_bytes + fixed_frame_size;
1096
1097 // Allocate and store the output frame description.
1098 FrameDescription* output_frame =
1099 new(output_frame_size) FrameDescription(output_frame_size, function);
1100 output_frame->SetFrameType(StackFrame::ARGUMENTS_ADAPTOR);
1101
1102 // Arguments adaptor can not be topmost or bottommost.
1103 CHECK(frame_index > 0 && frame_index < output_count_ - 1);
1104 CHECK(output_[frame_index] == NULL);
1105 output_[frame_index] = output_frame;
1106
1107 // The top address of the frame is computed from the previous
1108 // frame's top and this frame's size.
1109 intptr_t top_address;
1110 top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
1111 output_frame->SetTop(top_address);
1112
1113 // Compute the incoming parameter translation.
1114 int parameter_count = height;
1115 unsigned output_offset = output_frame_size;
1116 for (int i = 0; i < parameter_count; ++i) {
1117 output_offset -= kPointerSize;
1118 DoTranslateCommand(iterator, frame_index, output_offset);
1119 }
1120
1121 // Read caller's PC from the previous frame.
1122 output_offset -= kPCOnStackSize;
1123 intptr_t callers_pc = output_[frame_index - 1]->GetPc();
1124 output_frame->SetCallerPc(output_offset, callers_pc);
1125 if (trace_scope_ != NULL) {
1126 PrintF(trace_scope_->file(),
1127 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1128 V8PRIxPTR " ; caller's pc\n",
1129 top_address + output_offset, output_offset, callers_pc);
1130 }
1131
1132 // Read caller's FP from the previous frame, and set this frame's FP.
1133 output_offset -= kFPOnStackSize;
1134 intptr_t value = output_[frame_index - 1]->GetFp();
1135 output_frame->SetCallerFp(output_offset, value);
1136 intptr_t fp_value = top_address + output_offset;
1137 output_frame->SetFp(fp_value);
1138 if (trace_scope_ != NULL) {
1139 PrintF(trace_scope_->file(),
1140 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1141 V8PRIxPTR " ; caller's fp\n",
1142 fp_value, output_offset, value);
1143 }
1144
1145 if (FLAG_enable_ool_constant_pool) {
1146 // Read the caller's constant pool from the previous frame.
1147 output_offset -= kPointerSize;
1148 value = output_[frame_index - 1]->GetConstantPool();
1149 output_frame->SetCallerConstantPool(output_offset, value);
1150 if (trace_scope_) {
1151 PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1152 V8PRIxPTR "; caller's constant_pool\n",
1153 top_address + output_offset, output_offset, value);
1154 }
1155 }
1156
1157 // A marker value is used in place of the context.
1158 output_offset -= kPointerSize;
1159 intptr_t context = reinterpret_cast<intptr_t>(
1160 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
1161 output_frame->SetFrameSlot(output_offset, context);
1162 if (trace_scope_ != NULL) {
1163 PrintF(trace_scope_->file(),
1164 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1165 V8PRIxPTR " ; context (adaptor sentinel)\n",
1166 top_address + output_offset, output_offset, context);
1167 }
1168
1169 // The function was mentioned explicitly in the ARGUMENTS_ADAPTOR_FRAME.
1170 output_offset -= kPointerSize;
1171 value = reinterpret_cast<intptr_t>(function);
1172 output_frame->SetFrameSlot(output_offset, value);
1173 if (trace_scope_ != NULL) {
1174 PrintF(trace_scope_->file(),
1175 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1176 V8PRIxPTR " ; function\n",
1177 top_address + output_offset, output_offset, value);
1178 }
1179
1180 // Number of incoming arguments.
1181 output_offset -= kPointerSize;
1182 value = reinterpret_cast<intptr_t>(Smi::FromInt(height - 1));
1183 output_frame->SetFrameSlot(output_offset, value);
1184 if (trace_scope_ != NULL) {
1185 PrintF(trace_scope_->file(),
1186 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1187 V8PRIxPTR " ; argc (%d)\n",
1188 top_address + output_offset, output_offset, value, height - 1);
1189 }
1190
1191 ASSERT(0 == output_offset);
1192
1193 Builtins* builtins = isolate_->builtins();
1194 Code* adaptor_trampoline =
1195 builtins->builtin(Builtins::kArgumentsAdaptorTrampoline);
1196 intptr_t pc_value = reinterpret_cast<intptr_t>(
1197 adaptor_trampoline->instruction_start() +
1198 isolate_->heap()->arguments_adaptor_deopt_pc_offset()->value());
1199 output_frame->SetPc(pc_value);
1200 if (FLAG_enable_ool_constant_pool) {
1201 intptr_t constant_pool_value =
1202 reinterpret_cast<intptr_t>(adaptor_trampoline->constant_pool());
1203 output_frame->SetConstantPool(constant_pool_value);
1204 }
1205 }
1206
1207
DoComputeConstructStubFrame(TranslationIterator * iterator,int frame_index)1208 void Deoptimizer::DoComputeConstructStubFrame(TranslationIterator* iterator,
1209 int frame_index) {
1210 Builtins* builtins = isolate_->builtins();
1211 Code* construct_stub = builtins->builtin(Builtins::kJSConstructStubGeneric);
1212 JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next()));
1213 unsigned height = iterator->Next();
1214 unsigned height_in_bytes = height * kPointerSize;
1215 if (trace_scope_ != NULL) {
1216 PrintF(trace_scope_->file(),
1217 " translating construct stub => height=%d\n", height_in_bytes);
1218 }
1219
1220 unsigned fixed_frame_size = ConstructFrameConstants::kFrameSize;
1221 unsigned output_frame_size = height_in_bytes + fixed_frame_size;
1222
1223 // Allocate and store the output frame description.
1224 FrameDescription* output_frame =
1225 new(output_frame_size) FrameDescription(output_frame_size, function);
1226 output_frame->SetFrameType(StackFrame::CONSTRUCT);
1227
1228 // Construct stub can not be topmost or bottommost.
1229 ASSERT(frame_index > 0 && frame_index < output_count_ - 1);
1230 ASSERT(output_[frame_index] == NULL);
1231 output_[frame_index] = output_frame;
1232
1233 // The top address of the frame is computed from the previous
1234 // frame's top and this frame's size.
1235 intptr_t top_address;
1236 top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
1237 output_frame->SetTop(top_address);
1238
1239 // Compute the incoming parameter translation.
1240 int parameter_count = height;
1241 unsigned output_offset = output_frame_size;
1242 for (int i = 0; i < parameter_count; ++i) {
1243 output_offset -= kPointerSize;
1244 int deferred_object_index = deferred_objects_.length();
1245 DoTranslateCommand(iterator, frame_index, output_offset);
1246 // The allocated receiver of a construct stub frame is passed as the
1247 // receiver parameter through the translation. It might be encoding
1248 // a captured object, patch the slot address for a captured object.
1249 if (i == 0 && deferred_objects_.length() > deferred_object_index) {
1250 CHECK(!deferred_objects_[deferred_object_index].is_arguments());
1251 deferred_objects_[deferred_object_index].patch_slot_address(top_address);
1252 }
1253 }
1254
1255 // Read caller's PC from the previous frame.
1256 output_offset -= kPCOnStackSize;
1257 intptr_t callers_pc = output_[frame_index - 1]->GetPc();
1258 output_frame->SetCallerPc(output_offset, callers_pc);
1259 if (trace_scope_ != NULL) {
1260 PrintF(trace_scope_->file(),
1261 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1262 V8PRIxPTR " ; caller's pc\n",
1263 top_address + output_offset, output_offset, callers_pc);
1264 }
1265
1266 // Read caller's FP from the previous frame, and set this frame's FP.
1267 output_offset -= kFPOnStackSize;
1268 intptr_t value = output_[frame_index - 1]->GetFp();
1269 output_frame->SetCallerFp(output_offset, value);
1270 intptr_t fp_value = top_address + output_offset;
1271 output_frame->SetFp(fp_value);
1272 if (trace_scope_ != NULL) {
1273 PrintF(trace_scope_->file(),
1274 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1275 V8PRIxPTR " ; caller's fp\n",
1276 fp_value, output_offset, value);
1277 }
1278
1279 if (FLAG_enable_ool_constant_pool) {
1280 // Read the caller's constant pool from the previous frame.
1281 output_offset -= kPointerSize;
1282 value = output_[frame_index - 1]->GetConstantPool();
1283 output_frame->SetCallerConstantPool(output_offset, value);
1284 if (trace_scope_) {
1285 PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1286 V8PRIxPTR " ; caller's constant pool\n",
1287 top_address + output_offset, output_offset, value);
1288 }
1289 }
1290
1291 // The context can be gotten from the previous frame.
1292 output_offset -= kPointerSize;
1293 value = output_[frame_index - 1]->GetContext();
1294 output_frame->SetFrameSlot(output_offset, value);
1295 if (trace_scope_ != NULL) {
1296 PrintF(trace_scope_->file(),
1297 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1298 V8PRIxPTR " ; context\n",
1299 top_address + output_offset, output_offset, value);
1300 }
1301
1302 // A marker value is used in place of the function.
1303 output_offset -= kPointerSize;
1304 value = reinterpret_cast<intptr_t>(Smi::FromInt(StackFrame::CONSTRUCT));
1305 output_frame->SetFrameSlot(output_offset, value);
1306 if (trace_scope_ != NULL) {
1307 PrintF(trace_scope_->file(),
1308 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1309 V8PRIxPTR " ; function (construct sentinel)\n",
1310 top_address + output_offset, output_offset, value);
1311 }
1312
1313 // The output frame reflects a JSConstructStubGeneric frame.
1314 output_offset -= kPointerSize;
1315 value = reinterpret_cast<intptr_t>(construct_stub);
1316 output_frame->SetFrameSlot(output_offset, value);
1317 if (trace_scope_ != NULL) {
1318 PrintF(trace_scope_->file(),
1319 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1320 V8PRIxPTR " ; code object\n",
1321 top_address + output_offset, output_offset, value);
1322 }
1323
1324 // Number of incoming arguments.
1325 output_offset -= kPointerSize;
1326 value = reinterpret_cast<intptr_t>(Smi::FromInt(height - 1));
1327 output_frame->SetFrameSlot(output_offset, value);
1328 if (trace_scope_ != NULL) {
1329 PrintF(trace_scope_->file(),
1330 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1331 V8PRIxPTR " ; argc (%d)\n",
1332 top_address + output_offset, output_offset, value, height - 1);
1333 }
1334
1335 // Constructor function being invoked by the stub (only present on some
1336 // architectures, indicated by kConstructorOffset).
1337 if (ConstructFrameConstants::kConstructorOffset != kMinInt) {
1338 output_offset -= kPointerSize;
1339 value = reinterpret_cast<intptr_t>(function);
1340 output_frame->SetFrameSlot(output_offset, value);
1341 if (trace_scope_ != NULL) {
1342 PrintF(trace_scope_->file(),
1343 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1344 V8PRIxPTR " ; constructor function\n",
1345 top_address + output_offset, output_offset, value);
1346 }
1347 }
1348
1349 // The newly allocated object was passed as receiver in the artificial
1350 // constructor stub environment created by HEnvironment::CopyForInlining().
1351 output_offset -= kPointerSize;
1352 value = output_frame->GetFrameSlot(output_frame_size - kPointerSize);
1353 output_frame->SetFrameSlot(output_offset, value);
1354 if (trace_scope_ != NULL) {
1355 PrintF(trace_scope_->file(),
1356 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1357 V8PRIxPTR " ; allocated receiver\n",
1358 top_address + output_offset, output_offset, value);
1359 }
1360
1361 CHECK_EQ(0, output_offset);
1362
1363 intptr_t pc = reinterpret_cast<intptr_t>(
1364 construct_stub->instruction_start() +
1365 isolate_->heap()->construct_stub_deopt_pc_offset()->value());
1366 output_frame->SetPc(pc);
1367 if (FLAG_enable_ool_constant_pool) {
1368 intptr_t constant_pool_value =
1369 reinterpret_cast<intptr_t>(construct_stub->constant_pool());
1370 output_frame->SetConstantPool(constant_pool_value);
1371 }
1372 }
1373
1374
DoComputeAccessorStubFrame(TranslationIterator * iterator,int frame_index,bool is_setter_stub_frame)1375 void Deoptimizer::DoComputeAccessorStubFrame(TranslationIterator* iterator,
1376 int frame_index,
1377 bool is_setter_stub_frame) {
1378 JSFunction* accessor = JSFunction::cast(ComputeLiteral(iterator->Next()));
1379 // The receiver (and the implicit return value, if any) are expected in
1380 // registers by the LoadIC/StoreIC, so they don't belong to the output stack
1381 // frame. This means that we have to use a height of 0.
1382 unsigned height = 0;
1383 unsigned height_in_bytes = height * kPointerSize;
1384 const char* kind = is_setter_stub_frame ? "setter" : "getter";
1385 if (trace_scope_ != NULL) {
1386 PrintF(trace_scope_->file(),
1387 " translating %s stub => height=%u\n", kind, height_in_bytes);
1388 }
1389
1390 // We need 1 stack entry for the return address and enough entries for the
1391 // StackFrame::INTERNAL (FP, context, frame type, code object and constant
1392 // pool (if FLAG_enable_ool_constant_pool)- see MacroAssembler::EnterFrame).
1393 // For a setter stub frame we need one additional entry for the implicit
1394 // return value, see StoreStubCompiler::CompileStoreViaSetter.
1395 unsigned fixed_frame_entries =
1396 (StandardFrameConstants::kFixedFrameSize / kPointerSize) + 1 +
1397 (is_setter_stub_frame ? 1 : 0);
1398 unsigned fixed_frame_size = fixed_frame_entries * kPointerSize;
1399 unsigned output_frame_size = height_in_bytes + fixed_frame_size;
1400
1401 // Allocate and store the output frame description.
1402 FrameDescription* output_frame =
1403 new(output_frame_size) FrameDescription(output_frame_size, accessor);
1404 output_frame->SetFrameType(StackFrame::INTERNAL);
1405
1406 // A frame for an accessor stub can not be the topmost or bottommost one.
1407 CHECK(frame_index > 0 && frame_index < output_count_ - 1);
1408 CHECK_EQ(output_[frame_index], NULL);
1409 output_[frame_index] = output_frame;
1410
1411 // The top address of the frame is computed from the previous frame's top and
1412 // this frame's size.
1413 intptr_t top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
1414 output_frame->SetTop(top_address);
1415
1416 unsigned output_offset = output_frame_size;
1417
1418 // Read caller's PC from the previous frame.
1419 output_offset -= kPCOnStackSize;
1420 intptr_t callers_pc = output_[frame_index - 1]->GetPc();
1421 output_frame->SetCallerPc(output_offset, callers_pc);
1422 if (trace_scope_ != NULL) {
1423 PrintF(trace_scope_->file(),
1424 " 0x%08" V8PRIxPTR ": [top + %u] <- 0x%08" V8PRIxPTR
1425 " ; caller's pc\n",
1426 top_address + output_offset, output_offset, callers_pc);
1427 }
1428
1429 // Read caller's FP from the previous frame, and set this frame's FP.
1430 output_offset -= kFPOnStackSize;
1431 intptr_t value = output_[frame_index - 1]->GetFp();
1432 output_frame->SetCallerFp(output_offset, value);
1433 intptr_t fp_value = top_address + output_offset;
1434 output_frame->SetFp(fp_value);
1435 if (trace_scope_ != NULL) {
1436 PrintF(trace_scope_->file(),
1437 " 0x%08" V8PRIxPTR ": [top + %u] <- 0x%08" V8PRIxPTR
1438 " ; caller's fp\n",
1439 fp_value, output_offset, value);
1440 }
1441
1442 if (FLAG_enable_ool_constant_pool) {
1443 // Read the caller's constant pool from the previous frame.
1444 output_offset -= kPointerSize;
1445 value = output_[frame_index - 1]->GetConstantPool();
1446 output_frame->SetCallerConstantPool(output_offset, value);
1447 if (trace_scope_) {
1448 PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1449 V8PRIxPTR " ; caller's constant pool\n",
1450 top_address + output_offset, output_offset, value);
1451 }
1452 }
1453
1454 // The context can be gotten from the previous frame.
1455 output_offset -= kPointerSize;
1456 value = output_[frame_index - 1]->GetContext();
1457 output_frame->SetFrameSlot(output_offset, value);
1458 if (trace_scope_ != NULL) {
1459 PrintF(trace_scope_->file(),
1460 " 0x%08" V8PRIxPTR ": [top + %u] <- 0x%08" V8PRIxPTR
1461 " ; context\n",
1462 top_address + output_offset, output_offset, value);
1463 }
1464
1465 // A marker value is used in place of the function.
1466 output_offset -= kPointerSize;
1467 value = reinterpret_cast<intptr_t>(Smi::FromInt(StackFrame::INTERNAL));
1468 output_frame->SetFrameSlot(output_offset, value);
1469 if (trace_scope_ != NULL) {
1470 PrintF(trace_scope_->file(),
1471 " 0x%08" V8PRIxPTR ": [top + %u] <- 0x%08" V8PRIxPTR
1472 " ; function (%s sentinel)\n",
1473 top_address + output_offset, output_offset, value, kind);
1474 }
1475
1476 // Get Code object from accessor stub.
1477 output_offset -= kPointerSize;
1478 Builtins::Name name = is_setter_stub_frame ?
1479 Builtins::kStoreIC_Setter_ForDeopt :
1480 Builtins::kLoadIC_Getter_ForDeopt;
1481 Code* accessor_stub = isolate_->builtins()->builtin(name);
1482 value = reinterpret_cast<intptr_t>(accessor_stub);
1483 output_frame->SetFrameSlot(output_offset, value);
1484 if (trace_scope_ != NULL) {
1485 PrintF(trace_scope_->file(),
1486 " 0x%08" V8PRIxPTR ": [top + %u] <- 0x%08" V8PRIxPTR
1487 " ; code object\n",
1488 top_address + output_offset, output_offset, value);
1489 }
1490
1491 // Skip receiver.
1492 DoTranslateObjectAndSkip(iterator);
1493
1494 if (is_setter_stub_frame) {
1495 // The implicit return value was part of the artificial setter stub
1496 // environment.
1497 output_offset -= kPointerSize;
1498 DoTranslateCommand(iterator, frame_index, output_offset);
1499 }
1500
1501 CHECK_EQ(output_offset, 0);
1502
1503 Smi* offset = is_setter_stub_frame ?
1504 isolate_->heap()->setter_stub_deopt_pc_offset() :
1505 isolate_->heap()->getter_stub_deopt_pc_offset();
1506 intptr_t pc = reinterpret_cast<intptr_t>(
1507 accessor_stub->instruction_start() + offset->value());
1508 output_frame->SetPc(pc);
1509 if (FLAG_enable_ool_constant_pool) {
1510 intptr_t constant_pool_value =
1511 reinterpret_cast<intptr_t>(accessor_stub->constant_pool());
1512 output_frame->SetConstantPool(constant_pool_value);
1513 }
1514 }
1515
1516
DoComputeCompiledStubFrame(TranslationIterator * iterator,int frame_index)1517 void Deoptimizer::DoComputeCompiledStubFrame(TranslationIterator* iterator,
1518 int frame_index) {
1519 //
1520 // FROM TO
1521 // | .... | | .... |
1522 // +-------------------------+ +-------------------------+
1523 // | JSFunction continuation | | JSFunction continuation |
1524 // +-------------------------+ +-------------------------+
1525 // | | saved frame (FP) | | saved frame (FP) |
1526 // | +=========================+<-fpreg +=========================+<-fpreg
1527 // | |constant pool (if ool_cp)| |constant pool (if ool_cp)|
1528 // | +-------------------------+ +-------------------------|
1529 // | | JSFunction context | | JSFunction context |
1530 // v +-------------------------+ +-------------------------|
1531 // | COMPILED_STUB marker | | STUB_FAILURE marker |
1532 // +-------------------------+ +-------------------------+
1533 // | | | caller args.arguments_ |
1534 // | ... | +-------------------------+
1535 // | | | caller args.length_ |
1536 // |-------------------------|<-spreg +-------------------------+
1537 // | caller args pointer |
1538 // +-------------------------+
1539 // | caller stack param 1 |
1540 // parameters in registers +-------------------------+
1541 // and spilled to stack | .... |
1542 // +-------------------------+
1543 // | caller stack param n |
1544 // +-------------------------+<-spreg
1545 // reg = number of parameters
1546 // reg = failure handler address
1547 // reg = saved frame
1548 // reg = JSFunction context
1549 //
1550
1551 CHECK(compiled_code_->is_crankshafted() &&
1552 compiled_code_->kind() != Code::OPTIMIZED_FUNCTION);
1553 int major_key = compiled_code_->major_key();
1554 CodeStubInterfaceDescriptor* descriptor =
1555 isolate_->code_stub_interface_descriptor(major_key);
1556
1557 // The output frame must have room for all pushed register parameters
1558 // and the standard stack frame slots. Include space for an argument
1559 // object to the callee and optionally the space to pass the argument
1560 // object to the stub failure handler.
1561 CHECK_GE(descriptor->register_param_count_, 0);
1562 int height_in_bytes = kPointerSize * descriptor->register_param_count_ +
1563 sizeof(Arguments) + kPointerSize;
1564 int fixed_frame_size = StandardFrameConstants::kFixedFrameSize;
1565 int input_frame_size = input_->GetFrameSize();
1566 int output_frame_size = height_in_bytes + fixed_frame_size;
1567 if (trace_scope_ != NULL) {
1568 PrintF(trace_scope_->file(),
1569 " translating %s => StubFailureTrampolineStub, height=%d\n",
1570 CodeStub::MajorName(static_cast<CodeStub::Major>(major_key), false),
1571 height_in_bytes);
1572 }
1573
1574 // The stub failure trampoline is a single frame.
1575 FrameDescription* output_frame =
1576 new(output_frame_size) FrameDescription(output_frame_size, NULL);
1577 output_frame->SetFrameType(StackFrame::STUB_FAILURE_TRAMPOLINE);
1578 CHECK_EQ(frame_index, 0);
1579 output_[frame_index] = output_frame;
1580
1581 // The top address for the output frame can be computed from the input
1582 // frame pointer and the output frame's height. Subtract space for the
1583 // context and function slots.
1584 Register fp_reg = StubFailureTrampolineFrame::fp_register();
1585 intptr_t top_address = input_->GetRegister(fp_reg.code()) -
1586 StandardFrameConstants::kFixedFrameSizeFromFp - height_in_bytes;
1587 output_frame->SetTop(top_address);
1588
1589 // Read caller's PC (JSFunction continuation) from the input frame.
1590 unsigned input_frame_offset = input_frame_size - kPCOnStackSize;
1591 unsigned output_frame_offset = output_frame_size - kFPOnStackSize;
1592 intptr_t value = input_->GetFrameSlot(input_frame_offset);
1593 output_frame->SetCallerPc(output_frame_offset, value);
1594 if (trace_scope_ != NULL) {
1595 PrintF(trace_scope_->file(),
1596 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1597 V8PRIxPTR " ; caller's pc\n",
1598 top_address + output_frame_offset, output_frame_offset, value);
1599 }
1600
1601 // Read caller's FP from the input frame, and set this frame's FP.
1602 input_frame_offset -= kFPOnStackSize;
1603 value = input_->GetFrameSlot(input_frame_offset);
1604 output_frame_offset -= kFPOnStackSize;
1605 output_frame->SetCallerFp(output_frame_offset, value);
1606 intptr_t frame_ptr = input_->GetRegister(fp_reg.code());
1607 output_frame->SetRegister(fp_reg.code(), frame_ptr);
1608 output_frame->SetFp(frame_ptr);
1609 if (trace_scope_ != NULL) {
1610 PrintF(trace_scope_->file(),
1611 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1612 V8PRIxPTR " ; caller's fp\n",
1613 top_address + output_frame_offset, output_frame_offset, value);
1614 }
1615
1616 if (FLAG_enable_ool_constant_pool) {
1617 // Read the caller's constant pool from the input frame.
1618 input_frame_offset -= kPointerSize;
1619 value = input_->GetFrameSlot(input_frame_offset);
1620 output_frame_offset -= kPointerSize;
1621 output_frame->SetCallerConstantPool(output_frame_offset, value);
1622 if (trace_scope_) {
1623 PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1624 V8PRIxPTR " ; caller's constant_pool\n",
1625 top_address + output_frame_offset, output_frame_offset, value);
1626 }
1627 }
1628
1629 // The context can be gotten from the input frame.
1630 Register context_reg = StubFailureTrampolineFrame::context_register();
1631 input_frame_offset -= kPointerSize;
1632 value = input_->GetFrameSlot(input_frame_offset);
1633 output_frame->SetRegister(context_reg.code(), value);
1634 output_frame_offset -= kPointerSize;
1635 output_frame->SetFrameSlot(output_frame_offset, value);
1636 CHECK(reinterpret_cast<Object*>(value)->IsContext());
1637 if (trace_scope_ != NULL) {
1638 PrintF(trace_scope_->file(),
1639 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1640 V8PRIxPTR " ; context\n",
1641 top_address + output_frame_offset, output_frame_offset, value);
1642 }
1643
1644 // A marker value is used in place of the function.
1645 output_frame_offset -= kPointerSize;
1646 value = reinterpret_cast<intptr_t>(
1647 Smi::FromInt(StackFrame::STUB_FAILURE_TRAMPOLINE));
1648 output_frame->SetFrameSlot(output_frame_offset, value);
1649 if (trace_scope_ != NULL) {
1650 PrintF(trace_scope_->file(),
1651 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1652 V8PRIxPTR " ; function (stub failure sentinel)\n",
1653 top_address + output_frame_offset, output_frame_offset, value);
1654 }
1655
1656 intptr_t caller_arg_count = 0;
1657 bool arg_count_known = !descriptor->stack_parameter_count_.is_valid();
1658
1659 // Build the Arguments object for the caller's parameters and a pointer to it.
1660 output_frame_offset -= kPointerSize;
1661 int args_arguments_offset = output_frame_offset;
1662 intptr_t the_hole = reinterpret_cast<intptr_t>(
1663 isolate_->heap()->the_hole_value());
1664 if (arg_count_known) {
1665 value = frame_ptr + StandardFrameConstants::kCallerSPOffset +
1666 (caller_arg_count - 1) * kPointerSize;
1667 } else {
1668 value = the_hole;
1669 }
1670
1671 output_frame->SetFrameSlot(args_arguments_offset, value);
1672 if (trace_scope_ != NULL) {
1673 PrintF(trace_scope_->file(),
1674 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1675 V8PRIxPTR " ; args.arguments %s\n",
1676 top_address + args_arguments_offset, args_arguments_offset, value,
1677 arg_count_known ? "" : "(the hole)");
1678 }
1679
1680 output_frame_offset -= kPointerSize;
1681 int length_frame_offset = output_frame_offset;
1682 value = arg_count_known ? caller_arg_count : the_hole;
1683 output_frame->SetFrameSlot(length_frame_offset, value);
1684 if (trace_scope_ != NULL) {
1685 PrintF(trace_scope_->file(),
1686 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1687 V8PRIxPTR " ; args.length %s\n",
1688 top_address + length_frame_offset, length_frame_offset, value,
1689 arg_count_known ? "" : "(the hole)");
1690 }
1691
1692 output_frame_offset -= kPointerSize;
1693 value = frame_ptr + StandardFrameConstants::kCallerSPOffset -
1694 (output_frame_size - output_frame_offset) + kPointerSize;
1695 output_frame->SetFrameSlot(output_frame_offset, value);
1696 if (trace_scope_ != NULL) {
1697 PrintF(trace_scope_->file(),
1698 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1699 V8PRIxPTR " ; args*\n",
1700 top_address + output_frame_offset, output_frame_offset, value);
1701 }
1702
1703 // Copy the register parameters to the failure frame.
1704 int arguments_length_offset = -1;
1705 for (int i = 0; i < descriptor->register_param_count_; ++i) {
1706 output_frame_offset -= kPointerSize;
1707 DoTranslateCommand(iterator, 0, output_frame_offset);
1708
1709 if (!arg_count_known && descriptor->IsParameterCountRegister(i)) {
1710 arguments_length_offset = output_frame_offset;
1711 }
1712 }
1713
1714 CHECK_EQ(output_frame_offset, 0);
1715
1716 if (!arg_count_known) {
1717 CHECK_GE(arguments_length_offset, 0);
1718 // We know it's a smi because 1) the code stub guarantees the stack
1719 // parameter count is in smi range, and 2) the DoTranslateCommand in the
1720 // parameter loop above translated that to a tagged value.
1721 Smi* smi_caller_arg_count = reinterpret_cast<Smi*>(
1722 output_frame->GetFrameSlot(arguments_length_offset));
1723 caller_arg_count = smi_caller_arg_count->value();
1724 output_frame->SetFrameSlot(length_frame_offset, caller_arg_count);
1725 if (trace_scope_ != NULL) {
1726 PrintF(trace_scope_->file(),
1727 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1728 V8PRIxPTR " ; args.length\n",
1729 top_address + length_frame_offset, length_frame_offset,
1730 caller_arg_count);
1731 }
1732 value = frame_ptr + StandardFrameConstants::kCallerSPOffset +
1733 (caller_arg_count - 1) * kPointerSize;
1734 output_frame->SetFrameSlot(args_arguments_offset, value);
1735 if (trace_scope_ != NULL) {
1736 PrintF(trace_scope_->file(),
1737 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1738 V8PRIxPTR " ; args.arguments\n",
1739 top_address + args_arguments_offset, args_arguments_offset,
1740 value);
1741 }
1742 }
1743
1744 // Copy the double registers from the input into the output frame.
1745 CopyDoubleRegisters(output_frame);
1746
1747 // Fill registers containing handler and number of parameters.
1748 SetPlatformCompiledStubRegisters(output_frame, descriptor);
1749
1750 // Compute this frame's PC, state, and continuation.
1751 Code* trampoline = NULL;
1752 StubFunctionMode function_mode = descriptor->function_mode_;
1753 StubFailureTrampolineStub(isolate_,
1754 function_mode).FindCodeInCache(&trampoline);
1755 ASSERT(trampoline != NULL);
1756 output_frame->SetPc(reinterpret_cast<intptr_t>(
1757 trampoline->instruction_start()));
1758 if (FLAG_enable_ool_constant_pool) {
1759 Register constant_pool_reg =
1760 StubFailureTrampolineFrame::constant_pool_pointer_register();
1761 intptr_t constant_pool_value =
1762 reinterpret_cast<intptr_t>(trampoline->constant_pool());
1763 output_frame->SetConstantPool(constant_pool_value);
1764 output_frame->SetRegister(constant_pool_reg.code(), constant_pool_value);
1765 }
1766 output_frame->SetState(Smi::FromInt(FullCodeGenerator::NO_REGISTERS));
1767 Code* notify_failure =
1768 isolate_->builtins()->builtin(Builtins::kNotifyStubFailureSaveDoubles);
1769 output_frame->SetContinuation(
1770 reinterpret_cast<intptr_t>(notify_failure->entry()));
1771 }
1772
1773
MaterializeNextHeapObject()1774 Handle<Object> Deoptimizer::MaterializeNextHeapObject() {
1775 int object_index = materialization_object_index_++;
1776 ObjectMaterializationDescriptor desc = deferred_objects_[object_index];
1777 const int length = desc.object_length();
1778
1779 if (desc.duplicate_object() >= 0) {
1780 // Found a previously materialized object by de-duplication.
1781 object_index = desc.duplicate_object();
1782 materialized_objects_->Add(Handle<Object>());
1783 } else if (desc.is_arguments() && ArgumentsObjectIsAdapted(object_index)) {
1784 // Use the arguments adapter frame we just built to materialize the
1785 // arguments object. FunctionGetArguments can't throw an exception.
1786 Handle<JSFunction> function = ArgumentsObjectFunction(object_index);
1787 Handle<JSObject> arguments = Handle<JSObject>::cast(
1788 Accessors::FunctionGetArguments(function));
1789 materialized_objects_->Add(arguments);
1790 // To keep consistent object counters, we still materialize the
1791 // nested values (but we throw them away).
1792 for (int i = 0; i < length; ++i) {
1793 MaterializeNextValue();
1794 }
1795 } else if (desc.is_arguments()) {
1796 // Construct an arguments object and copy the parameters to a newly
1797 // allocated arguments object backing store.
1798 Handle<JSFunction> function = ArgumentsObjectFunction(object_index);
1799 Handle<JSObject> arguments =
1800 isolate_->factory()->NewArgumentsObject(function, length);
1801 Handle<FixedArray> array = isolate_->factory()->NewFixedArray(length);
1802 ASSERT_EQ(array->length(), length);
1803 arguments->set_elements(*array);
1804 materialized_objects_->Add(arguments);
1805 for (int i = 0; i < length; ++i) {
1806 Handle<Object> value = MaterializeNextValue();
1807 array->set(i, *value);
1808 }
1809 } else {
1810 // Dispatch on the instance type of the object to be materialized.
1811 // We also need to make sure that the representation of all fields
1812 // in the given object are general enough to hold a tagged value.
1813 Handle<Map> map = Map::GeneralizeAllFieldRepresentations(
1814 Handle<Map>::cast(MaterializeNextValue()));
1815 switch (map->instance_type()) {
1816 case HEAP_NUMBER_TYPE: {
1817 // Reuse the HeapNumber value directly as it is already properly
1818 // tagged and skip materializing the HeapNumber explicitly.
1819 Handle<Object> object = MaterializeNextValue();
1820 if (object_index < prev_materialized_count_) {
1821 materialized_objects_->Add(Handle<Object>(
1822 previously_materialized_objects_->get(object_index), isolate_));
1823 } else {
1824 materialized_objects_->Add(object);
1825 }
1826 materialization_value_index_ += kDoubleSize / kPointerSize - 1;
1827 break;
1828 }
1829 case JS_OBJECT_TYPE: {
1830 Handle<JSObject> object =
1831 isolate_->factory()->NewJSObjectFromMap(map, NOT_TENURED, false);
1832 if (object_index < prev_materialized_count_) {
1833 materialized_objects_->Add(Handle<Object>(
1834 previously_materialized_objects_->get(object_index), isolate_));
1835 } else {
1836 materialized_objects_->Add(object);
1837 }
1838 Handle<Object> properties = MaterializeNextValue();
1839 Handle<Object> elements = MaterializeNextValue();
1840 object->set_properties(FixedArray::cast(*properties));
1841 object->set_elements(FixedArrayBase::cast(*elements));
1842 for (int i = 0; i < length - 3; ++i) {
1843 Handle<Object> value = MaterializeNextValue();
1844 FieldIndex index = FieldIndex::ForPropertyIndex(object->map(), i);
1845 object->FastPropertyAtPut(index, *value);
1846 }
1847 break;
1848 }
1849 case JS_ARRAY_TYPE: {
1850 Handle<JSArray> object =
1851 isolate_->factory()->NewJSArray(0, map->elements_kind());
1852 if (object_index < prev_materialized_count_) {
1853 materialized_objects_->Add(Handle<Object>(
1854 previously_materialized_objects_->get(object_index), isolate_));
1855 } else {
1856 materialized_objects_->Add(object);
1857 }
1858 Handle<Object> properties = MaterializeNextValue();
1859 Handle<Object> elements = MaterializeNextValue();
1860 Handle<Object> length = MaterializeNextValue();
1861 object->set_properties(FixedArray::cast(*properties));
1862 object->set_elements(FixedArrayBase::cast(*elements));
1863 object->set_length(*length);
1864 break;
1865 }
1866 default:
1867 PrintF(stderr,
1868 "[couldn't handle instance type %d]\n", map->instance_type());
1869 FATAL("Unsupported instance type");
1870 }
1871 }
1872
1873 return materialized_objects_->at(object_index);
1874 }
1875
1876
MaterializeNextValue()1877 Handle<Object> Deoptimizer::MaterializeNextValue() {
1878 int value_index = materialization_value_index_++;
1879 Handle<Object> value = materialized_values_->at(value_index);
1880 if (*value == isolate_->heap()->arguments_marker()) {
1881 value = MaterializeNextHeapObject();
1882 }
1883 return value;
1884 }
1885
1886
MaterializeHeapObjects(JavaScriptFrameIterator * it)1887 void Deoptimizer::MaterializeHeapObjects(JavaScriptFrameIterator* it) {
1888 ASSERT_NE(DEBUGGER, bailout_type_);
1889
1890 MaterializedObjectStore* materialized_store =
1891 isolate_->materialized_object_store();
1892 previously_materialized_objects_ = materialized_store->Get(stack_fp_);
1893 prev_materialized_count_ = previously_materialized_objects_.is_null() ?
1894 0 : previously_materialized_objects_->length();
1895
1896 // Walk all JavaScript output frames with the given frame iterator.
1897 for (int frame_index = 0; frame_index < jsframe_count(); ++frame_index) {
1898 if (frame_index != 0) it->Advance();
1899 JavaScriptFrame* frame = it->frame();
1900 jsframe_functions_.Add(handle(frame->function(), isolate_));
1901 jsframe_has_adapted_arguments_.Add(frame->has_adapted_arguments());
1902 }
1903
1904 // Handlify all tagged object values before triggering any allocation.
1905 List<Handle<Object> > values(deferred_objects_tagged_values_.length());
1906 for (int i = 0; i < deferred_objects_tagged_values_.length(); ++i) {
1907 values.Add(Handle<Object>(deferred_objects_tagged_values_[i], isolate_));
1908 }
1909
1910 // Play it safe and clear all unhandlified values before we continue.
1911 deferred_objects_tagged_values_.Clear();
1912
1913 // Materialize all heap numbers before looking at arguments because when the
1914 // output frames are used to materialize arguments objects later on they need
1915 // to already contain valid heap numbers.
1916 for (int i = 0; i < deferred_heap_numbers_.length(); i++) {
1917 HeapNumberMaterializationDescriptor<Address> d = deferred_heap_numbers_[i];
1918 Handle<Object> num = isolate_->factory()->NewNumber(d.value());
1919 if (trace_scope_ != NULL) {
1920 PrintF(trace_scope_->file(),
1921 "Materialized a new heap number %p [%e] in slot %p\n",
1922 reinterpret_cast<void*>(*num),
1923 d.value(),
1924 d.destination());
1925 }
1926 Memory::Object_at(d.destination()) = *num;
1927 }
1928
1929 // Materialize all heap numbers required for arguments/captured objects.
1930 for (int i = 0; i < deferred_objects_double_values_.length(); i++) {
1931 HeapNumberMaterializationDescriptor<int> d =
1932 deferred_objects_double_values_[i];
1933 Handle<Object> num = isolate_->factory()->NewNumber(d.value());
1934 if (trace_scope_ != NULL) {
1935 PrintF(trace_scope_->file(),
1936 "Materialized a new heap number %p [%e] for object at %d\n",
1937 reinterpret_cast<void*>(*num),
1938 d.value(),
1939 d.destination());
1940 }
1941 ASSERT(values.at(d.destination())->IsTheHole());
1942 values.Set(d.destination(), num);
1943 }
1944
1945 // Play it safe and clear all object double values before we continue.
1946 deferred_objects_double_values_.Clear();
1947
1948 // Materialize arguments/captured objects.
1949 if (!deferred_objects_.is_empty()) {
1950 List<Handle<Object> > materialized_objects(deferred_objects_.length());
1951 materialized_objects_ = &materialized_objects;
1952 materialized_values_ = &values;
1953
1954 while (materialization_object_index_ < deferred_objects_.length()) {
1955 int object_index = materialization_object_index_;
1956 ObjectMaterializationDescriptor descriptor =
1957 deferred_objects_.at(object_index);
1958
1959 // Find a previously materialized object by de-duplication or
1960 // materialize a new instance of the object if necessary. Store
1961 // the materialized object into the frame slot.
1962 Handle<Object> object = MaterializeNextHeapObject();
1963 if (descriptor.slot_address() != NULL) {
1964 Memory::Object_at(descriptor.slot_address()) = *object;
1965 }
1966 if (trace_scope_ != NULL) {
1967 if (descriptor.is_arguments()) {
1968 PrintF(trace_scope_->file(),
1969 "Materialized %sarguments object of length %d for %p: ",
1970 ArgumentsObjectIsAdapted(object_index) ? "(adapted) " : "",
1971 Handle<JSObject>::cast(object)->elements()->length(),
1972 reinterpret_cast<void*>(descriptor.slot_address()));
1973 } else {
1974 PrintF(trace_scope_->file(),
1975 "Materialized captured object of size %d for %p: ",
1976 Handle<HeapObject>::cast(object)->Size(),
1977 reinterpret_cast<void*>(descriptor.slot_address()));
1978 }
1979 object->ShortPrint(trace_scope_->file());
1980 PrintF(trace_scope_->file(), "\n");
1981 }
1982 }
1983
1984 CHECK_EQ(materialization_object_index_, materialized_objects_->length());
1985 CHECK_EQ(materialization_value_index_, materialized_values_->length());
1986 }
1987
1988 if (prev_materialized_count_ > 0) {
1989 materialized_store->Remove(stack_fp_);
1990 }
1991 }
1992
1993
MaterializeHeapNumbersForDebuggerInspectableFrame(Address parameters_top,uint32_t parameters_size,Address expressions_top,uint32_t expressions_size,DeoptimizedFrameInfo * info)1994 void Deoptimizer::MaterializeHeapNumbersForDebuggerInspectableFrame(
1995 Address parameters_top,
1996 uint32_t parameters_size,
1997 Address expressions_top,
1998 uint32_t expressions_size,
1999 DeoptimizedFrameInfo* info) {
2000 CHECK_EQ(DEBUGGER, bailout_type_);
2001 Address parameters_bottom = parameters_top + parameters_size;
2002 Address expressions_bottom = expressions_top + expressions_size;
2003 for (int i = 0; i < deferred_heap_numbers_.length(); i++) {
2004 HeapNumberMaterializationDescriptor<Address> d = deferred_heap_numbers_[i];
2005
2006 // Check of the heap number to materialize actually belong to the frame
2007 // being extracted.
2008 Address slot = d.destination();
2009 if (parameters_top <= slot && slot < parameters_bottom) {
2010 Handle<Object> num = isolate_->factory()->NewNumber(d.value());
2011
2012 int index = (info->parameters_count() - 1) -
2013 static_cast<int>(slot - parameters_top) / kPointerSize;
2014
2015 if (trace_scope_ != NULL) {
2016 PrintF(trace_scope_->file(),
2017 "Materializing a new heap number %p [%e] in slot %p"
2018 "for parameter slot #%d\n",
2019 reinterpret_cast<void*>(*num),
2020 d.value(),
2021 d.destination(),
2022 index);
2023 }
2024
2025 info->SetParameter(index, *num);
2026 } else if (expressions_top <= slot && slot < expressions_bottom) {
2027 Handle<Object> num = isolate_->factory()->NewNumber(d.value());
2028
2029 int index = info->expression_count() - 1 -
2030 static_cast<int>(slot - expressions_top) / kPointerSize;
2031
2032 if (trace_scope_ != NULL) {
2033 PrintF(trace_scope_->file(),
2034 "Materializing a new heap number %p [%e] in slot %p"
2035 "for expression slot #%d\n",
2036 reinterpret_cast<void*>(*num),
2037 d.value(),
2038 d.destination(),
2039 index);
2040 }
2041
2042 info->SetExpression(index, *num);
2043 }
2044 }
2045 }
2046
2047
TraceValueType(bool is_smi)2048 static const char* TraceValueType(bool is_smi) {
2049 if (is_smi) {
2050 return "smi";
2051 }
2052
2053 return "heap number";
2054 }
2055
2056
DoTranslateObjectAndSkip(TranslationIterator * iterator)2057 void Deoptimizer::DoTranslateObjectAndSkip(TranslationIterator* iterator) {
2058 Translation::Opcode opcode =
2059 static_cast<Translation::Opcode>(iterator->Next());
2060
2061 switch (opcode) {
2062 case Translation::BEGIN:
2063 case Translation::JS_FRAME:
2064 case Translation::ARGUMENTS_ADAPTOR_FRAME:
2065 case Translation::CONSTRUCT_STUB_FRAME:
2066 case Translation::GETTER_STUB_FRAME:
2067 case Translation::SETTER_STUB_FRAME:
2068 case Translation::COMPILED_STUB_FRAME: {
2069 FATAL("Unexpected frame start translation opcode");
2070 return;
2071 }
2072
2073 case Translation::REGISTER:
2074 case Translation::INT32_REGISTER:
2075 case Translation::UINT32_REGISTER:
2076 case Translation::DOUBLE_REGISTER:
2077 case Translation::STACK_SLOT:
2078 case Translation::INT32_STACK_SLOT:
2079 case Translation::UINT32_STACK_SLOT:
2080 case Translation::DOUBLE_STACK_SLOT:
2081 case Translation::LITERAL: {
2082 // The value is not part of any materialized object, so we can ignore it.
2083 iterator->Skip(Translation::NumberOfOperandsFor(opcode));
2084 return;
2085 }
2086
2087 case Translation::DUPLICATED_OBJECT: {
2088 int object_index = iterator->Next();
2089 if (trace_scope_ != NULL) {
2090 PrintF(trace_scope_->file(), " skipping object ");
2091 PrintF(trace_scope_->file(),
2092 " ; duplicate of object #%d\n", object_index);
2093 }
2094 AddObjectDuplication(0, object_index);
2095 return;
2096 }
2097
2098 case Translation::ARGUMENTS_OBJECT:
2099 case Translation::CAPTURED_OBJECT: {
2100 int length = iterator->Next();
2101 bool is_args = opcode == Translation::ARGUMENTS_OBJECT;
2102 if (trace_scope_ != NULL) {
2103 PrintF(trace_scope_->file(), " skipping object ");
2104 PrintF(trace_scope_->file(),
2105 " ; object (length = %d, is_args = %d)\n", length, is_args);
2106 }
2107
2108 AddObjectStart(0, length, is_args);
2109
2110 // We save the object values on the side and materialize the actual
2111 // object after the deoptimized frame is built.
2112 int object_index = deferred_objects_.length() - 1;
2113 for (int i = 0; i < length; i++) {
2114 DoTranslateObject(iterator, object_index, i);
2115 }
2116 return;
2117 }
2118 }
2119
2120 FATAL("Unexpected translation opcode");
2121 }
2122
2123
DoTranslateObject(TranslationIterator * iterator,int object_index,int field_index)2124 void Deoptimizer::DoTranslateObject(TranslationIterator* iterator,
2125 int object_index,
2126 int field_index) {
2127 disasm::NameConverter converter;
2128 Address object_slot = deferred_objects_[object_index].slot_address();
2129
2130 Translation::Opcode opcode =
2131 static_cast<Translation::Opcode>(iterator->Next());
2132
2133 switch (opcode) {
2134 case Translation::BEGIN:
2135 case Translation::JS_FRAME:
2136 case Translation::ARGUMENTS_ADAPTOR_FRAME:
2137 case Translation::CONSTRUCT_STUB_FRAME:
2138 case Translation::GETTER_STUB_FRAME:
2139 case Translation::SETTER_STUB_FRAME:
2140 case Translation::COMPILED_STUB_FRAME:
2141 FATAL("Unexpected frame start translation opcode");
2142 return;
2143
2144 case Translation::REGISTER: {
2145 int input_reg = iterator->Next();
2146 intptr_t input_value = input_->GetRegister(input_reg);
2147 if (trace_scope_ != NULL) {
2148 PrintF(trace_scope_->file(),
2149 " object @0x%08" V8PRIxPTR ": [field #%d] <- ",
2150 reinterpret_cast<intptr_t>(object_slot),
2151 field_index);
2152 PrintF(trace_scope_->file(),
2153 "0x%08" V8PRIxPTR " ; %s ", input_value,
2154 converter.NameOfCPURegister(input_reg));
2155 reinterpret_cast<Object*>(input_value)->ShortPrint(
2156 trace_scope_->file());
2157 PrintF(trace_scope_->file(),
2158 "\n");
2159 }
2160 AddObjectTaggedValue(input_value);
2161 return;
2162 }
2163
2164 case Translation::INT32_REGISTER: {
2165 int input_reg = iterator->Next();
2166 intptr_t value = input_->GetRegister(input_reg);
2167 bool is_smi = Smi::IsValid(value);
2168 if (trace_scope_ != NULL) {
2169 PrintF(trace_scope_->file(),
2170 " object @0x%08" V8PRIxPTR ": [field #%d] <- ",
2171 reinterpret_cast<intptr_t>(object_slot),
2172 field_index);
2173 PrintF(trace_scope_->file(),
2174 "%" V8PRIdPTR " ; %s (%s)\n", value,
2175 converter.NameOfCPURegister(input_reg),
2176 TraceValueType(is_smi));
2177 }
2178 if (is_smi) {
2179 intptr_t tagged_value =
2180 reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value)));
2181 AddObjectTaggedValue(tagged_value);
2182 } else {
2183 double double_value = static_cast<double>(static_cast<int32_t>(value));
2184 AddObjectDoubleValue(double_value);
2185 }
2186 return;
2187 }
2188
2189 case Translation::UINT32_REGISTER: {
2190 int input_reg = iterator->Next();
2191 uintptr_t value = static_cast<uintptr_t>(input_->GetRegister(input_reg));
2192 bool is_smi = (value <= static_cast<uintptr_t>(Smi::kMaxValue));
2193 if (trace_scope_ != NULL) {
2194 PrintF(trace_scope_->file(),
2195 " object @0x%08" V8PRIxPTR ": [field #%d] <- ",
2196 reinterpret_cast<intptr_t>(object_slot),
2197 field_index);
2198 PrintF(trace_scope_->file(),
2199 "%" V8PRIdPTR " ; uint %s (%s)\n", value,
2200 converter.NameOfCPURegister(input_reg),
2201 TraceValueType(is_smi));
2202 }
2203 if (is_smi) {
2204 intptr_t tagged_value =
2205 reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value)));
2206 AddObjectTaggedValue(tagged_value);
2207 } else {
2208 double double_value = static_cast<double>(static_cast<uint32_t>(value));
2209 AddObjectDoubleValue(double_value);
2210 }
2211 return;
2212 }
2213
2214 case Translation::DOUBLE_REGISTER: {
2215 int input_reg = iterator->Next();
2216 double value = input_->GetDoubleRegister(input_reg);
2217 if (trace_scope_ != NULL) {
2218 PrintF(trace_scope_->file(),
2219 " object @0x%08" V8PRIxPTR ": [field #%d] <- ",
2220 reinterpret_cast<intptr_t>(object_slot),
2221 field_index);
2222 PrintF(trace_scope_->file(),
2223 "%e ; %s\n", value,
2224 DoubleRegister::AllocationIndexToString(input_reg));
2225 }
2226 AddObjectDoubleValue(value);
2227 return;
2228 }
2229
2230 case Translation::STACK_SLOT: {
2231 int input_slot_index = iterator->Next();
2232 unsigned input_offset = input_->GetOffsetFromSlotIndex(input_slot_index);
2233 intptr_t input_value = input_->GetFrameSlot(input_offset);
2234 if (trace_scope_ != NULL) {
2235 PrintF(trace_scope_->file(),
2236 " object @0x%08" V8PRIxPTR ": [field #%d] <- ",
2237 reinterpret_cast<intptr_t>(object_slot),
2238 field_index);
2239 PrintF(trace_scope_->file(),
2240 "0x%08" V8PRIxPTR " ; [sp + %d] ", input_value, input_offset);
2241 reinterpret_cast<Object*>(input_value)->ShortPrint(
2242 trace_scope_->file());
2243 PrintF(trace_scope_->file(),
2244 "\n");
2245 }
2246 AddObjectTaggedValue(input_value);
2247 return;
2248 }
2249
2250 case Translation::INT32_STACK_SLOT: {
2251 int input_slot_index = iterator->Next();
2252 unsigned input_offset = input_->GetOffsetFromSlotIndex(input_slot_index);
2253 intptr_t value = input_->GetFrameSlot(input_offset);
2254 bool is_smi = Smi::IsValid(value);
2255 if (trace_scope_ != NULL) {
2256 PrintF(trace_scope_->file(),
2257 " object @0x%08" V8PRIxPTR ": [field #%d] <- ",
2258 reinterpret_cast<intptr_t>(object_slot),
2259 field_index);
2260 PrintF(trace_scope_->file(),
2261 "%" V8PRIdPTR " ; [sp + %d] (%s)\n",
2262 value, input_offset, TraceValueType(is_smi));
2263 }
2264 if (is_smi) {
2265 intptr_t tagged_value =
2266 reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value)));
2267 AddObjectTaggedValue(tagged_value);
2268 } else {
2269 double double_value = static_cast<double>(static_cast<int32_t>(value));
2270 AddObjectDoubleValue(double_value);
2271 }
2272 return;
2273 }
2274
2275 case Translation::UINT32_STACK_SLOT: {
2276 int input_slot_index = iterator->Next();
2277 unsigned input_offset = input_->GetOffsetFromSlotIndex(input_slot_index);
2278 uintptr_t value =
2279 static_cast<uintptr_t>(input_->GetFrameSlot(input_offset));
2280 bool is_smi = (value <= static_cast<uintptr_t>(Smi::kMaxValue));
2281 if (trace_scope_ != NULL) {
2282 PrintF(trace_scope_->file(),
2283 " object @0x%08" V8PRIxPTR ": [field #%d] <- ",
2284 reinterpret_cast<intptr_t>(object_slot),
2285 field_index);
2286 PrintF(trace_scope_->file(),
2287 "%" V8PRIdPTR " ; [sp + %d] (uint %s)\n",
2288 value, input_offset, TraceValueType(is_smi));
2289 }
2290 if (is_smi) {
2291 intptr_t tagged_value =
2292 reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value)));
2293 AddObjectTaggedValue(tagged_value);
2294 } else {
2295 double double_value = static_cast<double>(static_cast<uint32_t>(value));
2296 AddObjectDoubleValue(double_value);
2297 }
2298 return;
2299 }
2300
2301 case Translation::DOUBLE_STACK_SLOT: {
2302 int input_slot_index = iterator->Next();
2303 unsigned input_offset = input_->GetOffsetFromSlotIndex(input_slot_index);
2304 double value = input_->GetDoubleFrameSlot(input_offset);
2305 if (trace_scope_ != NULL) {
2306 PrintF(trace_scope_->file(),
2307 " object @0x%08" V8PRIxPTR ": [field #%d] <- ",
2308 reinterpret_cast<intptr_t>(object_slot),
2309 field_index);
2310 PrintF(trace_scope_->file(),
2311 "%e ; [sp + %d]\n", value, input_offset);
2312 }
2313 AddObjectDoubleValue(value);
2314 return;
2315 }
2316
2317 case Translation::LITERAL: {
2318 Object* literal = ComputeLiteral(iterator->Next());
2319 if (trace_scope_ != NULL) {
2320 PrintF(trace_scope_->file(),
2321 " object @0x%08" V8PRIxPTR ": [field #%d] <- ",
2322 reinterpret_cast<intptr_t>(object_slot),
2323 field_index);
2324 literal->ShortPrint(trace_scope_->file());
2325 PrintF(trace_scope_->file(),
2326 " ; literal\n");
2327 }
2328 intptr_t value = reinterpret_cast<intptr_t>(literal);
2329 AddObjectTaggedValue(value);
2330 return;
2331 }
2332
2333 case Translation::DUPLICATED_OBJECT: {
2334 int object_index = iterator->Next();
2335 if (trace_scope_ != NULL) {
2336 PrintF(trace_scope_->file(),
2337 " nested @0x%08" V8PRIxPTR ": [field #%d] <- ",
2338 reinterpret_cast<intptr_t>(object_slot),
2339 field_index);
2340 isolate_->heap()->arguments_marker()->ShortPrint(trace_scope_->file());
2341 PrintF(trace_scope_->file(),
2342 " ; duplicate of object #%d\n", object_index);
2343 }
2344 // Use the materialization marker value as a sentinel and fill in
2345 // the object after the deoptimized frame is built.
2346 intptr_t value = reinterpret_cast<intptr_t>(
2347 isolate_->heap()->arguments_marker());
2348 AddObjectDuplication(0, object_index);
2349 AddObjectTaggedValue(value);
2350 return;
2351 }
2352
2353 case Translation::ARGUMENTS_OBJECT:
2354 case Translation::CAPTURED_OBJECT: {
2355 int length = iterator->Next();
2356 bool is_args = opcode == Translation::ARGUMENTS_OBJECT;
2357 if (trace_scope_ != NULL) {
2358 PrintF(trace_scope_->file(),
2359 " nested @0x%08" V8PRIxPTR ": [field #%d] <- ",
2360 reinterpret_cast<intptr_t>(object_slot),
2361 field_index);
2362 isolate_->heap()->arguments_marker()->ShortPrint(trace_scope_->file());
2363 PrintF(trace_scope_->file(),
2364 " ; object (length = %d, is_args = %d)\n", length, is_args);
2365 }
2366 // Use the materialization marker value as a sentinel and fill in
2367 // the object after the deoptimized frame is built.
2368 intptr_t value = reinterpret_cast<intptr_t>(
2369 isolate_->heap()->arguments_marker());
2370 AddObjectStart(0, length, is_args);
2371 AddObjectTaggedValue(value);
2372 // We save the object values on the side and materialize the actual
2373 // object after the deoptimized frame is built.
2374 int object_index = deferred_objects_.length() - 1;
2375 for (int i = 0; i < length; i++) {
2376 DoTranslateObject(iterator, object_index, i);
2377 }
2378 return;
2379 }
2380 }
2381
2382 FATAL("Unexpected translation opcode");
2383 }
2384
2385
DoTranslateCommand(TranslationIterator * iterator,int frame_index,unsigned output_offset)2386 void Deoptimizer::DoTranslateCommand(TranslationIterator* iterator,
2387 int frame_index,
2388 unsigned output_offset) {
2389 disasm::NameConverter converter;
2390 // A GC-safe temporary placeholder that we can put in the output frame.
2391 const intptr_t kPlaceholder = reinterpret_cast<intptr_t>(Smi::FromInt(0));
2392
2393 Translation::Opcode opcode =
2394 static_cast<Translation::Opcode>(iterator->Next());
2395
2396 switch (opcode) {
2397 case Translation::BEGIN:
2398 case Translation::JS_FRAME:
2399 case Translation::ARGUMENTS_ADAPTOR_FRAME:
2400 case Translation::CONSTRUCT_STUB_FRAME:
2401 case Translation::GETTER_STUB_FRAME:
2402 case Translation::SETTER_STUB_FRAME:
2403 case Translation::COMPILED_STUB_FRAME:
2404 FATAL("Unexpected translation opcode");
2405 return;
2406
2407 case Translation::REGISTER: {
2408 int input_reg = iterator->Next();
2409 intptr_t input_value = input_->GetRegister(input_reg);
2410 if (trace_scope_ != NULL) {
2411 PrintF(
2412 trace_scope_->file(),
2413 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" V8PRIxPTR " ; %s ",
2414 output_[frame_index]->GetTop() + output_offset,
2415 output_offset,
2416 input_value,
2417 converter.NameOfCPURegister(input_reg));
2418 reinterpret_cast<Object*>(input_value)->ShortPrint(
2419 trace_scope_->file());
2420 PrintF(trace_scope_->file(), "\n");
2421 }
2422 output_[frame_index]->SetFrameSlot(output_offset, input_value);
2423 return;
2424 }
2425
2426 case Translation::INT32_REGISTER: {
2427 int input_reg = iterator->Next();
2428 intptr_t value = input_->GetRegister(input_reg);
2429 bool is_smi = Smi::IsValid(value);
2430 if (trace_scope_ != NULL) {
2431 PrintF(
2432 trace_scope_->file(),
2433 " 0x%08" V8PRIxPTR ": [top + %d] <- %" V8PRIdPTR " ; %s (%s)\n",
2434 output_[frame_index]->GetTop() + output_offset,
2435 output_offset,
2436 value,
2437 converter.NameOfCPURegister(input_reg),
2438 TraceValueType(is_smi));
2439 }
2440 if (is_smi) {
2441 intptr_t tagged_value =
2442 reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value)));
2443 output_[frame_index]->SetFrameSlot(output_offset, tagged_value);
2444 } else {
2445 // We save the untagged value on the side and store a GC-safe
2446 // temporary placeholder in the frame.
2447 AddDoubleValue(output_[frame_index]->GetTop() + output_offset,
2448 static_cast<double>(static_cast<int32_t>(value)));
2449 output_[frame_index]->SetFrameSlot(output_offset, kPlaceholder);
2450 }
2451 return;
2452 }
2453
2454 case Translation::UINT32_REGISTER: {
2455 int input_reg = iterator->Next();
2456 uintptr_t value = static_cast<uintptr_t>(input_->GetRegister(input_reg));
2457 bool is_smi = value <= static_cast<uintptr_t>(Smi::kMaxValue);
2458 if (trace_scope_ != NULL) {
2459 PrintF(
2460 trace_scope_->file(),
2461 " 0x%08" V8PRIxPTR ": [top + %d] <- %" V8PRIuPTR
2462 " ; uint %s (%s)\n",
2463 output_[frame_index]->GetTop() + output_offset,
2464 output_offset,
2465 value,
2466 converter.NameOfCPURegister(input_reg),
2467 TraceValueType(is_smi));
2468 }
2469 if (is_smi) {
2470 intptr_t tagged_value =
2471 reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value)));
2472 output_[frame_index]->SetFrameSlot(output_offset, tagged_value);
2473 } else {
2474 // We save the untagged value on the side and store a GC-safe
2475 // temporary placeholder in the frame.
2476 AddDoubleValue(output_[frame_index]->GetTop() + output_offset,
2477 static_cast<double>(static_cast<uint32_t>(value)));
2478 output_[frame_index]->SetFrameSlot(output_offset, kPlaceholder);
2479 }
2480 return;
2481 }
2482
2483 case Translation::DOUBLE_REGISTER: {
2484 int input_reg = iterator->Next();
2485 double value = input_->GetDoubleRegister(input_reg);
2486 if (trace_scope_ != NULL) {
2487 PrintF(trace_scope_->file(),
2488 " 0x%08" V8PRIxPTR ": [top + %d] <- %e ; %s\n",
2489 output_[frame_index]->GetTop() + output_offset,
2490 output_offset,
2491 value,
2492 DoubleRegister::AllocationIndexToString(input_reg));
2493 }
2494 // We save the untagged value on the side and store a GC-safe
2495 // temporary placeholder in the frame.
2496 AddDoubleValue(output_[frame_index]->GetTop() + output_offset, value);
2497 output_[frame_index]->SetFrameSlot(output_offset, kPlaceholder);
2498 return;
2499 }
2500
2501 case Translation::STACK_SLOT: {
2502 int input_slot_index = iterator->Next();
2503 unsigned input_offset = input_->GetOffsetFromSlotIndex(input_slot_index);
2504 intptr_t input_value = input_->GetFrameSlot(input_offset);
2505 if (trace_scope_ != NULL) {
2506 PrintF(trace_scope_->file(),
2507 " 0x%08" V8PRIxPTR ": ",
2508 output_[frame_index]->GetTop() + output_offset);
2509 PrintF(trace_scope_->file(),
2510 "[top + %d] <- 0x%08" V8PRIxPTR " ; [sp + %d] ",
2511 output_offset,
2512 input_value,
2513 input_offset);
2514 reinterpret_cast<Object*>(input_value)->ShortPrint(
2515 trace_scope_->file());
2516 PrintF(trace_scope_->file(), "\n");
2517 }
2518 output_[frame_index]->SetFrameSlot(output_offset, input_value);
2519 return;
2520 }
2521
2522 case Translation::INT32_STACK_SLOT: {
2523 int input_slot_index = iterator->Next();
2524 unsigned input_offset = input_->GetOffsetFromSlotIndex(input_slot_index);
2525 intptr_t value = input_->GetFrameSlot(input_offset);
2526 bool is_smi = Smi::IsValid(value);
2527 if (trace_scope_ != NULL) {
2528 PrintF(trace_scope_->file(),
2529 " 0x%08" V8PRIxPTR ": ",
2530 output_[frame_index]->GetTop() + output_offset);
2531 PrintF(trace_scope_->file(),
2532 "[top + %d] <- %" V8PRIdPTR " ; [sp + %d] (%s)\n",
2533 output_offset,
2534 value,
2535 input_offset,
2536 TraceValueType(is_smi));
2537 }
2538 if (is_smi) {
2539 intptr_t tagged_value =
2540 reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value)));
2541 output_[frame_index]->SetFrameSlot(output_offset, tagged_value);
2542 } else {
2543 // We save the untagged value on the side and store a GC-safe
2544 // temporary placeholder in the frame.
2545 AddDoubleValue(output_[frame_index]->GetTop() + output_offset,
2546 static_cast<double>(static_cast<int32_t>(value)));
2547 output_[frame_index]->SetFrameSlot(output_offset, kPlaceholder);
2548 }
2549 return;
2550 }
2551
2552 case Translation::UINT32_STACK_SLOT: {
2553 int input_slot_index = iterator->Next();
2554 unsigned input_offset = input_->GetOffsetFromSlotIndex(input_slot_index);
2555 uintptr_t value =
2556 static_cast<uintptr_t>(input_->GetFrameSlot(input_offset));
2557 bool is_smi = value <= static_cast<uintptr_t>(Smi::kMaxValue);
2558 if (trace_scope_ != NULL) {
2559 PrintF(trace_scope_->file(),
2560 " 0x%08" V8PRIxPTR ": ",
2561 output_[frame_index]->GetTop() + output_offset);
2562 PrintF(trace_scope_->file(),
2563 "[top + %d] <- %" V8PRIuPTR " ; [sp + %d] (uint32 %s)\n",
2564 output_offset,
2565 value,
2566 input_offset,
2567 TraceValueType(is_smi));
2568 }
2569 if (is_smi) {
2570 intptr_t tagged_value =
2571 reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value)));
2572 output_[frame_index]->SetFrameSlot(output_offset, tagged_value);
2573 } else {
2574 // We save the untagged value on the side and store a GC-safe
2575 // temporary placeholder in the frame.
2576 AddDoubleValue(output_[frame_index]->GetTop() + output_offset,
2577 static_cast<double>(static_cast<uint32_t>(value)));
2578 output_[frame_index]->SetFrameSlot(output_offset, kPlaceholder);
2579 }
2580 return;
2581 }
2582
2583 case Translation::DOUBLE_STACK_SLOT: {
2584 int input_slot_index = iterator->Next();
2585 unsigned input_offset = input_->GetOffsetFromSlotIndex(input_slot_index);
2586 double value = input_->GetDoubleFrameSlot(input_offset);
2587 if (trace_scope_ != NULL) {
2588 PrintF(trace_scope_->file(),
2589 " 0x%08" V8PRIxPTR ": [top + %d] <- %e ; [sp + %d]\n",
2590 output_[frame_index]->GetTop() + output_offset,
2591 output_offset,
2592 value,
2593 input_offset);
2594 }
2595 // We save the untagged value on the side and store a GC-safe
2596 // temporary placeholder in the frame.
2597 AddDoubleValue(output_[frame_index]->GetTop() + output_offset, value);
2598 output_[frame_index]->SetFrameSlot(output_offset, kPlaceholder);
2599 return;
2600 }
2601
2602 case Translation::LITERAL: {
2603 Object* literal = ComputeLiteral(iterator->Next());
2604 if (trace_scope_ != NULL) {
2605 PrintF(trace_scope_->file(),
2606 " 0x%08" V8PRIxPTR ": [top + %d] <- ",
2607 output_[frame_index]->GetTop() + output_offset,
2608 output_offset);
2609 literal->ShortPrint(trace_scope_->file());
2610 PrintF(trace_scope_->file(), " ; literal\n");
2611 }
2612 intptr_t value = reinterpret_cast<intptr_t>(literal);
2613 output_[frame_index]->SetFrameSlot(output_offset, value);
2614 return;
2615 }
2616
2617 case Translation::DUPLICATED_OBJECT: {
2618 int object_index = iterator->Next();
2619 if (trace_scope_ != NULL) {
2620 PrintF(trace_scope_->file(),
2621 " 0x%08" V8PRIxPTR ": [top + %d] <- ",
2622 output_[frame_index]->GetTop() + output_offset,
2623 output_offset);
2624 isolate_->heap()->arguments_marker()->ShortPrint(trace_scope_->file());
2625 PrintF(trace_scope_->file(),
2626 " ; duplicate of object #%d\n", object_index);
2627 }
2628 // Use the materialization marker value as a sentinel and fill in
2629 // the object after the deoptimized frame is built.
2630 intptr_t value = reinterpret_cast<intptr_t>(
2631 isolate_->heap()->arguments_marker());
2632 AddObjectDuplication(output_[frame_index]->GetTop() + output_offset,
2633 object_index);
2634 output_[frame_index]->SetFrameSlot(output_offset, value);
2635 return;
2636 }
2637
2638 case Translation::ARGUMENTS_OBJECT:
2639 case Translation::CAPTURED_OBJECT: {
2640 int length = iterator->Next();
2641 bool is_args = opcode == Translation::ARGUMENTS_OBJECT;
2642 if (trace_scope_ != NULL) {
2643 PrintF(trace_scope_->file(),
2644 " 0x%08" V8PRIxPTR ": [top + %d] <- ",
2645 output_[frame_index]->GetTop() + output_offset,
2646 output_offset);
2647 isolate_->heap()->arguments_marker()->ShortPrint(trace_scope_->file());
2648 PrintF(trace_scope_->file(),
2649 " ; object (length = %d, is_args = %d)\n", length, is_args);
2650 }
2651 // Use the materialization marker value as a sentinel and fill in
2652 // the object after the deoptimized frame is built.
2653 intptr_t value = reinterpret_cast<intptr_t>(
2654 isolate_->heap()->arguments_marker());
2655 AddObjectStart(output_[frame_index]->GetTop() + output_offset,
2656 length, is_args);
2657 output_[frame_index]->SetFrameSlot(output_offset, value);
2658 // We save the object values on the side and materialize the actual
2659 // object after the deoptimized frame is built.
2660 int object_index = deferred_objects_.length() - 1;
2661 for (int i = 0; i < length; i++) {
2662 DoTranslateObject(iterator, object_index, i);
2663 }
2664 return;
2665 }
2666 }
2667 }
2668
2669
ComputeInputFrameSize() const2670 unsigned Deoptimizer::ComputeInputFrameSize() const {
2671 unsigned fixed_size = ComputeFixedSize(function_);
2672 // The fp-to-sp delta already takes the context, constant pool pointer and the
2673 // function into account so we have to avoid double counting them.
2674 unsigned result = fixed_size + fp_to_sp_delta_ -
2675 StandardFrameConstants::kFixedFrameSizeFromFp;
2676 if (compiled_code_->kind() == Code::OPTIMIZED_FUNCTION) {
2677 unsigned stack_slots = compiled_code_->stack_slots();
2678 unsigned outgoing_size = ComputeOutgoingArgumentSize();
2679 CHECK(result == fixed_size + (stack_slots * kPointerSize) + outgoing_size);
2680 }
2681 return result;
2682 }
2683
2684
ComputeFixedSize(JSFunction * function) const2685 unsigned Deoptimizer::ComputeFixedSize(JSFunction* function) const {
2686 // The fixed part of the frame consists of the return address, frame
2687 // pointer, function, context, and all the incoming arguments.
2688 return ComputeIncomingArgumentSize(function) +
2689 StandardFrameConstants::kFixedFrameSize;
2690 }
2691
2692
ComputeIncomingArgumentSize(JSFunction * function) const2693 unsigned Deoptimizer::ComputeIncomingArgumentSize(JSFunction* function) const {
2694 // The incoming arguments is the values for formal parameters and
2695 // the receiver. Every slot contains a pointer.
2696 if (function->IsSmi()) {
2697 CHECK_EQ(Smi::cast(function), Smi::FromInt(StackFrame::STUB));
2698 return 0;
2699 }
2700 unsigned arguments = function->shared()->formal_parameter_count() + 1;
2701 return arguments * kPointerSize;
2702 }
2703
2704
ComputeOutgoingArgumentSize() const2705 unsigned Deoptimizer::ComputeOutgoingArgumentSize() const {
2706 DeoptimizationInputData* data = DeoptimizationInputData::cast(
2707 compiled_code_->deoptimization_data());
2708 unsigned height = data->ArgumentsStackHeight(bailout_id_)->value();
2709 return height * kPointerSize;
2710 }
2711
2712
ComputeLiteral(int index) const2713 Object* Deoptimizer::ComputeLiteral(int index) const {
2714 DeoptimizationInputData* data = DeoptimizationInputData::cast(
2715 compiled_code_->deoptimization_data());
2716 FixedArray* literals = data->LiteralArray();
2717 return literals->get(index);
2718 }
2719
2720
AddObjectStart(intptr_t slot,int length,bool is_args)2721 void Deoptimizer::AddObjectStart(intptr_t slot, int length, bool is_args) {
2722 ObjectMaterializationDescriptor object_desc(
2723 reinterpret_cast<Address>(slot), jsframe_count_, length, -1, is_args);
2724 deferred_objects_.Add(object_desc);
2725 }
2726
2727
AddObjectDuplication(intptr_t slot,int object_index)2728 void Deoptimizer::AddObjectDuplication(intptr_t slot, int object_index) {
2729 ObjectMaterializationDescriptor object_desc(
2730 reinterpret_cast<Address>(slot), jsframe_count_, -1, object_index, false);
2731 deferred_objects_.Add(object_desc);
2732 }
2733
2734
AddObjectTaggedValue(intptr_t value)2735 void Deoptimizer::AddObjectTaggedValue(intptr_t value) {
2736 deferred_objects_tagged_values_.Add(reinterpret_cast<Object*>(value));
2737 }
2738
2739
AddObjectDoubleValue(double value)2740 void Deoptimizer::AddObjectDoubleValue(double value) {
2741 deferred_objects_tagged_values_.Add(isolate()->heap()->the_hole_value());
2742 HeapNumberMaterializationDescriptor<int> value_desc(
2743 deferred_objects_tagged_values_.length() - 1, value);
2744 deferred_objects_double_values_.Add(value_desc);
2745 }
2746
2747
AddDoubleValue(intptr_t slot_address,double value)2748 void Deoptimizer::AddDoubleValue(intptr_t slot_address, double value) {
2749 HeapNumberMaterializationDescriptor<Address> value_desc(
2750 reinterpret_cast<Address>(slot_address), value);
2751 deferred_heap_numbers_.Add(value_desc);
2752 }
2753
2754
EnsureCodeForDeoptimizationEntry(Isolate * isolate,BailoutType type,int max_entry_id)2755 void Deoptimizer::EnsureCodeForDeoptimizationEntry(Isolate* isolate,
2756 BailoutType type,
2757 int max_entry_id) {
2758 // We cannot run this if the serializer is enabled because this will
2759 // cause us to emit relocation information for the external
2760 // references. This is fine because the deoptimizer's code section
2761 // isn't meant to be serialized at all.
2762 CHECK(type == EAGER || type == SOFT || type == LAZY);
2763 DeoptimizerData* data = isolate->deoptimizer_data();
2764 int entry_count = data->deopt_entry_code_entries_[type];
2765 if (max_entry_id < entry_count) return;
2766 entry_count = Max(entry_count, Deoptimizer::kMinNumberOfEntries);
2767 while (max_entry_id >= entry_count) entry_count *= 2;
2768 CHECK(entry_count <= Deoptimizer::kMaxNumberOfEntries);
2769
2770 MacroAssembler masm(isolate, NULL, 16 * KB);
2771 masm.set_emit_debug_code(false);
2772 GenerateDeoptimizationEntries(&masm, entry_count, type);
2773 CodeDesc desc;
2774 masm.GetCode(&desc);
2775 ASSERT(!RelocInfo::RequiresRelocation(desc));
2776
2777 MemoryChunk* chunk = data->deopt_entry_code_[type];
2778 CHECK(static_cast<int>(Deoptimizer::GetMaxDeoptTableSize()) >=
2779 desc.instr_size);
2780 chunk->CommitArea(desc.instr_size);
2781 CopyBytes(chunk->area_start(), desc.buffer,
2782 static_cast<size_t>(desc.instr_size));
2783 CPU::FlushICache(chunk->area_start(), desc.instr_size);
2784
2785 data->deopt_entry_code_entries_[type] = entry_count;
2786 }
2787
2788
FrameDescription(uint32_t frame_size,JSFunction * function)2789 FrameDescription::FrameDescription(uint32_t frame_size,
2790 JSFunction* function)
2791 : frame_size_(frame_size),
2792 function_(function),
2793 top_(kZapUint32),
2794 pc_(kZapUint32),
2795 fp_(kZapUint32),
2796 context_(kZapUint32),
2797 constant_pool_(kZapUint32) {
2798 // Zap all the registers.
2799 for (int r = 0; r < Register::kNumRegisters; r++) {
2800 // TODO(jbramley): It isn't safe to use kZapUint32 here. If the register
2801 // isn't used before the next safepoint, the GC will try to scan it as a
2802 // tagged value. kZapUint32 looks like a valid tagged pointer, but it isn't.
2803 SetRegister(r, kZapUint32);
2804 }
2805
2806 // Zap all the slots.
2807 for (unsigned o = 0; o < frame_size; o += kPointerSize) {
2808 SetFrameSlot(o, kZapUint32);
2809 }
2810 }
2811
2812
ComputeFixedSize()2813 int FrameDescription::ComputeFixedSize() {
2814 return StandardFrameConstants::kFixedFrameSize +
2815 (ComputeParametersCount() + 1) * kPointerSize;
2816 }
2817
2818
GetOffsetFromSlotIndex(int slot_index)2819 unsigned FrameDescription::GetOffsetFromSlotIndex(int slot_index) {
2820 if (slot_index >= 0) {
2821 // Local or spill slots. Skip the fixed part of the frame
2822 // including all arguments.
2823 unsigned base = GetFrameSize() - ComputeFixedSize();
2824 return base - ((slot_index + 1) * kPointerSize);
2825 } else {
2826 // Incoming parameter.
2827 int arg_size = (ComputeParametersCount() + 1) * kPointerSize;
2828 unsigned base = GetFrameSize() - arg_size;
2829 return base - ((slot_index + 1) * kPointerSize);
2830 }
2831 }
2832
2833
ComputeParametersCount()2834 int FrameDescription::ComputeParametersCount() {
2835 switch (type_) {
2836 case StackFrame::JAVA_SCRIPT:
2837 return function_->shared()->formal_parameter_count();
2838 case StackFrame::ARGUMENTS_ADAPTOR: {
2839 // Last slot contains number of incomming arguments as a smi.
2840 // Can't use GetExpression(0) because it would cause infinite recursion.
2841 return reinterpret_cast<Smi*>(*GetFrameSlotPointer(0))->value();
2842 }
2843 case StackFrame::STUB:
2844 return -1; // Minus receiver.
2845 default:
2846 FATAL("Unexpected stack frame type");
2847 return 0;
2848 }
2849 }
2850
2851
GetParameter(int index)2852 Object* FrameDescription::GetParameter(int index) {
2853 CHECK_GE(index, 0);
2854 CHECK_LT(index, ComputeParametersCount());
2855 // The slot indexes for incoming arguments are negative.
2856 unsigned offset = GetOffsetFromSlotIndex(index - ComputeParametersCount());
2857 return reinterpret_cast<Object*>(*GetFrameSlotPointer(offset));
2858 }
2859
2860
GetExpressionCount()2861 unsigned FrameDescription::GetExpressionCount() {
2862 CHECK_EQ(StackFrame::JAVA_SCRIPT, type_);
2863 unsigned size = GetFrameSize() - ComputeFixedSize();
2864 return size / kPointerSize;
2865 }
2866
2867
GetExpression(int index)2868 Object* FrameDescription::GetExpression(int index) {
2869 ASSERT_EQ(StackFrame::JAVA_SCRIPT, type_);
2870 unsigned offset = GetOffsetFromSlotIndex(index);
2871 return reinterpret_cast<Object*>(*GetFrameSlotPointer(offset));
2872 }
2873
2874
Add(int32_t value,Zone * zone)2875 void TranslationBuffer::Add(int32_t value, Zone* zone) {
2876 // Encode the sign bit in the least significant bit.
2877 bool is_negative = (value < 0);
2878 uint32_t bits = ((is_negative ? -value : value) << 1) |
2879 static_cast<int32_t>(is_negative);
2880 // Encode the individual bytes using the least significant bit of
2881 // each byte to indicate whether or not more bytes follow.
2882 do {
2883 uint32_t next = bits >> 7;
2884 contents_.Add(((bits << 1) & 0xFF) | (next != 0), zone);
2885 bits = next;
2886 } while (bits != 0);
2887 }
2888
2889
Next()2890 int32_t TranslationIterator::Next() {
2891 // Run through the bytes until we reach one with a least significant
2892 // bit of zero (marks the end).
2893 uint32_t bits = 0;
2894 for (int i = 0; true; i += 7) {
2895 ASSERT(HasNext());
2896 uint8_t next = buffer_->get(index_++);
2897 bits |= (next >> 1) << i;
2898 if ((next & 1) == 0) break;
2899 }
2900 // The bits encode the sign in the least significant bit.
2901 bool is_negative = (bits & 1) == 1;
2902 int32_t result = bits >> 1;
2903 return is_negative ? -result : result;
2904 }
2905
2906
CreateByteArray(Factory * factory)2907 Handle<ByteArray> TranslationBuffer::CreateByteArray(Factory* factory) {
2908 int length = contents_.length();
2909 Handle<ByteArray> result = factory->NewByteArray(length, TENURED);
2910 MemCopy(result->GetDataStartAddress(), contents_.ToVector().start(), length);
2911 return result;
2912 }
2913
2914
BeginConstructStubFrame(int literal_id,unsigned height)2915 void Translation::BeginConstructStubFrame(int literal_id, unsigned height) {
2916 buffer_->Add(CONSTRUCT_STUB_FRAME, zone());
2917 buffer_->Add(literal_id, zone());
2918 buffer_->Add(height, zone());
2919 }
2920
2921
BeginGetterStubFrame(int literal_id)2922 void Translation::BeginGetterStubFrame(int literal_id) {
2923 buffer_->Add(GETTER_STUB_FRAME, zone());
2924 buffer_->Add(literal_id, zone());
2925 }
2926
2927
BeginSetterStubFrame(int literal_id)2928 void Translation::BeginSetterStubFrame(int literal_id) {
2929 buffer_->Add(SETTER_STUB_FRAME, zone());
2930 buffer_->Add(literal_id, zone());
2931 }
2932
2933
BeginArgumentsAdaptorFrame(int literal_id,unsigned height)2934 void Translation::BeginArgumentsAdaptorFrame(int literal_id, unsigned height) {
2935 buffer_->Add(ARGUMENTS_ADAPTOR_FRAME, zone());
2936 buffer_->Add(literal_id, zone());
2937 buffer_->Add(height, zone());
2938 }
2939
2940
BeginJSFrame(BailoutId node_id,int literal_id,unsigned height)2941 void Translation::BeginJSFrame(BailoutId node_id,
2942 int literal_id,
2943 unsigned height) {
2944 buffer_->Add(JS_FRAME, zone());
2945 buffer_->Add(node_id.ToInt(), zone());
2946 buffer_->Add(literal_id, zone());
2947 buffer_->Add(height, zone());
2948 }
2949
2950
BeginCompiledStubFrame()2951 void Translation::BeginCompiledStubFrame() {
2952 buffer_->Add(COMPILED_STUB_FRAME, zone());
2953 }
2954
2955
BeginArgumentsObject(int args_length)2956 void Translation::BeginArgumentsObject(int args_length) {
2957 buffer_->Add(ARGUMENTS_OBJECT, zone());
2958 buffer_->Add(args_length, zone());
2959 }
2960
2961
BeginCapturedObject(int length)2962 void Translation::BeginCapturedObject(int length) {
2963 buffer_->Add(CAPTURED_OBJECT, zone());
2964 buffer_->Add(length, zone());
2965 }
2966
2967
DuplicateObject(int object_index)2968 void Translation::DuplicateObject(int object_index) {
2969 buffer_->Add(DUPLICATED_OBJECT, zone());
2970 buffer_->Add(object_index, zone());
2971 }
2972
2973
StoreRegister(Register reg)2974 void Translation::StoreRegister(Register reg) {
2975 buffer_->Add(REGISTER, zone());
2976 buffer_->Add(reg.code(), zone());
2977 }
2978
2979
StoreInt32Register(Register reg)2980 void Translation::StoreInt32Register(Register reg) {
2981 buffer_->Add(INT32_REGISTER, zone());
2982 buffer_->Add(reg.code(), zone());
2983 }
2984
2985
StoreUint32Register(Register reg)2986 void Translation::StoreUint32Register(Register reg) {
2987 buffer_->Add(UINT32_REGISTER, zone());
2988 buffer_->Add(reg.code(), zone());
2989 }
2990
2991
StoreDoubleRegister(DoubleRegister reg)2992 void Translation::StoreDoubleRegister(DoubleRegister reg) {
2993 buffer_->Add(DOUBLE_REGISTER, zone());
2994 buffer_->Add(DoubleRegister::ToAllocationIndex(reg), zone());
2995 }
2996
2997
StoreStackSlot(int index)2998 void Translation::StoreStackSlot(int index) {
2999 buffer_->Add(STACK_SLOT, zone());
3000 buffer_->Add(index, zone());
3001 }
3002
3003
StoreInt32StackSlot(int index)3004 void Translation::StoreInt32StackSlot(int index) {
3005 buffer_->Add(INT32_STACK_SLOT, zone());
3006 buffer_->Add(index, zone());
3007 }
3008
3009
StoreUint32StackSlot(int index)3010 void Translation::StoreUint32StackSlot(int index) {
3011 buffer_->Add(UINT32_STACK_SLOT, zone());
3012 buffer_->Add(index, zone());
3013 }
3014
3015
StoreDoubleStackSlot(int index)3016 void Translation::StoreDoubleStackSlot(int index) {
3017 buffer_->Add(DOUBLE_STACK_SLOT, zone());
3018 buffer_->Add(index, zone());
3019 }
3020
3021
StoreLiteral(int literal_id)3022 void Translation::StoreLiteral(int literal_id) {
3023 buffer_->Add(LITERAL, zone());
3024 buffer_->Add(literal_id, zone());
3025 }
3026
3027
StoreArgumentsObject(bool args_known,int args_index,int args_length)3028 void Translation::StoreArgumentsObject(bool args_known,
3029 int args_index,
3030 int args_length) {
3031 buffer_->Add(ARGUMENTS_OBJECT, zone());
3032 buffer_->Add(args_known, zone());
3033 buffer_->Add(args_index, zone());
3034 buffer_->Add(args_length, zone());
3035 }
3036
3037
NumberOfOperandsFor(Opcode opcode)3038 int Translation::NumberOfOperandsFor(Opcode opcode) {
3039 switch (opcode) {
3040 case GETTER_STUB_FRAME:
3041 case SETTER_STUB_FRAME:
3042 case DUPLICATED_OBJECT:
3043 case ARGUMENTS_OBJECT:
3044 case CAPTURED_OBJECT:
3045 case REGISTER:
3046 case INT32_REGISTER:
3047 case UINT32_REGISTER:
3048 case DOUBLE_REGISTER:
3049 case STACK_SLOT:
3050 case INT32_STACK_SLOT:
3051 case UINT32_STACK_SLOT:
3052 case DOUBLE_STACK_SLOT:
3053 case LITERAL:
3054 case COMPILED_STUB_FRAME:
3055 return 1;
3056 case BEGIN:
3057 case ARGUMENTS_ADAPTOR_FRAME:
3058 case CONSTRUCT_STUB_FRAME:
3059 return 2;
3060 case JS_FRAME:
3061 return 3;
3062 }
3063 FATAL("Unexpected translation type");
3064 return -1;
3065 }
3066
3067
3068 #if defined(OBJECT_PRINT) || defined(ENABLE_DISASSEMBLER)
3069
StringFor(Opcode opcode)3070 const char* Translation::StringFor(Opcode opcode) {
3071 #define TRANSLATION_OPCODE_CASE(item) case item: return #item;
3072 switch (opcode) {
3073 TRANSLATION_OPCODE_LIST(TRANSLATION_OPCODE_CASE)
3074 }
3075 #undef TRANSLATION_OPCODE_CASE
3076 UNREACHABLE();
3077 return "";
3078 }
3079
3080 #endif
3081
3082
3083 // We can't intermix stack decoding and allocations because
3084 // deoptimization infrastracture is not GC safe.
3085 // Thus we build a temporary structure in malloced space.
ComputeSlotForNextArgument(Translation::Opcode opcode,TranslationIterator * iterator,DeoptimizationInputData * data,JavaScriptFrame * frame)3086 SlotRef SlotRefValueBuilder::ComputeSlotForNextArgument(
3087 Translation::Opcode opcode,
3088 TranslationIterator* iterator,
3089 DeoptimizationInputData* data,
3090 JavaScriptFrame* frame) {
3091 switch (opcode) {
3092 case Translation::BEGIN:
3093 case Translation::JS_FRAME:
3094 case Translation::ARGUMENTS_ADAPTOR_FRAME:
3095 case Translation::CONSTRUCT_STUB_FRAME:
3096 case Translation::GETTER_STUB_FRAME:
3097 case Translation::SETTER_STUB_FRAME:
3098 // Peeled off before getting here.
3099 break;
3100
3101 case Translation::DUPLICATED_OBJECT: {
3102 return SlotRef::NewDuplicateObject(iterator->Next());
3103 }
3104
3105 case Translation::ARGUMENTS_OBJECT:
3106 return SlotRef::NewArgumentsObject(iterator->Next());
3107
3108 case Translation::CAPTURED_OBJECT: {
3109 return SlotRef::NewDeferredObject(iterator->Next());
3110 }
3111
3112 case Translation::REGISTER:
3113 case Translation::INT32_REGISTER:
3114 case Translation::UINT32_REGISTER:
3115 case Translation::DOUBLE_REGISTER:
3116 // We are at safepoint which corresponds to call. All registers are
3117 // saved by caller so there would be no live registers at this
3118 // point. Thus these translation commands should not be used.
3119 break;
3120
3121 case Translation::STACK_SLOT: {
3122 int slot_index = iterator->Next();
3123 Address slot_addr = SlotAddress(frame, slot_index);
3124 return SlotRef(slot_addr, SlotRef::TAGGED);
3125 }
3126
3127 case Translation::INT32_STACK_SLOT: {
3128 int slot_index = iterator->Next();
3129 Address slot_addr = SlotAddress(frame, slot_index);
3130 return SlotRef(slot_addr, SlotRef::INT32);
3131 }
3132
3133 case Translation::UINT32_STACK_SLOT: {
3134 int slot_index = iterator->Next();
3135 Address slot_addr = SlotAddress(frame, slot_index);
3136 return SlotRef(slot_addr, SlotRef::UINT32);
3137 }
3138
3139 case Translation::DOUBLE_STACK_SLOT: {
3140 int slot_index = iterator->Next();
3141 Address slot_addr = SlotAddress(frame, slot_index);
3142 return SlotRef(slot_addr, SlotRef::DOUBLE);
3143 }
3144
3145 case Translation::LITERAL: {
3146 int literal_index = iterator->Next();
3147 return SlotRef(data->GetIsolate(),
3148 data->LiteralArray()->get(literal_index));
3149 }
3150
3151 case Translation::COMPILED_STUB_FRAME:
3152 UNREACHABLE();
3153 break;
3154 }
3155
3156 FATAL("We should never get here - unexpected deopt info.");
3157 return SlotRef();
3158 }
3159
3160
SlotRefValueBuilder(JavaScriptFrame * frame,int inlined_jsframe_index,int formal_parameter_count)3161 SlotRefValueBuilder::SlotRefValueBuilder(JavaScriptFrame* frame,
3162 int inlined_jsframe_index,
3163 int formal_parameter_count)
3164 : current_slot_(0), args_length_(-1), first_slot_index_(-1) {
3165 DisallowHeapAllocation no_gc;
3166
3167 int deopt_index = Safepoint::kNoDeoptimizationIndex;
3168 DeoptimizationInputData* data =
3169 static_cast<OptimizedFrame*>(frame)->GetDeoptimizationData(&deopt_index);
3170 TranslationIterator it(data->TranslationByteArray(),
3171 data->TranslationIndex(deopt_index)->value());
3172 Translation::Opcode opcode = static_cast<Translation::Opcode>(it.Next());
3173 CHECK_EQ(opcode, Translation::BEGIN);
3174 it.Next(); // Drop frame count.
3175
3176 stack_frame_id_ = frame->fp();
3177
3178 int jsframe_count = it.Next();
3179 CHECK_GT(jsframe_count, inlined_jsframe_index);
3180 int jsframes_to_skip = inlined_jsframe_index;
3181 int number_of_slots = -1; // Number of slots inside our frame (yet unknown)
3182 bool should_deopt = false;
3183 while (number_of_slots != 0) {
3184 opcode = static_cast<Translation::Opcode>(it.Next());
3185 bool processed = false;
3186 if (opcode == Translation::ARGUMENTS_ADAPTOR_FRAME) {
3187 if (jsframes_to_skip == 0) {
3188 CHECK_EQ(Translation::NumberOfOperandsFor(opcode), 2);
3189
3190 it.Skip(1); // literal id
3191 int height = it.Next();
3192
3193 // Skip the translation command for the receiver.
3194 it.Skip(Translation::NumberOfOperandsFor(
3195 static_cast<Translation::Opcode>(it.Next())));
3196
3197 // We reached the arguments adaptor frame corresponding to the
3198 // inlined function in question. Number of arguments is height - 1.
3199 first_slot_index_ = slot_refs_.length();
3200 args_length_ = height - 1;
3201 number_of_slots = height - 1;
3202 processed = true;
3203 }
3204 } else if (opcode == Translation::JS_FRAME) {
3205 if (jsframes_to_skip == 0) {
3206 // Skip over operands to advance to the next opcode.
3207 it.Skip(Translation::NumberOfOperandsFor(opcode));
3208
3209 // Skip the translation command for the receiver.
3210 it.Skip(Translation::NumberOfOperandsFor(
3211 static_cast<Translation::Opcode>(it.Next())));
3212
3213 // We reached the frame corresponding to the inlined function
3214 // in question. Process the translation commands for the
3215 // arguments. Number of arguments is equal to the number of
3216 // format parameter count.
3217 first_slot_index_ = slot_refs_.length();
3218 args_length_ = formal_parameter_count;
3219 number_of_slots = formal_parameter_count;
3220 processed = true;
3221 }
3222 jsframes_to_skip--;
3223 } else if (opcode != Translation::BEGIN &&
3224 opcode != Translation::CONSTRUCT_STUB_FRAME &&
3225 opcode != Translation::GETTER_STUB_FRAME &&
3226 opcode != Translation::SETTER_STUB_FRAME &&
3227 opcode != Translation::COMPILED_STUB_FRAME) {
3228 slot_refs_.Add(ComputeSlotForNextArgument(opcode, &it, data, frame));
3229
3230 if (first_slot_index_ >= 0) {
3231 // We have found the beginning of our frame -> make sure we count
3232 // the nested slots of captured objects
3233 number_of_slots--;
3234 SlotRef& slot = slot_refs_.last();
3235 CHECK_NE(slot.Representation(), SlotRef::ARGUMENTS_OBJECT);
3236 number_of_slots += slot.GetChildrenCount();
3237 if (slot.Representation() == SlotRef::DEFERRED_OBJECT ||
3238 slot.Representation() == SlotRef::DUPLICATE_OBJECT) {
3239 should_deopt = true;
3240 }
3241 }
3242
3243 processed = true;
3244 }
3245 if (!processed) {
3246 // Skip over operands to advance to the next opcode.
3247 it.Skip(Translation::NumberOfOperandsFor(opcode));
3248 }
3249 }
3250 if (should_deopt) {
3251 List<JSFunction*> functions(2);
3252 frame->GetFunctions(&functions);
3253 Deoptimizer::DeoptimizeFunction(functions[0]);
3254 }
3255 }
3256
3257
GetValue(Isolate * isolate)3258 Handle<Object> SlotRef::GetValue(Isolate* isolate) {
3259 switch (representation_) {
3260 case TAGGED:
3261 return Handle<Object>(Memory::Object_at(addr_), isolate);
3262
3263 case INT32: {
3264 int value = Memory::int32_at(addr_);
3265 if (Smi::IsValid(value)) {
3266 return Handle<Object>(Smi::FromInt(value), isolate);
3267 } else {
3268 return isolate->factory()->NewNumberFromInt(value);
3269 }
3270 }
3271
3272 case UINT32: {
3273 uint32_t value = Memory::uint32_at(addr_);
3274 if (value <= static_cast<uint32_t>(Smi::kMaxValue)) {
3275 return Handle<Object>(Smi::FromInt(static_cast<int>(value)), isolate);
3276 } else {
3277 return isolate->factory()->NewNumber(static_cast<double>(value));
3278 }
3279 }
3280
3281 case DOUBLE: {
3282 double value = read_double_value(addr_);
3283 return isolate->factory()->NewNumber(value);
3284 }
3285
3286 case LITERAL:
3287 return literal_;
3288
3289 default:
3290 FATAL("We should never get here - unexpected deopt info.");
3291 return Handle<Object>::null();
3292 }
3293 }
3294
3295
Prepare(Isolate * isolate)3296 void SlotRefValueBuilder::Prepare(Isolate* isolate) {
3297 MaterializedObjectStore* materialized_store =
3298 isolate->materialized_object_store();
3299 previously_materialized_objects_ = materialized_store->Get(stack_frame_id_);
3300 prev_materialized_count_ = previously_materialized_objects_.is_null()
3301 ? 0 : previously_materialized_objects_->length();
3302
3303 // Skip any materialized objects of the inlined "parent" frames.
3304 // (Note that we still need to materialize them because they might be
3305 // referred to as duplicated objects.)
3306 while (current_slot_ < first_slot_index_) {
3307 GetNext(isolate, 0);
3308 }
3309 CHECK_EQ(current_slot_, first_slot_index_);
3310 }
3311
3312
GetPreviouslyMaterialized(Isolate * isolate,int length)3313 Handle<Object> SlotRefValueBuilder::GetPreviouslyMaterialized(
3314 Isolate* isolate, int length) {
3315 int object_index = materialized_objects_.length();
3316 Handle<Object> return_value = Handle<Object>(
3317 previously_materialized_objects_->get(object_index), isolate);
3318 materialized_objects_.Add(return_value);
3319
3320 // Now need to skip all the nested objects (and possibly read them from
3321 // the materialization store, too).
3322 for (int i = 0; i < length; i++) {
3323 SlotRef& slot = slot_refs_[current_slot_];
3324 current_slot_++;
3325
3326 // We need to read all the nested objects - add them to the
3327 // number of objects we need to process.
3328 length += slot.GetChildrenCount();
3329
3330 // Put the nested deferred/duplicate objects into our materialization
3331 // array.
3332 if (slot.Representation() == SlotRef::DEFERRED_OBJECT ||
3333 slot.Representation() == SlotRef::DUPLICATE_OBJECT) {
3334 int nested_object_index = materialized_objects_.length();
3335 Handle<Object> nested_object = Handle<Object>(
3336 previously_materialized_objects_->get(nested_object_index),
3337 isolate);
3338 materialized_objects_.Add(nested_object);
3339 }
3340 }
3341
3342 return return_value;
3343 }
3344
3345
GetNext(Isolate * isolate,int lvl)3346 Handle<Object> SlotRefValueBuilder::GetNext(Isolate* isolate, int lvl) {
3347 SlotRef& slot = slot_refs_[current_slot_];
3348 current_slot_++;
3349 switch (slot.Representation()) {
3350 case SlotRef::TAGGED:
3351 case SlotRef::INT32:
3352 case SlotRef::UINT32:
3353 case SlotRef::DOUBLE:
3354 case SlotRef::LITERAL: {
3355 return slot.GetValue(isolate);
3356 }
3357 case SlotRef::ARGUMENTS_OBJECT: {
3358 // We should never need to materialize an arguments object,
3359 // but we still need to put something into the array
3360 // so that the indexing is consistent.
3361 materialized_objects_.Add(isolate->factory()->undefined_value());
3362 int length = slot.GetChildrenCount();
3363 for (int i = 0; i < length; ++i) {
3364 // We don't need the argument, just ignore it
3365 GetNext(isolate, lvl + 1);
3366 }
3367 return isolate->factory()->undefined_value();
3368 }
3369 case SlotRef::DEFERRED_OBJECT: {
3370 int length = slot.GetChildrenCount();
3371 CHECK(slot_refs_[current_slot_].Representation() == SlotRef::LITERAL ||
3372 slot_refs_[current_slot_].Representation() == SlotRef::TAGGED);
3373
3374 int object_index = materialized_objects_.length();
3375 if (object_index < prev_materialized_count_) {
3376 return GetPreviouslyMaterialized(isolate, length);
3377 }
3378
3379 Handle<Object> map_object = slot_refs_[current_slot_].GetValue(isolate);
3380 Handle<Map> map = Map::GeneralizeAllFieldRepresentations(
3381 Handle<Map>::cast(map_object));
3382 current_slot_++;
3383 // TODO(jarin) this should be unified with the code in
3384 // Deoptimizer::MaterializeNextHeapObject()
3385 switch (map->instance_type()) {
3386 case HEAP_NUMBER_TYPE: {
3387 // Reuse the HeapNumber value directly as it is already properly
3388 // tagged and skip materializing the HeapNumber explicitly.
3389 Handle<Object> object = GetNext(isolate, lvl + 1);
3390 materialized_objects_.Add(object);
3391 // On 32-bit architectures, there is an extra slot there because
3392 // the escape analysis calculates the number of slots as
3393 // object-size/pointer-size. To account for this, we read out
3394 // any extra slots.
3395 for (int i = 0; i < length - 2; i++) {
3396 GetNext(isolate, lvl + 1);
3397 }
3398 return object;
3399 }
3400 case JS_OBJECT_TYPE: {
3401 Handle<JSObject> object =
3402 isolate->factory()->NewJSObjectFromMap(map, NOT_TENURED, false);
3403 materialized_objects_.Add(object);
3404 Handle<Object> properties = GetNext(isolate, lvl + 1);
3405 Handle<Object> elements = GetNext(isolate, lvl + 1);
3406 object->set_properties(FixedArray::cast(*properties));
3407 object->set_elements(FixedArrayBase::cast(*elements));
3408 for (int i = 0; i < length - 3; ++i) {
3409 Handle<Object> value = GetNext(isolate, lvl + 1);
3410 FieldIndex index = FieldIndex::ForPropertyIndex(object->map(), i);
3411 object->FastPropertyAtPut(index, *value);
3412 }
3413 return object;
3414 }
3415 case JS_ARRAY_TYPE: {
3416 Handle<JSArray> object =
3417 isolate->factory()->NewJSArray(0, map->elements_kind());
3418 materialized_objects_.Add(object);
3419 Handle<Object> properties = GetNext(isolate, lvl + 1);
3420 Handle<Object> elements = GetNext(isolate, lvl + 1);
3421 Handle<Object> length = GetNext(isolate, lvl + 1);
3422 object->set_properties(FixedArray::cast(*properties));
3423 object->set_elements(FixedArrayBase::cast(*elements));
3424 object->set_length(*length);
3425 return object;
3426 }
3427 default:
3428 PrintF(stderr,
3429 "[couldn't handle instance type %d]\n", map->instance_type());
3430 UNREACHABLE();
3431 break;
3432 }
3433 UNREACHABLE();
3434 break;
3435 }
3436
3437 case SlotRef::DUPLICATE_OBJECT: {
3438 int object_index = slot.DuplicateObjectId();
3439 Handle<Object> object = materialized_objects_[object_index];
3440 materialized_objects_.Add(object);
3441 return object;
3442 }
3443 default:
3444 UNREACHABLE();
3445 break;
3446 }
3447
3448 FATAL("We should never get here - unexpected deopt slot kind.");
3449 return Handle<Object>::null();
3450 }
3451
3452
Finish(Isolate * isolate)3453 void SlotRefValueBuilder::Finish(Isolate* isolate) {
3454 // We should have processed all the slots
3455 CHECK_EQ(slot_refs_.length(), current_slot_);
3456
3457 if (materialized_objects_.length() > prev_materialized_count_) {
3458 // We have materialized some new objects, so we have to store them
3459 // to prevent duplicate materialization
3460 Handle<FixedArray> array = isolate->factory()->NewFixedArray(
3461 materialized_objects_.length());
3462 for (int i = 0; i < materialized_objects_.length(); i++) {
3463 array->set(i, *(materialized_objects_.at(i)));
3464 }
3465 isolate->materialized_object_store()->Set(stack_frame_id_, array);
3466 }
3467 }
3468
3469
Get(Address fp)3470 Handle<FixedArray> MaterializedObjectStore::Get(Address fp) {
3471 int index = StackIdToIndex(fp);
3472 if (index == -1) {
3473 return Handle<FixedArray>::null();
3474 }
3475 Handle<FixedArray> array = GetStackEntries();
3476 CHECK_GT(array->length(), index);
3477 return Handle<FixedArray>::cast(Handle<Object>(array->get(index),
3478 isolate()));
3479 }
3480
3481
Set(Address fp,Handle<FixedArray> materialized_objects)3482 void MaterializedObjectStore::Set(Address fp,
3483 Handle<FixedArray> materialized_objects) {
3484 int index = StackIdToIndex(fp);
3485 if (index == -1) {
3486 index = frame_fps_.length();
3487 frame_fps_.Add(fp);
3488 }
3489
3490 Handle<FixedArray> array = EnsureStackEntries(index + 1);
3491 array->set(index, *materialized_objects);
3492 }
3493
3494
Remove(Address fp)3495 void MaterializedObjectStore::Remove(Address fp) {
3496 int index = StackIdToIndex(fp);
3497 CHECK_GE(index, 0);
3498
3499 frame_fps_.Remove(index);
3500 Handle<FixedArray> array = GetStackEntries();
3501 CHECK_LT(index, array->length());
3502 for (int i = index; i < frame_fps_.length(); i++) {
3503 array->set(i, array->get(i + 1));
3504 }
3505 array->set(frame_fps_.length(), isolate()->heap()->undefined_value());
3506 }
3507
3508
StackIdToIndex(Address fp)3509 int MaterializedObjectStore::StackIdToIndex(Address fp) {
3510 for (int i = 0; i < frame_fps_.length(); i++) {
3511 if (frame_fps_[i] == fp) {
3512 return i;
3513 }
3514 }
3515 return -1;
3516 }
3517
3518
GetStackEntries()3519 Handle<FixedArray> MaterializedObjectStore::GetStackEntries() {
3520 return Handle<FixedArray>(isolate()->heap()->materialized_objects());
3521 }
3522
3523
EnsureStackEntries(int length)3524 Handle<FixedArray> MaterializedObjectStore::EnsureStackEntries(int length) {
3525 Handle<FixedArray> array = GetStackEntries();
3526 if (array->length() >= length) {
3527 return array;
3528 }
3529
3530 int new_length = length > 10 ? length : 10;
3531 if (new_length < 2 * array->length()) {
3532 new_length = 2 * array->length();
3533 }
3534
3535 Handle<FixedArray> new_array =
3536 isolate()->factory()->NewFixedArray(new_length, TENURED);
3537 for (int i = 0; i < array->length(); i++) {
3538 new_array->set(i, array->get(i));
3539 }
3540 for (int i = array->length(); i < length; i++) {
3541 new_array->set(i, isolate()->heap()->undefined_value());
3542 }
3543 isolate()->heap()->public_set_materialized_objects(*new_array);
3544 return new_array;
3545 }
3546
3547
DeoptimizedFrameInfo(Deoptimizer * deoptimizer,int frame_index,bool has_arguments_adaptor,bool has_construct_stub)3548 DeoptimizedFrameInfo::DeoptimizedFrameInfo(Deoptimizer* deoptimizer,
3549 int frame_index,
3550 bool has_arguments_adaptor,
3551 bool has_construct_stub) {
3552 FrameDescription* output_frame = deoptimizer->output_[frame_index];
3553 function_ = output_frame->GetFunction();
3554 has_construct_stub_ = has_construct_stub;
3555 expression_count_ = output_frame->GetExpressionCount();
3556 expression_stack_ = new Object*[expression_count_];
3557 // Get the source position using the unoptimized code.
3558 Address pc = reinterpret_cast<Address>(output_frame->GetPc());
3559 Code* code = Code::cast(deoptimizer->isolate()->FindCodeObject(pc));
3560 source_position_ = code->SourcePosition(pc);
3561
3562 for (int i = 0; i < expression_count_; i++) {
3563 SetExpression(i, output_frame->GetExpression(i));
3564 }
3565
3566 if (has_arguments_adaptor) {
3567 output_frame = deoptimizer->output_[frame_index - 1];
3568 CHECK_EQ(output_frame->GetFrameType(), StackFrame::ARGUMENTS_ADAPTOR);
3569 }
3570
3571 parameters_count_ = output_frame->ComputeParametersCount();
3572 parameters_ = new Object*[parameters_count_];
3573 for (int i = 0; i < parameters_count_; i++) {
3574 SetParameter(i, output_frame->GetParameter(i));
3575 }
3576 }
3577
3578
~DeoptimizedFrameInfo()3579 DeoptimizedFrameInfo::~DeoptimizedFrameInfo() {
3580 delete[] expression_stack_;
3581 delete[] parameters_;
3582 }
3583
3584
Iterate(ObjectVisitor * v)3585 void DeoptimizedFrameInfo::Iterate(ObjectVisitor* v) {
3586 v->VisitPointer(BitCast<Object**>(&function_));
3587 v->VisitPointers(parameters_, parameters_ + parameters_count_);
3588 v->VisitPointers(expression_stack_, expression_stack_ + expression_count_);
3589 }
3590
3591 } } // namespace v8::internal
3592