1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_DEOPTIMIZER_H_
6 #define V8_DEOPTIMIZER_H_
7
8 #include "src/v8.h"
9
10 #include "src/allocation.h"
11 #include "src/macro-assembler.h"
12 #include "src/zone-inl.h"
13
14
15 namespace v8 {
16 namespace internal {
17
18
read_double_value(Address p)19 static inline double read_double_value(Address p) {
20 double d;
21 memcpy(&d, p, sizeof(d));
22 return d;
23 }
24
25
26 class FrameDescription;
27 class TranslationIterator;
28 class DeoptimizedFrameInfo;
29
30 template<typename T>
31 class HeapNumberMaterializationDescriptor BASE_EMBEDDED {
32 public:
HeapNumberMaterializationDescriptor(T destination,double value)33 HeapNumberMaterializationDescriptor(T destination, double value)
34 : destination_(destination), value_(value) { }
35
destination()36 T destination() const { return destination_; }
value()37 double value() const { return value_; }
38
39 private:
40 T destination_;
41 double value_;
42 };
43
44
45 class ObjectMaterializationDescriptor BASE_EMBEDDED {
46 public:
ObjectMaterializationDescriptor(Address slot_address,int frame,int length,int duplicate,bool is_args)47 ObjectMaterializationDescriptor(
48 Address slot_address, int frame, int length, int duplicate, bool is_args)
49 : slot_address_(slot_address),
50 jsframe_index_(frame),
51 object_length_(length),
52 duplicate_object_(duplicate),
53 is_arguments_(is_args) { }
54
slot_address()55 Address slot_address() const { return slot_address_; }
jsframe_index()56 int jsframe_index() const { return jsframe_index_; }
object_length()57 int object_length() const { return object_length_; }
duplicate_object()58 int duplicate_object() const { return duplicate_object_; }
is_arguments()59 bool is_arguments() const { return is_arguments_; }
60
61 // Only used for allocated receivers in DoComputeConstructStubFrame.
patch_slot_address(intptr_t slot)62 void patch_slot_address(intptr_t slot) {
63 slot_address_ = reinterpret_cast<Address>(slot);
64 }
65
66 private:
67 Address slot_address_;
68 int jsframe_index_;
69 int object_length_;
70 int duplicate_object_;
71 bool is_arguments_;
72 };
73
74
75 class OptimizedFunctionVisitor BASE_EMBEDDED {
76 public:
~OptimizedFunctionVisitor()77 virtual ~OptimizedFunctionVisitor() {}
78
79 // Function which is called before iteration of any optimized functions
80 // from given native context.
81 virtual void EnterContext(Context* context) = 0;
82
83 virtual void VisitFunction(JSFunction* function) = 0;
84
85 // Function which is called after iteration of all optimized functions
86 // from given native context.
87 virtual void LeaveContext(Context* context) = 0;
88 };
89
90
91 class Deoptimizer : public Malloced {
92 public:
93 enum BailoutType {
94 EAGER,
95 LAZY,
96 SOFT,
97 // This last bailout type is not really a bailout, but used by the
98 // debugger to deoptimize stack frames to allow inspection.
99 DEBUGGER
100 };
101
102 static const int kBailoutTypesWithCodeEntry = SOFT + 1;
103
104 struct Reason {
ReasonReason105 Reason(int r, const char* m, const char* d)
106 : raw_position(r), mnemonic(m), detail(d) {}
107
108 bool operator==(const Reason& other) const {
109 return raw_position == other.raw_position &&
110 CStringEquals(mnemonic, other.mnemonic) &&
111 CStringEquals(detail, other.detail);
112 }
113
114 bool operator!=(const Reason& other) const { return !(*this == other); }
115
116 int raw_position;
117 const char* mnemonic;
118 const char* detail;
119 };
120
121 struct JumpTableEntry : public ZoneObject {
JumpTableEntryJumpTableEntry122 inline JumpTableEntry(Address entry, const Reason& the_reason,
123 Deoptimizer::BailoutType type, bool frame)
124 : label(),
125 address(entry),
126 reason(the_reason),
127 bailout_type(type),
128 needs_frame(frame) {}
129
IsEquivalentToJumpTableEntry130 bool IsEquivalentTo(const JumpTableEntry& other) const {
131 return address == other.address && bailout_type == other.bailout_type &&
132 needs_frame == other.needs_frame &&
133 (!FLAG_trace_deopt || reason == other.reason);
134 }
135
136 Label label;
137 Address address;
138 Reason reason;
139 Deoptimizer::BailoutType bailout_type;
140 bool needs_frame;
141 };
142
143 static bool TraceEnabledFor(BailoutType deopt_type,
144 StackFrame::Type frame_type);
145 static const char* MessageFor(BailoutType type);
146
output_count()147 int output_count() const { return output_count_; }
148
function()149 Handle<JSFunction> function() const { return Handle<JSFunction>(function_); }
compiled_code()150 Handle<Code> compiled_code() const { return Handle<Code>(compiled_code_); }
bailout_type()151 BailoutType bailout_type() const { return bailout_type_; }
152
153 // Number of created JS frames. Not all created frames are necessarily JS.
jsframe_count()154 int jsframe_count() const { return jsframe_count_; }
155
156 static Deoptimizer* New(JSFunction* function,
157 BailoutType type,
158 unsigned bailout_id,
159 Address from,
160 int fp_to_sp_delta,
161 Isolate* isolate);
162 static Deoptimizer* Grab(Isolate* isolate);
163
164 // The returned object with information on the optimized frame needs to be
165 // freed before another one can be generated.
166 static DeoptimizedFrameInfo* DebuggerInspectableFrame(JavaScriptFrame* frame,
167 int jsframe_index,
168 Isolate* isolate);
169 static void DeleteDebuggerInspectableFrame(DeoptimizedFrameInfo* info,
170 Isolate* isolate);
171
172 // Makes sure that there is enough room in the relocation
173 // information of a code object to perform lazy deoptimization
174 // patching. If there is not enough room a new relocation
175 // information object is allocated and comments are added until it
176 // is big enough.
177 static void EnsureRelocSpaceForLazyDeoptimization(Handle<Code> code);
178
179 // Deoptimize the function now. Its current optimized code will never be run
180 // again and any activations of the optimized code will get deoptimized when
181 // execution returns.
182 static void DeoptimizeFunction(JSFunction* function);
183
184 // Deoptimize all code in the given isolate.
185 static void DeoptimizeAll(Isolate* isolate);
186
187 // Deoptimize code associated with the given global object.
188 static void DeoptimizeGlobalObject(JSObject* object);
189
190 // Deoptimizes all optimized code that has been previously marked
191 // (via code->set_marked_for_deoptimization) and unlinks all functions that
192 // refer to that code.
193 static void DeoptimizeMarkedCode(Isolate* isolate);
194
195 // Visit all the known optimized functions in a given isolate.
196 static void VisitAllOptimizedFunctions(
197 Isolate* isolate, OptimizedFunctionVisitor* visitor);
198
199 // The size in bytes of the code required at a lazy deopt patch site.
200 static int patch_size();
201
202 ~Deoptimizer();
203
204 void MaterializeHeapObjects(JavaScriptFrameIterator* it);
205
206 void MaterializeHeapNumbersForDebuggerInspectableFrame(
207 Address parameters_top,
208 uint32_t parameters_size,
209 Address expressions_top,
210 uint32_t expressions_size,
211 DeoptimizedFrameInfo* info);
212
213 static void ComputeOutputFrames(Deoptimizer* deoptimizer);
214
215
216 enum GetEntryMode {
217 CALCULATE_ENTRY_ADDRESS,
218 ENSURE_ENTRY_CODE
219 };
220
221
222 static Address GetDeoptimizationEntry(
223 Isolate* isolate,
224 int id,
225 BailoutType type,
226 GetEntryMode mode = ENSURE_ENTRY_CODE);
227 static int GetDeoptimizationId(Isolate* isolate,
228 Address addr,
229 BailoutType type);
230 static int GetOutputInfo(DeoptimizationOutputData* data,
231 BailoutId node_id,
232 SharedFunctionInfo* shared);
233
234 // Code generation support.
input_offset()235 static int input_offset() { return OFFSET_OF(Deoptimizer, input_); }
output_count_offset()236 static int output_count_offset() {
237 return OFFSET_OF(Deoptimizer, output_count_);
238 }
output_offset()239 static int output_offset() { return OFFSET_OF(Deoptimizer, output_); }
240
has_alignment_padding_offset()241 static int has_alignment_padding_offset() {
242 return OFFSET_OF(Deoptimizer, has_alignment_padding_);
243 }
244
245 static int GetDeoptimizedCodeCount(Isolate* isolate);
246
247 static const int kNotDeoptimizationEntry = -1;
248
249 // Generators for the deoptimization entry code.
250 class EntryGenerator BASE_EMBEDDED {
251 public:
EntryGenerator(MacroAssembler * masm,BailoutType type)252 EntryGenerator(MacroAssembler* masm, BailoutType type)
253 : masm_(masm), type_(type) { }
~EntryGenerator()254 virtual ~EntryGenerator() { }
255
256 void Generate();
257
258 protected:
masm()259 MacroAssembler* masm() const { return masm_; }
type()260 BailoutType type() const { return type_; }
isolate()261 Isolate* isolate() const { return masm_->isolate(); }
262
GeneratePrologue()263 virtual void GeneratePrologue() { }
264
265 private:
266 MacroAssembler* masm_;
267 Deoptimizer::BailoutType type_;
268 };
269
270 class TableEntryGenerator : public EntryGenerator {
271 public:
TableEntryGenerator(MacroAssembler * masm,BailoutType type,int count)272 TableEntryGenerator(MacroAssembler* masm, BailoutType type, int count)
273 : EntryGenerator(masm, type), count_(count) { }
274
275 protected:
276 virtual void GeneratePrologue();
277
278 private:
count()279 int count() const { return count_; }
280
281 int count_;
282 };
283
284 int ConvertJSFrameIndexToFrameIndex(int jsframe_index);
285
286 static size_t GetMaxDeoptTableSize();
287
288 static void EnsureCodeForDeoptimizationEntry(Isolate* isolate,
289 BailoutType type,
290 int max_entry_id);
291
isolate()292 Isolate* isolate() const { return isolate_; }
293
294 private:
295 static const int kMinNumberOfEntries = 64;
296 static const int kMaxNumberOfEntries = 16384;
297
298 Deoptimizer(Isolate* isolate,
299 JSFunction* function,
300 BailoutType type,
301 unsigned bailout_id,
302 Address from,
303 int fp_to_sp_delta,
304 Code* optimized_code);
305 Code* FindOptimizedCode(JSFunction* function, Code* optimized_code);
306 void PrintFunctionName();
307 void DeleteFrameDescriptions();
308
309 void DoComputeOutputFrames();
310 void DoComputeJSFrame(TranslationIterator* iterator, int frame_index);
311 void DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
312 int frame_index);
313 void DoComputeConstructStubFrame(TranslationIterator* iterator,
314 int frame_index);
315 void DoComputeAccessorStubFrame(TranslationIterator* iterator,
316 int frame_index,
317 bool is_setter_stub_frame);
318 void DoComputeCompiledStubFrame(TranslationIterator* iterator,
319 int frame_index);
320
321 // Translate object, store the result into an auxiliary array
322 // (deferred_objects_tagged_values_).
323 void DoTranslateObject(TranslationIterator* iterator,
324 int object_index,
325 int field_index);
326
327 // Translate value, store the result into the given frame slot.
328 void DoTranslateCommand(TranslationIterator* iterator,
329 int frame_index,
330 unsigned output_offset);
331
332 // Translate object, do not store the result anywhere (but do update
333 // the deferred materialization array).
334 void DoTranslateObjectAndSkip(TranslationIterator* iterator);
335
336 unsigned ComputeInputFrameSize() const;
337 unsigned ComputeFixedSize(JSFunction* function) const;
338
339 unsigned ComputeIncomingArgumentSize(JSFunction* function) const;
340 unsigned ComputeOutgoingArgumentSize() const;
341
342 Object* ComputeLiteral(int index) const;
343
344 void AddObjectStart(intptr_t slot_address, int argc, bool is_arguments);
345 void AddObjectDuplication(intptr_t slot, int object_index);
346 void AddObjectTaggedValue(intptr_t value);
347 void AddObjectDoubleValue(double value);
348 void AddDoubleValue(intptr_t slot_address, double value);
349
ArgumentsObjectIsAdapted(int object_index)350 bool ArgumentsObjectIsAdapted(int object_index) {
351 ObjectMaterializationDescriptor desc = deferred_objects_.at(object_index);
352 int reverse_jsframe_index = jsframe_count_ - desc.jsframe_index() - 1;
353 return jsframe_has_adapted_arguments_[reverse_jsframe_index];
354 }
355
ArgumentsObjectFunction(int object_index)356 Handle<JSFunction> ArgumentsObjectFunction(int object_index) {
357 ObjectMaterializationDescriptor desc = deferred_objects_.at(object_index);
358 int reverse_jsframe_index = jsframe_count_ - desc.jsframe_index() - 1;
359 return jsframe_functions_[reverse_jsframe_index];
360 }
361
362 // Helper function for heap object materialization.
363 Handle<Object> MaterializeNextHeapObject();
364 Handle<Object> MaterializeNextValue();
365
366 static void GenerateDeoptimizationEntries(
367 MacroAssembler* masm, int count, BailoutType type);
368
369 // Marks all the code in the given context for deoptimization.
370 static void MarkAllCodeForContext(Context* native_context);
371
372 // Visit all the known optimized functions in a given context.
373 static void VisitAllOptimizedFunctionsForContext(
374 Context* context, OptimizedFunctionVisitor* visitor);
375
376 // Deoptimizes all code marked in the given context.
377 static void DeoptimizeMarkedCodeForContext(Context* native_context);
378
379 // Patch the given code so that it will deoptimize itself.
380 static void PatchCodeForDeoptimization(Isolate* isolate, Code* code);
381
382 // Searches the list of known deoptimizing code for a Code object
383 // containing the given address (which is supposedly faster than
384 // searching all code objects).
385 Code* FindDeoptimizingCode(Address addr);
386
387 // Fill the input from from a JavaScript frame. This is used when
388 // the debugger needs to inspect an optimized frame. For normal
389 // deoptimizations the input frame is filled in generated code.
390 void FillInputFrame(Address tos, JavaScriptFrame* frame);
391
392 // Fill the given output frame's registers to contain the failure handler
393 // address and the number of parameters for a stub failure trampoline.
394 void SetPlatformCompiledStubRegisters(FrameDescription* output_frame,
395 CodeStubDescriptor* desc);
396
397 // Fill the given output frame's double registers with the original values
398 // from the input frame's double registers.
399 void CopyDoubleRegisters(FrameDescription* output_frame);
400
401 // Determines whether the input frame contains alignment padding by looking
402 // at the dynamic alignment state slot inside the frame.
403 bool HasAlignmentPadding(JSFunction* function);
404
405 Isolate* isolate_;
406 JSFunction* function_;
407 Code* compiled_code_;
408 unsigned bailout_id_;
409 BailoutType bailout_type_;
410 Address from_;
411 int fp_to_sp_delta_;
412 int has_alignment_padding_;
413
414 // Input frame description.
415 FrameDescription* input_;
416 // Number of output frames.
417 int output_count_;
418 // Number of output js frames.
419 int jsframe_count_;
420 // Array of output frame descriptions.
421 FrameDescription** output_;
422
423 // Deferred values to be materialized.
424 List<Object*> deferred_objects_tagged_values_;
425 List<HeapNumberMaterializationDescriptor<int> >
426 deferred_objects_double_values_;
427 List<ObjectMaterializationDescriptor> deferred_objects_;
428 List<HeapNumberMaterializationDescriptor<Address> > deferred_heap_numbers_;
429
430 // Key for lookup of previously materialized objects
431 Address stack_fp_;
432 Handle<FixedArray> previously_materialized_objects_;
433 int prev_materialized_count_;
434
435 // Output frame information. Only used during heap object materialization.
436 List<Handle<JSFunction> > jsframe_functions_;
437 List<bool> jsframe_has_adapted_arguments_;
438
439 // Materialized objects. Only used during heap object materialization.
440 List<Handle<Object> >* materialized_values_;
441 List<Handle<Object> >* materialized_objects_;
442 int materialization_value_index_;
443 int materialization_object_index_;
444
445 #ifdef DEBUG
446 DisallowHeapAllocation* disallow_heap_allocation_;
447 #endif // DEBUG
448
449 CodeTracer::Scope* trace_scope_;
450
451 static const int table_entry_size_;
452
453 friend class FrameDescription;
454 friend class DeoptimizedFrameInfo;
455 };
456
457
458 class FrameDescription {
459 public:
460 FrameDescription(uint32_t frame_size,
461 JSFunction* function);
462
new(size_t size,uint32_t frame_size)463 void* operator new(size_t size, uint32_t frame_size) {
464 // Subtracts kPointerSize, as the member frame_content_ already supplies
465 // the first element of the area to store the frame.
466 return malloc(size + frame_size - kPointerSize);
467 }
468
delete(void * pointer,uint32_t frame_size)469 void operator delete(void* pointer, uint32_t frame_size) {
470 free(pointer);
471 }
472
delete(void * description)473 void operator delete(void* description) {
474 free(description);
475 }
476
GetFrameSize()477 uint32_t GetFrameSize() const {
478 DCHECK(static_cast<uint32_t>(frame_size_) == frame_size_);
479 return static_cast<uint32_t>(frame_size_);
480 }
481
GetFunction()482 JSFunction* GetFunction() const { return function_; }
483
484 unsigned GetOffsetFromSlotIndex(int slot_index);
485
GetFrameSlot(unsigned offset)486 intptr_t GetFrameSlot(unsigned offset) {
487 return *GetFrameSlotPointer(offset);
488 }
489
GetDoubleFrameSlot(unsigned offset)490 double GetDoubleFrameSlot(unsigned offset) {
491 intptr_t* ptr = GetFrameSlotPointer(offset);
492 return read_double_value(reinterpret_cast<Address>(ptr));
493 }
494
SetFrameSlot(unsigned offset,intptr_t value)495 void SetFrameSlot(unsigned offset, intptr_t value) {
496 *GetFrameSlotPointer(offset) = value;
497 }
498
499 void SetCallerPc(unsigned offset, intptr_t value);
500
501 void SetCallerFp(unsigned offset, intptr_t value);
502
503 void SetCallerConstantPool(unsigned offset, intptr_t value);
504
GetRegister(unsigned n)505 intptr_t GetRegister(unsigned n) const {
506 #if DEBUG
507 // This convoluted DCHECK is needed to work around a gcc problem that
508 // improperly detects an array bounds overflow in optimized debug builds
509 // when using a plain DCHECK.
510 if (n >= arraysize(registers_)) {
511 DCHECK(false);
512 return 0;
513 }
514 #endif
515 return registers_[n];
516 }
517
GetDoubleRegister(unsigned n)518 double GetDoubleRegister(unsigned n) const {
519 DCHECK(n < arraysize(double_registers_));
520 return double_registers_[n];
521 }
522
SetRegister(unsigned n,intptr_t value)523 void SetRegister(unsigned n, intptr_t value) {
524 DCHECK(n < arraysize(registers_));
525 registers_[n] = value;
526 }
527
SetDoubleRegister(unsigned n,double value)528 void SetDoubleRegister(unsigned n, double value) {
529 DCHECK(n < arraysize(double_registers_));
530 double_registers_[n] = value;
531 }
532
GetTop()533 intptr_t GetTop() const { return top_; }
SetTop(intptr_t top)534 void SetTop(intptr_t top) { top_ = top; }
535
GetPc()536 intptr_t GetPc() const { return pc_; }
SetPc(intptr_t pc)537 void SetPc(intptr_t pc) { pc_ = pc; }
538
GetFp()539 intptr_t GetFp() const { return fp_; }
SetFp(intptr_t fp)540 void SetFp(intptr_t fp) { fp_ = fp; }
541
GetContext()542 intptr_t GetContext() const { return context_; }
SetContext(intptr_t context)543 void SetContext(intptr_t context) { context_ = context; }
544
GetConstantPool()545 intptr_t GetConstantPool() const { return constant_pool_; }
SetConstantPool(intptr_t constant_pool)546 void SetConstantPool(intptr_t constant_pool) {
547 constant_pool_ = constant_pool;
548 }
549
GetState()550 Smi* GetState() const { return state_; }
SetState(Smi * state)551 void SetState(Smi* state) { state_ = state; }
552
SetContinuation(intptr_t pc)553 void SetContinuation(intptr_t pc) { continuation_ = pc; }
554
GetFrameType()555 StackFrame::Type GetFrameType() const { return type_; }
SetFrameType(StackFrame::Type type)556 void SetFrameType(StackFrame::Type type) { type_ = type; }
557
558 // Get the incoming arguments count.
559 int ComputeParametersCount();
560
561 // Get a parameter value for an unoptimized frame.
562 Object* GetParameter(int index);
563
564 // Get the expression stack height for a unoptimized frame.
565 unsigned GetExpressionCount();
566
567 // Get the expression stack value for an unoptimized frame.
568 Object* GetExpression(int index);
569
registers_offset()570 static int registers_offset() {
571 return OFFSET_OF(FrameDescription, registers_);
572 }
573
double_registers_offset()574 static int double_registers_offset() {
575 return OFFSET_OF(FrameDescription, double_registers_);
576 }
577
frame_size_offset()578 static int frame_size_offset() {
579 return OFFSET_OF(FrameDescription, frame_size_);
580 }
581
pc_offset()582 static int pc_offset() {
583 return OFFSET_OF(FrameDescription, pc_);
584 }
585
state_offset()586 static int state_offset() {
587 return OFFSET_OF(FrameDescription, state_);
588 }
589
continuation_offset()590 static int continuation_offset() {
591 return OFFSET_OF(FrameDescription, continuation_);
592 }
593
frame_content_offset()594 static int frame_content_offset() {
595 return OFFSET_OF(FrameDescription, frame_content_);
596 }
597
598 private:
599 static const uint32_t kZapUint32 = 0xbeeddead;
600
601 // Frame_size_ must hold a uint32_t value. It is only a uintptr_t to
602 // keep the variable-size array frame_content_ of type intptr_t at
603 // the end of the structure aligned.
604 uintptr_t frame_size_; // Number of bytes.
605 JSFunction* function_;
606 intptr_t registers_[Register::kNumRegisters];
607 double double_registers_[DoubleRegister::kMaxNumRegisters];
608 intptr_t top_;
609 intptr_t pc_;
610 intptr_t fp_;
611 intptr_t context_;
612 intptr_t constant_pool_;
613 StackFrame::Type type_;
614 Smi* state_;
615
616 // Continuation is the PC where the execution continues after
617 // deoptimizing.
618 intptr_t continuation_;
619
620 // This must be at the end of the object as the object is allocated larger
621 // than it's definition indicate to extend this array.
622 intptr_t frame_content_[1];
623
GetFrameSlotPointer(unsigned offset)624 intptr_t* GetFrameSlotPointer(unsigned offset) {
625 DCHECK(offset < frame_size_);
626 return reinterpret_cast<intptr_t*>(
627 reinterpret_cast<Address>(this) + frame_content_offset() + offset);
628 }
629
630 int ComputeFixedSize();
631 };
632
633
634 class DeoptimizerData {
635 public:
636 explicit DeoptimizerData(MemoryAllocator* allocator);
637 ~DeoptimizerData();
638
639 void Iterate(ObjectVisitor* v);
640
641 private:
642 MemoryAllocator* allocator_;
643 int deopt_entry_code_entries_[Deoptimizer::kBailoutTypesWithCodeEntry];
644 MemoryChunk* deopt_entry_code_[Deoptimizer::kBailoutTypesWithCodeEntry];
645
646 DeoptimizedFrameInfo* deoptimized_frame_info_;
647
648 Deoptimizer* current_;
649
650 friend class Deoptimizer;
651
652 DISALLOW_COPY_AND_ASSIGN(DeoptimizerData);
653 };
654
655
656 class TranslationBuffer BASE_EMBEDDED {
657 public:
TranslationBuffer(Zone * zone)658 explicit TranslationBuffer(Zone* zone) : contents_(256, zone) { }
659
CurrentIndex()660 int CurrentIndex() const { return contents_.length(); }
661 void Add(int32_t value, Zone* zone);
662
663 Handle<ByteArray> CreateByteArray(Factory* factory);
664
665 private:
666 ZoneList<uint8_t> contents_;
667 };
668
669
670 class TranslationIterator BASE_EMBEDDED {
671 public:
TranslationIterator(ByteArray * buffer,int index)672 TranslationIterator(ByteArray* buffer, int index)
673 : buffer_(buffer), index_(index) {
674 DCHECK(index >= 0 && index < buffer->length());
675 }
676
677 int32_t Next();
678
HasNext()679 bool HasNext() const { return index_ < buffer_->length(); }
680
Skip(int n)681 void Skip(int n) {
682 for (int i = 0; i < n; i++) Next();
683 }
684
685 private:
686 ByteArray* buffer_;
687 int index_;
688 };
689
690
691 #define TRANSLATION_OPCODE_LIST(V) \
692 V(BEGIN) \
693 V(JS_FRAME) \
694 V(CONSTRUCT_STUB_FRAME) \
695 V(GETTER_STUB_FRAME) \
696 V(SETTER_STUB_FRAME) \
697 V(ARGUMENTS_ADAPTOR_FRAME) \
698 V(COMPILED_STUB_FRAME) \
699 V(DUPLICATED_OBJECT) \
700 V(ARGUMENTS_OBJECT) \
701 V(CAPTURED_OBJECT) \
702 V(REGISTER) \
703 V(INT32_REGISTER) \
704 V(UINT32_REGISTER) \
705 V(DOUBLE_REGISTER) \
706 V(STACK_SLOT) \
707 V(INT32_STACK_SLOT) \
708 V(UINT32_STACK_SLOT) \
709 V(DOUBLE_STACK_SLOT) \
710 V(LITERAL)
711
712
713 class Translation BASE_EMBEDDED {
714 public:
715 #define DECLARE_TRANSLATION_OPCODE_ENUM(item) item,
716 enum Opcode {
717 TRANSLATION_OPCODE_LIST(DECLARE_TRANSLATION_OPCODE_ENUM)
718 LAST = LITERAL
719 };
720 #undef DECLARE_TRANSLATION_OPCODE_ENUM
721
Translation(TranslationBuffer * buffer,int frame_count,int jsframe_count,Zone * zone)722 Translation(TranslationBuffer* buffer, int frame_count, int jsframe_count,
723 Zone* zone)
724 : buffer_(buffer),
725 index_(buffer->CurrentIndex()),
726 zone_(zone) {
727 buffer_->Add(BEGIN, zone);
728 buffer_->Add(frame_count, zone);
729 buffer_->Add(jsframe_count, zone);
730 }
731
index()732 int index() const { return index_; }
733
734 // Commands.
735 void BeginJSFrame(BailoutId node_id, int literal_id, unsigned height);
736 void BeginCompiledStubFrame();
737 void BeginArgumentsAdaptorFrame(int literal_id, unsigned height);
738 void BeginConstructStubFrame(int literal_id, unsigned height);
739 void BeginGetterStubFrame(int literal_id);
740 void BeginSetterStubFrame(int literal_id);
741 void BeginArgumentsObject(int args_length);
742 void BeginCapturedObject(int length);
743 void DuplicateObject(int object_index);
744 void StoreRegister(Register reg);
745 void StoreInt32Register(Register reg);
746 void StoreUint32Register(Register reg);
747 void StoreDoubleRegister(DoubleRegister reg);
748 void StoreStackSlot(int index);
749 void StoreInt32StackSlot(int index);
750 void StoreUint32StackSlot(int index);
751 void StoreDoubleStackSlot(int index);
752 void StoreLiteral(int literal_id);
753 void StoreArgumentsObject(bool args_known, int args_index, int args_length);
754
zone()755 Zone* zone() const { return zone_; }
756
757 static int NumberOfOperandsFor(Opcode opcode);
758
759 #if defined(OBJECT_PRINT) || defined(ENABLE_DISASSEMBLER)
760 static const char* StringFor(Opcode opcode);
761 #endif
762
763 // A literal id which refers to the JSFunction itself.
764 static const int kSelfLiteralId = -239;
765
766 private:
767 TranslationBuffer* buffer_;
768 int index_;
769 Zone* zone_;
770 };
771
772
773 class SlotRef BASE_EMBEDDED {
774 public:
775 enum SlotRepresentation {
776 UNKNOWN,
777 TAGGED,
778 INT32,
779 UINT32,
780 DOUBLE,
781 LITERAL,
782 DEFERRED_OBJECT, // Object captured by the escape analysis.
783 // The number of nested objects can be obtained
784 // with the DeferredObjectLength() method
785 // (the SlotRefs of the nested objects follow
786 // this SlotRef in the depth-first order.)
787 DUPLICATE_OBJECT, // Duplicated object of a deferred object.
788 ARGUMENTS_OBJECT // Arguments object - only used to keep indexing
789 // in sync, it should not be materialized.
790 };
791
SlotRef()792 SlotRef()
793 : addr_(NULL), representation_(UNKNOWN) { }
794
SlotRef(Address addr,SlotRepresentation representation)795 SlotRef(Address addr, SlotRepresentation representation)
796 : addr_(addr), representation_(representation) { }
797
SlotRef(Isolate * isolate,Object * literal)798 SlotRef(Isolate* isolate, Object* literal)
799 : literal_(literal, isolate), representation_(LITERAL) { }
800
NewArgumentsObject(int length)801 static SlotRef NewArgumentsObject(int length) {
802 SlotRef slot;
803 slot.representation_ = ARGUMENTS_OBJECT;
804 slot.deferred_object_length_ = length;
805 return slot;
806 }
807
NewDeferredObject(int length)808 static SlotRef NewDeferredObject(int length) {
809 SlotRef slot;
810 slot.representation_ = DEFERRED_OBJECT;
811 slot.deferred_object_length_ = length;
812 return slot;
813 }
814
Representation()815 SlotRepresentation Representation() { return representation_; }
816
NewDuplicateObject(int id)817 static SlotRef NewDuplicateObject(int id) {
818 SlotRef slot;
819 slot.representation_ = DUPLICATE_OBJECT;
820 slot.duplicate_object_id_ = id;
821 return slot;
822 }
823
GetChildrenCount()824 int GetChildrenCount() {
825 if (representation_ == DEFERRED_OBJECT ||
826 representation_ == ARGUMENTS_OBJECT) {
827 return deferred_object_length_;
828 } else {
829 return 0;
830 }
831 }
832
DuplicateObjectId()833 int DuplicateObjectId() { return duplicate_object_id_; }
834
835 Handle<Object> GetValue(Isolate* isolate);
836
837 private:
838 Address addr_;
839 Handle<Object> literal_;
840 SlotRepresentation representation_;
841 int deferred_object_length_;
842 int duplicate_object_id_;
843 };
844
845 class SlotRefValueBuilder BASE_EMBEDDED {
846 public:
847 SlotRefValueBuilder(
848 JavaScriptFrame* frame,
849 int inlined_frame_index,
850 int formal_parameter_count);
851
852 void Prepare(Isolate* isolate);
853 Handle<Object> GetNext(Isolate* isolate, int level);
854 void Finish(Isolate* isolate);
855
args_length()856 int args_length() { return args_length_; }
857
858 private:
859 List<Handle<Object> > materialized_objects_;
860 Handle<FixedArray> previously_materialized_objects_;
861 int prev_materialized_count_;
862 Address stack_frame_id_;
863 List<SlotRef> slot_refs_;
864 int current_slot_;
865 int args_length_;
866 int first_slot_index_;
867
868 static SlotRef ComputeSlotForNextArgument(
869 Translation::Opcode opcode,
870 TranslationIterator* iterator,
871 DeoptimizationInputData* data,
872 JavaScriptFrame* frame);
873
874 Handle<Object> GetPreviouslyMaterialized(Isolate* isolate, int length);
875
SlotAddress(JavaScriptFrame * frame,int slot_index)876 static Address SlotAddress(JavaScriptFrame* frame, int slot_index) {
877 if (slot_index >= 0) {
878 const int offset = JavaScriptFrameConstants::kLocal0Offset;
879 return frame->fp() + offset - (slot_index * kPointerSize);
880 } else {
881 const int offset = JavaScriptFrameConstants::kLastParameterOffset;
882 return frame->fp() + offset - ((slot_index + 1) * kPointerSize);
883 }
884 }
885
886 Handle<Object> GetDeferredObject(Isolate* isolate);
887 };
888
889 class MaterializedObjectStore {
890 public:
MaterializedObjectStore(Isolate * isolate)891 explicit MaterializedObjectStore(Isolate* isolate) : isolate_(isolate) {
892 }
893
894 Handle<FixedArray> Get(Address fp);
895 void Set(Address fp, Handle<FixedArray> materialized_objects);
896 void Remove(Address fp);
897
898 private:
isolate()899 Isolate* isolate() { return isolate_; }
900 Handle<FixedArray> GetStackEntries();
901 Handle<FixedArray> EnsureStackEntries(int size);
902
903 int StackIdToIndex(Address fp);
904
905 Isolate* isolate_;
906 List<Address> frame_fps_;
907 };
908
909
910 // Class used to represent an unoptimized frame when the debugger
911 // needs to inspect a frame that is part of an optimized frame. The
912 // internally used FrameDescription objects are not GC safe so for use
913 // by the debugger frame information is copied to an object of this type.
914 // Represents parameters in unadapted form so their number might mismatch
915 // formal parameter count.
916 class DeoptimizedFrameInfo : public Malloced {
917 public:
918 DeoptimizedFrameInfo(Deoptimizer* deoptimizer,
919 int frame_index,
920 bool has_arguments_adaptor,
921 bool has_construct_stub);
922 virtual ~DeoptimizedFrameInfo();
923
924 // GC support.
925 void Iterate(ObjectVisitor* v);
926
927 // Return the number of incoming arguments.
parameters_count()928 int parameters_count() { return parameters_count_; }
929
930 // Return the height of the expression stack.
expression_count()931 int expression_count() { return expression_count_; }
932
933 // Get the frame function.
GetFunction()934 JSFunction* GetFunction() {
935 return function_;
936 }
937
938 // Get the frame context.
GetContext()939 Object* GetContext() { return context_; }
940
941 // Check if this frame is preceded by construct stub frame. The bottom-most
942 // inlined frame might still be called by an uninlined construct stub.
HasConstructStub()943 bool HasConstructStub() {
944 return has_construct_stub_;
945 }
946
947 // Get an incoming argument.
GetParameter(int index)948 Object* GetParameter(int index) {
949 DCHECK(0 <= index && index < parameters_count());
950 return parameters_[index];
951 }
952
953 // Get an expression from the expression stack.
GetExpression(int index)954 Object* GetExpression(int index) {
955 DCHECK(0 <= index && index < expression_count());
956 return expression_stack_[index];
957 }
958
GetSourcePosition()959 int GetSourcePosition() {
960 return source_position_;
961 }
962
963 private:
964 // Set an incoming argument.
SetParameter(int index,Object * obj)965 void SetParameter(int index, Object* obj) {
966 DCHECK(0 <= index && index < parameters_count());
967 parameters_[index] = obj;
968 }
969
970 // Set an expression on the expression stack.
SetExpression(int index,Object * obj)971 void SetExpression(int index, Object* obj) {
972 DCHECK(0 <= index && index < expression_count());
973 expression_stack_[index] = obj;
974 }
975
976 JSFunction* function_;
977 Object* context_;
978 bool has_construct_stub_;
979 int parameters_count_;
980 int expression_count_;
981 Object** parameters_;
982 Object** expression_stack_;
983 int source_position_;
984
985 friend class Deoptimizer;
986 };
987
988 } } // namespace v8::internal
989
990 #endif // V8_DEOPTIMIZER_H_
991