1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_DEOPTIMIZER_H_
6 #define V8_DEOPTIMIZER_H_
7
8 #include "src/v8.h"
9
10 #include "src/allocation.h"
11 #include "src/macro-assembler.h"
12 #include "src/zone-inl.h"
13
14
15 namespace v8 {
16 namespace internal {
17
18
read_double_value(Address p)19 static inline double read_double_value(Address p) {
20 #ifdef V8_HOST_CAN_READ_UNALIGNED
21 return Memory::double_at(p);
22 #else // V8_HOST_CAN_READ_UNALIGNED
23 // Prevent gcc from using load-double (mips ldc1) on (possibly)
24 // non-64-bit aligned address.
25 union conversion {
26 double d;
27 uint32_t u[2];
28 } c;
29 c.u[0] = *reinterpret_cast<uint32_t*>(p);
30 c.u[1] = *reinterpret_cast<uint32_t*>(p + 4);
31 return c.d;
32 #endif // V8_HOST_CAN_READ_UNALIGNED
33 }
34
35
36 class FrameDescription;
37 class TranslationIterator;
38 class DeoptimizedFrameInfo;
39
40 template<typename T>
41 class HeapNumberMaterializationDescriptor BASE_EMBEDDED {
42 public:
HeapNumberMaterializationDescriptor(T destination,double value)43 HeapNumberMaterializationDescriptor(T destination, double value)
44 : destination_(destination), value_(value) { }
45
destination()46 T destination() const { return destination_; }
value()47 double value() const { return value_; }
48
49 private:
50 T destination_;
51 double value_;
52 };
53
54
55 class ObjectMaterializationDescriptor BASE_EMBEDDED {
56 public:
ObjectMaterializationDescriptor(Address slot_address,int frame,int length,int duplicate,bool is_args)57 ObjectMaterializationDescriptor(
58 Address slot_address, int frame, int length, int duplicate, bool is_args)
59 : slot_address_(slot_address),
60 jsframe_index_(frame),
61 object_length_(length),
62 duplicate_object_(duplicate),
63 is_arguments_(is_args) { }
64
slot_address()65 Address slot_address() const { return slot_address_; }
jsframe_index()66 int jsframe_index() const { return jsframe_index_; }
object_length()67 int object_length() const { return object_length_; }
duplicate_object()68 int duplicate_object() const { return duplicate_object_; }
is_arguments()69 bool is_arguments() const { return is_arguments_; }
70
71 // Only used for allocated receivers in DoComputeConstructStubFrame.
patch_slot_address(intptr_t slot)72 void patch_slot_address(intptr_t slot) {
73 slot_address_ = reinterpret_cast<Address>(slot);
74 }
75
76 private:
77 Address slot_address_;
78 int jsframe_index_;
79 int object_length_;
80 int duplicate_object_;
81 bool is_arguments_;
82 };
83
84
85 class OptimizedFunctionVisitor BASE_EMBEDDED {
86 public:
~OptimizedFunctionVisitor()87 virtual ~OptimizedFunctionVisitor() {}
88
89 // Function which is called before iteration of any optimized functions
90 // from given native context.
91 virtual void EnterContext(Context* context) = 0;
92
93 virtual void VisitFunction(JSFunction* function) = 0;
94
95 // Function which is called after iteration of all optimized functions
96 // from given native context.
97 virtual void LeaveContext(Context* context) = 0;
98 };
99
100
101 class Deoptimizer : public Malloced {
102 public:
103 enum BailoutType {
104 EAGER,
105 LAZY,
106 SOFT,
107 // This last bailout type is not really a bailout, but used by the
108 // debugger to deoptimize stack frames to allow inspection.
109 DEBUGGER
110 };
111
112 static const int kBailoutTypesWithCodeEntry = SOFT + 1;
113
114 struct JumpTableEntry : public ZoneObject {
JumpTableEntryJumpTableEntry115 inline JumpTableEntry(Address entry,
116 Deoptimizer::BailoutType type,
117 bool frame)
118 : label(),
119 address(entry),
120 bailout_type(type),
121 needs_frame(frame) { }
122 Label label;
123 Address address;
124 Deoptimizer::BailoutType bailout_type;
125 bool needs_frame;
126 };
127
128 static bool TraceEnabledFor(BailoutType deopt_type,
129 StackFrame::Type frame_type);
130 static const char* MessageFor(BailoutType type);
131
output_count()132 int output_count() const { return output_count_; }
133
function()134 Handle<JSFunction> function() const { return Handle<JSFunction>(function_); }
compiled_code()135 Handle<Code> compiled_code() const { return Handle<Code>(compiled_code_); }
bailout_type()136 BailoutType bailout_type() const { return bailout_type_; }
137
138 // Number of created JS frames. Not all created frames are necessarily JS.
jsframe_count()139 int jsframe_count() const { return jsframe_count_; }
140
141 static Deoptimizer* New(JSFunction* function,
142 BailoutType type,
143 unsigned bailout_id,
144 Address from,
145 int fp_to_sp_delta,
146 Isolate* isolate);
147 static Deoptimizer* Grab(Isolate* isolate);
148
149 // The returned object with information on the optimized frame needs to be
150 // freed before another one can be generated.
151 static DeoptimizedFrameInfo* DebuggerInspectableFrame(JavaScriptFrame* frame,
152 int jsframe_index,
153 Isolate* isolate);
154 static void DeleteDebuggerInspectableFrame(DeoptimizedFrameInfo* info,
155 Isolate* isolate);
156
157 // Makes sure that there is enough room in the relocation
158 // information of a code object to perform lazy deoptimization
159 // patching. If there is not enough room a new relocation
160 // information object is allocated and comments are added until it
161 // is big enough.
162 static void EnsureRelocSpaceForLazyDeoptimization(Handle<Code> code);
163
164 // Deoptimize the function now. Its current optimized code will never be run
165 // again and any activations of the optimized code will get deoptimized when
166 // execution returns.
167 static void DeoptimizeFunction(JSFunction* function);
168
169 // Deoptimize all code in the given isolate.
170 static void DeoptimizeAll(Isolate* isolate);
171
172 // Deoptimize code associated with the given global object.
173 static void DeoptimizeGlobalObject(JSObject* object);
174
175 // Deoptimizes all optimized code that has been previously marked
176 // (via code->set_marked_for_deoptimization) and unlinks all functions that
177 // refer to that code.
178 static void DeoptimizeMarkedCode(Isolate* isolate);
179
180 // Visit all the known optimized functions in a given isolate.
181 static void VisitAllOptimizedFunctions(
182 Isolate* isolate, OptimizedFunctionVisitor* visitor);
183
184 // The size in bytes of the code required at a lazy deopt patch site.
185 static int patch_size();
186
187 ~Deoptimizer();
188
189 void MaterializeHeapObjects(JavaScriptFrameIterator* it);
190
191 void MaterializeHeapNumbersForDebuggerInspectableFrame(
192 Address parameters_top,
193 uint32_t parameters_size,
194 Address expressions_top,
195 uint32_t expressions_size,
196 DeoptimizedFrameInfo* info);
197
198 static void ComputeOutputFrames(Deoptimizer* deoptimizer);
199
200
201 enum GetEntryMode {
202 CALCULATE_ENTRY_ADDRESS,
203 ENSURE_ENTRY_CODE
204 };
205
206
207 static Address GetDeoptimizationEntry(
208 Isolate* isolate,
209 int id,
210 BailoutType type,
211 GetEntryMode mode = ENSURE_ENTRY_CODE);
212 static int GetDeoptimizationId(Isolate* isolate,
213 Address addr,
214 BailoutType type);
215 static int GetOutputInfo(DeoptimizationOutputData* data,
216 BailoutId node_id,
217 SharedFunctionInfo* shared);
218
219 // Code generation support.
input_offset()220 static int input_offset() { return OFFSET_OF(Deoptimizer, input_); }
output_count_offset()221 static int output_count_offset() {
222 return OFFSET_OF(Deoptimizer, output_count_);
223 }
output_offset()224 static int output_offset() { return OFFSET_OF(Deoptimizer, output_); }
225
has_alignment_padding_offset()226 static int has_alignment_padding_offset() {
227 return OFFSET_OF(Deoptimizer, has_alignment_padding_);
228 }
229
230 static int GetDeoptimizedCodeCount(Isolate* isolate);
231
232 static const int kNotDeoptimizationEntry = -1;
233
234 // Generators for the deoptimization entry code.
235 class EntryGenerator BASE_EMBEDDED {
236 public:
EntryGenerator(MacroAssembler * masm,BailoutType type)237 EntryGenerator(MacroAssembler* masm, BailoutType type)
238 : masm_(masm), type_(type) { }
~EntryGenerator()239 virtual ~EntryGenerator() { }
240
241 void Generate();
242
243 protected:
masm()244 MacroAssembler* masm() const { return masm_; }
type()245 BailoutType type() const { return type_; }
isolate()246 Isolate* isolate() const { return masm_->isolate(); }
247
GeneratePrologue()248 virtual void GeneratePrologue() { }
249
250 private:
251 MacroAssembler* masm_;
252 Deoptimizer::BailoutType type_;
253 };
254
255 class TableEntryGenerator : public EntryGenerator {
256 public:
TableEntryGenerator(MacroAssembler * masm,BailoutType type,int count)257 TableEntryGenerator(MacroAssembler* masm, BailoutType type, int count)
258 : EntryGenerator(masm, type), count_(count) { }
259
260 protected:
261 virtual void GeneratePrologue();
262
263 private:
count()264 int count() const { return count_; }
265
266 int count_;
267 };
268
269 int ConvertJSFrameIndexToFrameIndex(int jsframe_index);
270
271 static size_t GetMaxDeoptTableSize();
272
273 static void EnsureCodeForDeoptimizationEntry(Isolate* isolate,
274 BailoutType type,
275 int max_entry_id);
276
isolate()277 Isolate* isolate() const { return isolate_; }
278
279 private:
280 static const int kMinNumberOfEntries = 64;
281 static const int kMaxNumberOfEntries = 16384;
282
283 Deoptimizer(Isolate* isolate,
284 JSFunction* function,
285 BailoutType type,
286 unsigned bailout_id,
287 Address from,
288 int fp_to_sp_delta,
289 Code* optimized_code);
290 Code* FindOptimizedCode(JSFunction* function, Code* optimized_code);
291 void PrintFunctionName();
292 void DeleteFrameDescriptions();
293
294 void DoComputeOutputFrames();
295 void DoComputeJSFrame(TranslationIterator* iterator, int frame_index);
296 void DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
297 int frame_index);
298 void DoComputeConstructStubFrame(TranslationIterator* iterator,
299 int frame_index);
300 void DoComputeAccessorStubFrame(TranslationIterator* iterator,
301 int frame_index,
302 bool is_setter_stub_frame);
303 void DoComputeCompiledStubFrame(TranslationIterator* iterator,
304 int frame_index);
305
306 // Translate object, store the result into an auxiliary array
307 // (deferred_objects_tagged_values_).
308 void DoTranslateObject(TranslationIterator* iterator,
309 int object_index,
310 int field_index);
311
312 // Translate value, store the result into the given frame slot.
313 void DoTranslateCommand(TranslationIterator* iterator,
314 int frame_index,
315 unsigned output_offset);
316
317 // Translate object, do not store the result anywhere (but do update
318 // the deferred materialization array).
319 void DoTranslateObjectAndSkip(TranslationIterator* iterator);
320
321 unsigned ComputeInputFrameSize() const;
322 unsigned ComputeFixedSize(JSFunction* function) const;
323
324 unsigned ComputeIncomingArgumentSize(JSFunction* function) const;
325 unsigned ComputeOutgoingArgumentSize() const;
326
327 Object* ComputeLiteral(int index) const;
328
329 void AddObjectStart(intptr_t slot_address, int argc, bool is_arguments);
330 void AddObjectDuplication(intptr_t slot, int object_index);
331 void AddObjectTaggedValue(intptr_t value);
332 void AddObjectDoubleValue(double value);
333 void AddDoubleValue(intptr_t slot_address, double value);
334
ArgumentsObjectIsAdapted(int object_index)335 bool ArgumentsObjectIsAdapted(int object_index) {
336 ObjectMaterializationDescriptor desc = deferred_objects_.at(object_index);
337 int reverse_jsframe_index = jsframe_count_ - desc.jsframe_index() - 1;
338 return jsframe_has_adapted_arguments_[reverse_jsframe_index];
339 }
340
ArgumentsObjectFunction(int object_index)341 Handle<JSFunction> ArgumentsObjectFunction(int object_index) {
342 ObjectMaterializationDescriptor desc = deferred_objects_.at(object_index);
343 int reverse_jsframe_index = jsframe_count_ - desc.jsframe_index() - 1;
344 return jsframe_functions_[reverse_jsframe_index];
345 }
346
347 // Helper function for heap object materialization.
348 Handle<Object> MaterializeNextHeapObject();
349 Handle<Object> MaterializeNextValue();
350
351 static void GenerateDeoptimizationEntries(
352 MacroAssembler* masm, int count, BailoutType type);
353
354 // Marks all the code in the given context for deoptimization.
355 static void MarkAllCodeForContext(Context* native_context);
356
357 // Visit all the known optimized functions in a given context.
358 static void VisitAllOptimizedFunctionsForContext(
359 Context* context, OptimizedFunctionVisitor* visitor);
360
361 // Deoptimizes all code marked in the given context.
362 static void DeoptimizeMarkedCodeForContext(Context* native_context);
363
364 // Patch the given code so that it will deoptimize itself.
365 static void PatchCodeForDeoptimization(Isolate* isolate, Code* code);
366
367 // Searches the list of known deoptimizing code for a Code object
368 // containing the given address (which is supposedly faster than
369 // searching all code objects).
370 Code* FindDeoptimizingCode(Address addr);
371
372 // Fill the input from from a JavaScript frame. This is used when
373 // the debugger needs to inspect an optimized frame. For normal
374 // deoptimizations the input frame is filled in generated code.
375 void FillInputFrame(Address tos, JavaScriptFrame* frame);
376
377 // Fill the given output frame's registers to contain the failure handler
378 // address and the number of parameters for a stub failure trampoline.
379 void SetPlatformCompiledStubRegisters(FrameDescription* output_frame,
380 CodeStubInterfaceDescriptor* desc);
381
382 // Fill the given output frame's double registers with the original values
383 // from the input frame's double registers.
384 void CopyDoubleRegisters(FrameDescription* output_frame);
385
386 // Determines whether the input frame contains alignment padding by looking
387 // at the dynamic alignment state slot inside the frame.
388 bool HasAlignmentPadding(JSFunction* function);
389
390 Isolate* isolate_;
391 JSFunction* function_;
392 Code* compiled_code_;
393 unsigned bailout_id_;
394 BailoutType bailout_type_;
395 Address from_;
396 int fp_to_sp_delta_;
397 int has_alignment_padding_;
398
399 // Input frame description.
400 FrameDescription* input_;
401 // Number of output frames.
402 int output_count_;
403 // Number of output js frames.
404 int jsframe_count_;
405 // Array of output frame descriptions.
406 FrameDescription** output_;
407
408 // Deferred values to be materialized.
409 List<Object*> deferred_objects_tagged_values_;
410 List<HeapNumberMaterializationDescriptor<int> >
411 deferred_objects_double_values_;
412 List<ObjectMaterializationDescriptor> deferred_objects_;
413 List<HeapNumberMaterializationDescriptor<Address> > deferred_heap_numbers_;
414
415 // Key for lookup of previously materialized objects
416 Address stack_fp_;
417 Handle<FixedArray> previously_materialized_objects_;
418 int prev_materialized_count_;
419
420 // Output frame information. Only used during heap object materialization.
421 List<Handle<JSFunction> > jsframe_functions_;
422 List<bool> jsframe_has_adapted_arguments_;
423
424 // Materialized objects. Only used during heap object materialization.
425 List<Handle<Object> >* materialized_values_;
426 List<Handle<Object> >* materialized_objects_;
427 int materialization_value_index_;
428 int materialization_object_index_;
429
430 #ifdef DEBUG
431 DisallowHeapAllocation* disallow_heap_allocation_;
432 #endif // DEBUG
433
434 CodeTracer::Scope* trace_scope_;
435
436 static const int table_entry_size_;
437
438 friend class FrameDescription;
439 friend class DeoptimizedFrameInfo;
440 };
441
442
443 class FrameDescription {
444 public:
445 FrameDescription(uint32_t frame_size,
446 JSFunction* function);
447
new(size_t size,uint32_t frame_size)448 void* operator new(size_t size, uint32_t frame_size) {
449 // Subtracts kPointerSize, as the member frame_content_ already supplies
450 // the first element of the area to store the frame.
451 return malloc(size + frame_size - kPointerSize);
452 }
453
delete(void * pointer,uint32_t frame_size)454 void operator delete(void* pointer, uint32_t frame_size) {
455 free(pointer);
456 }
457
delete(void * description)458 void operator delete(void* description) {
459 free(description);
460 }
461
GetFrameSize()462 uint32_t GetFrameSize() const {
463 ASSERT(static_cast<uint32_t>(frame_size_) == frame_size_);
464 return static_cast<uint32_t>(frame_size_);
465 }
466
GetFunction()467 JSFunction* GetFunction() const { return function_; }
468
469 unsigned GetOffsetFromSlotIndex(int slot_index);
470
GetFrameSlot(unsigned offset)471 intptr_t GetFrameSlot(unsigned offset) {
472 return *GetFrameSlotPointer(offset);
473 }
474
GetDoubleFrameSlot(unsigned offset)475 double GetDoubleFrameSlot(unsigned offset) {
476 intptr_t* ptr = GetFrameSlotPointer(offset);
477 return read_double_value(reinterpret_cast<Address>(ptr));
478 }
479
SetFrameSlot(unsigned offset,intptr_t value)480 void SetFrameSlot(unsigned offset, intptr_t value) {
481 *GetFrameSlotPointer(offset) = value;
482 }
483
484 void SetCallerPc(unsigned offset, intptr_t value);
485
486 void SetCallerFp(unsigned offset, intptr_t value);
487
488 void SetCallerConstantPool(unsigned offset, intptr_t value);
489
GetRegister(unsigned n)490 intptr_t GetRegister(unsigned n) const {
491 #if DEBUG
492 // This convoluted ASSERT is needed to work around a gcc problem that
493 // improperly detects an array bounds overflow in optimized debug builds
494 // when using a plain ASSERT.
495 if (n >= ARRAY_SIZE(registers_)) {
496 ASSERT(false);
497 return 0;
498 }
499 #endif
500 return registers_[n];
501 }
502
GetDoubleRegister(unsigned n)503 double GetDoubleRegister(unsigned n) const {
504 ASSERT(n < ARRAY_SIZE(double_registers_));
505 return double_registers_[n];
506 }
507
SetRegister(unsigned n,intptr_t value)508 void SetRegister(unsigned n, intptr_t value) {
509 ASSERT(n < ARRAY_SIZE(registers_));
510 registers_[n] = value;
511 }
512
SetDoubleRegister(unsigned n,double value)513 void SetDoubleRegister(unsigned n, double value) {
514 ASSERT(n < ARRAY_SIZE(double_registers_));
515 double_registers_[n] = value;
516 }
517
GetTop()518 intptr_t GetTop() const { return top_; }
SetTop(intptr_t top)519 void SetTop(intptr_t top) { top_ = top; }
520
GetPc()521 intptr_t GetPc() const { return pc_; }
SetPc(intptr_t pc)522 void SetPc(intptr_t pc) { pc_ = pc; }
523
GetFp()524 intptr_t GetFp() const { return fp_; }
SetFp(intptr_t fp)525 void SetFp(intptr_t fp) { fp_ = fp; }
526
GetContext()527 intptr_t GetContext() const { return context_; }
SetContext(intptr_t context)528 void SetContext(intptr_t context) { context_ = context; }
529
GetConstantPool()530 intptr_t GetConstantPool() const { return constant_pool_; }
SetConstantPool(intptr_t constant_pool)531 void SetConstantPool(intptr_t constant_pool) {
532 constant_pool_ = constant_pool;
533 }
534
GetState()535 Smi* GetState() const { return state_; }
SetState(Smi * state)536 void SetState(Smi* state) { state_ = state; }
537
SetContinuation(intptr_t pc)538 void SetContinuation(intptr_t pc) { continuation_ = pc; }
539
GetFrameType()540 StackFrame::Type GetFrameType() const { return type_; }
SetFrameType(StackFrame::Type type)541 void SetFrameType(StackFrame::Type type) { type_ = type; }
542
543 // Get the incoming arguments count.
544 int ComputeParametersCount();
545
546 // Get a parameter value for an unoptimized frame.
547 Object* GetParameter(int index);
548
549 // Get the expression stack height for a unoptimized frame.
550 unsigned GetExpressionCount();
551
552 // Get the expression stack value for an unoptimized frame.
553 Object* GetExpression(int index);
554
registers_offset()555 static int registers_offset() {
556 return OFFSET_OF(FrameDescription, registers_);
557 }
558
double_registers_offset()559 static int double_registers_offset() {
560 return OFFSET_OF(FrameDescription, double_registers_);
561 }
562
frame_size_offset()563 static int frame_size_offset() {
564 return OFFSET_OF(FrameDescription, frame_size_);
565 }
566
pc_offset()567 static int pc_offset() {
568 return OFFSET_OF(FrameDescription, pc_);
569 }
570
state_offset()571 static int state_offset() {
572 return OFFSET_OF(FrameDescription, state_);
573 }
574
continuation_offset()575 static int continuation_offset() {
576 return OFFSET_OF(FrameDescription, continuation_);
577 }
578
frame_content_offset()579 static int frame_content_offset() {
580 return OFFSET_OF(FrameDescription, frame_content_);
581 }
582
583 private:
584 static const uint32_t kZapUint32 = 0xbeeddead;
585
586 // Frame_size_ must hold a uint32_t value. It is only a uintptr_t to
587 // keep the variable-size array frame_content_ of type intptr_t at
588 // the end of the structure aligned.
589 uintptr_t frame_size_; // Number of bytes.
590 JSFunction* function_;
591 intptr_t registers_[Register::kNumRegisters];
592 double double_registers_[DoubleRegister::kMaxNumRegisters];
593 intptr_t top_;
594 intptr_t pc_;
595 intptr_t fp_;
596 intptr_t context_;
597 intptr_t constant_pool_;
598 StackFrame::Type type_;
599 Smi* state_;
600
601 // Continuation is the PC where the execution continues after
602 // deoptimizing.
603 intptr_t continuation_;
604
605 // This must be at the end of the object as the object is allocated larger
606 // than it's definition indicate to extend this array.
607 intptr_t frame_content_[1];
608
GetFrameSlotPointer(unsigned offset)609 intptr_t* GetFrameSlotPointer(unsigned offset) {
610 ASSERT(offset < frame_size_);
611 return reinterpret_cast<intptr_t*>(
612 reinterpret_cast<Address>(this) + frame_content_offset() + offset);
613 }
614
615 int ComputeFixedSize();
616 };
617
618
619 class DeoptimizerData {
620 public:
621 explicit DeoptimizerData(MemoryAllocator* allocator);
622 ~DeoptimizerData();
623
624 void Iterate(ObjectVisitor* v);
625
626 private:
627 MemoryAllocator* allocator_;
628 int deopt_entry_code_entries_[Deoptimizer::kBailoutTypesWithCodeEntry];
629 MemoryChunk* deopt_entry_code_[Deoptimizer::kBailoutTypesWithCodeEntry];
630
631 DeoptimizedFrameInfo* deoptimized_frame_info_;
632
633 Deoptimizer* current_;
634
635 friend class Deoptimizer;
636
637 DISALLOW_COPY_AND_ASSIGN(DeoptimizerData);
638 };
639
640
641 class TranslationBuffer BASE_EMBEDDED {
642 public:
TranslationBuffer(Zone * zone)643 explicit TranslationBuffer(Zone* zone) : contents_(256, zone) { }
644
CurrentIndex()645 int CurrentIndex() const { return contents_.length(); }
646 void Add(int32_t value, Zone* zone);
647
648 Handle<ByteArray> CreateByteArray(Factory* factory);
649
650 private:
651 ZoneList<uint8_t> contents_;
652 };
653
654
655 class TranslationIterator BASE_EMBEDDED {
656 public:
TranslationIterator(ByteArray * buffer,int index)657 TranslationIterator(ByteArray* buffer, int index)
658 : buffer_(buffer), index_(index) {
659 ASSERT(index >= 0 && index < buffer->length());
660 }
661
662 int32_t Next();
663
HasNext()664 bool HasNext() const { return index_ < buffer_->length(); }
665
Skip(int n)666 void Skip(int n) {
667 for (int i = 0; i < n; i++) Next();
668 }
669
670 private:
671 ByteArray* buffer_;
672 int index_;
673 };
674
675
676 #define TRANSLATION_OPCODE_LIST(V) \
677 V(BEGIN) \
678 V(JS_FRAME) \
679 V(CONSTRUCT_STUB_FRAME) \
680 V(GETTER_STUB_FRAME) \
681 V(SETTER_STUB_FRAME) \
682 V(ARGUMENTS_ADAPTOR_FRAME) \
683 V(COMPILED_STUB_FRAME) \
684 V(DUPLICATED_OBJECT) \
685 V(ARGUMENTS_OBJECT) \
686 V(CAPTURED_OBJECT) \
687 V(REGISTER) \
688 V(INT32_REGISTER) \
689 V(UINT32_REGISTER) \
690 V(DOUBLE_REGISTER) \
691 V(STACK_SLOT) \
692 V(INT32_STACK_SLOT) \
693 V(UINT32_STACK_SLOT) \
694 V(DOUBLE_STACK_SLOT) \
695 V(LITERAL)
696
697
698 class Translation BASE_EMBEDDED {
699 public:
700 #define DECLARE_TRANSLATION_OPCODE_ENUM(item) item,
701 enum Opcode {
702 TRANSLATION_OPCODE_LIST(DECLARE_TRANSLATION_OPCODE_ENUM)
703 LAST = LITERAL
704 };
705 #undef DECLARE_TRANSLATION_OPCODE_ENUM
706
Translation(TranslationBuffer * buffer,int frame_count,int jsframe_count,Zone * zone)707 Translation(TranslationBuffer* buffer, int frame_count, int jsframe_count,
708 Zone* zone)
709 : buffer_(buffer),
710 index_(buffer->CurrentIndex()),
711 zone_(zone) {
712 buffer_->Add(BEGIN, zone);
713 buffer_->Add(frame_count, zone);
714 buffer_->Add(jsframe_count, zone);
715 }
716
index()717 int index() const { return index_; }
718
719 // Commands.
720 void BeginJSFrame(BailoutId node_id, int literal_id, unsigned height);
721 void BeginCompiledStubFrame();
722 void BeginArgumentsAdaptorFrame(int literal_id, unsigned height);
723 void BeginConstructStubFrame(int literal_id, unsigned height);
724 void BeginGetterStubFrame(int literal_id);
725 void BeginSetterStubFrame(int literal_id);
726 void BeginArgumentsObject(int args_length);
727 void BeginCapturedObject(int length);
728 void DuplicateObject(int object_index);
729 void StoreRegister(Register reg);
730 void StoreInt32Register(Register reg);
731 void StoreUint32Register(Register reg);
732 void StoreDoubleRegister(DoubleRegister reg);
733 void StoreStackSlot(int index);
734 void StoreInt32StackSlot(int index);
735 void StoreUint32StackSlot(int index);
736 void StoreDoubleStackSlot(int index);
737 void StoreLiteral(int literal_id);
738 void StoreArgumentsObject(bool args_known, int args_index, int args_length);
739
zone()740 Zone* zone() const { return zone_; }
741
742 static int NumberOfOperandsFor(Opcode opcode);
743
744 #if defined(OBJECT_PRINT) || defined(ENABLE_DISASSEMBLER)
745 static const char* StringFor(Opcode opcode);
746 #endif
747
748 // A literal id which refers to the JSFunction itself.
749 static const int kSelfLiteralId = -239;
750
751 private:
752 TranslationBuffer* buffer_;
753 int index_;
754 Zone* zone_;
755 };
756
757
758 class SlotRef BASE_EMBEDDED {
759 public:
760 enum SlotRepresentation {
761 UNKNOWN,
762 TAGGED,
763 INT32,
764 UINT32,
765 DOUBLE,
766 LITERAL,
767 DEFERRED_OBJECT, // Object captured by the escape analysis.
768 // The number of nested objects can be obtained
769 // with the DeferredObjectLength() method
770 // (the SlotRefs of the nested objects follow
771 // this SlotRef in the depth-first order.)
772 DUPLICATE_OBJECT, // Duplicated object of a deferred object.
773 ARGUMENTS_OBJECT // Arguments object - only used to keep indexing
774 // in sync, it should not be materialized.
775 };
776
SlotRef()777 SlotRef()
778 : addr_(NULL), representation_(UNKNOWN) { }
779
SlotRef(Address addr,SlotRepresentation representation)780 SlotRef(Address addr, SlotRepresentation representation)
781 : addr_(addr), representation_(representation) { }
782
SlotRef(Isolate * isolate,Object * literal)783 SlotRef(Isolate* isolate, Object* literal)
784 : literal_(literal, isolate), representation_(LITERAL) { }
785
NewArgumentsObject(int length)786 static SlotRef NewArgumentsObject(int length) {
787 SlotRef slot;
788 slot.representation_ = ARGUMENTS_OBJECT;
789 slot.deferred_object_length_ = length;
790 return slot;
791 }
792
NewDeferredObject(int length)793 static SlotRef NewDeferredObject(int length) {
794 SlotRef slot;
795 slot.representation_ = DEFERRED_OBJECT;
796 slot.deferred_object_length_ = length;
797 return slot;
798 }
799
Representation()800 SlotRepresentation Representation() { return representation_; }
801
NewDuplicateObject(int id)802 static SlotRef NewDuplicateObject(int id) {
803 SlotRef slot;
804 slot.representation_ = DUPLICATE_OBJECT;
805 slot.duplicate_object_id_ = id;
806 return slot;
807 }
808
GetChildrenCount()809 int GetChildrenCount() {
810 if (representation_ == DEFERRED_OBJECT ||
811 representation_ == ARGUMENTS_OBJECT) {
812 return deferred_object_length_;
813 } else {
814 return 0;
815 }
816 }
817
DuplicateObjectId()818 int DuplicateObjectId() { return duplicate_object_id_; }
819
820 Handle<Object> GetValue(Isolate* isolate);
821
822 private:
823 Address addr_;
824 Handle<Object> literal_;
825 SlotRepresentation representation_;
826 int deferred_object_length_;
827 int duplicate_object_id_;
828 };
829
830 class SlotRefValueBuilder BASE_EMBEDDED {
831 public:
832 SlotRefValueBuilder(
833 JavaScriptFrame* frame,
834 int inlined_frame_index,
835 int formal_parameter_count);
836
837 void Prepare(Isolate* isolate);
838 Handle<Object> GetNext(Isolate* isolate, int level);
839 void Finish(Isolate* isolate);
840
args_length()841 int args_length() { return args_length_; }
842
843 private:
844 List<Handle<Object> > materialized_objects_;
845 Handle<FixedArray> previously_materialized_objects_;
846 int prev_materialized_count_;
847 Address stack_frame_id_;
848 List<SlotRef> slot_refs_;
849 int current_slot_;
850 int args_length_;
851 int first_slot_index_;
852
853 static SlotRef ComputeSlotForNextArgument(
854 Translation::Opcode opcode,
855 TranslationIterator* iterator,
856 DeoptimizationInputData* data,
857 JavaScriptFrame* frame);
858
859 Handle<Object> GetPreviouslyMaterialized(Isolate* isolate, int length);
860
SlotAddress(JavaScriptFrame * frame,int slot_index)861 static Address SlotAddress(JavaScriptFrame* frame, int slot_index) {
862 if (slot_index >= 0) {
863 const int offset = JavaScriptFrameConstants::kLocal0Offset;
864 return frame->fp() + offset - (slot_index * kPointerSize);
865 } else {
866 const int offset = JavaScriptFrameConstants::kLastParameterOffset;
867 return frame->fp() + offset - ((slot_index + 1) * kPointerSize);
868 }
869 }
870
871 Handle<Object> GetDeferredObject(Isolate* isolate);
872 };
873
874 class MaterializedObjectStore {
875 public:
MaterializedObjectStore(Isolate * isolate)876 explicit MaterializedObjectStore(Isolate* isolate) : isolate_(isolate) {
877 }
878
879 Handle<FixedArray> Get(Address fp);
880 void Set(Address fp, Handle<FixedArray> materialized_objects);
881 void Remove(Address fp);
882
883 private:
isolate()884 Isolate* isolate() { return isolate_; }
885 Handle<FixedArray> GetStackEntries();
886 Handle<FixedArray> EnsureStackEntries(int size);
887
888 int StackIdToIndex(Address fp);
889
890 Isolate* isolate_;
891 List<Address> frame_fps_;
892 };
893
894
895 // Class used to represent an unoptimized frame when the debugger
896 // needs to inspect a frame that is part of an optimized frame. The
897 // internally used FrameDescription objects are not GC safe so for use
898 // by the debugger frame information is copied to an object of this type.
899 // Represents parameters in unadapted form so their number might mismatch
900 // formal parameter count.
901 class DeoptimizedFrameInfo : public Malloced {
902 public:
903 DeoptimizedFrameInfo(Deoptimizer* deoptimizer,
904 int frame_index,
905 bool has_arguments_adaptor,
906 bool has_construct_stub);
907 virtual ~DeoptimizedFrameInfo();
908
909 // GC support.
910 void Iterate(ObjectVisitor* v);
911
912 // Return the number of incoming arguments.
parameters_count()913 int parameters_count() { return parameters_count_; }
914
915 // Return the height of the expression stack.
expression_count()916 int expression_count() { return expression_count_; }
917
918 // Get the frame function.
GetFunction()919 JSFunction* GetFunction() {
920 return function_;
921 }
922
923 // Check if this frame is preceded by construct stub frame. The bottom-most
924 // inlined frame might still be called by an uninlined construct stub.
HasConstructStub()925 bool HasConstructStub() {
926 return has_construct_stub_;
927 }
928
929 // Get an incoming argument.
GetParameter(int index)930 Object* GetParameter(int index) {
931 ASSERT(0 <= index && index < parameters_count());
932 return parameters_[index];
933 }
934
935 // Get an expression from the expression stack.
GetExpression(int index)936 Object* GetExpression(int index) {
937 ASSERT(0 <= index && index < expression_count());
938 return expression_stack_[index];
939 }
940
GetSourcePosition()941 int GetSourcePosition() {
942 return source_position_;
943 }
944
945 private:
946 // Set an incoming argument.
SetParameter(int index,Object * obj)947 void SetParameter(int index, Object* obj) {
948 ASSERT(0 <= index && index < parameters_count());
949 parameters_[index] = obj;
950 }
951
952 // Set an expression on the expression stack.
SetExpression(int index,Object * obj)953 void SetExpression(int index, Object* obj) {
954 ASSERT(0 <= index && index < expression_count());
955 expression_stack_[index] = obj;
956 }
957
958 JSFunction* function_;
959 bool has_construct_stub_;
960 int parameters_count_;
961 int expression_count_;
962 Object** parameters_;
963 Object** expression_stack_;
964 int source_position_;
965
966 friend class Deoptimizer;
967 };
968
969 } } // namespace v8::internal
970
971 #endif // V8_DEOPTIMIZER_H_
972