• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 //     * Redistributions of source code must retain the above copyright
7 //       notice, this list of conditions and the following disclaimer.
8 //     * Redistributions in binary form must reproduce the above
9 //       copyright notice, this list of conditions and the following
10 //       disclaimer in the documentation and/or other materials provided
11 //       with the distribution.
12 //     * Neither the name of Google Inc. nor the names of its
13 //       contributors may be used to endorse or promote products derived
14 //       from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #ifndef V8_DEOPTIMIZER_H_
29 #define V8_DEOPTIMIZER_H_
30 
31 #include "v8.h"
32 
33 #include "allocation.h"
34 #include "macro-assembler.h"
35 #include "zone-inl.h"
36 
37 
38 namespace v8 {
39 namespace internal {
40 
41 class FrameDescription;
42 class TranslationIterator;
43 class DeoptimizingCodeListNode;
44 class DeoptimizedFrameInfo;
45 
46 class HeapNumberMaterializationDescriptor BASE_EMBEDDED {
47  public:
HeapNumberMaterializationDescriptor(Address slot_address,double val)48   HeapNumberMaterializationDescriptor(Address slot_address, double val)
49       : slot_address_(slot_address), val_(val) { }
50 
slot_address()51   Address slot_address() const { return slot_address_; }
value()52   double value() const { return val_; }
53 
54  private:
55   Address slot_address_;
56   double val_;
57 };
58 
59 
60 class OptimizedFunctionVisitor BASE_EMBEDDED {
61  public:
~OptimizedFunctionVisitor()62   virtual ~OptimizedFunctionVisitor() {}
63 
64   // Function which is called before iteration of any optimized functions
65   // from given global context.
66   virtual void EnterContext(Context* context) = 0;
67 
68   virtual void VisitFunction(JSFunction* function) = 0;
69 
70   // Function which is called after iteration of all optimized functions
71   // from given global context.
72   virtual void LeaveContext(Context* context) = 0;
73 };
74 
75 
76 class Deoptimizer;
77 
78 
79 class DeoptimizerData {
80  public:
81   DeoptimizerData();
82   ~DeoptimizerData();
83 
84 #ifdef ENABLE_DEBUGGER_SUPPORT
85   void Iterate(ObjectVisitor* v);
86 #endif
87 
88  private:
89   MemoryChunk* eager_deoptimization_entry_code_;
90   MemoryChunk* lazy_deoptimization_entry_code_;
91   Deoptimizer* current_;
92 
93 #ifdef ENABLE_DEBUGGER_SUPPORT
94   DeoptimizedFrameInfo* deoptimized_frame_info_;
95 #endif
96 
97   // List of deoptimized code which still have references from active stack
98   // frames. These code objects are needed by the deoptimizer when deoptimizing
99   // a frame for which the code object for the function function has been
100   // changed from the code present when deoptimizing was done.
101   DeoptimizingCodeListNode* deoptimizing_code_list_;
102 
103   friend class Deoptimizer;
104 
105   DISALLOW_COPY_AND_ASSIGN(DeoptimizerData);
106 };
107 
108 
109 class Deoptimizer : public Malloced {
110  public:
111   enum BailoutType {
112     EAGER,
113     LAZY,
114     OSR,
115     // This last bailout type is not really a bailout, but used by the
116     // debugger to deoptimize stack frames to allow inspection.
117     DEBUGGER
118   };
119 
output_count()120   int output_count() const { return output_count_; }
121 
122   // Number of created JS frames. Not all created frames are necessarily JS.
jsframe_count()123   int jsframe_count() const { return jsframe_count_; }
124 
125   static Deoptimizer* New(JSFunction* function,
126                           BailoutType type,
127                           unsigned bailout_id,
128                           Address from,
129                           int fp_to_sp_delta,
130                           Isolate* isolate);
131   static Deoptimizer* Grab(Isolate* isolate);
132 
133 #ifdef ENABLE_DEBUGGER_SUPPORT
134   // The returned object with information on the optimized frame needs to be
135   // freed before another one can be generated.
136   static DeoptimizedFrameInfo* DebuggerInspectableFrame(JavaScriptFrame* frame,
137                                                         int jsframe_index,
138                                                         Isolate* isolate);
139   static void DeleteDebuggerInspectableFrame(DeoptimizedFrameInfo* info,
140                                              Isolate* isolate);
141 #endif
142 
143   // Makes sure that there is enough room in the relocation
144   // information of a code object to perform lazy deoptimization
145   // patching. If there is not enough room a new relocation
146   // information object is allocated and comments are added until it
147   // is big enough.
148   static void EnsureRelocSpaceForLazyDeoptimization(Handle<Code> code);
149 
150   // Deoptimize the function now. Its current optimized code will never be run
151   // again and any activations of the optimized code will get deoptimized when
152   // execution returns.
153   static void DeoptimizeFunction(JSFunction* function);
154 
155   // Deoptimize all functions in the heap.
156   static void DeoptimizeAll();
157 
158   static void DeoptimizeGlobalObject(JSObject* object);
159 
160   static void VisitAllOptimizedFunctionsForContext(
161       Context* context, OptimizedFunctionVisitor* visitor);
162 
163   static void VisitAllOptimizedFunctionsForGlobalObject(
164       JSObject* object, OptimizedFunctionVisitor* visitor);
165 
166   static void VisitAllOptimizedFunctions(OptimizedFunctionVisitor* visitor);
167 
168   // The size in bytes of the code required at a lazy deopt patch site.
169   static int patch_size();
170 
171   // Patch all stack guard checks in the unoptimized code to
172   // unconditionally call replacement_code.
173   static void PatchStackCheckCode(Code* unoptimized_code,
174                                   Code* check_code,
175                                   Code* replacement_code);
176 
177   // Patch stack guard check at instruction before pc_after in
178   // the unoptimized code to unconditionally call replacement_code.
179   static void PatchStackCheckCodeAt(Code* unoptimized_code,
180                                     Address pc_after,
181                                     Code* check_code,
182                                     Code* replacement_code);
183 
184   // Change all patched stack guard checks in the unoptimized code
185   // back to a normal stack guard check.
186   static void RevertStackCheckCode(Code* unoptimized_code,
187                                    Code* check_code,
188                                    Code* replacement_code);
189 
190   // Change all patched stack guard checks in the unoptimized code
191   // back to a normal stack guard check.
192   static void RevertStackCheckCodeAt(Code* unoptimized_code,
193                                      Address pc_after,
194                                      Code* check_code,
195                                      Code* replacement_code);
196 
197   ~Deoptimizer();
198 
199   void MaterializeHeapNumbers();
200 #ifdef ENABLE_DEBUGGER_SUPPORT
201   void MaterializeHeapNumbersForDebuggerInspectableFrame(
202       Address parameters_top,
203       uint32_t parameters_size,
204       Address expressions_top,
205       uint32_t expressions_size,
206       DeoptimizedFrameInfo* info);
207 #endif
208 
209   static void ComputeOutputFrames(Deoptimizer* deoptimizer);
210 
211   static Address GetDeoptimizationEntry(int id, BailoutType type);
212   static int GetDeoptimizationId(Address addr, BailoutType type);
213   static int GetOutputInfo(DeoptimizationOutputData* data,
214                            unsigned node_id,
215                            SharedFunctionInfo* shared);
216 
217   // Code generation support.
input_offset()218   static int input_offset() { return OFFSET_OF(Deoptimizer, input_); }
output_count_offset()219   static int output_count_offset() {
220     return OFFSET_OF(Deoptimizer, output_count_);
221   }
output_offset()222   static int output_offset() { return OFFSET_OF(Deoptimizer, output_); }
223 
224   static int GetDeoptimizedCodeCount(Isolate* isolate);
225 
226   static const int kNotDeoptimizationEntry = -1;
227 
228   // Generators for the deoptimization entry code.
229   class EntryGenerator BASE_EMBEDDED {
230    public:
EntryGenerator(MacroAssembler * masm,BailoutType type)231     EntryGenerator(MacroAssembler* masm, BailoutType type)
232         : masm_(masm), type_(type) { }
~EntryGenerator()233     virtual ~EntryGenerator() { }
234 
235     void Generate();
236 
237    protected:
masm()238     MacroAssembler* masm() const { return masm_; }
type()239     BailoutType type() const { return type_; }
240 
GeneratePrologue()241     virtual void GeneratePrologue() { }
242 
243    private:
244     MacroAssembler* masm_;
245     Deoptimizer::BailoutType type_;
246   };
247 
248   class TableEntryGenerator : public EntryGenerator {
249    public:
TableEntryGenerator(MacroAssembler * masm,BailoutType type,int count)250     TableEntryGenerator(MacroAssembler* masm, BailoutType type,  int count)
251         : EntryGenerator(masm, type), count_(count) { }
252 
253    protected:
254     virtual void GeneratePrologue();
255 
256    private:
count()257     int count() const { return count_; }
258 
259     int count_;
260   };
261 
262   int ConvertJSFrameIndexToFrameIndex(int jsframe_index);
263 
264  private:
265   static const int kNumberOfEntries = 16384;
266 
267   Deoptimizer(Isolate* isolate,
268               JSFunction* function,
269               BailoutType type,
270               unsigned bailout_id,
271               Address from,
272               int fp_to_sp_delta,
273               Code* optimized_code);
274   void DeleteFrameDescriptions();
275 
276   void DoComputeOutputFrames();
277   void DoComputeOsrOutputFrame();
278   void DoComputeJSFrame(TranslationIterator* iterator, int frame_index);
279   void DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
280                                       int frame_index);
281   void DoComputeConstructStubFrame(TranslationIterator* iterator,
282                                    int frame_index);
283   void DoTranslateCommand(TranslationIterator* iterator,
284                           int frame_index,
285                           unsigned output_offset);
286   // Translate a command for OSR.  Updates the input offset to be used for
287   // the next command.  Returns false if translation of the command failed
288   // (e.g., a number conversion failed) and may or may not have updated the
289   // input offset.
290   bool DoOsrTranslateCommand(TranslationIterator* iterator,
291                              int* input_offset);
292 
293   unsigned ComputeInputFrameSize() const;
294   unsigned ComputeFixedSize(JSFunction* function) const;
295 
296   unsigned ComputeIncomingArgumentSize(JSFunction* function) const;
297   unsigned ComputeOutgoingArgumentSize() const;
298 
299   Object* ComputeLiteral(int index) const;
300 
301   void AddDoubleValue(intptr_t slot_address, double value);
302 
303   static MemoryChunk* CreateCode(BailoutType type);
304   static void GenerateDeoptimizationEntries(
305       MacroAssembler* masm, int count, BailoutType type);
306 
307   // Weak handle callback for deoptimizing code objects.
308   static void HandleWeakDeoptimizedCode(
309       v8::Persistent<v8::Value> obj, void* data);
310   static Code* FindDeoptimizingCodeFromAddress(Address addr);
311   static void RemoveDeoptimizingCode(Code* code);
312 
313   // Fill the input from from a JavaScript frame. This is used when
314   // the debugger needs to inspect an optimized frame. For normal
315   // deoptimizations the input frame is filled in generated code.
316   void FillInputFrame(Address tos, JavaScriptFrame* frame);
317 
318   Isolate* isolate_;
319   JSFunction* function_;
320   Code* optimized_code_;
321   unsigned bailout_id_;
322   BailoutType bailout_type_;
323   Address from_;
324   int fp_to_sp_delta_;
325 
326   // Input frame description.
327   FrameDescription* input_;
328   // Number of output frames.
329   int output_count_;
330   // Number of output js frames.
331   int jsframe_count_;
332   // Array of output frame descriptions.
333   FrameDescription** output_;
334 
335   List<HeapNumberMaterializationDescriptor> deferred_heap_numbers_;
336 
337   static const int table_entry_size_;
338 
339   friend class FrameDescription;
340   friend class DeoptimizingCodeListNode;
341   friend class DeoptimizedFrameInfo;
342 };
343 
344 
345 class FrameDescription {
346  public:
347   FrameDescription(uint32_t frame_size,
348                    JSFunction* function);
349 
new(size_t size,uint32_t frame_size)350   void* operator new(size_t size, uint32_t frame_size) {
351     // Subtracts kPointerSize, as the member frame_content_ already supplies
352     // the first element of the area to store the frame.
353     return malloc(size + frame_size - kPointerSize);
354   }
355 
delete(void * pointer,uint32_t frame_size)356   void operator delete(void* pointer, uint32_t frame_size) {
357     free(pointer);
358   }
359 
delete(void * description)360   void operator delete(void* description) {
361     free(description);
362   }
363 
GetFrameSize()364   uint32_t GetFrameSize() const {
365     ASSERT(static_cast<uint32_t>(frame_size_) == frame_size_);
366     return static_cast<uint32_t>(frame_size_);
367   }
368 
GetFunction()369   JSFunction* GetFunction() const { return function_; }
370 
371   unsigned GetOffsetFromSlotIndex(int slot_index);
372 
GetFrameSlot(unsigned offset)373   intptr_t GetFrameSlot(unsigned offset) {
374     return *GetFrameSlotPointer(offset);
375   }
376 
GetDoubleFrameSlot(unsigned offset)377   double GetDoubleFrameSlot(unsigned offset) {
378     intptr_t* ptr = GetFrameSlotPointer(offset);
379 #if V8_TARGET_ARCH_MIPS
380     // Prevent gcc from using load-double (mips ldc1) on (possibly)
381     // non-64-bit aligned double. Uses two lwc1 instructions.
382     union conversion {
383       double d;
384       uint32_t u[2];
385     } c;
386     c.u[0] = *reinterpret_cast<uint32_t*>(ptr);
387     c.u[1] = *(reinterpret_cast<uint32_t*>(ptr) + 1);
388     return c.d;
389 #else
390     return *reinterpret_cast<double*>(ptr);
391 #endif
392   }
393 
SetFrameSlot(unsigned offset,intptr_t value)394   void SetFrameSlot(unsigned offset, intptr_t value) {
395     *GetFrameSlotPointer(offset) = value;
396   }
397 
GetRegister(unsigned n)398   intptr_t GetRegister(unsigned n) const {
399     ASSERT(n < ARRAY_SIZE(registers_));
400     return registers_[n];
401   }
402 
GetDoubleRegister(unsigned n)403   double GetDoubleRegister(unsigned n) const {
404     ASSERT(n < ARRAY_SIZE(double_registers_));
405     return double_registers_[n];
406   }
407 
SetRegister(unsigned n,intptr_t value)408   void SetRegister(unsigned n, intptr_t value) {
409     ASSERT(n < ARRAY_SIZE(registers_));
410     registers_[n] = value;
411   }
412 
SetDoubleRegister(unsigned n,double value)413   void SetDoubleRegister(unsigned n, double value) {
414     ASSERT(n < ARRAY_SIZE(double_registers_));
415     double_registers_[n] = value;
416   }
417 
GetTop()418   intptr_t GetTop() const { return top_; }
SetTop(intptr_t top)419   void SetTop(intptr_t top) { top_ = top; }
420 
GetPc()421   intptr_t GetPc() const { return pc_; }
SetPc(intptr_t pc)422   void SetPc(intptr_t pc) { pc_ = pc; }
423 
GetFp()424   intptr_t GetFp() const { return fp_; }
SetFp(intptr_t fp)425   void SetFp(intptr_t fp) { fp_ = fp; }
426 
GetContext()427   intptr_t GetContext() const { return context_; }
SetContext(intptr_t context)428   void SetContext(intptr_t context) { context_ = context; }
429 
GetState()430   Smi* GetState() const { return state_; }
SetState(Smi * state)431   void SetState(Smi* state) { state_ = state; }
432 
SetContinuation(intptr_t pc)433   void SetContinuation(intptr_t pc) { continuation_ = pc; }
434 
GetFrameType()435   StackFrame::Type GetFrameType() const { return type_; }
SetFrameType(StackFrame::Type type)436   void SetFrameType(StackFrame::Type type) { type_ = type; }
437 
438   // Get the incoming arguments count.
439   int ComputeParametersCount();
440 
441   // Get a parameter value for an unoptimized frame.
442   Object* GetParameter(int index);
443 
444   // Get the expression stack height for a unoptimized frame.
445   unsigned GetExpressionCount();
446 
447   // Get the expression stack value for an unoptimized frame.
448   Object* GetExpression(int index);
449 
registers_offset()450   static int registers_offset() {
451     return OFFSET_OF(FrameDescription, registers_);
452   }
453 
double_registers_offset()454   static int double_registers_offset() {
455     return OFFSET_OF(FrameDescription, double_registers_);
456   }
457 
frame_size_offset()458   static int frame_size_offset() {
459     return OFFSET_OF(FrameDescription, frame_size_);
460   }
461 
pc_offset()462   static int pc_offset() {
463     return OFFSET_OF(FrameDescription, pc_);
464   }
465 
state_offset()466   static int state_offset() {
467     return OFFSET_OF(FrameDescription, state_);
468   }
469 
continuation_offset()470   static int continuation_offset() {
471     return OFFSET_OF(FrameDescription, continuation_);
472   }
473 
frame_content_offset()474   static int frame_content_offset() {
475     return OFFSET_OF(FrameDescription, frame_content_);
476   }
477 
478  private:
479   static const uint32_t kZapUint32 = 0xbeeddead;
480 
481   // Frame_size_ must hold a uint32_t value.  It is only a uintptr_t to
482   // keep the variable-size array frame_content_ of type intptr_t at
483   // the end of the structure aligned.
484   uintptr_t frame_size_;  // Number of bytes.
485   JSFunction* function_;
486   intptr_t registers_[Register::kNumRegisters];
487   double double_registers_[DoubleRegister::kNumAllocatableRegisters];
488   intptr_t top_;
489   intptr_t pc_;
490   intptr_t fp_;
491   intptr_t context_;
492   StackFrame::Type type_;
493   Smi* state_;
494 #ifdef DEBUG
495   Code::Kind kind_;
496 #endif
497 
498   // Continuation is the PC where the execution continues after
499   // deoptimizing.
500   intptr_t continuation_;
501 
502   // This must be at the end of the object as the object is allocated larger
503   // than it's definition indicate to extend this array.
504   intptr_t frame_content_[1];
505 
GetFrameSlotPointer(unsigned offset)506   intptr_t* GetFrameSlotPointer(unsigned offset) {
507     ASSERT(offset < frame_size_);
508     return reinterpret_cast<intptr_t*>(
509         reinterpret_cast<Address>(this) + frame_content_offset() + offset);
510   }
511 
512   int ComputeFixedSize();
513 };
514 
515 
516 class TranslationBuffer BASE_EMBEDDED {
517  public:
TranslationBuffer()518   TranslationBuffer() : contents_(256) { }
519 
CurrentIndex()520   int CurrentIndex() const { return contents_.length(); }
521   void Add(int32_t value);
522 
523   Handle<ByteArray> CreateByteArray();
524 
525  private:
526   ZoneList<uint8_t> contents_;
527 };
528 
529 
530 class TranslationIterator BASE_EMBEDDED {
531  public:
TranslationIterator(ByteArray * buffer,int index)532   TranslationIterator(ByteArray* buffer, int index)
533       : buffer_(buffer), index_(index) {
534     ASSERT(index >= 0 && index < buffer->length());
535   }
536 
537   int32_t Next();
538 
HasNext()539   bool HasNext() const { return index_ < buffer_->length(); }
540 
Skip(int n)541   void Skip(int n) {
542     for (int i = 0; i < n; i++) Next();
543   }
544 
545  private:
546   ByteArray* buffer_;
547   int index_;
548 };
549 
550 
551 class Translation BASE_EMBEDDED {
552  public:
553   enum Opcode {
554     BEGIN,
555     JS_FRAME,
556     CONSTRUCT_STUB_FRAME,
557     ARGUMENTS_ADAPTOR_FRAME,
558     REGISTER,
559     INT32_REGISTER,
560     DOUBLE_REGISTER,
561     STACK_SLOT,
562     INT32_STACK_SLOT,
563     DOUBLE_STACK_SLOT,
564     LITERAL,
565     ARGUMENTS_OBJECT,
566 
567     // A prefix indicating that the next command is a duplicate of the one
568     // that follows it.
569     DUPLICATE
570   };
571 
Translation(TranslationBuffer * buffer,int frame_count,int jsframe_count)572   Translation(TranslationBuffer* buffer, int frame_count, int jsframe_count)
573       : buffer_(buffer),
574         index_(buffer->CurrentIndex()) {
575     buffer_->Add(BEGIN);
576     buffer_->Add(frame_count);
577     buffer_->Add(jsframe_count);
578   }
579 
index()580   int index() const { return index_; }
581 
582   // Commands.
583   void BeginJSFrame(int node_id, int literal_id, unsigned height);
584   void BeginArgumentsAdaptorFrame(int literal_id, unsigned height);
585   void BeginConstructStubFrame(int literal_id, unsigned height);
586   void StoreRegister(Register reg);
587   void StoreInt32Register(Register reg);
588   void StoreDoubleRegister(DoubleRegister reg);
589   void StoreStackSlot(int index);
590   void StoreInt32StackSlot(int index);
591   void StoreDoubleStackSlot(int index);
592   void StoreLiteral(int literal_id);
593   void StoreArgumentsObject();
594   void MarkDuplicate();
595 
596   static int NumberOfOperandsFor(Opcode opcode);
597 
598 #if defined(OBJECT_PRINT) || defined(ENABLE_DISASSEMBLER)
599   static const char* StringFor(Opcode opcode);
600 #endif
601 
602  private:
603   TranslationBuffer* buffer_;
604   int index_;
605 };
606 
607 
608 // Linked list holding deoptimizing code objects. The deoptimizing code objects
609 // are kept as weak handles until they are no longer activated on the stack.
610 class DeoptimizingCodeListNode : public Malloced {
611  public:
612   explicit DeoptimizingCodeListNode(Code* code);
613   ~DeoptimizingCodeListNode();
614 
next()615   DeoptimizingCodeListNode* next() const { return next_; }
set_next(DeoptimizingCodeListNode * next)616   void set_next(DeoptimizingCodeListNode* next) { next_ = next; }
code()617   Handle<Code> code() const { return code_; }
618 
619  private:
620   // Global (weak) handle to the deoptimizing code object.
621   Handle<Code> code_;
622 
623   // Next pointer for linked list.
624   DeoptimizingCodeListNode* next_;
625 };
626 
627 
628 class SlotRef BASE_EMBEDDED {
629  public:
630   enum SlotRepresentation {
631     UNKNOWN,
632     TAGGED,
633     INT32,
634     DOUBLE,
635     LITERAL
636   };
637 
SlotRef()638   SlotRef()
639       : addr_(NULL), representation_(UNKNOWN) { }
640 
SlotRef(Address addr,SlotRepresentation representation)641   SlotRef(Address addr, SlotRepresentation representation)
642       : addr_(addr), representation_(representation) { }
643 
SlotRef(Object * literal)644   explicit SlotRef(Object* literal)
645       : literal_(literal), representation_(LITERAL) { }
646 
GetValue()647   Handle<Object> GetValue() {
648     switch (representation_) {
649       case TAGGED:
650         return Handle<Object>(Memory::Object_at(addr_));
651 
652       case INT32: {
653         int value = Memory::int32_at(addr_);
654         if (Smi::IsValid(value)) {
655           return Handle<Object>(Smi::FromInt(value));
656         } else {
657           return Isolate::Current()->factory()->NewNumberFromInt(value);
658         }
659       }
660 
661       case DOUBLE: {
662         double value = Memory::double_at(addr_);
663         return Isolate::Current()->factory()->NewNumber(value);
664       }
665 
666       case LITERAL:
667         return literal_;
668 
669       default:
670         UNREACHABLE();
671         return Handle<Object>::null();
672     }
673   }
674 
675   static Vector<SlotRef> ComputeSlotMappingForArguments(
676       JavaScriptFrame* frame,
677       int inlined_frame_index,
678       int formal_parameter_count);
679 
680  private:
681   Address addr_;
682   Handle<Object> literal_;
683   SlotRepresentation representation_;
684 
SlotAddress(JavaScriptFrame * frame,int slot_index)685   static Address SlotAddress(JavaScriptFrame* frame, int slot_index) {
686     if (slot_index >= 0) {
687       const int offset = JavaScriptFrameConstants::kLocal0Offset;
688       return frame->fp() + offset - (slot_index * kPointerSize);
689     } else {
690       const int offset = JavaScriptFrameConstants::kLastParameterOffset;
691       return frame->fp() + offset - ((slot_index + 1) * kPointerSize);
692     }
693   }
694 
695   static SlotRef ComputeSlotForNextArgument(TranslationIterator* iterator,
696                                             DeoptimizationInputData* data,
697                                             JavaScriptFrame* frame);
698 
699   static void ComputeSlotsForArguments(
700       Vector<SlotRef>* args_slots,
701       TranslationIterator* iterator,
702       DeoptimizationInputData* data,
703       JavaScriptFrame* frame);
704 };
705 
706 
707 #ifdef ENABLE_DEBUGGER_SUPPORT
708 // Class used to represent an unoptimized frame when the debugger
709 // needs to inspect a frame that is part of an optimized frame. The
710 // internally used FrameDescription objects are not GC safe so for use
711 // by the debugger frame information is copied to an object of this type.
712 // Represents parameters in unadapted form so their number might mismatch
713 // formal parameter count.
714 class DeoptimizedFrameInfo : public Malloced {
715  public:
716   DeoptimizedFrameInfo(Deoptimizer* deoptimizer,
717                        int frame_index,
718                        bool has_arguments_adaptor,
719                        bool has_construct_stub);
720   virtual ~DeoptimizedFrameInfo();
721 
722   // GC support.
723   void Iterate(ObjectVisitor* v);
724 
725   // Return the number of incoming arguments.
parameters_count()726   int parameters_count() { return parameters_count_; }
727 
728   // Return the height of the expression stack.
expression_count()729   int expression_count() { return expression_count_; }
730 
731   // Get the frame function.
GetFunction()732   JSFunction* GetFunction() {
733     return function_;
734   }
735 
736   // Check if this frame is preceded by construct stub frame.  The bottom-most
737   // inlined frame might still be called by an uninlined construct stub.
HasConstructStub()738   bool HasConstructStub() {
739     return has_construct_stub_;
740   }
741 
742   // Get an incoming argument.
GetParameter(int index)743   Object* GetParameter(int index) {
744     ASSERT(0 <= index && index < parameters_count());
745     return parameters_[index];
746   }
747 
748   // Get an expression from the expression stack.
GetExpression(int index)749   Object* GetExpression(int index) {
750     ASSERT(0 <= index && index < expression_count());
751     return expression_stack_[index];
752   }
753 
GetSourcePosition()754   int GetSourcePosition() {
755     return source_position_;
756   }
757 
758  private:
759   // Set an incoming argument.
SetParameter(int index,Object * obj)760   void SetParameter(int index, Object* obj) {
761     ASSERT(0 <= index && index < parameters_count());
762     parameters_[index] = obj;
763   }
764 
765   // Set an expression on the expression stack.
SetExpression(int index,Object * obj)766   void SetExpression(int index, Object* obj) {
767     ASSERT(0 <= index && index < expression_count());
768     expression_stack_[index] = obj;
769   }
770 
771   JSFunction* function_;
772   bool has_construct_stub_;
773   int parameters_count_;
774   int expression_count_;
775   Object** parameters_;
776   Object** expression_stack_;
777   int source_position_;
778 
779   friend class Deoptimizer;
780 };
781 #endif
782 
783 } }  // namespace v8::internal
784 
785 #endif  // V8_DEOPTIMIZER_H_
786