1 // Copyright 2012 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #ifndef V8_DEOPTIMIZER_H_ 6 #define V8_DEOPTIMIZER_H_ 7 8 #include "src/allocation.h" 9 #include "src/deoptimize-reason.h" 10 #include "src/macro-assembler.h" 11 #include "src/source-position.h" 12 #include "src/zone/zone-chunk-list.h" 13 14 namespace v8 { 15 namespace internal { 16 17 class FrameDescription; 18 class TranslationIterator; 19 class DeoptimizedFrameInfo; 20 class TranslatedState; 21 class RegisterValues; 22 23 // Safety wrapper for a 32-bit floating-point value to make sure we don't loose 24 // the exact bit pattern during deoptimization when passing this value. Note 25 // that there is intentionally no way to construct it from a {float} value. 26 class Float32 { 27 public: Float32()28 Float32() : bit_pattern_(0) {} get_bits()29 uint32_t get_bits() const { return bit_pattern_; } get_scalar()30 float get_scalar() const { return bit_cast<float>(bit_pattern_); } FromBits(uint32_t bits)31 static Float32 FromBits(uint32_t bits) { return Float32(bits); } 32 33 private: Float32(uint32_t bit_pattern)34 explicit Float32(uint32_t bit_pattern) : bit_pattern_(bit_pattern) {} 35 uint32_t bit_pattern_; 36 }; 37 38 // Safety wrapper for a 64-bit floating-point value to make sure we don't loose 39 // the exact bit pattern during deoptimization when passing this value. Note 40 // that there is intentionally no way to construct it from a {double} value. 41 class Float64 { 42 public: Float64()43 Float64() : bit_pattern_(0) {} get_bits()44 uint64_t get_bits() const { return bit_pattern_; } get_scalar()45 double get_scalar() const { return bit_cast<double>(bit_pattern_); } is_hole_nan()46 bool is_hole_nan() const { return bit_pattern_ == kHoleNanInt64; } FromBits(uint64_t bits)47 static Float64 FromBits(uint64_t bits) { return Float64(bits); } 48 49 private: Float64(uint64_t bit_pattern)50 explicit Float64(uint64_t bit_pattern) : bit_pattern_(bit_pattern) {} 51 uint64_t bit_pattern_; 52 }; 53 54 class TranslatedValue { 55 public: 56 // Allocation-less getter of the value. 57 // Returns heap()->arguments_marker() if allocation would be 58 // necessary to get the value. 59 Object* GetRawValue() const; 60 Handle<Object> GetValue(); 61 62 bool IsMaterializedObject() const; 63 bool IsMaterializableByDebugger() const; 64 65 private: 66 friend class TranslatedState; 67 friend class TranslatedFrame; 68 69 enum Kind { 70 kInvalid, 71 kTagged, 72 kInt32, 73 kUInt32, 74 kBoolBit, 75 kFloat, 76 kDouble, 77 kCapturedObject, // Object captured by the escape analysis. 78 // The number of nested objects can be obtained 79 // with the DeferredObjectLength() method 80 // (the values of the nested objects follow 81 // this value in the depth-first order.) 82 kDuplicatedObject, // Duplicated object of a deferred object. 83 kArgumentsObject // Arguments object - only used to keep indexing 84 // in sync, it should not be materialized. 85 }; 86 TranslatedValue(TranslatedState * container,Kind kind)87 TranslatedValue(TranslatedState* container, Kind kind) 88 : kind_(kind), container_(container) {} kind()89 Kind kind() const { return kind_; } 90 void Handlify(); 91 int GetChildrenCount() const; 92 93 static TranslatedValue NewArgumentsObject(TranslatedState* container, 94 int length, int object_index); 95 static TranslatedValue NewDeferredObject(TranslatedState* container, 96 int length, int object_index); 97 static TranslatedValue NewDuplicateObject(TranslatedState* container, int id); 98 static TranslatedValue NewFloat(TranslatedState* container, Float32 value); 99 static TranslatedValue NewDouble(TranslatedState* container, Float64 value); 100 static TranslatedValue NewInt32(TranslatedState* container, int32_t value); 101 static TranslatedValue NewUInt32(TranslatedState* container, uint32_t value); 102 static TranslatedValue NewBool(TranslatedState* container, uint32_t value); 103 static TranslatedValue NewTagged(TranslatedState* container, Object* literal); 104 static TranslatedValue NewInvalid(TranslatedState* container); 105 106 Isolate* isolate() const; 107 void MaterializeSimple(); 108 109 Kind kind_; 110 TranslatedState* container_; // This is only needed for materialization of 111 // objects and constructing handles (to get 112 // to the isolate). 113 114 MaybeHandle<Object> value_; // Before handlification, this is always null, 115 // after materialization it is never null, 116 // in between it is only null if the value needs 117 // to be materialized. 118 119 struct MaterializedObjectInfo { 120 int id_; 121 int length_; // Applies only to kArgumentsObject or kCapturedObject kinds. 122 }; 123 124 union { 125 // kind kTagged. After handlification it is always nullptr. 126 Object* raw_literal_; 127 // kind is kUInt32 or kBoolBit. 128 uint32_t uint32_value_; 129 // kind is kInt32. 130 int32_t int32_value_; 131 // kind is kFloat 132 Float32 float_value_; 133 // kind is kDouble 134 Float64 double_value_; 135 // kind is kDuplicatedObject or kArgumentsObject or kCapturedObject. 136 MaterializedObjectInfo materialization_info_; 137 }; 138 139 // Checked accessors for the union members. 140 Object* raw_literal() const; 141 int32_t int32_value() const; 142 uint32_t uint32_value() const; 143 Float32 float_value() const; 144 Float64 double_value() const; 145 int object_length() const; 146 int object_index() const; 147 }; 148 149 150 class TranslatedFrame { 151 public: 152 enum Kind { 153 kFunction, 154 kInterpretedFunction, 155 kGetter, 156 kSetter, 157 kTailCallerFunction, 158 kArgumentsAdaptor, 159 kConstructStub, 160 kCompiledStub, 161 kInvalid 162 }; 163 164 int GetValueCount(); 165 kind()166 Kind kind() const { return kind_; } node_id()167 BailoutId node_id() const { return node_id_; } shared_info()168 Handle<SharedFunctionInfo> shared_info() const { return shared_info_; } height()169 int height() const { return height_; } 170 raw_shared_info()171 SharedFunctionInfo* raw_shared_info() const { 172 CHECK_NOT_NULL(raw_shared_info_); 173 return raw_shared_info_; 174 } 175 176 class iterator { 177 public: 178 iterator& operator++() { 179 AdvanceIterator(&position_); 180 return *this; 181 } 182 183 iterator operator++(int) { 184 iterator original(position_); 185 AdvanceIterator(&position_); 186 return original; 187 } 188 189 bool operator==(const iterator& other) const { 190 return position_ == other.position_; 191 } 192 bool operator!=(const iterator& other) const { return !(*this == other); } 193 194 TranslatedValue& operator*() { return (*position_); } 195 TranslatedValue* operator->() { return &(*position_); } 196 197 private: 198 friend TranslatedFrame; 199 iterator(std::deque<TranslatedValue>::iterator position)200 explicit iterator(std::deque<TranslatedValue>::iterator position) 201 : position_(position) {} 202 203 std::deque<TranslatedValue>::iterator position_; 204 }; 205 206 typedef TranslatedValue& reference; 207 typedef TranslatedValue const& const_reference; 208 begin()209 iterator begin() { return iterator(values_.begin()); } end()210 iterator end() { return iterator(values_.end()); } 211 front()212 reference front() { return values_.front(); } front()213 const_reference front() const { return values_.front(); } 214 215 private: 216 friend class TranslatedState; 217 218 // Constructor static methods. 219 static TranslatedFrame JSFrame(BailoutId node_id, 220 SharedFunctionInfo* shared_info, int height); 221 static TranslatedFrame InterpretedFrame(BailoutId bytecode_offset, 222 SharedFunctionInfo* shared_info, 223 int height); 224 static TranslatedFrame AccessorFrame(Kind kind, 225 SharedFunctionInfo* shared_info); 226 static TranslatedFrame ArgumentsAdaptorFrame(SharedFunctionInfo* shared_info, 227 int height); 228 static TranslatedFrame TailCallerFrame(SharedFunctionInfo* shared_info); 229 static TranslatedFrame ConstructStubFrame(BailoutId bailout_id, 230 SharedFunctionInfo* shared_info, 231 int height); CompiledStubFrame(int height,Isolate * isolate)232 static TranslatedFrame CompiledStubFrame(int height, Isolate* isolate) { 233 return TranslatedFrame(kCompiledStub, isolate, nullptr, height); 234 } InvalidFrame()235 static TranslatedFrame InvalidFrame() { 236 return TranslatedFrame(kInvalid, nullptr); 237 } 238 239 static void AdvanceIterator(std::deque<TranslatedValue>::iterator* iter); 240 241 TranslatedFrame(Kind kind, Isolate* isolate, 242 SharedFunctionInfo* shared_info = nullptr, int height = 0) kind_(kind)243 : kind_(kind), 244 node_id_(BailoutId::None()), 245 raw_shared_info_(shared_info), 246 height_(height), 247 isolate_(isolate) {} 248 249 Add(const TranslatedValue & value)250 void Add(const TranslatedValue& value) { values_.push_back(value); } 251 void Handlify(); 252 253 Kind kind_; 254 BailoutId node_id_; 255 SharedFunctionInfo* raw_shared_info_; 256 Handle<SharedFunctionInfo> shared_info_; 257 int height_; 258 Isolate* isolate_; 259 260 typedef std::deque<TranslatedValue> ValuesContainer; 261 262 ValuesContainer values_; 263 }; 264 265 266 // Auxiliary class for translating deoptimization values. 267 // Typical usage sequence: 268 // 269 // 1. Construct the instance. This will involve reading out the translations 270 // and resolving them to values using the supplied frame pointer and 271 // machine state (registers). This phase is guaranteed not to allocate 272 // and not to use any HandleScope. Any object pointers will be stored raw. 273 // 274 // 2. Handlify pointers. This will convert all the raw pointers to handles. 275 // 276 // 3. Reading out the frame values. 277 // 278 // Note: After the instance is constructed, it is possible to iterate over 279 // the values eagerly. 280 281 class TranslatedState { 282 public: 283 TranslatedState(); 284 explicit TranslatedState(JavaScriptFrame* frame); 285 286 void Prepare(bool has_adapted_arguments, Address stack_frame_pointer); 287 288 // Store newly materialized values into the isolate. 289 void StoreMaterializedValuesAndDeopt(JavaScriptFrame* frame); 290 291 typedef std::vector<TranslatedFrame>::iterator iterator; begin()292 iterator begin() { return frames_.begin(); } end()293 iterator end() { return frames_.end(); } 294 295 typedef std::vector<TranslatedFrame>::const_iterator const_iterator; begin()296 const_iterator begin() const { return frames_.begin(); } end()297 const_iterator end() const { return frames_.end(); } 298 frames()299 std::vector<TranslatedFrame>& frames() { return frames_; } 300 301 TranslatedFrame* GetArgumentsInfoFromJSFrameIndex(int jsframe_index, 302 int* arguments_count); 303 isolate()304 Isolate* isolate() { return isolate_; } 305 306 void Init(Address input_frame_pointer, TranslationIterator* iterator, 307 FixedArray* literal_array, RegisterValues* registers, 308 FILE* trace_file); 309 310 private: 311 friend TranslatedValue; 312 313 TranslatedFrame CreateNextTranslatedFrame(TranslationIterator* iterator, 314 FixedArray* literal_array, 315 Address fp, 316 FILE* trace_file); 317 TranslatedValue CreateNextTranslatedValue(int frame_index, int value_index, 318 TranslationIterator* iterator, 319 FixedArray* literal_array, 320 Address fp, 321 RegisterValues* registers, 322 FILE* trace_file); 323 324 void UpdateFromPreviouslyMaterializedObjects(); 325 Handle<Object> MaterializeAt(int frame_index, int* value_index); 326 Handle<Object> MaterializeObjectAt(int object_index); 327 class CapturedObjectMaterializer; 328 Handle<Object> MaterializeCapturedObjectAt(TranslatedValue* slot, 329 int frame_index, int* value_index); 330 bool GetAdaptedArguments(Handle<JSObject>* result, int frame_index); 331 332 static uint32_t GetUInt32Slot(Address fp, int slot_index); 333 static Float32 GetFloatSlot(Address fp, int slot_index); 334 static Float64 GetDoubleSlot(Address fp, int slot_index); 335 336 std::vector<TranslatedFrame> frames_; 337 Isolate* isolate_; 338 Address stack_frame_pointer_; 339 bool has_adapted_arguments_; 340 341 struct ObjectPosition { 342 int frame_index_; 343 int value_index_; 344 }; 345 std::deque<ObjectPosition> object_positions_; 346 }; 347 348 349 class OptimizedFunctionVisitor BASE_EMBEDDED { 350 public: ~OptimizedFunctionVisitor()351 virtual ~OptimizedFunctionVisitor() {} 352 353 // Function which is called before iteration of any optimized functions 354 // from given native context. 355 virtual void EnterContext(Context* context) = 0; 356 357 virtual void VisitFunction(JSFunction* function) = 0; 358 359 // Function which is called after iteration of all optimized functions 360 // from given native context. 361 virtual void LeaveContext(Context* context) = 0; 362 }; 363 364 class Deoptimizer : public Malloced { 365 public: 366 enum BailoutType { EAGER, LAZY, SOFT, kLastBailoutType = SOFT }; 367 368 enum class BailoutState { 369 NO_REGISTERS, 370 TOS_REGISTER, 371 }; 372 BailoutStateToString(BailoutState state)373 static const char* BailoutStateToString(BailoutState state) { 374 switch (state) { 375 case BailoutState::NO_REGISTERS: 376 return "NO_REGISTERS"; 377 case BailoutState::TOS_REGISTER: 378 return "TOS_REGISTER"; 379 } 380 UNREACHABLE(); 381 return nullptr; 382 } 383 384 struct DeoptInfo { DeoptInfoDeoptInfo385 DeoptInfo(SourcePosition position, DeoptimizeReason deopt_reason, 386 int deopt_id) 387 : position(position), deopt_reason(deopt_reason), deopt_id(deopt_id) {} 388 389 SourcePosition position; 390 DeoptimizeReason deopt_reason; 391 int deopt_id; 392 393 static const int kNoDeoptId = -1; 394 }; 395 396 static DeoptInfo GetDeoptInfo(Code* code, byte* from); 397 398 static int ComputeSourcePositionFromBaselineCode(SharedFunctionInfo* shared, 399 BailoutId node_id); 400 static int ComputeSourcePositionFromBytecodeArray(SharedFunctionInfo* shared, 401 BailoutId node_id); 402 403 struct JumpTableEntry : public ZoneObject { JumpTableEntryJumpTableEntry404 inline JumpTableEntry(Address entry, const DeoptInfo& deopt_info, 405 Deoptimizer::BailoutType type, bool frame) 406 : label(), 407 address(entry), 408 deopt_info(deopt_info), 409 bailout_type(type), 410 needs_frame(frame) {} 411 IsEquivalentToJumpTableEntry412 bool IsEquivalentTo(const JumpTableEntry& other) const { 413 return address == other.address && bailout_type == other.bailout_type && 414 needs_frame == other.needs_frame; 415 } 416 417 Label label; 418 Address address; 419 DeoptInfo deopt_info; 420 Deoptimizer::BailoutType bailout_type; 421 bool needs_frame; 422 }; 423 424 static bool TraceEnabledFor(StackFrame::Type frame_type); 425 static const char* MessageFor(BailoutType type); 426 output_count()427 int output_count() const { return output_count_; } 428 function()429 Handle<JSFunction> function() const { return Handle<JSFunction>(function_); } compiled_code()430 Handle<Code> compiled_code() const { return Handle<Code>(compiled_code_); } bailout_type()431 BailoutType bailout_type() const { return bailout_type_; } 432 433 // Number of created JS frames. Not all created frames are necessarily JS. jsframe_count()434 int jsframe_count() const { return jsframe_count_; } 435 436 static Deoptimizer* New(JSFunction* function, 437 BailoutType type, 438 unsigned bailout_id, 439 Address from, 440 int fp_to_sp_delta, 441 Isolate* isolate); 442 static Deoptimizer* Grab(Isolate* isolate); 443 444 // The returned object with information on the optimized frame needs to be 445 // freed before another one can be generated. 446 static DeoptimizedFrameInfo* DebuggerInspectableFrame(JavaScriptFrame* frame, 447 int jsframe_index, 448 Isolate* isolate); 449 450 // Makes sure that there is enough room in the relocation 451 // information of a code object to perform lazy deoptimization 452 // patching. If there is not enough room a new relocation 453 // information object is allocated and comments are added until it 454 // is big enough. 455 static void EnsureRelocSpaceForLazyDeoptimization(Handle<Code> code); 456 457 // Deoptimize the function now. Its current optimized code will never be run 458 // again and any activations of the optimized code will get deoptimized when 459 // execution returns. If {code} is specified then the given code is targeted 460 // instead of the function code (e.g. OSR code not installed on function). 461 static void DeoptimizeFunction(JSFunction* function, Code* code = nullptr); 462 463 // Deoptimize all code in the given isolate. 464 static void DeoptimizeAll(Isolate* isolate); 465 466 // Deoptimizes all optimized code that has been previously marked 467 // (via code->set_marked_for_deoptimization) and unlinks all functions that 468 // refer to that code. 469 static void DeoptimizeMarkedCode(Isolate* isolate); 470 471 // Visit all the known optimized functions in a given isolate. 472 static void VisitAllOptimizedFunctions( 473 Isolate* isolate, OptimizedFunctionVisitor* visitor); 474 475 // The size in bytes of the code required at a lazy deopt patch site. 476 static int patch_size(); 477 478 ~Deoptimizer(); 479 480 void MaterializeHeapObjects(JavaScriptFrameIterator* it); 481 482 static void ComputeOutputFrames(Deoptimizer* deoptimizer); 483 484 485 enum GetEntryMode { 486 CALCULATE_ENTRY_ADDRESS, 487 ENSURE_ENTRY_CODE 488 }; 489 490 491 static Address GetDeoptimizationEntry( 492 Isolate* isolate, 493 int id, 494 BailoutType type, 495 GetEntryMode mode = ENSURE_ENTRY_CODE); 496 static int GetDeoptimizationId(Isolate* isolate, 497 Address addr, 498 BailoutType type); 499 static int GetOutputInfo(DeoptimizationOutputData* data, 500 BailoutId node_id, 501 SharedFunctionInfo* shared); 502 503 // Code generation support. input_offset()504 static int input_offset() { return OFFSET_OF(Deoptimizer, input_); } output_count_offset()505 static int output_count_offset() { 506 return OFFSET_OF(Deoptimizer, output_count_); 507 } output_offset()508 static int output_offset() { return OFFSET_OF(Deoptimizer, output_); } 509 caller_frame_top_offset()510 static int caller_frame_top_offset() { 511 return OFFSET_OF(Deoptimizer, caller_frame_top_); 512 } 513 514 static int GetDeoptimizedCodeCount(Isolate* isolate); 515 516 static const int kNotDeoptimizationEntry = -1; 517 518 // Generators for the deoptimization entry code. 519 class TableEntryGenerator BASE_EMBEDDED { 520 public: TableEntryGenerator(MacroAssembler * masm,BailoutType type,int count)521 TableEntryGenerator(MacroAssembler* masm, BailoutType type, int count) 522 : masm_(masm), type_(type), count_(count) {} 523 524 void Generate(); 525 526 protected: masm()527 MacroAssembler* masm() const { return masm_; } type()528 BailoutType type() const { return type_; } isolate()529 Isolate* isolate() const { return masm_->isolate(); } 530 531 void GeneratePrologue(); 532 533 private: count()534 int count() const { return count_; } 535 536 MacroAssembler* masm_; 537 Deoptimizer::BailoutType type_; 538 int count_; 539 }; 540 541 static size_t GetMaxDeoptTableSize(); 542 543 static void EnsureCodeForDeoptimizationEntry(Isolate* isolate, 544 BailoutType type, 545 int max_entry_id); 546 isolate()547 Isolate* isolate() const { return isolate_; } 548 549 private: 550 static const int kMinNumberOfEntries = 64; 551 static const int kMaxNumberOfEntries = 16384; 552 553 Deoptimizer(Isolate* isolate, JSFunction* function, BailoutType type, 554 unsigned bailout_id, Address from, int fp_to_sp_delta); 555 Code* FindOptimizedCode(JSFunction* function); 556 void PrintFunctionName(); 557 void DeleteFrameDescriptions(); 558 559 void DoComputeOutputFrames(); 560 void DoComputeJSFrame(TranslatedFrame* translated_frame, int frame_index, 561 bool goto_catch_handler); 562 void DoComputeInterpretedFrame(TranslatedFrame* translated_frame, 563 int frame_index, bool goto_catch_handler); 564 void DoComputeArgumentsAdaptorFrame(TranslatedFrame* translated_frame, 565 int frame_index); 566 void DoComputeTailCallerFrame(TranslatedFrame* translated_frame, 567 int frame_index); 568 void DoComputeConstructStubFrame(TranslatedFrame* translated_frame, 569 int frame_index); 570 void DoComputeAccessorStubFrame(TranslatedFrame* translated_frame, 571 int frame_index, bool is_setter_stub_frame); 572 void DoComputeCompiledStubFrame(TranslatedFrame* translated_frame, 573 int frame_index); 574 575 void WriteTranslatedValueToOutput( 576 TranslatedFrame::iterator* iterator, int* input_index, int frame_index, 577 unsigned output_offset, const char* debug_hint_string = nullptr, 578 Address output_address_for_materialization = nullptr); 579 void WriteValueToOutput(Object* value, int input_index, int frame_index, 580 unsigned output_offset, 581 const char* debug_hint_string); 582 void DebugPrintOutputSlot(intptr_t value, int frame_index, 583 unsigned output_offset, 584 const char* debug_hint_string); 585 586 unsigned ComputeInputFrameAboveFpFixedSize() const; 587 unsigned ComputeInputFrameSize() const; 588 static unsigned ComputeJavascriptFixedSize(SharedFunctionInfo* shared); 589 static unsigned ComputeInterpretedFixedSize(SharedFunctionInfo* shared); 590 591 static unsigned ComputeIncomingArgumentSize(SharedFunctionInfo* shared); 592 static unsigned ComputeOutgoingArgumentSize(Code* code, unsigned bailout_id); 593 594 static void GenerateDeoptimizationEntries( 595 MacroAssembler* masm, int count, BailoutType type); 596 597 // Marks all the code in the given context for deoptimization. 598 static void MarkAllCodeForContext(Context* native_context); 599 600 // Visit all the known optimized functions in a given context. 601 static void VisitAllOptimizedFunctionsForContext( 602 Context* context, OptimizedFunctionVisitor* visitor); 603 604 // Deoptimizes all code marked in the given context. 605 static void DeoptimizeMarkedCodeForContext(Context* native_context); 606 607 // Patch the given code so that it will deoptimize itself. 608 static void PatchCodeForDeoptimization(Isolate* isolate, Code* code); 609 610 // Searches the list of known deoptimizing code for a Code object 611 // containing the given address (which is supposedly faster than 612 // searching all code objects). 613 Code* FindDeoptimizingCode(Address addr); 614 615 // Fill the given output frame's registers to contain the failure handler 616 // address and the number of parameters for a stub failure trampoline. 617 void SetPlatformCompiledStubRegisters(FrameDescription* output_frame, 618 CodeStubDescriptor* desc); 619 620 // Fill the given output frame's double registers with the original values 621 // from the input frame's double registers. 622 void CopyDoubleRegisters(FrameDescription* output_frame); 623 624 Isolate* isolate_; 625 JSFunction* function_; 626 Code* compiled_code_; 627 unsigned bailout_id_; 628 BailoutType bailout_type_; 629 Address from_; 630 int fp_to_sp_delta_; 631 bool deoptimizing_throw_; 632 int catch_handler_data_; 633 int catch_handler_pc_offset_; 634 635 // Input frame description. 636 FrameDescription* input_; 637 // Number of output frames. 638 int output_count_; 639 // Number of output js frames. 640 int jsframe_count_; 641 // Array of output frame descriptions. 642 FrameDescription** output_; 643 644 // Caller frame details computed from input frame. 645 intptr_t caller_frame_top_; 646 intptr_t caller_fp_; 647 intptr_t caller_pc_; 648 intptr_t caller_constant_pool_; 649 intptr_t input_frame_context_; 650 651 // Key for lookup of previously materialized objects 652 intptr_t stack_fp_; 653 654 TranslatedState translated_state_; 655 struct ValueToMaterialize { 656 Address output_slot_address_; 657 TranslatedFrame::iterator value_; 658 }; 659 std::vector<ValueToMaterialize> values_to_materialize_; 660 661 #ifdef DEBUG 662 DisallowHeapAllocation* disallow_heap_allocation_; 663 #endif // DEBUG 664 665 CodeTracer::Scope* trace_scope_; 666 667 static const int table_entry_size_; 668 669 friend class FrameDescription; 670 friend class DeoptimizedFrameInfo; 671 }; 672 673 674 class RegisterValues { 675 public: GetRegister(unsigned n)676 intptr_t GetRegister(unsigned n) const { 677 #if DEBUG 678 // This convoluted DCHECK is needed to work around a gcc problem that 679 // improperly detects an array bounds overflow in optimized debug builds 680 // when using a plain DCHECK. 681 if (n >= arraysize(registers_)) { 682 DCHECK(false); 683 return 0; 684 } 685 #endif 686 return registers_[n]; 687 } 688 GetFloatRegister(unsigned n)689 Float32 GetFloatRegister(unsigned n) const { 690 DCHECK(n < arraysize(float_registers_)); 691 return float_registers_[n]; 692 } 693 GetDoubleRegister(unsigned n)694 Float64 GetDoubleRegister(unsigned n) const { 695 DCHECK(n < arraysize(double_registers_)); 696 return double_registers_[n]; 697 } 698 SetRegister(unsigned n,intptr_t value)699 void SetRegister(unsigned n, intptr_t value) { 700 DCHECK(n < arraysize(registers_)); 701 registers_[n] = value; 702 } 703 SetFloatRegister(unsigned n,Float32 value)704 void SetFloatRegister(unsigned n, Float32 value) { 705 DCHECK(n < arraysize(float_registers_)); 706 float_registers_[n] = value; 707 } 708 SetDoubleRegister(unsigned n,Float64 value)709 void SetDoubleRegister(unsigned n, Float64 value) { 710 DCHECK(n < arraysize(double_registers_)); 711 double_registers_[n] = value; 712 } 713 714 // Generated code is writing directly into the below arrays, make sure their 715 // element sizes fit what the machine instructions expect. 716 static_assert(sizeof(Float32) == kFloatSize, "size mismatch"); 717 static_assert(sizeof(Float64) == kDoubleSize, "size mismatch"); 718 719 intptr_t registers_[Register::kNumRegisters]; 720 Float32 float_registers_[FloatRegister::kMaxNumRegisters]; 721 Float64 double_registers_[DoubleRegister::kMaxNumRegisters]; 722 }; 723 724 725 class FrameDescription { 726 public: 727 explicit FrameDescription(uint32_t frame_size, int parameter_count = 0); 728 new(size_t size,uint32_t frame_size)729 void* operator new(size_t size, uint32_t frame_size) { 730 // Subtracts kPointerSize, as the member frame_content_ already supplies 731 // the first element of the area to store the frame. 732 return malloc(size + frame_size - kPointerSize); 733 } 734 delete(void * pointer,uint32_t frame_size)735 void operator delete(void* pointer, uint32_t frame_size) { 736 free(pointer); 737 } 738 delete(void * description)739 void operator delete(void* description) { 740 free(description); 741 } 742 GetFrameSize()743 uint32_t GetFrameSize() const { 744 DCHECK(static_cast<uint32_t>(frame_size_) == frame_size_); 745 return static_cast<uint32_t>(frame_size_); 746 } 747 GetFrameSlot(unsigned offset)748 intptr_t GetFrameSlot(unsigned offset) { 749 return *GetFrameSlotPointer(offset); 750 } 751 GetFramePointerAddress()752 Address GetFramePointerAddress() { 753 int fp_offset = GetFrameSize() - parameter_count() * kPointerSize - 754 StandardFrameConstants::kCallerSPOffset; 755 return reinterpret_cast<Address>(GetFrameSlotPointer(fp_offset)); 756 } 757 GetRegisterValues()758 RegisterValues* GetRegisterValues() { return ®ister_values_; } 759 SetFrameSlot(unsigned offset,intptr_t value)760 void SetFrameSlot(unsigned offset, intptr_t value) { 761 *GetFrameSlotPointer(offset) = value; 762 } 763 764 void SetCallerPc(unsigned offset, intptr_t value); 765 766 void SetCallerFp(unsigned offset, intptr_t value); 767 768 void SetCallerConstantPool(unsigned offset, intptr_t value); 769 GetRegister(unsigned n)770 intptr_t GetRegister(unsigned n) const { 771 return register_values_.GetRegister(n); 772 } 773 GetDoubleRegister(unsigned n)774 Float64 GetDoubleRegister(unsigned n) const { 775 return register_values_.GetDoubleRegister(n); 776 } 777 SetRegister(unsigned n,intptr_t value)778 void SetRegister(unsigned n, intptr_t value) { 779 register_values_.SetRegister(n, value); 780 } 781 SetDoubleRegister(unsigned n,Float64 value)782 void SetDoubleRegister(unsigned n, Float64 value) { 783 register_values_.SetDoubleRegister(n, value); 784 } 785 GetTop()786 intptr_t GetTop() const { return top_; } SetTop(intptr_t top)787 void SetTop(intptr_t top) { top_ = top; } 788 GetPc()789 intptr_t GetPc() const { return pc_; } SetPc(intptr_t pc)790 void SetPc(intptr_t pc) { pc_ = pc; } 791 GetFp()792 intptr_t GetFp() const { return fp_; } SetFp(intptr_t fp)793 void SetFp(intptr_t fp) { fp_ = fp; } 794 GetContext()795 intptr_t GetContext() const { return context_; } SetContext(intptr_t context)796 void SetContext(intptr_t context) { context_ = context; } 797 GetConstantPool()798 intptr_t GetConstantPool() const { return constant_pool_; } SetConstantPool(intptr_t constant_pool)799 void SetConstantPool(intptr_t constant_pool) { 800 constant_pool_ = constant_pool; 801 } 802 GetState()803 Smi* GetState() const { return state_; } SetState(Smi * state)804 void SetState(Smi* state) { state_ = state; } 805 SetContinuation(intptr_t pc)806 void SetContinuation(intptr_t pc) { continuation_ = pc; } 807 GetFrameType()808 StackFrame::Type GetFrameType() const { return type_; } SetFrameType(StackFrame::Type type)809 void SetFrameType(StackFrame::Type type) { type_ = type; } 810 811 // Argument count, including receiver. parameter_count()812 int parameter_count() { return parameter_count_; } 813 registers_offset()814 static int registers_offset() { 815 return OFFSET_OF(FrameDescription, register_values_.registers_); 816 } 817 double_registers_offset()818 static int double_registers_offset() { 819 return OFFSET_OF(FrameDescription, register_values_.double_registers_); 820 } 821 frame_size_offset()822 static int frame_size_offset() { 823 return offsetof(FrameDescription, frame_size_); 824 } 825 pc_offset()826 static int pc_offset() { return offsetof(FrameDescription, pc_); } 827 state_offset()828 static int state_offset() { return offsetof(FrameDescription, state_); } 829 continuation_offset()830 static int continuation_offset() { 831 return offsetof(FrameDescription, continuation_); 832 } 833 frame_content_offset()834 static int frame_content_offset() { 835 return offsetof(FrameDescription, frame_content_); 836 } 837 838 private: 839 static const uint32_t kZapUint32 = 0xbeeddead; 840 841 // Frame_size_ must hold a uint32_t value. It is only a uintptr_t to 842 // keep the variable-size array frame_content_ of type intptr_t at 843 // the end of the structure aligned. 844 uintptr_t frame_size_; // Number of bytes. 845 int parameter_count_; 846 RegisterValues register_values_; 847 intptr_t top_; 848 intptr_t pc_; 849 intptr_t fp_; 850 intptr_t context_; 851 intptr_t constant_pool_; 852 StackFrame::Type type_; 853 Smi* state_; 854 855 // Continuation is the PC where the execution continues after 856 // deoptimizing. 857 intptr_t continuation_; 858 859 // This must be at the end of the object as the object is allocated larger 860 // than it's definition indicate to extend this array. 861 intptr_t frame_content_[1]; 862 GetFrameSlotPointer(unsigned offset)863 intptr_t* GetFrameSlotPointer(unsigned offset) { 864 DCHECK(offset < frame_size_); 865 return reinterpret_cast<intptr_t*>( 866 reinterpret_cast<Address>(this) + frame_content_offset() + offset); 867 } 868 }; 869 870 871 class DeoptimizerData { 872 public: 873 explicit DeoptimizerData(MemoryAllocator* allocator); 874 ~DeoptimizerData(); 875 876 private: 877 MemoryAllocator* allocator_; 878 int deopt_entry_code_entries_[Deoptimizer::kLastBailoutType + 1]; 879 MemoryChunk* deopt_entry_code_[Deoptimizer::kLastBailoutType + 1]; 880 881 Deoptimizer* current_; 882 883 friend class Deoptimizer; 884 885 DISALLOW_COPY_AND_ASSIGN(DeoptimizerData); 886 }; 887 888 889 class TranslationBuffer BASE_EMBEDDED { 890 public: TranslationBuffer(Zone * zone)891 explicit TranslationBuffer(Zone* zone) : contents_(zone) {} 892 CurrentIndex()893 int CurrentIndex() const { return static_cast<int>(contents_.size()); } 894 void Add(int32_t value); 895 896 Handle<ByteArray> CreateByteArray(Factory* factory); 897 898 private: 899 ZoneChunkList<uint8_t> contents_; 900 }; 901 902 903 class TranslationIterator BASE_EMBEDDED { 904 public: TranslationIterator(ByteArray * buffer,int index)905 TranslationIterator(ByteArray* buffer, int index) 906 : buffer_(buffer), index_(index) { 907 DCHECK(index >= 0 && index < buffer->length()); 908 } 909 910 int32_t Next(); 911 HasNext()912 bool HasNext() const { return index_ < buffer_->length(); } 913 Skip(int n)914 void Skip(int n) { 915 for (int i = 0; i < n; i++) Next(); 916 } 917 918 private: 919 ByteArray* buffer_; 920 int index_; 921 }; 922 923 #define TRANSLATION_OPCODE_LIST(V) \ 924 V(BEGIN) \ 925 V(JS_FRAME) \ 926 V(INTERPRETED_FRAME) \ 927 V(CONSTRUCT_STUB_FRAME) \ 928 V(GETTER_STUB_FRAME) \ 929 V(SETTER_STUB_FRAME) \ 930 V(ARGUMENTS_ADAPTOR_FRAME) \ 931 V(TAIL_CALLER_FRAME) \ 932 V(COMPILED_STUB_FRAME) \ 933 V(DUPLICATED_OBJECT) \ 934 V(ARGUMENTS_OBJECT) \ 935 V(CAPTURED_OBJECT) \ 936 V(REGISTER) \ 937 V(INT32_REGISTER) \ 938 V(UINT32_REGISTER) \ 939 V(BOOL_REGISTER) \ 940 V(FLOAT_REGISTER) \ 941 V(DOUBLE_REGISTER) \ 942 V(STACK_SLOT) \ 943 V(INT32_STACK_SLOT) \ 944 V(UINT32_STACK_SLOT) \ 945 V(BOOL_STACK_SLOT) \ 946 V(FLOAT_STACK_SLOT) \ 947 V(DOUBLE_STACK_SLOT) \ 948 V(LITERAL) 949 950 class Translation BASE_EMBEDDED { 951 public: 952 #define DECLARE_TRANSLATION_OPCODE_ENUM(item) item, 953 enum Opcode { 954 TRANSLATION_OPCODE_LIST(DECLARE_TRANSLATION_OPCODE_ENUM) 955 LAST = LITERAL 956 }; 957 #undef DECLARE_TRANSLATION_OPCODE_ENUM 958 Translation(TranslationBuffer * buffer,int frame_count,int jsframe_count,Zone * zone)959 Translation(TranslationBuffer* buffer, int frame_count, int jsframe_count, 960 Zone* zone) 961 : buffer_(buffer), 962 index_(buffer->CurrentIndex()), 963 zone_(zone) { 964 buffer_->Add(BEGIN); 965 buffer_->Add(frame_count); 966 buffer_->Add(jsframe_count); 967 } 968 index()969 int index() const { return index_; } 970 971 // Commands. 972 void BeginJSFrame(BailoutId node_id, int literal_id, unsigned height); 973 void BeginInterpretedFrame(BailoutId bytecode_offset, int literal_id, 974 unsigned height); 975 void BeginCompiledStubFrame(int height); 976 void BeginArgumentsAdaptorFrame(int literal_id, unsigned height); 977 void BeginTailCallerFrame(int literal_id); 978 void BeginConstructStubFrame(BailoutId bailout_id, int literal_id, 979 unsigned height); 980 void BeginGetterStubFrame(int literal_id); 981 void BeginSetterStubFrame(int literal_id); 982 void BeginArgumentsObject(int args_length); 983 void BeginCapturedObject(int length); 984 void DuplicateObject(int object_index); 985 void StoreRegister(Register reg); 986 void StoreInt32Register(Register reg); 987 void StoreUint32Register(Register reg); 988 void StoreBoolRegister(Register reg); 989 void StoreFloatRegister(FloatRegister reg); 990 void StoreDoubleRegister(DoubleRegister reg); 991 void StoreStackSlot(int index); 992 void StoreInt32StackSlot(int index); 993 void StoreUint32StackSlot(int index); 994 void StoreBoolStackSlot(int index); 995 void StoreFloatStackSlot(int index); 996 void StoreDoubleStackSlot(int index); 997 void StoreLiteral(int literal_id); 998 void StoreArgumentsObject(bool args_known, int args_index, int args_length); 999 void StoreJSFrameFunction(); 1000 zone()1001 Zone* zone() const { return zone_; } 1002 1003 static int NumberOfOperandsFor(Opcode opcode); 1004 1005 #if defined(OBJECT_PRINT) || defined(ENABLE_DISASSEMBLER) 1006 static const char* StringFor(Opcode opcode); 1007 #endif 1008 1009 private: 1010 TranslationBuffer* buffer_; 1011 int index_; 1012 Zone* zone_; 1013 }; 1014 1015 1016 class MaterializedObjectStore { 1017 public: MaterializedObjectStore(Isolate * isolate)1018 explicit MaterializedObjectStore(Isolate* isolate) : isolate_(isolate) { 1019 } 1020 1021 Handle<FixedArray> Get(Address fp); 1022 void Set(Address fp, Handle<FixedArray> materialized_objects); 1023 bool Remove(Address fp); 1024 1025 private: isolate()1026 Isolate* isolate() { return isolate_; } 1027 Handle<FixedArray> GetStackEntries(); 1028 Handle<FixedArray> EnsureStackEntries(int size); 1029 1030 int StackIdToIndex(Address fp); 1031 1032 Isolate* isolate_; 1033 List<Address> frame_fps_; 1034 }; 1035 1036 1037 // Class used to represent an unoptimized frame when the debugger 1038 // needs to inspect a frame that is part of an optimized frame. The 1039 // internally used FrameDescription objects are not GC safe so for use 1040 // by the debugger frame information is copied to an object of this type. 1041 // Represents parameters in unadapted form so their number might mismatch 1042 // formal parameter count. 1043 class DeoptimizedFrameInfo : public Malloced { 1044 public: 1045 DeoptimizedFrameInfo(TranslatedState* state, 1046 TranslatedState::iterator frame_it, Isolate* isolate); 1047 1048 // Return the number of incoming arguments. parameters_count()1049 int parameters_count() { return static_cast<int>(parameters_.size()); } 1050 1051 // Return the height of the expression stack. expression_count()1052 int expression_count() { return static_cast<int>(expression_stack_.size()); } 1053 1054 // Get the frame function. GetFunction()1055 Handle<JSFunction> GetFunction() { return function_; } 1056 1057 // Get the frame context. GetContext()1058 Handle<Object> GetContext() { return context_; } 1059 1060 // Check if this frame is preceded by construct stub frame. The bottom-most 1061 // inlined frame might still be called by an uninlined construct stub. HasConstructStub()1062 bool HasConstructStub() { 1063 return has_construct_stub_; 1064 } 1065 1066 // Get an incoming argument. GetParameter(int index)1067 Handle<Object> GetParameter(int index) { 1068 DCHECK(0 <= index && index < parameters_count()); 1069 return parameters_[index]; 1070 } 1071 1072 // Get an expression from the expression stack. GetExpression(int index)1073 Handle<Object> GetExpression(int index) { 1074 DCHECK(0 <= index && index < expression_count()); 1075 return expression_stack_[index]; 1076 } 1077 GetSourcePosition()1078 int GetSourcePosition() { 1079 return source_position_; 1080 } 1081 1082 private: 1083 // Set an incoming argument. SetParameter(int index,Handle<Object> obj)1084 void SetParameter(int index, Handle<Object> obj) { 1085 DCHECK(0 <= index && index < parameters_count()); 1086 parameters_[index] = obj; 1087 } 1088 1089 // Set an expression on the expression stack. SetExpression(int index,Handle<Object> obj)1090 void SetExpression(int index, Handle<Object> obj) { 1091 DCHECK(0 <= index && index < expression_count()); 1092 expression_stack_[index] = obj; 1093 } 1094 1095 Handle<JSFunction> function_; 1096 Handle<Object> context_; 1097 bool has_construct_stub_; 1098 std::vector<Handle<Object> > parameters_; 1099 std::vector<Handle<Object> > expression_stack_; 1100 int source_position_; 1101 1102 friend class Deoptimizer; 1103 }; 1104 1105 } // namespace internal 1106 } // namespace v8 1107 1108 #endif // V8_DEOPTIMIZER_H_ 1109