1 // Copyright 2012 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #ifndef V8_DEOPTIMIZER_H_ 6 #define V8_DEOPTIMIZER_H_ 7 8 #include "src/allocation.h" 9 #include "src/macro-assembler.h" 10 11 12 namespace v8 { 13 namespace internal { 14 15 class FrameDescription; 16 class TranslationIterator; 17 class DeoptimizedFrameInfo; 18 class TranslatedState; 19 class RegisterValues; 20 21 class TranslatedValue { 22 public: 23 // Allocation-less getter of the value. 24 // Returns heap()->arguments_marker() if allocation would be 25 // necessary to get the value. 26 Object* GetRawValue() const; 27 Handle<Object> GetValue(); 28 29 bool IsMaterializedObject() const; 30 bool IsMaterializableByDebugger() const; 31 32 private: 33 friend class TranslatedState; 34 friend class TranslatedFrame; 35 36 enum Kind { 37 kInvalid, 38 kTagged, 39 kInt32, 40 kUInt32, 41 kBoolBit, 42 kFloat, 43 kDouble, 44 kCapturedObject, // Object captured by the escape analysis. 45 // The number of nested objects can be obtained 46 // with the DeferredObjectLength() method 47 // (the values of the nested objects follow 48 // this value in the depth-first order.) 49 kDuplicatedObject, // Duplicated object of a deferred object. 50 kArgumentsObject // Arguments object - only used to keep indexing 51 // in sync, it should not be materialized. 52 }; 53 TranslatedValue(TranslatedState * container,Kind kind)54 TranslatedValue(TranslatedState* container, Kind kind) 55 : kind_(kind), container_(container) {} kind()56 Kind kind() const { return kind_; } 57 void Handlify(); 58 int GetChildrenCount() const; 59 60 static TranslatedValue NewArgumentsObject(TranslatedState* container, 61 int length, int object_index); 62 static TranslatedValue NewDeferredObject(TranslatedState* container, 63 int length, int object_index); 64 static TranslatedValue NewDuplicateObject(TranslatedState* container, int id); 65 static TranslatedValue NewFloat(TranslatedState* container, float value); 66 static TranslatedValue NewDouble(TranslatedState* container, double value); 67 static TranslatedValue NewInt32(TranslatedState* container, int32_t value); 68 static TranslatedValue NewUInt32(TranslatedState* container, uint32_t value); 69 static TranslatedValue NewBool(TranslatedState* container, uint32_t value); 70 static TranslatedValue NewTagged(TranslatedState* container, Object* literal); 71 static TranslatedValue NewInvalid(TranslatedState* container); 72 73 Isolate* isolate() const; 74 void MaterializeSimple(); 75 76 Kind kind_; 77 TranslatedState* container_; // This is only needed for materialization of 78 // objects and constructing handles (to get 79 // to the isolate). 80 81 MaybeHandle<Object> value_; // Before handlification, this is always null, 82 // after materialization it is never null, 83 // in between it is only null if the value needs 84 // to be materialized. 85 86 struct MaterializedObjectInfo { 87 int id_; 88 int length_; // Applies only to kArgumentsObject or kCapturedObject kinds. 89 }; 90 91 union { 92 // kind kTagged. After handlification it is always nullptr. 93 Object* raw_literal_; 94 // kind is kUInt32 or kBoolBit. 95 uint32_t uint32_value_; 96 // kind is kInt32. 97 int32_t int32_value_; 98 // kind is kFloat 99 float float_value_; 100 // kind is kDouble 101 double double_value_; 102 // kind is kDuplicatedObject or kArgumentsObject or kCapturedObject. 103 MaterializedObjectInfo materialization_info_; 104 }; 105 106 // Checked accessors for the union members. 107 Object* raw_literal() const; 108 int32_t int32_value() const; 109 uint32_t uint32_value() const; 110 float float_value() const; 111 double double_value() const; 112 int object_length() const; 113 int object_index() const; 114 }; 115 116 117 class TranslatedFrame { 118 public: 119 enum Kind { 120 kFunction, 121 kInterpretedFunction, 122 kGetter, 123 kSetter, 124 kTailCallerFunction, 125 kArgumentsAdaptor, 126 kConstructStub, 127 kCompiledStub, 128 kInvalid 129 }; 130 131 int GetValueCount(); 132 kind()133 Kind kind() const { return kind_; } node_id()134 BailoutId node_id() const { return node_id_; } shared_info()135 Handle<SharedFunctionInfo> shared_info() const { return shared_info_; } height()136 int height() const { return height_; } 137 raw_shared_info()138 SharedFunctionInfo* raw_shared_info() const { 139 CHECK_NOT_NULL(raw_shared_info_); 140 return raw_shared_info_; 141 } 142 143 class iterator { 144 public: 145 iterator& operator++() { 146 AdvanceIterator(&position_); 147 return *this; 148 } 149 150 iterator operator++(int) { 151 iterator original(position_); 152 AdvanceIterator(&position_); 153 return original; 154 } 155 156 bool operator==(const iterator& other) const { 157 return position_ == other.position_; 158 } 159 bool operator!=(const iterator& other) const { return !(*this == other); } 160 161 TranslatedValue& operator*() { return (*position_); } 162 TranslatedValue* operator->() { return &(*position_); } 163 164 private: 165 friend TranslatedFrame; 166 iterator(std::deque<TranslatedValue>::iterator position)167 explicit iterator(std::deque<TranslatedValue>::iterator position) 168 : position_(position) {} 169 170 std::deque<TranslatedValue>::iterator position_; 171 }; 172 173 typedef TranslatedValue& reference; 174 typedef TranslatedValue const& const_reference; 175 begin()176 iterator begin() { return iterator(values_.begin()); } end()177 iterator end() { return iterator(values_.end()); } 178 front()179 reference front() { return values_.front(); } front()180 const_reference front() const { return values_.front(); } 181 182 private: 183 friend class TranslatedState; 184 185 // Constructor static methods. 186 static TranslatedFrame JSFrame(BailoutId node_id, 187 SharedFunctionInfo* shared_info, int height); 188 static TranslatedFrame InterpretedFrame(BailoutId bytecode_offset, 189 SharedFunctionInfo* shared_info, 190 int height); 191 static TranslatedFrame AccessorFrame(Kind kind, 192 SharedFunctionInfo* shared_info); 193 static TranslatedFrame ArgumentsAdaptorFrame(SharedFunctionInfo* shared_info, 194 int height); 195 static TranslatedFrame TailCallerFrame(SharedFunctionInfo* shared_info); 196 static TranslatedFrame ConstructStubFrame(SharedFunctionInfo* shared_info, 197 int height); CompiledStubFrame(int height,Isolate * isolate)198 static TranslatedFrame CompiledStubFrame(int height, Isolate* isolate) { 199 return TranslatedFrame(kCompiledStub, isolate, nullptr, height); 200 } InvalidFrame()201 static TranslatedFrame InvalidFrame() { 202 return TranslatedFrame(kInvalid, nullptr); 203 } 204 205 static void AdvanceIterator(std::deque<TranslatedValue>::iterator* iter); 206 207 TranslatedFrame(Kind kind, Isolate* isolate, 208 SharedFunctionInfo* shared_info = nullptr, int height = 0) kind_(kind)209 : kind_(kind), 210 node_id_(BailoutId::None()), 211 raw_shared_info_(shared_info), 212 height_(height), 213 isolate_(isolate) {} 214 215 Add(const TranslatedValue & value)216 void Add(const TranslatedValue& value) { values_.push_back(value); } 217 void Handlify(); 218 219 Kind kind_; 220 BailoutId node_id_; 221 SharedFunctionInfo* raw_shared_info_; 222 Handle<SharedFunctionInfo> shared_info_; 223 int height_; 224 Isolate* isolate_; 225 226 typedef std::deque<TranslatedValue> ValuesContainer; 227 228 ValuesContainer values_; 229 }; 230 231 232 // Auxiliary class for translating deoptimization values. 233 // Typical usage sequence: 234 // 235 // 1. Construct the instance. This will involve reading out the translations 236 // and resolving them to values using the supplied frame pointer and 237 // machine state (registers). This phase is guaranteed not to allocate 238 // and not to use any HandleScope. Any object pointers will be stored raw. 239 // 240 // 2. Handlify pointers. This will convert all the raw pointers to handles. 241 // 242 // 3. Reading out the frame values. 243 // 244 // Note: After the instance is constructed, it is possible to iterate over 245 // the values eagerly. 246 247 class TranslatedState { 248 public: 249 TranslatedState(); 250 explicit TranslatedState(JavaScriptFrame* frame); 251 252 void Prepare(bool has_adapted_arguments, Address stack_frame_pointer); 253 254 // Store newly materialized values into the isolate. 255 void StoreMaterializedValuesAndDeopt(); 256 257 typedef std::vector<TranslatedFrame>::iterator iterator; begin()258 iterator begin() { return frames_.begin(); } end()259 iterator end() { return frames_.end(); } 260 261 typedef std::vector<TranslatedFrame>::const_iterator const_iterator; begin()262 const_iterator begin() const { return frames_.begin(); } end()263 const_iterator end() const { return frames_.end(); } 264 frames()265 std::vector<TranslatedFrame>& frames() { return frames_; } 266 267 TranslatedFrame* GetArgumentsInfoFromJSFrameIndex(int jsframe_index, 268 int* arguments_count); 269 isolate()270 Isolate* isolate() { return isolate_; } 271 272 void Init(Address input_frame_pointer, TranslationIterator* iterator, 273 FixedArray* literal_array, RegisterValues* registers, 274 FILE* trace_file); 275 276 private: 277 friend TranslatedValue; 278 279 TranslatedFrame CreateNextTranslatedFrame(TranslationIterator* iterator, 280 FixedArray* literal_array, 281 Address fp, 282 FILE* trace_file); 283 TranslatedValue CreateNextTranslatedValue(int frame_index, int value_index, 284 TranslationIterator* iterator, 285 FixedArray* literal_array, 286 Address fp, 287 RegisterValues* registers, 288 FILE* trace_file); 289 290 void UpdateFromPreviouslyMaterializedObjects(); 291 Handle<Object> MaterializeAt(int frame_index, int* value_index); 292 Handle<Object> MaterializeObjectAt(int object_index); 293 bool GetAdaptedArguments(Handle<JSObject>* result, int frame_index); 294 295 static uint32_t GetUInt32Slot(Address fp, int slot_index); 296 297 std::vector<TranslatedFrame> frames_; 298 Isolate* isolate_; 299 Address stack_frame_pointer_; 300 bool has_adapted_arguments_; 301 302 struct ObjectPosition { 303 int frame_index_; 304 int value_index_; 305 }; 306 std::deque<ObjectPosition> object_positions_; 307 }; 308 309 310 class OptimizedFunctionVisitor BASE_EMBEDDED { 311 public: ~OptimizedFunctionVisitor()312 virtual ~OptimizedFunctionVisitor() {} 313 314 // Function which is called before iteration of any optimized functions 315 // from given native context. 316 virtual void EnterContext(Context* context) = 0; 317 318 virtual void VisitFunction(JSFunction* function) = 0; 319 320 // Function which is called after iteration of all optimized functions 321 // from given native context. 322 virtual void LeaveContext(Context* context) = 0; 323 }; 324 325 #define DEOPT_MESSAGES_LIST(V) \ 326 V(kAccessCheck, "Access check needed") \ 327 V(kNoReason, "no reason") \ 328 V(kConstantGlobalVariableAssignment, "Constant global variable assignment") \ 329 V(kConversionOverflow, "conversion overflow") \ 330 V(kDivisionByZero, "division by zero") \ 331 V(kElementsKindUnhandledInKeyedLoadGenericStub, \ 332 "ElementsKind unhandled in KeyedLoadGenericStub") \ 333 V(kExpectedHeapNumber, "Expected heap number") \ 334 V(kExpectedSmi, "Expected smi") \ 335 V(kForcedDeoptToRuntime, "Forced deopt to runtime") \ 336 V(kHole, "hole") \ 337 V(kHoleyArrayDespitePackedElements_kindFeedback, \ 338 "Holey array despite packed elements_kind feedback") \ 339 V(kInstanceMigrationFailed, "instance migration failed") \ 340 V(kInsufficientTypeFeedbackForCallWithArguments, \ 341 "Insufficient type feedback for call with arguments") \ 342 V(kFastPathFailed, "Falling off the fast path") \ 343 V(kInsufficientTypeFeedbackForCombinedTypeOfBinaryOperation, \ 344 "Insufficient type feedback for combined type of binary operation") \ 345 V(kInsufficientTypeFeedbackForGenericNamedAccess, \ 346 "Insufficient type feedback for generic named access") \ 347 V(kInsufficientTypeFeedbackForKeyedLoad, \ 348 "Insufficient type feedback for keyed load") \ 349 V(kInsufficientTypeFeedbackForKeyedStore, \ 350 "Insufficient type feedback for keyed store") \ 351 V(kInsufficientTypeFeedbackForLHSOfBinaryOperation, \ 352 "Insufficient type feedback for LHS of binary operation") \ 353 V(kInsufficientTypeFeedbackForRHSOfBinaryOperation, \ 354 "Insufficient type feedback for RHS of binary operation") \ 355 V(kKeyIsNegative, "key is negative") \ 356 V(kLiteralsWereDisposed, "literals have been disposed") \ 357 V(kLostPrecision, "lost precision") \ 358 V(kLostPrecisionOrNaN, "lost precision or NaN") \ 359 V(kMementoFound, "memento found") \ 360 V(kMinusZero, "minus zero") \ 361 V(kNaN, "NaN") \ 362 V(kNegativeKeyEncountered, "Negative key encountered") \ 363 V(kNegativeValue, "negative value") \ 364 V(kNoCache, "no cache") \ 365 V(kNonStrictElementsInKeyedLoadGenericStub, \ 366 "non-strict elements in KeyedLoadGenericStub") \ 367 V(kNotADateObject, "not a date object") \ 368 V(kNotAHeapNumber, "not a heap number") \ 369 V(kNotAHeapNumberUndefinedBoolean, "not a heap number/undefined/true/false") \ 370 V(kNotAHeapNumberUndefined, "not a heap number/undefined") \ 371 V(kNotAJavaScriptObject, "not a JavaScript object") \ 372 V(kNotASmi, "not a Smi") \ 373 V(kNull, "null") \ 374 V(kOutOfBounds, "out of bounds") \ 375 V(kOutsideOfRange, "Outside of range") \ 376 V(kOverflow, "overflow") \ 377 V(kProxy, "proxy") \ 378 V(kReceiverWasAGlobalObject, "receiver was a global object") \ 379 V(kSmi, "Smi") \ 380 V(kTooManyArguments, "too many arguments") \ 381 V(kTooManyUndetectableTypes, "Too many undetectable types") \ 382 V(kTracingElementsTransitions, "Tracing elements transitions") \ 383 V(kTypeMismatchBetweenFeedbackAndConstant, \ 384 "Type mismatch between feedback and constant") \ 385 V(kUndefined, "undefined") \ 386 V(kUnexpectedCellContentsInConstantGlobalStore, \ 387 "Unexpected cell contents in constant global store") \ 388 V(kUnexpectedCellContentsInGlobalStore, \ 389 "Unexpected cell contents in global store") \ 390 V(kUnexpectedObject, "unexpected object") \ 391 V(kUnexpectedRHSOfBinaryOperation, "Unexpected RHS of binary operation") \ 392 V(kUninitializedBoilerplateInFastClone, \ 393 "Uninitialized boilerplate in fast clone") \ 394 V(kUninitializedBoilerplateLiterals, "Uninitialized boilerplate literals") \ 395 V(kUnknownMapInPolymorphicAccess, "Unknown map in polymorphic access") \ 396 V(kUnknownMapInPolymorphicCall, "Unknown map in polymorphic call") \ 397 V(kUnknownMapInPolymorphicElementAccess, \ 398 "Unknown map in polymorphic element access") \ 399 V(kUnknownMap, "Unknown map") \ 400 V(kValueMismatch, "value mismatch") \ 401 V(kWrongInstanceType, "wrong instance type") \ 402 V(kWrongMap, "wrong map") \ 403 V(kUndefinedOrNullInForIn, "null or undefined in for-in") \ 404 V(kUndefinedOrNullInToObject, "null or undefined in ToObject") 405 406 class Deoptimizer : public Malloced { 407 public: 408 enum BailoutType { EAGER, LAZY, SOFT, kLastBailoutType = SOFT }; 409 410 enum class BailoutState { 411 NO_REGISTERS, 412 TOS_REGISTER, 413 }; 414 BailoutStateToString(BailoutState state)415 static const char* BailoutStateToString(BailoutState state) { 416 switch (state) { 417 case BailoutState::NO_REGISTERS: 418 return "NO_REGISTERS"; 419 case BailoutState::TOS_REGISTER: 420 return "TOS_REGISTER"; 421 } 422 UNREACHABLE(); 423 return nullptr; 424 } 425 426 #define DEOPT_MESSAGES_CONSTANTS(C, T) C, 427 enum DeoptReason { 428 DEOPT_MESSAGES_LIST(DEOPT_MESSAGES_CONSTANTS) kLastDeoptReason 429 }; 430 #undef DEOPT_MESSAGES_CONSTANTS 431 static const char* GetDeoptReason(DeoptReason deopt_reason); 432 433 struct DeoptInfo { DeoptInfoDeoptInfo434 DeoptInfo(SourcePosition position, DeoptReason deopt_reason, int deopt_id) 435 : position(position), deopt_reason(deopt_reason), deopt_id(deopt_id) {} 436 437 SourcePosition position; 438 DeoptReason deopt_reason; 439 int deopt_id; 440 441 static const int kNoDeoptId = -1; 442 }; 443 444 static DeoptInfo GetDeoptInfo(Code* code, byte* from); 445 446 static int ComputeSourcePosition(SharedFunctionInfo* shared, 447 BailoutId node_id); 448 449 struct JumpTableEntry : public ZoneObject { JumpTableEntryJumpTableEntry450 inline JumpTableEntry(Address entry, const DeoptInfo& deopt_info, 451 Deoptimizer::BailoutType type, bool frame) 452 : label(), 453 address(entry), 454 deopt_info(deopt_info), 455 bailout_type(type), 456 needs_frame(frame) {} 457 IsEquivalentToJumpTableEntry458 bool IsEquivalentTo(const JumpTableEntry& other) const { 459 return address == other.address && bailout_type == other.bailout_type && 460 needs_frame == other.needs_frame; 461 } 462 463 Label label; 464 Address address; 465 DeoptInfo deopt_info; 466 Deoptimizer::BailoutType bailout_type; 467 bool needs_frame; 468 }; 469 470 static bool TraceEnabledFor(BailoutType deopt_type, 471 StackFrame::Type frame_type); 472 static const char* MessageFor(BailoutType type); 473 output_count()474 int output_count() const { return output_count_; } 475 function()476 Handle<JSFunction> function() const { return Handle<JSFunction>(function_); } compiled_code()477 Handle<Code> compiled_code() const { return Handle<Code>(compiled_code_); } bailout_type()478 BailoutType bailout_type() const { return bailout_type_; } 479 480 // Number of created JS frames. Not all created frames are necessarily JS. jsframe_count()481 int jsframe_count() const { return jsframe_count_; } 482 483 static Deoptimizer* New(JSFunction* function, 484 BailoutType type, 485 unsigned bailout_id, 486 Address from, 487 int fp_to_sp_delta, 488 Isolate* isolate); 489 static Deoptimizer* Grab(Isolate* isolate); 490 491 // The returned object with information on the optimized frame needs to be 492 // freed before another one can be generated. 493 static DeoptimizedFrameInfo* DebuggerInspectableFrame(JavaScriptFrame* frame, 494 int jsframe_index, 495 Isolate* isolate); 496 static void DeleteDebuggerInspectableFrame(DeoptimizedFrameInfo* info, 497 Isolate* isolate); 498 499 // Makes sure that there is enough room in the relocation 500 // information of a code object to perform lazy deoptimization 501 // patching. If there is not enough room a new relocation 502 // information object is allocated and comments are added until it 503 // is big enough. 504 static void EnsureRelocSpaceForLazyDeoptimization(Handle<Code> code); 505 506 // Deoptimize the function now. Its current optimized code will never be run 507 // again and any activations of the optimized code will get deoptimized when 508 // execution returns. 509 static void DeoptimizeFunction(JSFunction* function); 510 511 // Deoptimize all code in the given isolate. 512 static void DeoptimizeAll(Isolate* isolate); 513 514 // Deoptimizes all optimized code that has been previously marked 515 // (via code->set_marked_for_deoptimization) and unlinks all functions that 516 // refer to that code. 517 static void DeoptimizeMarkedCode(Isolate* isolate); 518 519 // Visit all the known optimized functions in a given isolate. 520 static void VisitAllOptimizedFunctions( 521 Isolate* isolate, OptimizedFunctionVisitor* visitor); 522 523 // The size in bytes of the code required at a lazy deopt patch site. 524 static int patch_size(); 525 526 ~Deoptimizer(); 527 528 void MaterializeHeapObjects(JavaScriptFrameIterator* it); 529 530 static void ComputeOutputFrames(Deoptimizer* deoptimizer); 531 532 533 enum GetEntryMode { 534 CALCULATE_ENTRY_ADDRESS, 535 ENSURE_ENTRY_CODE 536 }; 537 538 539 static Address GetDeoptimizationEntry( 540 Isolate* isolate, 541 int id, 542 BailoutType type, 543 GetEntryMode mode = ENSURE_ENTRY_CODE); 544 static int GetDeoptimizationId(Isolate* isolate, 545 Address addr, 546 BailoutType type); 547 static int GetOutputInfo(DeoptimizationOutputData* data, 548 BailoutId node_id, 549 SharedFunctionInfo* shared); 550 551 // Code generation support. input_offset()552 static int input_offset() { return OFFSET_OF(Deoptimizer, input_); } output_count_offset()553 static int output_count_offset() { 554 return OFFSET_OF(Deoptimizer, output_count_); 555 } output_offset()556 static int output_offset() { return OFFSET_OF(Deoptimizer, output_); } 557 caller_frame_top_offset()558 static int caller_frame_top_offset() { 559 return OFFSET_OF(Deoptimizer, caller_frame_top_); 560 } 561 562 static int GetDeoptimizedCodeCount(Isolate* isolate); 563 564 static const int kNotDeoptimizationEntry = -1; 565 566 // Generators for the deoptimization entry code. 567 class TableEntryGenerator BASE_EMBEDDED { 568 public: TableEntryGenerator(MacroAssembler * masm,BailoutType type,int count)569 TableEntryGenerator(MacroAssembler* masm, BailoutType type, int count) 570 : masm_(masm), type_(type), count_(count) {} 571 572 void Generate(); 573 574 protected: masm()575 MacroAssembler* masm() const { return masm_; } type()576 BailoutType type() const { return type_; } isolate()577 Isolate* isolate() const { return masm_->isolate(); } 578 579 void GeneratePrologue(); 580 581 private: count()582 int count() const { return count_; } 583 584 MacroAssembler* masm_; 585 Deoptimizer::BailoutType type_; 586 int count_; 587 }; 588 589 int ConvertJSFrameIndexToFrameIndex(int jsframe_index); 590 591 static size_t GetMaxDeoptTableSize(); 592 593 static void EnsureCodeForDeoptimizationEntry(Isolate* isolate, 594 BailoutType type, 595 int max_entry_id); 596 isolate()597 Isolate* isolate() const { return isolate_; } 598 599 private: 600 static const int kMinNumberOfEntries = 64; 601 static const int kMaxNumberOfEntries = 16384; 602 603 Deoptimizer(Isolate* isolate, 604 JSFunction* function, 605 BailoutType type, 606 unsigned bailout_id, 607 Address from, 608 int fp_to_sp_delta, 609 Code* optimized_code); 610 Code* FindOptimizedCode(JSFunction* function, Code* optimized_code); 611 void PrintFunctionName(); 612 void DeleteFrameDescriptions(); 613 614 void DoComputeOutputFrames(); 615 void DoComputeJSFrame(TranslatedFrame* translated_frame, int frame_index, 616 bool goto_catch_handler); 617 void DoComputeInterpretedFrame(TranslatedFrame* translated_frame, 618 int frame_index, bool goto_catch_handler); 619 void DoComputeArgumentsAdaptorFrame(TranslatedFrame* translated_frame, 620 int frame_index); 621 void DoComputeTailCallerFrame(TranslatedFrame* translated_frame, 622 int frame_index); 623 void DoComputeConstructStubFrame(TranslatedFrame* translated_frame, 624 int frame_index); 625 void DoComputeAccessorStubFrame(TranslatedFrame* translated_frame, 626 int frame_index, bool is_setter_stub_frame); 627 void DoComputeCompiledStubFrame(TranslatedFrame* translated_frame, 628 int frame_index); 629 630 void WriteTranslatedValueToOutput( 631 TranslatedFrame::iterator* iterator, int* input_index, int frame_index, 632 unsigned output_offset, const char* debug_hint_string = nullptr, 633 Address output_address_for_materialization = nullptr); 634 void WriteValueToOutput(Object* value, int input_index, int frame_index, 635 unsigned output_offset, 636 const char* debug_hint_string); 637 void DebugPrintOutputSlot(intptr_t value, int frame_index, 638 unsigned output_offset, 639 const char* debug_hint_string); 640 641 unsigned ComputeInputFrameAboveFpFixedSize() const; 642 unsigned ComputeInputFrameSize() const; 643 static unsigned ComputeJavascriptFixedSize(SharedFunctionInfo* shared); 644 static unsigned ComputeInterpretedFixedSize(SharedFunctionInfo* shared); 645 646 static unsigned ComputeIncomingArgumentSize(SharedFunctionInfo* shared); 647 static unsigned ComputeOutgoingArgumentSize(Code* code, unsigned bailout_id); 648 649 Object* ComputeLiteral(int index) const; 650 651 static void GenerateDeoptimizationEntries( 652 MacroAssembler* masm, int count, BailoutType type); 653 654 // Marks all the code in the given context for deoptimization. 655 static void MarkAllCodeForContext(Context* native_context); 656 657 // Visit all the known optimized functions in a given context. 658 static void VisitAllOptimizedFunctionsForContext( 659 Context* context, OptimizedFunctionVisitor* visitor); 660 661 // Deoptimizes all code marked in the given context. 662 static void DeoptimizeMarkedCodeForContext(Context* native_context); 663 664 // Patch the given code so that it will deoptimize itself. 665 static void PatchCodeForDeoptimization(Isolate* isolate, Code* code); 666 667 // Searches the list of known deoptimizing code for a Code object 668 // containing the given address (which is supposedly faster than 669 // searching all code objects). 670 Code* FindDeoptimizingCode(Address addr); 671 672 // Fill the given output frame's registers to contain the failure handler 673 // address and the number of parameters for a stub failure trampoline. 674 void SetPlatformCompiledStubRegisters(FrameDescription* output_frame, 675 CodeStubDescriptor* desc); 676 677 // Fill the given output frame's double registers with the original values 678 // from the input frame's double registers. 679 void CopyDoubleRegisters(FrameDescription* output_frame); 680 681 Isolate* isolate_; 682 JSFunction* function_; 683 Code* compiled_code_; 684 unsigned bailout_id_; 685 BailoutType bailout_type_; 686 Address from_; 687 int fp_to_sp_delta_; 688 bool deoptimizing_throw_; 689 int catch_handler_data_; 690 int catch_handler_pc_offset_; 691 692 // Input frame description. 693 FrameDescription* input_; 694 // Number of output frames. 695 int output_count_; 696 // Number of output js frames. 697 int jsframe_count_; 698 // Array of output frame descriptions. 699 FrameDescription** output_; 700 701 // Caller frame details computed from input frame. 702 intptr_t caller_frame_top_; 703 intptr_t caller_fp_; 704 intptr_t caller_pc_; 705 intptr_t caller_constant_pool_; 706 intptr_t input_frame_context_; 707 708 // Key for lookup of previously materialized objects 709 intptr_t stack_fp_; 710 711 TranslatedState translated_state_; 712 struct ValueToMaterialize { 713 Address output_slot_address_; 714 TranslatedFrame::iterator value_; 715 }; 716 std::vector<ValueToMaterialize> values_to_materialize_; 717 718 #ifdef DEBUG 719 DisallowHeapAllocation* disallow_heap_allocation_; 720 #endif // DEBUG 721 722 CodeTracer::Scope* trace_scope_; 723 724 static const int table_entry_size_; 725 726 friend class FrameDescription; 727 friend class DeoptimizedFrameInfo; 728 }; 729 730 731 class RegisterValues { 732 public: GetRegister(unsigned n)733 intptr_t GetRegister(unsigned n) const { 734 #if DEBUG 735 // This convoluted DCHECK is needed to work around a gcc problem that 736 // improperly detects an array bounds overflow in optimized debug builds 737 // when using a plain DCHECK. 738 if (n >= arraysize(registers_)) { 739 DCHECK(false); 740 return 0; 741 } 742 #endif 743 return registers_[n]; 744 } 745 GetFloatRegister(unsigned n)746 float GetFloatRegister(unsigned n) const { 747 DCHECK(n < arraysize(float_registers_)); 748 return float_registers_[n]; 749 } 750 GetDoubleRegister(unsigned n)751 double GetDoubleRegister(unsigned n) const { 752 DCHECK(n < arraysize(double_registers_)); 753 return double_registers_[n]; 754 } 755 SetRegister(unsigned n,intptr_t value)756 void SetRegister(unsigned n, intptr_t value) { 757 DCHECK(n < arraysize(registers_)); 758 registers_[n] = value; 759 } 760 SetFloatRegister(unsigned n,float value)761 void SetFloatRegister(unsigned n, float value) { 762 DCHECK(n < arraysize(float_registers_)); 763 float_registers_[n] = value; 764 } 765 SetDoubleRegister(unsigned n,double value)766 void SetDoubleRegister(unsigned n, double value) { 767 DCHECK(n < arraysize(double_registers_)); 768 double_registers_[n] = value; 769 } 770 771 intptr_t registers_[Register::kNumRegisters]; 772 float float_registers_[FloatRegister::kMaxNumRegisters]; 773 double double_registers_[DoubleRegister::kMaxNumRegisters]; 774 }; 775 776 777 class FrameDescription { 778 public: 779 explicit FrameDescription(uint32_t frame_size, int parameter_count = 0); 780 new(size_t size,uint32_t frame_size)781 void* operator new(size_t size, uint32_t frame_size) { 782 // Subtracts kPointerSize, as the member frame_content_ already supplies 783 // the first element of the area to store the frame. 784 return malloc(size + frame_size - kPointerSize); 785 } 786 delete(void * pointer,uint32_t frame_size)787 void operator delete(void* pointer, uint32_t frame_size) { 788 free(pointer); 789 } 790 delete(void * description)791 void operator delete(void* description) { 792 free(description); 793 } 794 GetFrameSize()795 uint32_t GetFrameSize() const { 796 DCHECK(static_cast<uint32_t>(frame_size_) == frame_size_); 797 return static_cast<uint32_t>(frame_size_); 798 } 799 800 unsigned GetOffsetFromSlotIndex(int slot_index); 801 GetFrameSlot(unsigned offset)802 intptr_t GetFrameSlot(unsigned offset) { 803 return *GetFrameSlotPointer(offset); 804 } 805 GetFramePointerAddress()806 Address GetFramePointerAddress() { 807 int fp_offset = GetFrameSize() - parameter_count() * kPointerSize - 808 StandardFrameConstants::kCallerSPOffset; 809 return reinterpret_cast<Address>(GetFrameSlotPointer(fp_offset)); 810 } 811 GetRegisterValues()812 RegisterValues* GetRegisterValues() { return ®ister_values_; } 813 SetFrameSlot(unsigned offset,intptr_t value)814 void SetFrameSlot(unsigned offset, intptr_t value) { 815 *GetFrameSlotPointer(offset) = value; 816 } 817 818 void SetCallerPc(unsigned offset, intptr_t value); 819 820 void SetCallerFp(unsigned offset, intptr_t value); 821 822 void SetCallerConstantPool(unsigned offset, intptr_t value); 823 GetRegister(unsigned n)824 intptr_t GetRegister(unsigned n) const { 825 return register_values_.GetRegister(n); 826 } 827 GetDoubleRegister(unsigned n)828 double GetDoubleRegister(unsigned n) const { 829 return register_values_.GetDoubleRegister(n); 830 } 831 SetRegister(unsigned n,intptr_t value)832 void SetRegister(unsigned n, intptr_t value) { 833 register_values_.SetRegister(n, value); 834 } 835 SetDoubleRegister(unsigned n,double value)836 void SetDoubleRegister(unsigned n, double value) { 837 register_values_.SetDoubleRegister(n, value); 838 } 839 GetTop()840 intptr_t GetTop() const { return top_; } SetTop(intptr_t top)841 void SetTop(intptr_t top) { top_ = top; } 842 GetPc()843 intptr_t GetPc() const { return pc_; } SetPc(intptr_t pc)844 void SetPc(intptr_t pc) { pc_ = pc; } 845 GetFp()846 intptr_t GetFp() const { return fp_; } SetFp(intptr_t fp)847 void SetFp(intptr_t fp) { fp_ = fp; } 848 GetContext()849 intptr_t GetContext() const { return context_; } SetContext(intptr_t context)850 void SetContext(intptr_t context) { context_ = context; } 851 GetConstantPool()852 intptr_t GetConstantPool() const { return constant_pool_; } SetConstantPool(intptr_t constant_pool)853 void SetConstantPool(intptr_t constant_pool) { 854 constant_pool_ = constant_pool; 855 } 856 GetState()857 Smi* GetState() const { return state_; } SetState(Smi * state)858 void SetState(Smi* state) { state_ = state; } 859 SetContinuation(intptr_t pc)860 void SetContinuation(intptr_t pc) { continuation_ = pc; } 861 GetFrameType()862 StackFrame::Type GetFrameType() const { return type_; } SetFrameType(StackFrame::Type type)863 void SetFrameType(StackFrame::Type type) { type_ = type; } 864 865 // Argument count, including receiver. parameter_count()866 int parameter_count() { return parameter_count_; } 867 registers_offset()868 static int registers_offset() { 869 return OFFSET_OF(FrameDescription, register_values_.registers_); 870 } 871 double_registers_offset()872 static int double_registers_offset() { 873 return OFFSET_OF(FrameDescription, register_values_.double_registers_); 874 } 875 frame_size_offset()876 static int frame_size_offset() { 877 return offsetof(FrameDescription, frame_size_); 878 } 879 pc_offset()880 static int pc_offset() { return offsetof(FrameDescription, pc_); } 881 state_offset()882 static int state_offset() { return offsetof(FrameDescription, state_); } 883 continuation_offset()884 static int continuation_offset() { 885 return offsetof(FrameDescription, continuation_); 886 } 887 frame_content_offset()888 static int frame_content_offset() { 889 return offsetof(FrameDescription, frame_content_); 890 } 891 892 private: 893 static const uint32_t kZapUint32 = 0xbeeddead; 894 895 // Frame_size_ must hold a uint32_t value. It is only a uintptr_t to 896 // keep the variable-size array frame_content_ of type intptr_t at 897 // the end of the structure aligned. 898 uintptr_t frame_size_; // Number of bytes. 899 int parameter_count_; 900 RegisterValues register_values_; 901 intptr_t top_; 902 intptr_t pc_; 903 intptr_t fp_; 904 intptr_t context_; 905 intptr_t constant_pool_; 906 StackFrame::Type type_; 907 Smi* state_; 908 909 // Continuation is the PC where the execution continues after 910 // deoptimizing. 911 intptr_t continuation_; 912 913 // This must be at the end of the object as the object is allocated larger 914 // than it's definition indicate to extend this array. 915 intptr_t frame_content_[1]; 916 GetFrameSlotPointer(unsigned offset)917 intptr_t* GetFrameSlotPointer(unsigned offset) { 918 DCHECK(offset < frame_size_); 919 return reinterpret_cast<intptr_t*>( 920 reinterpret_cast<Address>(this) + frame_content_offset() + offset); 921 } 922 923 int ComputeFixedSize(); 924 }; 925 926 927 class DeoptimizerData { 928 public: 929 explicit DeoptimizerData(MemoryAllocator* allocator); 930 ~DeoptimizerData(); 931 932 private: 933 MemoryAllocator* allocator_; 934 int deopt_entry_code_entries_[Deoptimizer::kLastBailoutType + 1]; 935 MemoryChunk* deopt_entry_code_[Deoptimizer::kLastBailoutType + 1]; 936 937 Deoptimizer* current_; 938 939 friend class Deoptimizer; 940 941 DISALLOW_COPY_AND_ASSIGN(DeoptimizerData); 942 }; 943 944 945 class TranslationBuffer BASE_EMBEDDED { 946 public: TranslationBuffer(Zone * zone)947 explicit TranslationBuffer(Zone* zone) : contents_(256, zone) { } 948 CurrentIndex()949 int CurrentIndex() const { return contents_.length(); } 950 void Add(int32_t value, Zone* zone); 951 952 Handle<ByteArray> CreateByteArray(Factory* factory); 953 954 private: 955 ZoneList<uint8_t> contents_; 956 }; 957 958 959 class TranslationIterator BASE_EMBEDDED { 960 public: TranslationIterator(ByteArray * buffer,int index)961 TranslationIterator(ByteArray* buffer, int index) 962 : buffer_(buffer), index_(index) { 963 DCHECK(index >= 0 && index < buffer->length()); 964 } 965 966 int32_t Next(); 967 HasNext()968 bool HasNext() const { return index_ < buffer_->length(); } 969 Skip(int n)970 void Skip(int n) { 971 for (int i = 0; i < n; i++) Next(); 972 } 973 974 private: 975 ByteArray* buffer_; 976 int index_; 977 }; 978 979 #define TRANSLATION_OPCODE_LIST(V) \ 980 V(BEGIN) \ 981 V(JS_FRAME) \ 982 V(INTERPRETED_FRAME) \ 983 V(CONSTRUCT_STUB_FRAME) \ 984 V(GETTER_STUB_FRAME) \ 985 V(SETTER_STUB_FRAME) \ 986 V(ARGUMENTS_ADAPTOR_FRAME) \ 987 V(TAIL_CALLER_FRAME) \ 988 V(COMPILED_STUB_FRAME) \ 989 V(DUPLICATED_OBJECT) \ 990 V(ARGUMENTS_OBJECT) \ 991 V(CAPTURED_OBJECT) \ 992 V(REGISTER) \ 993 V(INT32_REGISTER) \ 994 V(UINT32_REGISTER) \ 995 V(BOOL_REGISTER) \ 996 V(FLOAT_REGISTER) \ 997 V(DOUBLE_REGISTER) \ 998 V(STACK_SLOT) \ 999 V(INT32_STACK_SLOT) \ 1000 V(UINT32_STACK_SLOT) \ 1001 V(BOOL_STACK_SLOT) \ 1002 V(FLOAT_STACK_SLOT) \ 1003 V(DOUBLE_STACK_SLOT) \ 1004 V(LITERAL) 1005 1006 class Translation BASE_EMBEDDED { 1007 public: 1008 #define DECLARE_TRANSLATION_OPCODE_ENUM(item) item, 1009 enum Opcode { 1010 TRANSLATION_OPCODE_LIST(DECLARE_TRANSLATION_OPCODE_ENUM) 1011 LAST = LITERAL 1012 }; 1013 #undef DECLARE_TRANSLATION_OPCODE_ENUM 1014 Translation(TranslationBuffer * buffer,int frame_count,int jsframe_count,Zone * zone)1015 Translation(TranslationBuffer* buffer, int frame_count, int jsframe_count, 1016 Zone* zone) 1017 : buffer_(buffer), 1018 index_(buffer->CurrentIndex()), 1019 zone_(zone) { 1020 buffer_->Add(BEGIN, zone); 1021 buffer_->Add(frame_count, zone); 1022 buffer_->Add(jsframe_count, zone); 1023 } 1024 index()1025 int index() const { return index_; } 1026 1027 // Commands. 1028 void BeginJSFrame(BailoutId node_id, int literal_id, unsigned height); 1029 void BeginInterpretedFrame(BailoutId bytecode_offset, int literal_id, 1030 unsigned height); 1031 void BeginCompiledStubFrame(int height); 1032 void BeginArgumentsAdaptorFrame(int literal_id, unsigned height); 1033 void BeginTailCallerFrame(int literal_id); 1034 void BeginConstructStubFrame(int literal_id, unsigned height); 1035 void BeginGetterStubFrame(int literal_id); 1036 void BeginSetterStubFrame(int literal_id); 1037 void BeginArgumentsObject(int args_length); 1038 void BeginCapturedObject(int length); 1039 void DuplicateObject(int object_index); 1040 void StoreRegister(Register reg); 1041 void StoreInt32Register(Register reg); 1042 void StoreUint32Register(Register reg); 1043 void StoreBoolRegister(Register reg); 1044 void StoreFloatRegister(FloatRegister reg); 1045 void StoreDoubleRegister(DoubleRegister reg); 1046 void StoreStackSlot(int index); 1047 void StoreInt32StackSlot(int index); 1048 void StoreUint32StackSlot(int index); 1049 void StoreBoolStackSlot(int index); 1050 void StoreFloatStackSlot(int index); 1051 void StoreDoubleStackSlot(int index); 1052 void StoreLiteral(int literal_id); 1053 void StoreArgumentsObject(bool args_known, int args_index, int args_length); 1054 void StoreJSFrameFunction(); 1055 zone()1056 Zone* zone() const { return zone_; } 1057 1058 static int NumberOfOperandsFor(Opcode opcode); 1059 1060 #if defined(OBJECT_PRINT) || defined(ENABLE_DISASSEMBLER) 1061 static const char* StringFor(Opcode opcode); 1062 #endif 1063 1064 private: 1065 TranslationBuffer* buffer_; 1066 int index_; 1067 Zone* zone_; 1068 }; 1069 1070 1071 class MaterializedObjectStore { 1072 public: MaterializedObjectStore(Isolate * isolate)1073 explicit MaterializedObjectStore(Isolate* isolate) : isolate_(isolate) { 1074 } 1075 1076 Handle<FixedArray> Get(Address fp); 1077 void Set(Address fp, Handle<FixedArray> materialized_objects); 1078 bool Remove(Address fp); 1079 1080 private: isolate()1081 Isolate* isolate() { return isolate_; } 1082 Handle<FixedArray> GetStackEntries(); 1083 Handle<FixedArray> EnsureStackEntries(int size); 1084 1085 int StackIdToIndex(Address fp); 1086 1087 Isolate* isolate_; 1088 List<Address> frame_fps_; 1089 }; 1090 1091 1092 // Class used to represent an unoptimized frame when the debugger 1093 // needs to inspect a frame that is part of an optimized frame. The 1094 // internally used FrameDescription objects are not GC safe so for use 1095 // by the debugger frame information is copied to an object of this type. 1096 // Represents parameters in unadapted form so their number might mismatch 1097 // formal parameter count. 1098 class DeoptimizedFrameInfo : public Malloced { 1099 public: 1100 DeoptimizedFrameInfo(TranslatedState* state, 1101 TranslatedState::iterator frame_it, Isolate* isolate); 1102 1103 // Return the number of incoming arguments. parameters_count()1104 int parameters_count() { return static_cast<int>(parameters_.size()); } 1105 1106 // Return the height of the expression stack. expression_count()1107 int expression_count() { return static_cast<int>(expression_stack_.size()); } 1108 1109 // Get the frame function. GetFunction()1110 Handle<JSFunction> GetFunction() { return function_; } 1111 1112 // Get the frame context. GetContext()1113 Handle<Object> GetContext() { return context_; } 1114 1115 // Check if this frame is preceded by construct stub frame. The bottom-most 1116 // inlined frame might still be called by an uninlined construct stub. HasConstructStub()1117 bool HasConstructStub() { 1118 return has_construct_stub_; 1119 } 1120 1121 // Get an incoming argument. GetParameter(int index)1122 Handle<Object> GetParameter(int index) { 1123 DCHECK(0 <= index && index < parameters_count()); 1124 return parameters_[index]; 1125 } 1126 1127 // Get an expression from the expression stack. GetExpression(int index)1128 Handle<Object> GetExpression(int index) { 1129 DCHECK(0 <= index && index < expression_count()); 1130 return expression_stack_[index]; 1131 } 1132 GetSourcePosition()1133 int GetSourcePosition() { 1134 return source_position_; 1135 } 1136 1137 private: 1138 // Set an incoming argument. SetParameter(int index,Handle<Object> obj)1139 void SetParameter(int index, Handle<Object> obj) { 1140 DCHECK(0 <= index && index < parameters_count()); 1141 parameters_[index] = obj; 1142 } 1143 1144 // Set an expression on the expression stack. SetExpression(int index,Handle<Object> obj)1145 void SetExpression(int index, Handle<Object> obj) { 1146 DCHECK(0 <= index && index < expression_count()); 1147 expression_stack_[index] = obj; 1148 } 1149 1150 Handle<JSFunction> function_; 1151 Handle<Object> context_; 1152 bool has_construct_stub_; 1153 std::vector<Handle<Object> > parameters_; 1154 std::vector<Handle<Object> > expression_stack_; 1155 int source_position_; 1156 1157 friend class Deoptimizer; 1158 }; 1159 1160 } // namespace internal 1161 } // namespace v8 1162 1163 #endif // V8_DEOPTIMIZER_H_ 1164