1 // Copyright 2012 the V8 project authors. All rights reserved. 2 // Redistribution and use in source and binary forms, with or without 3 // modification, are permitted provided that the following conditions are 4 // met: 5 // 6 // * Redistributions of source code must retain the above copyright 7 // notice, this list of conditions and the following disclaimer. 8 // * Redistributions in binary form must reproduce the above 9 // copyright notice, this list of conditions and the following 10 // disclaimer in the documentation and/or other materials provided 11 // with the distribution. 12 // * Neither the name of Google Inc. nor the names of its 13 // contributors may be used to endorse or promote products derived 14 // from this software without specific prior written permission. 15 // 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 28 #ifndef V8_STUB_CACHE_H_ 29 #define V8_STUB_CACHE_H_ 30 31 #include "allocation.h" 32 #include "arguments.h" 33 #include "ic-inl.h" 34 #include "macro-assembler.h" 35 #include "objects.h" 36 #include "zone-inl.h" 37 38 namespace v8 { 39 namespace internal { 40 41 42 // The stub cache is used for megamorphic calls and property accesses. 43 // It maps (map, name, type)->Code* 44 45 // The design of the table uses the inline cache stubs used for 46 // mono-morphic calls. The beauty of this, we do not have to 47 // invalidate the cache whenever a prototype map is changed. The stub 48 // validates the map chain as in the mono-morphic case. 49 50 class SmallMapList; 51 class StubCache; 52 53 54 class SCTableReference { 55 public: address()56 Address address() const { return address_; } 57 58 private: SCTableReference(Address address)59 explicit SCTableReference(Address address) : address_(address) {} 60 61 Address address_; 62 63 friend class StubCache; 64 }; 65 66 67 class StubCache { 68 public: 69 struct Entry { 70 String* key; 71 Code* value; 72 Map* map; 73 }; 74 75 void Initialize(); 76 77 78 // Computes the right stub matching. Inserts the result in the 79 // cache before returning. This might compile a stub if needed. 80 Handle<Code> ComputeLoadNonexistent(Handle<String> name, 81 Handle<JSObject> receiver); 82 83 Handle<Code> ComputeLoadField(Handle<String> name, 84 Handle<JSObject> receiver, 85 Handle<JSObject> holder, 86 int field_index); 87 88 Handle<Code> ComputeLoadCallback(Handle<String> name, 89 Handle<JSObject> receiver, 90 Handle<JSObject> holder, 91 Handle<AccessorInfo> callback); 92 93 Handle<Code> ComputeLoadConstant(Handle<String> name, 94 Handle<JSObject> receiver, 95 Handle<JSObject> holder, 96 Handle<JSFunction> value); 97 98 Handle<Code> ComputeLoadInterceptor(Handle<String> name, 99 Handle<JSObject> receiver, 100 Handle<JSObject> holder); 101 102 Handle<Code> ComputeLoadNormal(); 103 104 Handle<Code> ComputeLoadGlobal(Handle<String> name, 105 Handle<JSObject> receiver, 106 Handle<GlobalObject> holder, 107 Handle<JSGlobalPropertyCell> cell, 108 bool is_dont_delete); 109 110 // --- 111 112 Handle<Code> ComputeKeyedLoadField(Handle<String> name, 113 Handle<JSObject> receiver, 114 Handle<JSObject> holder, 115 int field_index); 116 117 Handle<Code> ComputeKeyedLoadCallback(Handle<String> name, 118 Handle<JSObject> receiver, 119 Handle<JSObject> holder, 120 Handle<AccessorInfo> callback); 121 122 Handle<Code> ComputeKeyedLoadConstant(Handle<String> name, 123 Handle<JSObject> receiver, 124 Handle<JSObject> holder, 125 Handle<JSFunction> value); 126 127 Handle<Code> ComputeKeyedLoadInterceptor(Handle<String> name, 128 Handle<JSObject> receiver, 129 Handle<JSObject> holder); 130 131 Handle<Code> ComputeKeyedLoadArrayLength(Handle<String> name, 132 Handle<JSArray> receiver); 133 134 Handle<Code> ComputeKeyedLoadStringLength(Handle<String> name, 135 Handle<String> receiver); 136 137 Handle<Code> ComputeKeyedLoadFunctionPrototype(Handle<String> name, 138 Handle<JSFunction> receiver); 139 140 // --- 141 142 Handle<Code> ComputeStoreField(Handle<String> name, 143 Handle<JSObject> receiver, 144 int field_index, 145 Handle<Map> transition, 146 StrictModeFlag strict_mode); 147 148 Handle<Code> ComputeStoreNormal(StrictModeFlag strict_mode); 149 150 Handle<Code> ComputeStoreGlobal(Handle<String> name, 151 Handle<GlobalObject> receiver, 152 Handle<JSGlobalPropertyCell> cell, 153 StrictModeFlag strict_mode); 154 155 Handle<Code> ComputeStoreCallback(Handle<String> name, 156 Handle<JSObject> receiver, 157 Handle<AccessorInfo> callback, 158 StrictModeFlag strict_mode); 159 160 Handle<Code> ComputeStoreInterceptor(Handle<String> name, 161 Handle<JSObject> receiver, 162 StrictModeFlag strict_mode); 163 164 // --- 165 166 Handle<Code> ComputeKeyedStoreField(Handle<String> name, 167 Handle<JSObject> receiver, 168 int field_index, 169 Handle<Map> transition, 170 StrictModeFlag strict_mode); 171 172 Handle<Code> ComputeKeyedLoadOrStoreElement(Handle<JSObject> receiver, 173 KeyedIC::StubKind stub_kind, 174 StrictModeFlag strict_mode); 175 176 // --- 177 178 Handle<Code> ComputeCallField(int argc, 179 Code::Kind, 180 Code::ExtraICState extra_state, 181 Handle<String> name, 182 Handle<Object> object, 183 Handle<JSObject> holder, 184 int index); 185 186 Handle<Code> ComputeCallConstant(int argc, 187 Code::Kind, 188 Code::ExtraICState extra_state, 189 Handle<String> name, 190 Handle<Object> object, 191 Handle<JSObject> holder, 192 Handle<JSFunction> function); 193 194 Handle<Code> ComputeCallInterceptor(int argc, 195 Code::Kind, 196 Code::ExtraICState extra_state, 197 Handle<String> name, 198 Handle<Object> object, 199 Handle<JSObject> holder); 200 201 Handle<Code> ComputeCallGlobal(int argc, 202 Code::Kind, 203 Code::ExtraICState extra_state, 204 Handle<String> name, 205 Handle<JSObject> receiver, 206 Handle<GlobalObject> holder, 207 Handle<JSGlobalPropertyCell> cell, 208 Handle<JSFunction> function); 209 210 // --- 211 212 Handle<Code> ComputeCallInitialize(int argc, RelocInfo::Mode mode); 213 214 Handle<Code> ComputeKeyedCallInitialize(int argc); 215 216 Handle<Code> ComputeCallPreMonomorphic(int argc, 217 Code::Kind kind, 218 Code::ExtraICState extra_state); 219 220 Handle<Code> ComputeCallNormal(int argc, 221 Code::Kind kind, 222 Code::ExtraICState state); 223 224 Handle<Code> ComputeCallArguments(int argc, Code::Kind kind); 225 226 Handle<Code> ComputeCallMegamorphic(int argc, 227 Code::Kind kind, 228 Code::ExtraICState state); 229 230 Handle<Code> ComputeCallMiss(int argc, 231 Code::Kind kind, 232 Code::ExtraICState state); 233 234 // Finds the Code object stored in the Heap::non_monomorphic_cache(). 235 Code* FindCallInitialize(int argc, RelocInfo::Mode mode, Code::Kind kind); 236 237 #ifdef ENABLE_DEBUGGER_SUPPORT 238 Handle<Code> ComputeCallDebugBreak(int argc, Code::Kind kind); 239 240 Handle<Code> ComputeCallDebugPrepareStepIn(int argc, Code::Kind kind); 241 #endif 242 243 // Update cache for entry hash(name, map). 244 Code* Set(String* name, Map* map, Code* code); 245 246 // Clear the lookup table (@ mark compact collection). 247 void Clear(); 248 249 // Collect all maps that match the name and flags. 250 void CollectMatchingMaps(SmallMapList* types, 251 String* name, 252 Code::Flags flags, 253 Handle<Context> global_context); 254 255 // Generate code for probing the stub cache table. 256 // Arguments extra, extra2 and extra3 may be used to pass additional scratch 257 // registers. Set to no_reg if not needed. 258 void GenerateProbe(MacroAssembler* masm, 259 Code::Flags flags, 260 Register receiver, 261 Register name, 262 Register scratch, 263 Register extra, 264 Register extra2 = no_reg, 265 Register extra3 = no_reg); 266 267 enum Table { 268 kPrimary, 269 kSecondary 270 }; 271 272 key_reference(StubCache::Table table)273 SCTableReference key_reference(StubCache::Table table) { 274 return SCTableReference( 275 reinterpret_cast<Address>(&first_entry(table)->key)); 276 } 277 278 map_reference(StubCache::Table table)279 SCTableReference map_reference(StubCache::Table table) { 280 return SCTableReference( 281 reinterpret_cast<Address>(&first_entry(table)->map)); 282 } 283 284 value_reference(StubCache::Table table)285 SCTableReference value_reference(StubCache::Table table) { 286 return SCTableReference( 287 reinterpret_cast<Address>(&first_entry(table)->value)); 288 } 289 290 first_entry(StubCache::Table table)291 StubCache::Entry* first_entry(StubCache::Table table) { 292 switch (table) { 293 case StubCache::kPrimary: return StubCache::primary_; 294 case StubCache::kSecondary: return StubCache::secondary_; 295 } 296 UNREACHABLE(); 297 return NULL; 298 } 299 isolate()300 Isolate* isolate() { return isolate_; } heap()301 Heap* heap() { return isolate()->heap(); } factory()302 Factory* factory() { return isolate()->factory(); } 303 304 private: 305 explicit StubCache(Isolate* isolate); 306 307 Handle<Code> ComputeCallInitialize(int argc, 308 RelocInfo::Mode mode, 309 Code::Kind kind); 310 311 // The stub cache has a primary and secondary level. The two levels have 312 // different hashing algorithms in order to avoid simultaneous collisions 313 // in both caches. Unlike a probing strategy (quadratic or otherwise) the 314 // update strategy on updates is fairly clear and simple: Any existing entry 315 // in the primary cache is moved to the secondary cache, and secondary cache 316 // entries are overwritten. 317 318 // Hash algorithm for the primary table. This algorithm is replicated in 319 // assembler for every architecture. Returns an index into the table that 320 // is scaled by 1 << kHeapObjectTagSize. PrimaryOffset(String * name,Code::Flags flags,Map * map)321 static int PrimaryOffset(String* name, Code::Flags flags, Map* map) { 322 // This works well because the heap object tag size and the hash 323 // shift are equal. Shifting down the length field to get the 324 // hash code would effectively throw away two bits of the hash 325 // code. 326 STATIC_ASSERT(kHeapObjectTagSize == String::kHashShift); 327 // Compute the hash of the name (use entire hash field). 328 ASSERT(name->HasHashCode()); 329 uint32_t field = name->hash_field(); 330 // Using only the low bits in 64-bit mode is unlikely to increase the 331 // risk of collision even if the heap is spread over an area larger than 332 // 4Gb (and not at all if it isn't). 333 uint32_t map_low32bits = 334 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(map)); 335 // We always set the in_loop bit to zero when generating the lookup code 336 // so do it here too so the hash codes match. 337 uint32_t iflags = 338 (static_cast<uint32_t>(flags) & ~Code::kFlagsNotUsedInLookup); 339 // Base the offset on a simple combination of name, flags, and map. 340 uint32_t key = (map_low32bits + field) ^ iflags; 341 return key & ((kPrimaryTableSize - 1) << kHeapObjectTagSize); 342 } 343 344 // Hash algorithm for the secondary table. This algorithm is replicated in 345 // assembler for every architecture. Returns an index into the table that 346 // is scaled by 1 << kHeapObjectTagSize. SecondaryOffset(String * name,Code::Flags flags,int seed)347 static int SecondaryOffset(String* name, Code::Flags flags, int seed) { 348 // Use the seed from the primary cache in the secondary cache. 349 uint32_t string_low32bits = 350 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name)); 351 // We always set the in_loop bit to zero when generating the lookup code 352 // so do it here too so the hash codes match. 353 uint32_t iflags = 354 (static_cast<uint32_t>(flags) & ~Code::kFlagsNotUsedInLookup); 355 uint32_t key = (seed - string_low32bits) + iflags; 356 return key & ((kSecondaryTableSize - 1) << kHeapObjectTagSize); 357 } 358 359 // Compute the entry for a given offset in exactly the same way as 360 // we do in generated code. We generate an hash code that already 361 // ends in String::kHashShift 0s. Then we multiply it so it is a multiple 362 // of sizeof(Entry). This makes it easier to avoid making mistakes 363 // in the hashed offset computations. entry(Entry * table,int offset)364 static Entry* entry(Entry* table, int offset) { 365 const int multiplier = sizeof(*table) >> String::kHashShift; 366 return reinterpret_cast<Entry*>( 367 reinterpret_cast<Address>(table) + offset * multiplier); 368 } 369 370 static const int kPrimaryTableBits = 11; 371 static const int kPrimaryTableSize = (1 << kPrimaryTableBits); 372 static const int kSecondaryTableBits = 9; 373 static const int kSecondaryTableSize = (1 << kSecondaryTableBits); 374 375 Entry primary_[kPrimaryTableSize]; 376 Entry secondary_[kSecondaryTableSize]; 377 Isolate* isolate_; 378 379 friend class Isolate; 380 friend class SCTableReference; 381 382 DISALLOW_COPY_AND_ASSIGN(StubCache); 383 }; 384 385 386 // ------------------------------------------------------------------------ 387 388 389 // Support functions for IC stubs for callbacks. 390 DECLARE_RUNTIME_FUNCTION(MaybeObject*, LoadCallbackProperty); 391 DECLARE_RUNTIME_FUNCTION(MaybeObject*, StoreCallbackProperty); 392 393 394 // Support functions for IC stubs for interceptors. 395 DECLARE_RUNTIME_FUNCTION(MaybeObject*, LoadPropertyWithInterceptorOnly); 396 DECLARE_RUNTIME_FUNCTION(MaybeObject*, LoadPropertyWithInterceptorForLoad); 397 DECLARE_RUNTIME_FUNCTION(MaybeObject*, LoadPropertyWithInterceptorForCall); 398 DECLARE_RUNTIME_FUNCTION(MaybeObject*, StoreInterceptorProperty); 399 DECLARE_RUNTIME_FUNCTION(MaybeObject*, CallInterceptorProperty); 400 DECLARE_RUNTIME_FUNCTION(MaybeObject*, KeyedLoadPropertyWithInterceptor); 401 402 403 // The stub compilers compile stubs for the stub cache. 404 class StubCompiler BASE_EMBEDDED { 405 public: StubCompiler(Isolate * isolate)406 explicit StubCompiler(Isolate* isolate) 407 : isolate_(isolate), masm_(isolate, NULL, 256), failure_(NULL) { } 408 409 // Functions to compile either CallIC or KeyedCallIC. The specific kind 410 // is extracted from the code flags. 411 Handle<Code> CompileCallInitialize(Code::Flags flags); 412 Handle<Code> CompileCallPreMonomorphic(Code::Flags flags); 413 Handle<Code> CompileCallNormal(Code::Flags flags); 414 Handle<Code> CompileCallMegamorphic(Code::Flags flags); 415 Handle<Code> CompileCallArguments(Code::Flags flags); 416 Handle<Code> CompileCallMiss(Code::Flags flags); 417 418 #ifdef ENABLE_DEBUGGER_SUPPORT 419 Handle<Code> CompileCallDebugBreak(Code::Flags flags); 420 Handle<Code> CompileCallDebugPrepareStepIn(Code::Flags flags); 421 #endif 422 423 // Static functions for generating parts of stubs. 424 static void GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm, 425 int index, 426 Register prototype); 427 428 // Generates prototype loading code that uses the objects from the 429 // context we were in when this function was called. If the context 430 // has changed, a jump to miss is performed. This ties the generated 431 // code to a particular context and so must not be used in cases 432 // where the generated code is not allowed to have references to 433 // objects from a context. 434 static void GenerateDirectLoadGlobalFunctionPrototype(MacroAssembler* masm, 435 int index, 436 Register prototype, 437 Label* miss); 438 439 static void GenerateFastPropertyLoad(MacroAssembler* masm, 440 Register dst, 441 Register src, 442 Handle<JSObject> holder, 443 int index); 444 445 static void GenerateLoadArrayLength(MacroAssembler* masm, 446 Register receiver, 447 Register scratch, 448 Label* miss_label); 449 450 static void GenerateLoadStringLength(MacroAssembler* masm, 451 Register receiver, 452 Register scratch1, 453 Register scratch2, 454 Label* miss_label, 455 bool support_wrappers); 456 457 static void GenerateLoadFunctionPrototype(MacroAssembler* masm, 458 Register receiver, 459 Register scratch1, 460 Register scratch2, 461 Label* miss_label); 462 463 static void GenerateStoreField(MacroAssembler* masm, 464 Handle<JSObject> object, 465 int index, 466 Handle<Map> transition, 467 Register receiver_reg, 468 Register name_reg, 469 Register scratch, 470 Label* miss_label); 471 472 static void GenerateLoadMiss(MacroAssembler* masm, 473 Code::Kind kind); 474 475 static void GenerateKeyedLoadMissForceGeneric(MacroAssembler* masm); 476 477 // Generates code that verifies that the property holder has not changed 478 // (checking maps of objects in the prototype chain for fast and global 479 // objects or doing negative lookup for slow objects, ensures that the 480 // property cells for global objects are still empty) and checks that the map 481 // of the holder has not changed. If necessary the function also generates 482 // code for security check in case of global object holders. Helps to make 483 // sure that the current IC is still valid. 484 // 485 // The scratch and holder registers are always clobbered, but the object 486 // register is only clobbered if it the same as the holder register. The 487 // function returns a register containing the holder - either object_reg or 488 // holder_reg. 489 // The function can optionally (when save_at_depth != 490 // kInvalidProtoDepth) save the object at the given depth by moving 491 // it to [esp + kPointerSize]. CheckPrototypes(Handle<JSObject> object,Register object_reg,Handle<JSObject> holder,Register holder_reg,Register scratch1,Register scratch2,Handle<String> name,Label * miss)492 Register CheckPrototypes(Handle<JSObject> object, 493 Register object_reg, 494 Handle<JSObject> holder, 495 Register holder_reg, 496 Register scratch1, 497 Register scratch2, 498 Handle<String> name, 499 Label* miss) { 500 return CheckPrototypes(object, object_reg, holder, holder_reg, scratch1, 501 scratch2, name, kInvalidProtoDepth, miss); 502 } 503 504 Register CheckPrototypes(Handle<JSObject> object, 505 Register object_reg, 506 Handle<JSObject> holder, 507 Register holder_reg, 508 Register scratch1, 509 Register scratch2, 510 Handle<String> name, 511 int save_at_depth, 512 Label* miss); 513 514 protected: 515 Handle<Code> GetCodeWithFlags(Code::Flags flags, const char* name); 516 Handle<Code> GetCodeWithFlags(Code::Flags flags, Handle<String> name); 517 masm()518 MacroAssembler* masm() { return &masm_; } set_failure(Failure * failure)519 void set_failure(Failure* failure) { failure_ = failure; } 520 521 void GenerateLoadField(Handle<JSObject> object, 522 Handle<JSObject> holder, 523 Register receiver, 524 Register scratch1, 525 Register scratch2, 526 Register scratch3, 527 int index, 528 Handle<String> name, 529 Label* miss); 530 531 void GenerateLoadCallback(Handle<JSObject> object, 532 Handle<JSObject> holder, 533 Register receiver, 534 Register name_reg, 535 Register scratch1, 536 Register scratch2, 537 Register scratch3, 538 Handle<AccessorInfo> callback, 539 Handle<String> name, 540 Label* miss); 541 542 void GenerateLoadConstant(Handle<JSObject> object, 543 Handle<JSObject> holder, 544 Register receiver, 545 Register scratch1, 546 Register scratch2, 547 Register scratch3, 548 Handle<JSFunction> value, 549 Handle<String> name, 550 Label* miss); 551 552 void GenerateLoadInterceptor(Handle<JSObject> object, 553 Handle<JSObject> holder, 554 LookupResult* lookup, 555 Register receiver, 556 Register name_reg, 557 Register scratch1, 558 Register scratch2, 559 Register scratch3, 560 Handle<String> name, 561 Label* miss); 562 563 static void LookupPostInterceptor(Handle<JSObject> holder, 564 Handle<String> name, 565 LookupResult* lookup); 566 isolate()567 Isolate* isolate() { return isolate_; } heap()568 Heap* heap() { return isolate()->heap(); } factory()569 Factory* factory() { return isolate()->factory(); } 570 571 private: 572 Isolate* isolate_; 573 MacroAssembler masm_; 574 Failure* failure_; 575 }; 576 577 578 class LoadStubCompiler: public StubCompiler { 579 public: LoadStubCompiler(Isolate * isolate)580 explicit LoadStubCompiler(Isolate* isolate) : StubCompiler(isolate) { } 581 582 Handle<Code> CompileLoadNonexistent(Handle<String> name, 583 Handle<JSObject> object, 584 Handle<JSObject> last); 585 586 Handle<Code> CompileLoadField(Handle<JSObject> object, 587 Handle<JSObject> holder, 588 int index, 589 Handle<String> name); 590 591 Handle<Code> CompileLoadCallback(Handle<String> name, 592 Handle<JSObject> object, 593 Handle<JSObject> holder, 594 Handle<AccessorInfo> callback); 595 596 Handle<Code> CompileLoadConstant(Handle<JSObject> object, 597 Handle<JSObject> holder, 598 Handle<JSFunction> value, 599 Handle<String> name); 600 601 Handle<Code> CompileLoadInterceptor(Handle<JSObject> object, 602 Handle<JSObject> holder, 603 Handle<String> name); 604 605 Handle<Code> CompileLoadGlobal(Handle<JSObject> object, 606 Handle<GlobalObject> holder, 607 Handle<JSGlobalPropertyCell> cell, 608 Handle<String> name, 609 bool is_dont_delete); 610 611 private: 612 Handle<Code> GetCode(PropertyType type, Handle<String> name); 613 }; 614 615 616 class KeyedLoadStubCompiler: public StubCompiler { 617 public: KeyedLoadStubCompiler(Isolate * isolate)618 explicit KeyedLoadStubCompiler(Isolate* isolate) : StubCompiler(isolate) { } 619 620 Handle<Code> CompileLoadField(Handle<String> name, 621 Handle<JSObject> object, 622 Handle<JSObject> holder, 623 int index); 624 625 Handle<Code> CompileLoadCallback(Handle<String> name, 626 Handle<JSObject> object, 627 Handle<JSObject> holder, 628 Handle<AccessorInfo> callback); 629 630 Handle<Code> CompileLoadConstant(Handle<String> name, 631 Handle<JSObject> object, 632 Handle<JSObject> holder, 633 Handle<JSFunction> value); 634 635 Handle<Code> CompileLoadInterceptor(Handle<JSObject> object, 636 Handle<JSObject> holder, 637 Handle<String> name); 638 639 Handle<Code> CompileLoadArrayLength(Handle<String> name); 640 641 Handle<Code> CompileLoadStringLength(Handle<String> name); 642 643 Handle<Code> CompileLoadFunctionPrototype(Handle<String> name); 644 645 Handle<Code> CompileLoadElement(Handle<Map> receiver_map); 646 647 Handle<Code> CompileLoadPolymorphic(MapHandleList* receiver_maps, 648 CodeHandleList* handler_ics); 649 650 static void GenerateLoadExternalArray(MacroAssembler* masm, 651 ElementsKind elements_kind); 652 653 static void GenerateLoadFastElement(MacroAssembler* masm); 654 655 static void GenerateLoadFastDoubleElement(MacroAssembler* masm); 656 657 static void GenerateLoadDictionaryElement(MacroAssembler* masm); 658 659 private: 660 Handle<Code> GetCode(PropertyType type, 661 Handle<String> name, 662 InlineCacheState state = MONOMORPHIC); 663 }; 664 665 666 class StoreStubCompiler: public StubCompiler { 667 public: StoreStubCompiler(Isolate * isolate,StrictModeFlag strict_mode)668 StoreStubCompiler(Isolate* isolate, StrictModeFlag strict_mode) 669 : StubCompiler(isolate), strict_mode_(strict_mode) { } 670 671 672 Handle<Code> CompileStoreField(Handle<JSObject> object, 673 int index, 674 Handle<Map> transition, 675 Handle<String> name); 676 677 Handle<Code> CompileStoreCallback(Handle<JSObject> object, 678 Handle<AccessorInfo> callback, 679 Handle<String> name); 680 681 Handle<Code> CompileStoreInterceptor(Handle<JSObject> object, 682 Handle<String> name); 683 684 Handle<Code> CompileStoreGlobal(Handle<GlobalObject> object, 685 Handle<JSGlobalPropertyCell> holder, 686 Handle<String> name); 687 688 private: 689 Handle<Code> GetCode(PropertyType type, Handle<String> name); 690 691 StrictModeFlag strict_mode_; 692 }; 693 694 695 class KeyedStoreStubCompiler: public StubCompiler { 696 public: KeyedStoreStubCompiler(Isolate * isolate,StrictModeFlag strict_mode,KeyedAccessGrowMode grow_mode)697 KeyedStoreStubCompiler(Isolate* isolate, 698 StrictModeFlag strict_mode, 699 KeyedAccessGrowMode grow_mode) 700 : StubCompiler(isolate), 701 strict_mode_(strict_mode), 702 grow_mode_(grow_mode) { } 703 704 Handle<Code> CompileStoreField(Handle<JSObject> object, 705 int index, 706 Handle<Map> transition, 707 Handle<String> name); 708 709 Handle<Code> CompileStoreElement(Handle<Map> receiver_map); 710 711 Handle<Code> CompileStorePolymorphic(MapHandleList* receiver_maps, 712 CodeHandleList* handler_stubs, 713 MapHandleList* transitioned_maps); 714 715 static void GenerateStoreFastElement(MacroAssembler* masm, 716 bool is_js_array, 717 ElementsKind element_kind, 718 KeyedAccessGrowMode grow_mode); 719 720 static void GenerateStoreFastDoubleElement(MacroAssembler* masm, 721 bool is_js_array, 722 KeyedAccessGrowMode grow_mode); 723 724 static void GenerateStoreExternalArray(MacroAssembler* masm, 725 ElementsKind elements_kind); 726 727 static void GenerateStoreDictionaryElement(MacroAssembler* masm); 728 729 private: 730 Handle<Code> GetCode(PropertyType type, 731 Handle<String> name, 732 InlineCacheState state = MONOMORPHIC); 733 734 StrictModeFlag strict_mode_; 735 KeyedAccessGrowMode grow_mode_; 736 }; 737 738 739 // Subset of FUNCTIONS_WITH_ID_LIST with custom constant/global call 740 // IC stubs. 741 #define CUSTOM_CALL_IC_GENERATORS(V) \ 742 V(ArrayPush) \ 743 V(ArrayPop) \ 744 V(StringCharCodeAt) \ 745 V(StringCharAt) \ 746 V(StringFromCharCode) \ 747 V(MathFloor) \ 748 V(MathAbs) 749 750 751 class CallOptimization; 752 753 class CallStubCompiler: public StubCompiler { 754 public: 755 CallStubCompiler(Isolate* isolate, 756 int argc, 757 Code::Kind kind, 758 Code::ExtraICState extra_state, 759 InlineCacheHolderFlag cache_holder); 760 761 Handle<Code> CompileCallField(Handle<JSObject> object, 762 Handle<JSObject> holder, 763 int index, 764 Handle<String> name); 765 766 Handle<Code> CompileCallConstant(Handle<Object> object, 767 Handle<JSObject> holder, 768 Handle<JSFunction> function, 769 Handle<String> name, 770 CheckType check); 771 772 Handle<Code> CompileCallInterceptor(Handle<JSObject> object, 773 Handle<JSObject> holder, 774 Handle<String> name); 775 776 Handle<Code> CompileCallGlobal(Handle<JSObject> object, 777 Handle<GlobalObject> holder, 778 Handle<JSGlobalPropertyCell> cell, 779 Handle<JSFunction> function, 780 Handle<String> name); 781 782 static bool HasCustomCallGenerator(Handle<JSFunction> function); 783 784 private: 785 // Compiles a custom call constant/global IC. For constant calls cell is 786 // NULL. Returns an empty handle if there is no custom call code for the 787 // given function. 788 Handle<Code> CompileCustomCall(Handle<Object> object, 789 Handle<JSObject> holder, 790 Handle<JSGlobalPropertyCell> cell, 791 Handle<JSFunction> function, 792 Handle<String> name); 793 794 #define DECLARE_CALL_GENERATOR(name) \ 795 Handle<Code> Compile##name##Call(Handle<Object> object, \ 796 Handle<JSObject> holder, \ 797 Handle<JSGlobalPropertyCell> cell, \ 798 Handle<JSFunction> function, \ 799 Handle<String> fname); 800 CUSTOM_CALL_IC_GENERATORS(DECLARE_CALL_GENERATOR) 801 #undef DECLARE_CALL_GENERATOR 802 803 Handle<Code> CompileFastApiCall(const CallOptimization& optimization, 804 Handle<Object> object, 805 Handle<JSObject> holder, 806 Handle<JSGlobalPropertyCell> cell, 807 Handle<JSFunction> function, 808 Handle<String> name); 809 810 Handle<Code> GetCode(PropertyType type, Handle<String> name); 811 Handle<Code> GetCode(Handle<JSFunction> function); 812 arguments()813 const ParameterCount& arguments() { return arguments_; } 814 815 void GenerateNameCheck(Handle<String> name, Label* miss); 816 817 void GenerateGlobalReceiverCheck(Handle<JSObject> object, 818 Handle<JSObject> holder, 819 Handle<String> name, 820 Label* miss); 821 822 // Generates code to load the function from the cell checking that 823 // it still contains the same function. 824 void GenerateLoadFunctionFromCell(Handle<JSGlobalPropertyCell> cell, 825 Handle<JSFunction> function, 826 Label* miss); 827 828 // Generates a jump to CallIC miss stub. 829 void GenerateMissBranch(); 830 831 const ParameterCount arguments_; 832 const Code::Kind kind_; 833 const Code::ExtraICState extra_state_; 834 const InlineCacheHolderFlag cache_holder_; 835 }; 836 837 838 class ConstructStubCompiler: public StubCompiler { 839 public: ConstructStubCompiler(Isolate * isolate)840 explicit ConstructStubCompiler(Isolate* isolate) : StubCompiler(isolate) { } 841 842 Handle<Code> CompileConstructStub(Handle<JSFunction> function); 843 844 private: 845 Handle<Code> GetCode(); 846 }; 847 848 849 // Holds information about possible function call optimizations. 850 class CallOptimization BASE_EMBEDDED { 851 public: 852 explicit CallOptimization(LookupResult* lookup); 853 854 explicit CallOptimization(Handle<JSFunction> function); 855 is_constant_call()856 bool is_constant_call() const { 857 return !constant_function_.is_null(); 858 } 859 constant_function()860 Handle<JSFunction> constant_function() const { 861 ASSERT(is_constant_call()); 862 return constant_function_; 863 } 864 is_simple_api_call()865 bool is_simple_api_call() const { 866 return is_simple_api_call_; 867 } 868 expected_receiver_type()869 Handle<FunctionTemplateInfo> expected_receiver_type() const { 870 ASSERT(is_simple_api_call()); 871 return expected_receiver_type_; 872 } 873 api_call_info()874 Handle<CallHandlerInfo> api_call_info() const { 875 ASSERT(is_simple_api_call()); 876 return api_call_info_; 877 } 878 879 // Returns the depth of the object having the expected type in the 880 // prototype chain between the two arguments. 881 int GetPrototypeDepthOfExpectedType(Handle<JSObject> object, 882 Handle<JSObject> holder) const; 883 884 private: 885 void Initialize(Handle<JSFunction> function); 886 887 // Determines whether the given function can be called using the 888 // fast api call builtin. 889 void AnalyzePossibleApiFunction(Handle<JSFunction> function); 890 891 Handle<JSFunction> constant_function_; 892 bool is_simple_api_call_; 893 Handle<FunctionTemplateInfo> expected_receiver_type_; 894 Handle<CallHandlerInfo> api_call_info_; 895 }; 896 897 898 } } // namespace v8::internal 899 900 #endif // V8_STUB_CACHE_H_ 901