1 // Copyright 2006-2008 the V8 project authors. All rights reserved. 2 // Redistribution and use in source and binary forms, with or without 3 // modification, are permitted provided that the following conditions are 4 // met: 5 // 6 // * Redistributions of source code must retain the above copyright 7 // notice, this list of conditions and the following disclaimer. 8 // * Redistributions in binary form must reproduce the above 9 // copyright notice, this list of conditions and the following 10 // disclaimer in the documentation and/or other materials provided 11 // with the distribution. 12 // * Neither the name of Google Inc. nor the names of its 13 // contributors may be used to endorse or promote products derived 14 // from this software without specific prior written permission. 15 // 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 28 #ifndef V8_STUB_CACHE_H_ 29 #define V8_STUB_CACHE_H_ 30 31 #include "macro-assembler.h" 32 33 namespace v8 { 34 namespace internal { 35 36 37 // The stub cache is used for megamorphic calls and property accesses. 38 // It maps (map, name, type)->Code* 39 40 // The design of the table uses the inline cache stubs used for 41 // mono-morphic calls. The beauty of this, we do not have to 42 // invalidate the cache whenever a prototype map is changed. The stub 43 // validates the map chain as in the mono-morphic case. 44 45 class SCTableReference; 46 47 class StubCache : public AllStatic { 48 public: 49 struct Entry { 50 String* key; 51 Code* value; 52 }; 53 54 55 static void Initialize(bool create_heap_objects); 56 57 // Computes the right stub matching. Inserts the result in the 58 // cache before returning. This might compile a stub if needed. 59 static Object* ComputeLoadField(String* name, 60 JSObject* receiver, 61 JSObject* holder, 62 int field_index); 63 64 static Object* ComputeLoadCallback(String* name, 65 JSObject* receiver, 66 JSObject* holder, 67 AccessorInfo* callback); 68 69 static Object* ComputeLoadConstant(String* name, 70 JSObject* receiver, 71 JSObject* holder, 72 Object* value); 73 74 static Object* ComputeLoadInterceptor(String* name, 75 JSObject* receiver, 76 JSObject* holder); 77 78 static Object* ComputeLoadNormal(String* name, JSObject* receiver); 79 80 81 static Object* ComputeLoadGlobal(String* name, 82 JSObject* receiver, 83 GlobalObject* holder, 84 JSGlobalPropertyCell* cell, 85 bool is_dont_delete); 86 87 88 // --- 89 90 static Object* ComputeKeyedLoadField(String* name, 91 JSObject* receiver, 92 JSObject* holder, 93 int field_index); 94 95 static Object* ComputeKeyedLoadCallback(String* name, 96 JSObject* receiver, 97 JSObject* holder, 98 AccessorInfo* callback); 99 100 static Object* ComputeKeyedLoadConstant(String* name, JSObject* receiver, 101 JSObject* holder, Object* value); 102 103 static Object* ComputeKeyedLoadInterceptor(String* name, 104 JSObject* receiver, 105 JSObject* holder); 106 107 static Object* ComputeKeyedLoadArrayLength(String* name, JSArray* receiver); 108 109 static Object* ComputeKeyedLoadStringLength(String* name, 110 String* receiver); 111 112 static Object* ComputeKeyedLoadFunctionPrototype(String* name, 113 JSFunction* receiver); 114 115 // --- 116 117 static Object* ComputeStoreField(String* name, 118 JSObject* receiver, 119 int field_index, 120 Map* transition = NULL); 121 122 static Object* ComputeStoreGlobal(String* name, 123 GlobalObject* receiver, 124 JSGlobalPropertyCell* cell); 125 126 static Object* ComputeStoreCallback(String* name, 127 JSObject* receiver, 128 AccessorInfo* callback); 129 130 static Object* ComputeStoreInterceptor(String* name, JSObject* receiver); 131 132 // --- 133 134 static Object* ComputeKeyedStoreField(String* name, 135 JSObject* receiver, 136 int field_index, 137 Map* transition = NULL); 138 139 // --- 140 141 static Object* ComputeCallField(int argc, 142 InLoopFlag in_loop, 143 String* name, 144 Object* object, 145 JSObject* holder, 146 int index); 147 148 static Object* ComputeCallConstant(int argc, 149 InLoopFlag in_loop, 150 String* name, 151 Object* object, 152 JSObject* holder, 153 JSFunction* function); 154 155 static Object* ComputeCallNormal(int argc, 156 InLoopFlag in_loop, 157 String* name, 158 JSObject* receiver); 159 160 static Object* ComputeCallInterceptor(int argc, 161 String* name, 162 Object* object, 163 JSObject* holder); 164 165 static Object* ComputeCallGlobal(int argc, 166 InLoopFlag in_loop, 167 String* name, 168 JSObject* receiver, 169 GlobalObject* holder, 170 JSGlobalPropertyCell* cell, 171 JSFunction* function); 172 173 // --- 174 175 static Object* ComputeCallInitialize(int argc, InLoopFlag in_loop); 176 static Object* ComputeCallPreMonomorphic(int argc, InLoopFlag in_loop); 177 static Object* ComputeCallNormal(int argc, InLoopFlag in_loop); 178 static Object* ComputeCallMegamorphic(int argc, InLoopFlag in_loop); 179 static Object* ComputeCallMiss(int argc); 180 181 // Finds the Code object stored in the Heap::non_monomorphic_cache(). 182 static Code* FindCallInitialize(int argc, InLoopFlag in_loop); 183 184 #ifdef ENABLE_DEBUGGER_SUPPORT 185 static Object* ComputeCallDebugBreak(int argc); 186 static Object* ComputeCallDebugPrepareStepIn(int argc); 187 #endif 188 189 static Object* ComputeLazyCompile(int argc); 190 191 192 // Update cache for entry hash(name, map). 193 static Code* Set(String* name, Map* map, Code* code); 194 195 // Clear the lookup table (@ mark compact collection). 196 static void Clear(); 197 198 // Functions for generating stubs at startup. 199 static void GenerateMiss(MacroAssembler* masm); 200 201 // Generate code for probing the stub cache table. 202 // If extra != no_reg it might be used as am extra scratch register. 203 static void GenerateProbe(MacroAssembler* masm, 204 Code::Flags flags, 205 Register receiver, 206 Register name, 207 Register scratch, 208 Register extra); 209 210 enum Table { 211 kPrimary, 212 kSecondary 213 }; 214 215 private: 216 friend class SCTableReference; 217 static const int kPrimaryTableSize = 2048; 218 static const int kSecondaryTableSize = 512; 219 static Entry primary_[]; 220 static Entry secondary_[]; 221 222 // Computes the hashed offsets for primary and secondary caches. PrimaryOffset(String * name,Code::Flags flags,Map * map)223 static int PrimaryOffset(String* name, Code::Flags flags, Map* map) { 224 // This works well because the heap object tag size and the hash 225 // shift are equal. Shifting down the length field to get the 226 // hash code would effectively throw away two bits of the hash 227 // code. 228 ASSERT(kHeapObjectTagSize == String::kHashShift); 229 // Compute the hash of the name (use entire length field). 230 ASSERT(name->HasHashCode()); 231 uint32_t field = name->length_field(); 232 // Using only the low bits in 64-bit mode is unlikely to increase the 233 // risk of collision even if the heap is spread over an area larger than 234 // 4Gb (and not at all if it isn't). 235 uint32_t map_low32bits = 236 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(map)); 237 // We always set the in_loop bit to zero when generating the lookup code 238 // so do it here too so the hash codes match. 239 uint32_t iflags = 240 (static_cast<uint32_t>(flags) & ~Code::kFlagsNotUsedInLookup); 241 // Base the offset on a simple combination of name, flags, and map. 242 uint32_t key = (map_low32bits + field) ^ iflags; 243 return key & ((kPrimaryTableSize - 1) << kHeapObjectTagSize); 244 } 245 SecondaryOffset(String * name,Code::Flags flags,int seed)246 static int SecondaryOffset(String* name, Code::Flags flags, int seed) { 247 // Use the seed from the primary cache in the secondary cache. 248 uint32_t string_low32bits = 249 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name)); 250 // We always set the in_loop bit to zero when generating the lookup code 251 // so do it here too so the hash codes match. 252 uint32_t iflags = 253 (static_cast<uint32_t>(flags) & ~Code::kFlagsICInLoopMask); 254 uint32_t key = seed - string_low32bits + iflags; 255 return key & ((kSecondaryTableSize - 1) << kHeapObjectTagSize); 256 } 257 258 // Compute the entry for a given offset in exactly the same way as 259 // we do in generated code. We generate an hash code that already 260 // ends in String::kHashShift 0s. Then we shift it so it is a multiple 261 // of sizeof(Entry). This makes it easier to avoid making mistakes 262 // in the hashed offset computations. entry(Entry * table,int offset)263 static Entry* entry(Entry* table, int offset) { 264 const int shift_amount = kPointerSizeLog2 + 1 - String::kHashShift; 265 return reinterpret_cast<Entry*>( 266 reinterpret_cast<Address>(table) + (offset << shift_amount)); 267 } 268 }; 269 270 271 class SCTableReference { 272 public: keyReference(StubCache::Table table)273 static SCTableReference keyReference(StubCache::Table table) { 274 return SCTableReference( 275 reinterpret_cast<Address>(&first_entry(table)->key)); 276 } 277 278 valueReference(StubCache::Table table)279 static SCTableReference valueReference(StubCache::Table table) { 280 return SCTableReference( 281 reinterpret_cast<Address>(&first_entry(table)->value)); 282 } 283 address()284 Address address() const { return address_; } 285 286 private: SCTableReference(Address address)287 explicit SCTableReference(Address address) : address_(address) {} 288 first_entry(StubCache::Table table)289 static StubCache::Entry* first_entry(StubCache::Table table) { 290 switch (table) { 291 case StubCache::kPrimary: return StubCache::primary_; 292 case StubCache::kSecondary: return StubCache::secondary_; 293 } 294 UNREACHABLE(); 295 return NULL; 296 } 297 298 Address address_; 299 }; 300 301 // ------------------------------------------------------------------------ 302 303 304 // Support functions for IC stubs for callbacks. 305 Object* LoadCallbackProperty(Arguments args); 306 Object* StoreCallbackProperty(Arguments args); 307 308 309 // Support functions for IC stubs for interceptors. 310 Object* LoadPropertyWithInterceptorOnly(Arguments args); 311 Object* LoadPropertyWithInterceptorForLoad(Arguments args); 312 Object* LoadPropertyWithInterceptorForCall(Arguments args); 313 Object* StoreInterceptorProperty(Arguments args); 314 Object* CallInterceptorProperty(Arguments args); 315 316 317 // Support function for computing call IC miss stubs. 318 Handle<Code> ComputeCallMiss(int argc); 319 320 321 // The stub compiler compiles stubs for the stub cache. 322 class StubCompiler BASE_EMBEDDED { 323 public: 324 enum CheckType { 325 RECEIVER_MAP_CHECK, 326 STRING_CHECK, 327 NUMBER_CHECK, 328 BOOLEAN_CHECK, 329 JSARRAY_HAS_FAST_ELEMENTS_CHECK 330 }; 331 StubCompiler()332 StubCompiler() : scope_(), masm_(NULL, 256), failure_(NULL) { } 333 334 Object* CompileCallInitialize(Code::Flags flags); 335 Object* CompileCallPreMonomorphic(Code::Flags flags); 336 Object* CompileCallNormal(Code::Flags flags); 337 Object* CompileCallMegamorphic(Code::Flags flags); 338 Object* CompileCallMiss(Code::Flags flags); 339 #ifdef ENABLE_DEBUGGER_SUPPORT 340 Object* CompileCallDebugBreak(Code::Flags flags); 341 Object* CompileCallDebugPrepareStepIn(Code::Flags flags); 342 #endif 343 Object* CompileLazyCompile(Code::Flags flags); 344 345 // Static functions for generating parts of stubs. 346 static void GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm, 347 int index, 348 Register prototype); 349 static void GenerateFastPropertyLoad(MacroAssembler* masm, 350 Register dst, Register src, 351 JSObject* holder, int index); 352 353 static void GenerateLoadArrayLength(MacroAssembler* masm, 354 Register receiver, 355 Register scratch, 356 Label* miss_label); 357 static void GenerateLoadStringLength(MacroAssembler* masm, 358 Register receiver, 359 Register scratch, 360 Label* miss_label); 361 static void GenerateLoadStringLength2(MacroAssembler* masm, 362 Register receiver, 363 Register scratch1, 364 Register scratch2, 365 Label* miss_label); 366 static void GenerateLoadFunctionPrototype(MacroAssembler* masm, 367 Register receiver, 368 Register scratch1, 369 Register scratch2, 370 Label* miss_label); 371 static void GenerateStoreField(MacroAssembler* masm, 372 Builtins::Name storage_extend, 373 JSObject* object, 374 int index, 375 Map* transition, 376 Register receiver_reg, 377 Register name_reg, 378 Register scratch, 379 Label* miss_label); 380 static void GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind); 381 382 // Check the integrity of the prototype chain to make sure that the 383 // current IC is still valid. 384 Register CheckPrototypes(JSObject* object, 385 Register object_reg, 386 JSObject* holder, 387 Register holder_reg, 388 Register scratch, 389 String* name, 390 Label* miss); 391 392 protected: 393 Object* GetCodeWithFlags(Code::Flags flags, const char* name); 394 Object* GetCodeWithFlags(Code::Flags flags, String* name); 395 masm()396 MacroAssembler* masm() { return &masm_; } set_failure(Failure * failure)397 void set_failure(Failure* failure) { failure_ = failure; } 398 399 void GenerateLoadField(JSObject* object, 400 JSObject* holder, 401 Register receiver, 402 Register scratch1, 403 Register scratch2, 404 int index, 405 String* name, 406 Label* miss); 407 408 void GenerateLoadCallback(JSObject* object, 409 JSObject* holder, 410 Register receiver, 411 Register name_reg, 412 Register scratch1, 413 Register scratch2, 414 AccessorInfo* callback, 415 String* name, 416 Label* miss); 417 418 void GenerateLoadConstant(JSObject* object, 419 JSObject* holder, 420 Register receiver, 421 Register scratch1, 422 Register scratch2, 423 Object* value, 424 String* name, 425 Label* miss); 426 427 void GenerateLoadInterceptor(JSObject* object, 428 JSObject* holder, 429 LookupResult* lookup, 430 Register receiver, 431 Register name_reg, 432 Register scratch1, 433 Register scratch2, 434 String* name, 435 Label* miss); 436 437 private: 438 HandleScope scope_; 439 MacroAssembler masm_; 440 Failure* failure_; 441 }; 442 443 444 class LoadStubCompiler: public StubCompiler { 445 public: 446 Object* CompileLoadField(JSObject* object, 447 JSObject* holder, 448 int index, 449 String* name); 450 Object* CompileLoadCallback(JSObject* object, 451 JSObject* holder, 452 AccessorInfo* callback, 453 String* name); 454 Object* CompileLoadConstant(JSObject* object, 455 JSObject* holder, 456 Object* value, 457 String* name); 458 Object* CompileLoadInterceptor(JSObject* object, 459 JSObject* holder, 460 String* name); 461 462 Object* CompileLoadGlobal(JSObject* object, 463 GlobalObject* holder, 464 JSGlobalPropertyCell* cell, 465 String* name, 466 bool is_dont_delete); 467 468 private: 469 Object* GetCode(PropertyType type, String* name); 470 }; 471 472 473 class KeyedLoadStubCompiler: public StubCompiler { 474 public: 475 Object* CompileLoadField(String* name, 476 JSObject* object, 477 JSObject* holder, 478 int index); 479 Object* CompileLoadCallback(String* name, 480 JSObject* object, 481 JSObject* holder, 482 AccessorInfo* callback); 483 Object* CompileLoadConstant(String* name, 484 JSObject* object, 485 JSObject* holder, 486 Object* value); 487 Object* CompileLoadInterceptor(JSObject* object, 488 JSObject* holder, 489 String* name); 490 Object* CompileLoadArrayLength(String* name); 491 Object* CompileLoadStringLength(String* name); 492 Object* CompileLoadFunctionPrototype(String* name); 493 494 private: 495 Object* GetCode(PropertyType type, String* name); 496 }; 497 498 499 class StoreStubCompiler: public StubCompiler { 500 public: 501 Object* CompileStoreField(JSObject* object, 502 int index, 503 Map* transition, 504 String* name); 505 Object* CompileStoreCallback(JSObject* object, 506 AccessorInfo* callbacks, 507 String* name); 508 Object* CompileStoreInterceptor(JSObject* object, String* name); 509 Object* CompileStoreGlobal(GlobalObject* object, 510 JSGlobalPropertyCell* holder, 511 String* name); 512 513 514 private: 515 Object* GetCode(PropertyType type, String* name); 516 }; 517 518 519 class KeyedStoreStubCompiler: public StubCompiler { 520 public: 521 Object* CompileStoreField(JSObject* object, 522 int index, 523 Map* transition, 524 String* name); 525 526 private: 527 Object* GetCode(PropertyType type, String* name); 528 }; 529 530 531 class CallStubCompiler: public StubCompiler { 532 public: CallStubCompiler(int argc,InLoopFlag in_loop)533 explicit CallStubCompiler(int argc, InLoopFlag in_loop) 534 : arguments_(argc), in_loop_(in_loop) { } 535 536 Object* CompileCallField(Object* object, 537 JSObject* holder, 538 int index, 539 String* name); 540 Object* CompileCallConstant(Object* object, 541 JSObject* holder, 542 JSFunction* function, 543 String* name, 544 CheckType check); 545 Object* CompileCallInterceptor(Object* object, 546 JSObject* holder, 547 String* name); 548 Object* CompileCallGlobal(JSObject* object, 549 GlobalObject* holder, 550 JSGlobalPropertyCell* cell, 551 JSFunction* function, 552 String* name); 553 554 private: 555 const ParameterCount arguments_; 556 const InLoopFlag in_loop_; 557 arguments()558 const ParameterCount& arguments() { return arguments_; } 559 560 Object* GetCode(PropertyType type, String* name); 561 }; 562 563 564 class ConstructStubCompiler: public StubCompiler { 565 public: ConstructStubCompiler()566 explicit ConstructStubCompiler() {} 567 568 Object* CompileConstructStub(SharedFunctionInfo* shared); 569 570 private: 571 Object* GetCode(); 572 }; 573 574 575 } } // namespace v8::internal 576 577 #endif // V8_STUB_CACHE_H_ 578