1 // Copyright 2006-2008 the V8 project authors. All rights reserved. 2 // Redistribution and use in source and binary forms, with or without 3 // modification, are permitted provided that the following conditions are 4 // met: 5 // 6 // * Redistributions of source code must retain the above copyright 7 // notice, this list of conditions and the following disclaimer. 8 // * Redistributions in binary form must reproduce the above 9 // copyright notice, this list of conditions and the following 10 // disclaimer in the documentation and/or other materials provided 11 // with the distribution. 12 // * Neither the name of Google Inc. nor the names of its 13 // contributors may be used to endorse or promote products derived 14 // from this software without specific prior written permission. 15 // 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 28 #ifndef V8_CODE_STUBS_H_ 29 #define V8_CODE_STUBS_H_ 30 31 namespace v8 { 32 namespace internal { 33 34 // List of code stubs used on all platforms. The order in this list is important 35 // as only the stubs up to and including RecordWrite allows nested stub calls. 36 #define CODE_STUB_LIST_ALL_PLATFORMS(V) \ 37 V(CallFunction) \ 38 V(GenericBinaryOp) \ 39 V(StringAdd) \ 40 V(SubString) \ 41 V(StringCompare) \ 42 V(SmiOp) \ 43 V(Compare) \ 44 V(RecordWrite) \ 45 V(ConvertToDouble) \ 46 V(WriteInt32ToHeapNumber) \ 47 V(StackCheck) \ 48 V(FastNewClosure) \ 49 V(FastNewContext) \ 50 V(FastCloneShallowArray) \ 51 V(TranscendentalCache) \ 52 V(GenericUnaryOp) \ 53 V(RevertToNumber) \ 54 V(ToBoolean) \ 55 V(Instanceof) \ 56 V(CounterOp) \ 57 V(ArgumentsAccess) \ 58 V(RegExpExec) \ 59 V(NumberToString) \ 60 V(CEntry) \ 61 V(JSEntry) \ 62 V(DebuggerStatement) 63 64 // List of code stubs only used on ARM platforms. 65 #ifdef V8_TARGET_ARCH_ARM 66 #define CODE_STUB_LIST_ARM(V) \ 67 V(GetProperty) \ 68 V(SetProperty) \ 69 V(InvokeBuiltin) \ 70 V(RegExpCEntry) 71 #else 72 #define CODE_STUB_LIST_ARM(V) 73 #endif 74 75 // Combined list of code stubs. 76 #define CODE_STUB_LIST(V) \ 77 CODE_STUB_LIST_ALL_PLATFORMS(V) \ 78 CODE_STUB_LIST_ARM(V) 79 80 // Stub is base classes of all stubs. 81 class CodeStub BASE_EMBEDDED { 82 public: 83 enum Major { 84 #define DEF_ENUM(name) name, 85 CODE_STUB_LIST(DEF_ENUM) 86 #undef DEF_ENUM 87 NoCache, // marker for stubs that do custom caching 88 NUMBER_OF_IDS 89 }; 90 91 // Retrieve the code for the stub. Generate the code if needed. 92 Handle<Code> GetCode(); 93 94 // Retrieve the code for the stub if already generated. Do not 95 // generate the code if not already generated and instead return a 96 // retry after GC Failure object. 97 Object* TryGetCode(); 98 MajorKeyFromKey(uint32_t key)99 static Major MajorKeyFromKey(uint32_t key) { 100 return static_cast<Major>(MajorKeyBits::decode(key)); 101 }; MinorKeyFromKey(uint32_t key)102 static int MinorKeyFromKey(uint32_t key) { 103 return MinorKeyBits::decode(key); 104 }; 105 static const char* MajorName(Major major_key, bool allow_unknown_keys); 106 ~CodeStub()107 virtual ~CodeStub() {} 108 109 // Override these methods to provide a custom caching mechanism for 110 // an individual type of code stub. GetCustomCache(Code ** code_out)111 virtual bool GetCustomCache(Code** code_out) { return false; } SetCustomCache(Code * value)112 virtual void SetCustomCache(Code* value) { } has_custom_cache()113 virtual bool has_custom_cache() { return false; } 114 115 protected: 116 static const int kMajorBits = 5; 117 static const int kMinorBits = kBitsPerInt - kSmiTagSize - kMajorBits; 118 119 private: 120 // Lookup the code in the (possibly custom) cache. 121 bool FindCodeInCache(Code** code_out); 122 123 // Nonvirtual wrapper around the stub-specific Generate function. Call 124 // this function to set up the macro assembler and generate the code. 125 void GenerateCode(MacroAssembler* masm); 126 127 // Generates the assembler code for the stub. 128 virtual void Generate(MacroAssembler* masm) = 0; 129 130 // Perform bookkeeping required after code generation when stub code is 131 // initially generated. 132 void RecordCodeGeneration(Code* code, MacroAssembler* masm); 133 134 // Returns information for computing the number key. 135 virtual Major MajorKey() = 0; 136 virtual int MinorKey() = 0; 137 138 // The CallFunctionStub needs to override this so it can encode whether a 139 // lazily generated function should be fully optimized or not. InLoop()140 virtual InLoopFlag InLoop() { return NOT_IN_LOOP; } 141 142 // Returns a name for logging/debugging purposes. GetName()143 virtual const char* GetName() { return MajorName(MajorKey(), false); } 144 145 #ifdef DEBUG Print()146 virtual void Print() { PrintF("%s\n", GetName()); } 147 #endif 148 149 // Computes the key based on major and minor. GetKey()150 uint32_t GetKey() { 151 ASSERT(static_cast<int>(MajorKey()) < NUMBER_OF_IDS); 152 return MinorKeyBits::encode(MinorKey()) | 153 MajorKeyBits::encode(MajorKey()); 154 } 155 AllowsStubCalls()156 bool AllowsStubCalls() { return MajorKey() <= RecordWrite; } 157 158 class MajorKeyBits: public BitField<uint32_t, 0, kMajorBits> {}; 159 class MinorKeyBits: public BitField<uint32_t, kMajorBits, kMinorBits> {}; 160 161 friend class BreakPointIterator; 162 }; 163 164 } } // namespace v8::internal 165 166 #endif // V8_CODE_STUBS_H_ 167