1 /* 2 * Copyright (C) 2008 Apple Inc. All rights reserved. 3 * 4 * Redistribution and use in source and binary forms, with or without 5 * modification, are permitted provided that the following conditions 6 * are met: 7 * 1. Redistributions of source code must retain the above copyright 8 * notice, this list of conditions and the following disclaimer. 9 * 2. Redistributions in binary form must reproduce the above copyright 10 * notice, this list of conditions and the following disclaimer in the 11 * documentation and/or other materials provided with the distribution. 12 * 13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY 14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR 17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, 18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, 19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR 20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY 21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 24 */ 25 26 #ifndef JIT_h 27 #define JIT_h 28 29 #if ENABLE(JIT) 30 31 // We've run into some problems where changing the size of the class JIT leads to 32 // performance fluctuations. Try forcing alignment in an attempt to stabalize this. 33 #if COMPILER(GCC) 34 #define JIT_CLASS_ALIGNMENT __attribute__ ((aligned (32))) 35 #else 36 #define JIT_CLASS_ALIGNMENT 37 #endif 38 39 #define ASSERT_JIT_OFFSET(actual, expected) ASSERT_WITH_MESSAGE(actual == expected, "JIT Offset \"%s\" should be %d, not %d.\n", #expected, static_cast<int>(expected), static_cast<int>(actual)); 40 41 #include "CodeBlock.h" 42 #include "Interpreter.h" 43 #include "JSInterfaceJIT.h" 44 #include "Opcode.h" 45 #include "Profiler.h" 46 #include <bytecode/SamplingTool.h> 47 48 namespace JSC { 49 50 class CodeBlock; 51 class JIT; 52 class JSPropertyNameIterator; 53 class Interpreter; 54 class Register; 55 class RegisterFile; 56 class ScopeChainNode; 57 class StructureChain; 58 59 struct CallLinkInfo; 60 struct Instruction; 61 struct OperandTypes; 62 struct PolymorphicAccessStructureList; 63 struct SimpleJumpTable; 64 struct StringJumpTable; 65 struct StructureStubInfo; 66 67 struct CallRecord { 68 MacroAssembler::Call from; 69 unsigned bytecodeOffset; 70 void* to; 71 CallRecordCallRecord72 CallRecord() 73 { 74 } 75 76 CallRecord(MacroAssembler::Call from, unsigned bytecodeOffset, void* to = 0) fromCallRecord77 : from(from) 78 , bytecodeOffset(bytecodeOffset) 79 , to(to) 80 { 81 } 82 }; 83 84 struct JumpTable { 85 MacroAssembler::Jump from; 86 unsigned toBytecodeOffset; 87 JumpTableJumpTable88 JumpTable(MacroAssembler::Jump f, unsigned t) 89 : from(f) 90 , toBytecodeOffset(t) 91 { 92 } 93 }; 94 95 struct SlowCaseEntry { 96 MacroAssembler::Jump from; 97 unsigned to; 98 unsigned hint; 99 100 SlowCaseEntry(MacroAssembler::Jump f, unsigned t, unsigned h = 0) fromSlowCaseEntry101 : from(f) 102 , to(t) 103 , hint(h) 104 { 105 } 106 }; 107 108 struct SwitchRecord { 109 enum Type { 110 Immediate, 111 Character, 112 String 113 }; 114 115 Type type; 116 117 union { 118 SimpleJumpTable* simpleJumpTable; 119 StringJumpTable* stringJumpTable; 120 } jumpTable; 121 122 unsigned bytecodeOffset; 123 unsigned defaultOffset; 124 SwitchRecordSwitchRecord125 SwitchRecord(SimpleJumpTable* jumpTable, unsigned bytecodeOffset, unsigned defaultOffset, Type type) 126 : type(type) 127 , bytecodeOffset(bytecodeOffset) 128 , defaultOffset(defaultOffset) 129 { 130 this->jumpTable.simpleJumpTable = jumpTable; 131 } 132 SwitchRecordSwitchRecord133 SwitchRecord(StringJumpTable* jumpTable, unsigned bytecodeOffset, unsigned defaultOffset) 134 : type(String) 135 , bytecodeOffset(bytecodeOffset) 136 , defaultOffset(defaultOffset) 137 { 138 this->jumpTable.stringJumpTable = jumpTable; 139 } 140 }; 141 142 struct PropertyStubCompilationInfo { 143 MacroAssembler::Call callReturnLocation; 144 MacroAssembler::Label hotPathBegin; 145 }; 146 147 struct StructureStubCompilationInfo { 148 MacroAssembler::DataLabelPtr hotPathBegin; 149 MacroAssembler::Call hotPathOther; 150 MacroAssembler::Call callReturnLocation; 151 }; 152 153 struct MethodCallCompilationInfo { MethodCallCompilationInfoMethodCallCompilationInfo154 MethodCallCompilationInfo(unsigned propertyAccessIndex) 155 : propertyAccessIndex(propertyAccessIndex) 156 { 157 } 158 159 MacroAssembler::DataLabelPtr structureToCompare; 160 unsigned propertyAccessIndex; 161 }; 162 163 // Near calls can only be patched to other JIT code, regular calls can be patched to JIT code or relinked to stub functions. 164 void ctiPatchNearCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, MacroAssemblerCodePtr newCalleeFunction); 165 void ctiPatchCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, MacroAssemblerCodePtr newCalleeFunction); 166 void ctiPatchCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, FunctionPtr newCalleeFunction); 167 168 class JIT : private JSInterfaceJIT { 169 friend class JITStubCall; 170 171 using MacroAssembler::Jump; 172 using MacroAssembler::JumpList; 173 using MacroAssembler::Label; 174 175 static const int patchGetByIdDefaultStructure = -1; 176 // Magic number - initial offset cannot be representable as a signed 8bit value, or the X86Assembler 177 // will compress the displacement, and we may not be able to fit a patched offset. 178 static const int patchGetByIdDefaultOffset = 256; 179 180 public: 181 static JITCode compile(JSGlobalData* globalData, CodeBlock* codeBlock, CodePtr* functionEntryArityCheck = 0, void* offsetBase = 0) 182 { 183 return JIT(globalData, codeBlock, offsetBase).privateCompile(functionEntryArityCheck); 184 } 185 compileGetByIdProto(JSGlobalData * globalData,CallFrame * callFrame,CodeBlock * codeBlock,StructureStubInfo * stubInfo,Structure * structure,Structure * prototypeStructure,const Identifier & ident,const PropertySlot & slot,size_t cachedOffset,ReturnAddressPtr returnAddress)186 static void compileGetByIdProto(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, ReturnAddressPtr returnAddress) 187 { 188 JIT jit(globalData, codeBlock); 189 jit.privateCompileGetByIdProto(stubInfo, structure, prototypeStructure, ident, slot, cachedOffset, returnAddress, callFrame); 190 } 191 compileGetByIdSelfList(JSGlobalData * globalData,CodeBlock * codeBlock,StructureStubInfo * stubInfo,PolymorphicAccessStructureList * polymorphicStructures,int currentIndex,Structure * structure,const Identifier & ident,const PropertySlot & slot,size_t cachedOffset)192 static void compileGetByIdSelfList(JSGlobalData* globalData, CodeBlock* codeBlock, StructureStubInfo* stubInfo, PolymorphicAccessStructureList* polymorphicStructures, int currentIndex, Structure* structure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset) 193 { 194 JIT jit(globalData, codeBlock); 195 jit.privateCompileGetByIdSelfList(stubInfo, polymorphicStructures, currentIndex, structure, ident, slot, cachedOffset); 196 } compileGetByIdProtoList(JSGlobalData * globalData,CallFrame * callFrame,CodeBlock * codeBlock,StructureStubInfo * stubInfo,PolymorphicAccessStructureList * prototypeStructureList,int currentIndex,Structure * structure,Structure * prototypeStructure,const Identifier & ident,const PropertySlot & slot,size_t cachedOffset)197 static void compileGetByIdProtoList(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructureList, int currentIndex, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset) 198 { 199 JIT jit(globalData, codeBlock); 200 jit.privateCompileGetByIdProtoList(stubInfo, prototypeStructureList, currentIndex, structure, prototypeStructure, ident, slot, cachedOffset, callFrame); 201 } compileGetByIdChainList(JSGlobalData * globalData,CallFrame * callFrame,CodeBlock * codeBlock,StructureStubInfo * stubInfo,PolymorphicAccessStructureList * prototypeStructureList,int currentIndex,Structure * structure,StructureChain * chain,size_t count,const Identifier & ident,const PropertySlot & slot,size_t cachedOffset)202 static void compileGetByIdChainList(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructureList, int currentIndex, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset) 203 { 204 JIT jit(globalData, codeBlock); 205 jit.privateCompileGetByIdChainList(stubInfo, prototypeStructureList, currentIndex, structure, chain, count, ident, slot, cachedOffset, callFrame); 206 } 207 compileGetByIdChain(JSGlobalData * globalData,CallFrame * callFrame,CodeBlock * codeBlock,StructureStubInfo * stubInfo,Structure * structure,StructureChain * chain,size_t count,const Identifier & ident,const PropertySlot & slot,size_t cachedOffset,ReturnAddressPtr returnAddress)208 static void compileGetByIdChain(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, ReturnAddressPtr returnAddress) 209 { 210 JIT jit(globalData, codeBlock); 211 jit.privateCompileGetByIdChain(stubInfo, structure, chain, count, ident, slot, cachedOffset, returnAddress, callFrame); 212 } 213 compilePutByIdTransition(JSGlobalData * globalData,CodeBlock * codeBlock,StructureStubInfo * stubInfo,Structure * oldStructure,Structure * newStructure,size_t cachedOffset,StructureChain * chain,ReturnAddressPtr returnAddress,bool direct)214 static void compilePutByIdTransition(JSGlobalData* globalData, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* oldStructure, Structure* newStructure, size_t cachedOffset, StructureChain* chain, ReturnAddressPtr returnAddress, bool direct) 215 { 216 JIT jit(globalData, codeBlock); 217 jit.privateCompilePutByIdTransition(stubInfo, oldStructure, newStructure, cachedOffset, chain, returnAddress, direct); 218 } 219 compileCTIMachineTrampolines(JSGlobalData * globalData,RefPtr<ExecutablePool> * executablePool,TrampolineStructure * trampolines)220 static void compileCTIMachineTrampolines(JSGlobalData* globalData, RefPtr<ExecutablePool>* executablePool, TrampolineStructure *trampolines) 221 { 222 if (!globalData->canUseJIT()) 223 return; 224 JIT jit(globalData, 0, 0); 225 jit.privateCompileCTIMachineTrampolines(executablePool, globalData, trampolines); 226 } 227 compileCTINativeCall(JSGlobalData * globalData,PassRefPtr<ExecutablePool> executablePool,NativeFunction func)228 static CodePtr compileCTINativeCall(JSGlobalData* globalData, PassRefPtr<ExecutablePool> executablePool, NativeFunction func) 229 { 230 if (!globalData->canUseJIT()) 231 return CodePtr(); 232 JIT jit(globalData, 0, 0); 233 return jit.privateCompileCTINativeCall(executablePool, globalData, func); 234 } 235 236 static void patchGetByIdSelf(CodeBlock* codeblock, StructureStubInfo*, Structure*, size_t cachedOffset, ReturnAddressPtr returnAddress); 237 static void patchPutByIdReplace(CodeBlock* codeblock, StructureStubInfo*, Structure*, size_t cachedOffset, ReturnAddressPtr returnAddress, bool direct); 238 static void patchMethodCallProto(JSGlobalData&, CodeBlock* codeblock, MethodCallLinkInfo&, JSFunction*, Structure*, JSObject*, ReturnAddressPtr); 239 compilePatchGetArrayLength(JSGlobalData * globalData,CodeBlock * codeBlock,ReturnAddressPtr returnAddress)240 static void compilePatchGetArrayLength(JSGlobalData* globalData, CodeBlock* codeBlock, ReturnAddressPtr returnAddress) 241 { 242 JIT jit(globalData, codeBlock); 243 return jit.privateCompilePatchGetArrayLength(returnAddress); 244 } 245 246 static void linkCall(JSFunction* callee, CodeBlock* callerCodeBlock, CodeBlock* calleeCodeBlock, CodePtr, CallLinkInfo*, int callerArgCount, JSGlobalData*); 247 static void linkConstruct(JSFunction* callee, CodeBlock* callerCodeBlock, CodeBlock* calleeCodeBlock, CodePtr, CallLinkInfo*, int callerArgCount, JSGlobalData*); 248 249 private: 250 struct JSRInfo { 251 DataLabelPtr storeLocation; 252 Label target; 253 JSRInfoJSRInfo254 JSRInfo(DataLabelPtr storeLocation, Label targetLocation) 255 : storeLocation(storeLocation) 256 , target(targetLocation) 257 { 258 } 259 }; 260 261 JIT(JSGlobalData*, CodeBlock* = 0, void* = 0); 262 263 void privateCompileMainPass(); 264 void privateCompileLinkPass(); 265 void privateCompileSlowCases(); 266 JITCode privateCompile(CodePtr* functionEntryArityCheck); 267 void privateCompileGetByIdProto(StructureStubInfo*, Structure*, Structure* prototypeStructure, const Identifier&, const PropertySlot&, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame); 268 void privateCompileGetByIdSelfList(StructureStubInfo*, PolymorphicAccessStructureList*, int, Structure*, const Identifier&, const PropertySlot&, size_t cachedOffset); 269 void privateCompileGetByIdProtoList(StructureStubInfo*, PolymorphicAccessStructureList*, int, Structure*, Structure* prototypeStructure, const Identifier&, const PropertySlot&, size_t cachedOffset, CallFrame* callFrame); 270 void privateCompileGetByIdChainList(StructureStubInfo*, PolymorphicAccessStructureList*, int, Structure*, StructureChain* chain, size_t count, const Identifier&, const PropertySlot&, size_t cachedOffset, CallFrame* callFrame); 271 void privateCompileGetByIdChain(StructureStubInfo*, Structure*, StructureChain*, size_t count, const Identifier&, const PropertySlot&, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame); 272 void privateCompilePutByIdTransition(StructureStubInfo*, Structure*, Structure*, size_t cachedOffset, StructureChain*, ReturnAddressPtr returnAddress, bool direct); 273 274 void privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool>* executablePool, JSGlobalData* data, TrampolineStructure *trampolines); 275 Label privateCompileCTINativeCall(JSGlobalData*, bool isConstruct = false); 276 CodePtr privateCompileCTINativeCall(PassRefPtr<ExecutablePool> executablePool, JSGlobalData* data, NativeFunction func); 277 void privateCompilePatchGetArrayLength(ReturnAddressPtr returnAddress); 278 279 void addSlowCase(Jump); 280 void addSlowCase(JumpList); 281 void addJump(Jump, int); 282 void emitJumpSlowToHot(Jump, int); 283 284 void compileOpCall(OpcodeID, Instruction* instruction, unsigned callLinkInfoIndex); 285 void compileOpCallVarargs(Instruction* instruction); 286 void compileOpCallInitializeCallFrame(); 287 void compileOpCallSlowCase(Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter, unsigned callLinkInfoIndex, OpcodeID opcodeID); 288 void compileOpCallVarargsSlowCase(Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter); 289 290 enum CompileOpStrictEqType { OpStrictEq, OpNStrictEq }; 291 void compileOpStrictEq(Instruction* instruction, CompileOpStrictEqType type); 292 bool isOperandConstantImmediateDouble(unsigned src); 293 294 void emitLoadDouble(unsigned index, FPRegisterID value); 295 void emitLoadInt32ToDouble(unsigned index, FPRegisterID value); 296 297 void testPrototype(JSValue, JumpList& failureCases); 298 299 #if USE(JSVALUE32_64) 300 bool getOperandConstantImmediateInt(unsigned op1, unsigned op2, unsigned& op, int32_t& constant); 301 302 void emitLoadTag(unsigned index, RegisterID tag); 303 void emitLoadPayload(unsigned index, RegisterID payload); 304 305 void emitLoad(const JSValue& v, RegisterID tag, RegisterID payload); 306 void emitLoad(unsigned index, RegisterID tag, RegisterID payload, RegisterID base = callFrameRegister); 307 void emitLoad2(unsigned index1, RegisterID tag1, RegisterID payload1, unsigned index2, RegisterID tag2, RegisterID payload2); 308 309 void emitStore(unsigned index, RegisterID tag, RegisterID payload, RegisterID base = callFrameRegister); 310 void emitStore(unsigned index, const JSValue constant, RegisterID base = callFrameRegister); 311 void emitStoreInt32(unsigned index, RegisterID payload, bool indexIsInt32 = false); 312 void emitStoreInt32(unsigned index, TrustedImm32 payload, bool indexIsInt32 = false); 313 void emitStoreCell(unsigned index, RegisterID payload, bool indexIsCell = false); 314 void emitStoreBool(unsigned index, RegisterID payload, bool indexIsBool = false); 315 void emitStoreDouble(unsigned index, FPRegisterID value); 316 317 bool isLabeled(unsigned bytecodeOffset); 318 void map(unsigned bytecodeOffset, unsigned virtualRegisterIndex, RegisterID tag, RegisterID payload); 319 void unmap(RegisterID); 320 void unmap(); 321 bool isMapped(unsigned virtualRegisterIndex); 322 bool getMappedPayload(unsigned virtualRegisterIndex, RegisterID& payload); 323 bool getMappedTag(unsigned virtualRegisterIndex, RegisterID& tag); 324 325 void emitJumpSlowCaseIfNotJSCell(unsigned virtualRegisterIndex); 326 void emitJumpSlowCaseIfNotJSCell(unsigned virtualRegisterIndex, RegisterID tag); 327 void linkSlowCaseIfNotJSCell(Vector<SlowCaseEntry>::iterator&, unsigned virtualRegisterIndex); 328 329 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS) 330 void compileGetByIdHotPath(); 331 void compileGetByIdSlowCase(int resultVReg, int baseVReg, Identifier* ident, Vector<SlowCaseEntry>::iterator& iter, bool isMethodCheck = false); 332 #endif 333 void compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, Structure* structure, size_t cachedOffset); 334 void compileGetDirectOffset(JSObject* base, RegisterID resultTag, RegisterID resultPayload, size_t cachedOffset); 335 void compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, RegisterID offset); 336 void compilePutDirectOffset(RegisterID base, RegisterID valueTag, RegisterID valuePayload, Structure* structure, size_t cachedOffset); 337 338 // Arithmetic opcode helpers 339 void emitAdd32Constant(unsigned dst, unsigned op, int32_t constant, ResultType opType); 340 void emitSub32Constant(unsigned dst, unsigned op, int32_t constant, ResultType opType); 341 void emitBinaryDoubleOp(OpcodeID, unsigned dst, unsigned op1, unsigned op2, OperandTypes, JumpList& notInt32Op1, JumpList& notInt32Op2, bool op1IsInRegisters = true, bool op2IsInRegisters = true); 342 343 #if CPU(X86) 344 // These architecture specific value are used to enable patching - see comment on op_put_by_id. 345 static const int patchOffsetPutByIdStructure = 7; 346 static const int patchOffsetPutByIdPropertyMapOffset1 = 22; 347 static const int patchOffsetPutByIdPropertyMapOffset2 = 28; 348 // These architecture specific value are used to enable patching - see comment on op_get_by_id. 349 static const int patchOffsetGetByIdStructure = 7; 350 static const int patchOffsetGetByIdBranchToSlowCase = 13; 351 static const int patchOffsetGetByIdPropertyMapOffset1 = 22; 352 static const int patchOffsetGetByIdPropertyMapOffset2 = 28; 353 static const int patchOffsetGetByIdPutResult = 28; 354 #if ENABLE(OPCODE_SAMPLING) 355 static const int patchOffsetGetByIdSlowCaseCall = 37; 356 #else 357 static const int patchOffsetGetByIdSlowCaseCall = 27; 358 #endif 359 static const int patchOffsetOpCallCompareToJump = 6; 360 361 static const int patchOffsetMethodCheckProtoObj = 11; 362 static const int patchOffsetMethodCheckProtoStruct = 18; 363 static const int patchOffsetMethodCheckPutFunction = 29; 364 #elif CPU(ARM_TRADITIONAL) 365 // These architecture specific value are used to enable patching - see comment on op_put_by_id. 366 static const int patchOffsetPutByIdStructure = 4; 367 static const int patchOffsetPutByIdPropertyMapOffset1 = 20; 368 static const int patchOffsetPutByIdPropertyMapOffset2 = 28; 369 // These architecture specific value are used to enable patching - see comment on op_get_by_id. 370 static const int patchOffsetGetByIdStructure = 4; 371 static const int patchOffsetGetByIdBranchToSlowCase = 16; 372 static const int patchOffsetGetByIdPropertyMapOffset1 = 20; 373 static const int patchOffsetGetByIdPropertyMapOffset2 = 28; 374 static const int patchOffsetGetByIdPutResult = 36; 375 #if ENABLE(OPCODE_SAMPLING) 376 #error "OPCODE_SAMPLING is not yet supported" 377 #else 378 static const int patchOffsetGetByIdSlowCaseCall = 32; 379 #endif 380 static const int patchOffsetOpCallCompareToJump = 12; 381 382 static const int patchOffsetMethodCheckProtoObj = 12; 383 static const int patchOffsetMethodCheckProtoStruct = 20; 384 static const int patchOffsetMethodCheckPutFunction = 32; 385 386 // sequenceOpCall 387 static const int sequenceOpCallInstructionSpace = 12; 388 static const int sequenceOpCallConstantSpace = 2; 389 // sequenceMethodCheck 390 static const int sequenceMethodCheckInstructionSpace = 40; 391 static const int sequenceMethodCheckConstantSpace = 6; 392 // sequenceGetByIdHotPath 393 static const int sequenceGetByIdHotPathInstructionSpace = 36; 394 static const int sequenceGetByIdHotPathConstantSpace = 4; 395 // sequenceGetByIdSlowCase 396 static const int sequenceGetByIdSlowCaseInstructionSpace = 56; 397 static const int sequenceGetByIdSlowCaseConstantSpace = 2; 398 // sequencePutById 399 static const int sequencePutByIdInstructionSpace = 36; 400 static const int sequencePutByIdConstantSpace = 4; 401 #elif CPU(ARM_THUMB2) 402 // These architecture specific value are used to enable patching - see comment on op_put_by_id. 403 static const int patchOffsetPutByIdStructure = 10; 404 static const int patchOffsetPutByIdPropertyMapOffset1 = 36; 405 static const int patchOffsetPutByIdPropertyMapOffset2 = 48; 406 // These architecture specific value are used to enable patching - see comment on op_get_by_id. 407 static const int patchOffsetGetByIdStructure = 10; 408 static const int patchOffsetGetByIdBranchToSlowCase = 26; 409 static const int patchOffsetGetByIdPropertyMapOffset1 = 36; 410 static const int patchOffsetGetByIdPropertyMapOffset2 = 48; 411 static const int patchOffsetGetByIdPutResult = 52; 412 #if ENABLE(OPCODE_SAMPLING) 413 #error "OPCODE_SAMPLING is not yet supported" 414 #else 415 static const int patchOffsetGetByIdSlowCaseCall = 30; 416 #endif 417 static const int patchOffsetOpCallCompareToJump = 16; 418 419 static const int patchOffsetMethodCheckProtoObj = 24; 420 static const int patchOffsetMethodCheckProtoStruct = 34; 421 static const int patchOffsetMethodCheckPutFunction = 58; 422 423 // sequenceOpCall 424 static const int sequenceOpCallInstructionSpace = 12; 425 static const int sequenceOpCallConstantSpace = 2; 426 // sequenceMethodCheck 427 static const int sequenceMethodCheckInstructionSpace = 40; 428 static const int sequenceMethodCheckConstantSpace = 6; 429 // sequenceGetByIdHotPath 430 static const int sequenceGetByIdHotPathInstructionSpace = 36; 431 static const int sequenceGetByIdHotPathConstantSpace = 4; 432 // sequenceGetByIdSlowCase 433 static const int sequenceGetByIdSlowCaseInstructionSpace = 40; 434 static const int sequenceGetByIdSlowCaseConstantSpace = 2; 435 // sequencePutById 436 static const int sequencePutByIdInstructionSpace = 36; 437 static const int sequencePutByIdConstantSpace = 4; 438 #elif CPU(MIPS) 439 #if WTF_MIPS_ISA(1) 440 static const int patchOffsetPutByIdStructure = 16; 441 static const int patchOffsetPutByIdPropertyMapOffset1 = 56; 442 static const int patchOffsetPutByIdPropertyMapOffset2 = 72; 443 static const int patchOffsetGetByIdStructure = 16; 444 static const int patchOffsetGetByIdBranchToSlowCase = 48; 445 static const int patchOffsetGetByIdPropertyMapOffset1 = 56; 446 static const int patchOffsetGetByIdPropertyMapOffset2 = 76; 447 static const int patchOffsetGetByIdPutResult = 96; 448 #if ENABLE(OPCODE_SAMPLING) 449 #error "OPCODE_SAMPLING is not yet supported" 450 #else 451 static const int patchOffsetGetByIdSlowCaseCall = 44; 452 #endif 453 static const int patchOffsetOpCallCompareToJump = 32; 454 static const int patchOffsetMethodCheckProtoObj = 32; 455 static const int patchOffsetMethodCheckProtoStruct = 56; 456 static const int patchOffsetMethodCheckPutFunction = 88; 457 #else // WTF_MIPS_ISA(1) 458 static const int patchOffsetPutByIdStructure = 12; 459 static const int patchOffsetPutByIdPropertyMapOffset1 = 48; 460 static const int patchOffsetPutByIdPropertyMapOffset2 = 64; 461 static const int patchOffsetGetByIdStructure = 12; 462 static const int patchOffsetGetByIdBranchToSlowCase = 44; 463 static const int patchOffsetGetByIdPropertyMapOffset1 = 48; 464 static const int patchOffsetGetByIdPropertyMapOffset2 = 64; 465 static const int patchOffsetGetByIdPutResult = 80; 466 #if ENABLE(OPCODE_SAMPLING) 467 #error "OPCODE_SAMPLING is not yet supported" 468 #else 469 static const int patchOffsetGetByIdSlowCaseCall = 44; 470 #endif 471 static const int patchOffsetOpCallCompareToJump = 32; 472 static const int patchOffsetMethodCheckProtoObj = 32; 473 static const int patchOffsetMethodCheckProtoStruct = 52; 474 static const int patchOffsetMethodCheckPutFunction = 84; 475 #endif 476 #elif CPU(SH4) 477 // These architecture specific value are used to enable patching - see comment on op_put_by_id. 478 static const int patchOffsetGetByIdStructure = 6; 479 static const int patchOffsetPutByIdPropertyMapOffset = 24; 480 static const int patchOffsetPutByIdStructure = 6; 481 // These architecture specific value are used to enable patching - see comment on op_get_by_id. 482 static const int patchOffsetGetByIdBranchToSlowCase = 10; 483 static const int patchOffsetGetByIdPropertyMapOffset = 24; 484 static const int patchOffsetGetByIdPutResult = 32; 485 486 // sequenceOpCall 487 static const int sequenceOpCallInstructionSpace = 12; 488 static const int sequenceOpCallConstantSpace = 2; 489 // sequenceMethodCheck 490 static const int sequenceMethodCheckInstructionSpace = 40; 491 static const int sequenceMethodCheckConstantSpace = 6; 492 // sequenceGetByIdHotPath 493 static const int sequenceGetByIdHotPathInstructionSpace = 36; 494 static const int sequenceGetByIdHotPathConstantSpace = 5; 495 // sequenceGetByIdSlowCase 496 static const int sequenceGetByIdSlowCaseInstructionSpace = 26; 497 static const int sequenceGetByIdSlowCaseConstantSpace = 2; 498 // sequencePutById 499 static const int sequencePutByIdInstructionSpace = 36; 500 static const int sequencePutByIdConstantSpace = 5; 501 502 static const int patchOffsetGetByIdPropertyMapOffset1 = 20; 503 static const int patchOffsetGetByIdPropertyMapOffset2 = 26; 504 505 static const int patchOffsetPutByIdPropertyMapOffset1 = 20; 506 static const int patchOffsetPutByIdPropertyMapOffset2 = 26; 507 508 #if ENABLE(OPCODE_SAMPLING) 509 static const int patchOffsetGetByIdSlowCaseCall = 0; // FIMXE 510 #else 511 static const int patchOffsetGetByIdSlowCaseCall = 22; 512 #endif 513 static const int patchOffsetOpCallCompareToJump = 4; 514 515 static const int patchOffsetMethodCheckProtoObj = 12; 516 static const int patchOffsetMethodCheckProtoStruct = 20; 517 static const int patchOffsetMethodCheckPutFunction = 32; 518 #else 519 #error "JSVALUE32_64 not supported on this platform." 520 #endif 521 522 #else // USE(JSVALUE32_64) 523 void emitGetVirtualRegister(int src, RegisterID dst); 524 void emitGetVirtualRegisters(int src1, RegisterID dst1, int src2, RegisterID dst2); 525 void emitPutVirtualRegister(unsigned dst, RegisterID from = regT0); 526 527 int32_t getConstantOperandImmediateInt(unsigned src); 528 529 void killLastResultRegister(); 530 531 Jump emitJumpIfJSCell(RegisterID); 532 Jump emitJumpIfBothJSCells(RegisterID, RegisterID, RegisterID); 533 void emitJumpSlowCaseIfJSCell(RegisterID); 534 Jump emitJumpIfNotJSCell(RegisterID); 535 void emitJumpSlowCaseIfNotJSCell(RegisterID); 536 void emitJumpSlowCaseIfNotJSCell(RegisterID, int VReg); 537 #if USE(JSVALUE32_64) emitJumpIfImmediateNumber(RegisterID reg)538 JIT::Jump emitJumpIfImmediateNumber(RegisterID reg) 539 { 540 return emitJumpIfImmediateInteger(reg); 541 } 542 emitJumpIfNotImmediateNumber(RegisterID reg)543 JIT::Jump emitJumpIfNotImmediateNumber(RegisterID reg) 544 { 545 return emitJumpIfNotImmediateInteger(reg); 546 } 547 #endif 548 JIT::Jump emitJumpIfImmediateInteger(RegisterID); 549 JIT::Jump emitJumpIfNotImmediateInteger(RegisterID); 550 JIT::Jump emitJumpIfNotImmediateIntegers(RegisterID, RegisterID, RegisterID); 551 void emitJumpSlowCaseIfNotImmediateInteger(RegisterID); 552 void emitJumpSlowCaseIfNotImmediateNumber(RegisterID); 553 void emitJumpSlowCaseIfNotImmediateIntegers(RegisterID, RegisterID, RegisterID); 554 555 #if USE(JSVALUE32_64) 556 void emitFastArithDeTagImmediate(RegisterID); 557 Jump emitFastArithDeTagImmediateJumpIfZero(RegisterID); 558 #endif 559 void emitFastArithReTagImmediate(RegisterID src, RegisterID dest); 560 void emitFastArithIntToImmNoCheck(RegisterID src, RegisterID dest); 561 562 void emitTagAsBoolImmediate(RegisterID reg); 563 void compileBinaryArithOp(OpcodeID, unsigned dst, unsigned src1, unsigned src2, OperandTypes opi); 564 #if USE(JSVALUE64) 565 void compileBinaryArithOpSlowCase(OpcodeID, Vector<SlowCaseEntry>::iterator&, unsigned dst, unsigned src1, unsigned src2, OperandTypes, bool op1HasImmediateIntFastCase, bool op2HasImmediateIntFastCase); 566 #else 567 void compileBinaryArithOpSlowCase(OpcodeID, Vector<SlowCaseEntry>::iterator&, unsigned dst, unsigned src1, unsigned src2, OperandTypes); 568 #endif 569 570 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS) 571 void compileGetByIdHotPath(int resultVReg, int baseVReg, Identifier* ident, unsigned propertyAccessInstructionIndex); 572 void compileGetByIdSlowCase(int resultVReg, int baseVReg, Identifier* ident, Vector<SlowCaseEntry>::iterator& iter, bool isMethodCheck = false); 573 #endif 574 void compileGetDirectOffset(RegisterID base, RegisterID result, Structure* structure, size_t cachedOffset); 575 void compileGetDirectOffset(JSObject* base, RegisterID result, size_t cachedOffset); 576 void compileGetDirectOffset(RegisterID base, RegisterID result, RegisterID offset, RegisterID scratch); 577 void compilePutDirectOffset(RegisterID base, RegisterID value, Structure* structure, size_t cachedOffset); 578 579 #if CPU(X86_64) 580 // These architecture specific value are used to enable patching - see comment on op_put_by_id. 581 static const int patchOffsetPutByIdStructure = 10; 582 static const int patchOffsetPutByIdPropertyMapOffset = 31; 583 // These architecture specific value are used to enable patching - see comment on op_get_by_id. 584 static const int patchOffsetGetByIdStructure = 10; 585 static const int patchOffsetGetByIdBranchToSlowCase = 20; 586 static const int patchOffsetGetByIdPropertyMapOffset = 31; 587 static const int patchOffsetGetByIdPutResult = 31; 588 #if ENABLE(OPCODE_SAMPLING) 589 static const int patchOffsetGetByIdSlowCaseCall = 64; 590 #else 591 static const int patchOffsetGetByIdSlowCaseCall = 41; 592 #endif 593 static const int patchOffsetOpCallCompareToJump = 9; 594 595 static const int patchOffsetMethodCheckProtoObj = 20; 596 static const int patchOffsetMethodCheckProtoStruct = 30; 597 static const int patchOffsetMethodCheckPutFunction = 50; 598 #elif CPU(X86) 599 // These architecture specific value are used to enable patching - see comment on op_put_by_id. 600 static const int patchOffsetPutByIdStructure = 7; 601 static const int patchOffsetPutByIdPropertyMapOffset = 22; 602 // These architecture specific value are used to enable patching - see comment on op_get_by_id. 603 static const int patchOffsetGetByIdStructure = 7; 604 static const int patchOffsetGetByIdBranchToSlowCase = 13; 605 static const int patchOffsetGetByIdPropertyMapOffset = 22; 606 static const int patchOffsetGetByIdPutResult = 22; 607 #if ENABLE(OPCODE_SAMPLING) 608 static const int patchOffsetGetByIdSlowCaseCall = 33; 609 #else 610 static const int patchOffsetGetByIdSlowCaseCall = 23; 611 #endif 612 static const int patchOffsetOpCallCompareToJump = 6; 613 614 static const int patchOffsetMethodCheckProtoObj = 11; 615 static const int patchOffsetMethodCheckProtoStruct = 18; 616 static const int patchOffsetMethodCheckPutFunction = 29; 617 #elif CPU(ARM_THUMB2) 618 // These architecture specific value are used to enable patching - see comment on op_put_by_id. 619 static const int patchOffsetPutByIdStructure = 10; 620 static const int patchOffsetPutByIdPropertyMapOffset = 46; 621 // These architecture specific value are used to enable patching - see comment on op_get_by_id. 622 static const int patchOffsetGetByIdStructure = 10; 623 static const int patchOffsetGetByIdBranchToSlowCase = 26; 624 static const int patchOffsetGetByIdPropertyMapOffset = 46; 625 static const int patchOffsetGetByIdPutResult = 50; 626 #if ENABLE(OPCODE_SAMPLING) 627 static const int patchOffsetGetByIdSlowCaseCall = 0; // FIMXE 628 #else 629 static const int patchOffsetGetByIdSlowCaseCall = 28; 630 #endif 631 static const int patchOffsetOpCallCompareToJump = 16; 632 633 static const int patchOffsetMethodCheckProtoObj = 24; 634 static const int patchOffsetMethodCheckProtoStruct = 34; 635 static const int patchOffsetMethodCheckPutFunction = 58; 636 #elif CPU(ARM_TRADITIONAL) 637 // These architecture specific value are used to enable patching - see comment on op_put_by_id. 638 static const int patchOffsetPutByIdStructure = 4; 639 static const int patchOffsetPutByIdPropertyMapOffset = 20; 640 // These architecture specific value are used to enable patching - see comment on op_get_by_id. 641 static const int patchOffsetGetByIdStructure = 4; 642 static const int patchOffsetGetByIdBranchToSlowCase = 16; 643 static const int patchOffsetGetByIdPropertyMapOffset = 20; 644 static const int patchOffsetGetByIdPutResult = 28; 645 #if ENABLE(OPCODE_SAMPLING) 646 #error "OPCODE_SAMPLING is not yet supported" 647 #else 648 static const int patchOffsetGetByIdSlowCaseCall = 28; 649 #endif 650 static const int patchOffsetOpCallCompareToJump = 12; 651 652 static const int patchOffsetMethodCheckProtoObj = 12; 653 static const int patchOffsetMethodCheckProtoStruct = 20; 654 static const int patchOffsetMethodCheckPutFunction = 32; 655 656 // sequenceOpCall 657 static const int sequenceOpCallInstructionSpace = 12; 658 static const int sequenceOpCallConstantSpace = 2; 659 // sequenceMethodCheck 660 static const int sequenceMethodCheckInstructionSpace = 40; 661 static const int sequenceMethodCheckConstantSpace = 6; 662 // sequenceGetByIdHotPath 663 static const int sequenceGetByIdHotPathInstructionSpace = 28; 664 static const int sequenceGetByIdHotPathConstantSpace = 3; 665 // sequenceGetByIdSlowCase 666 static const int sequenceGetByIdSlowCaseInstructionSpace = 32; 667 static const int sequenceGetByIdSlowCaseConstantSpace = 2; 668 // sequencePutById 669 static const int sequencePutByIdInstructionSpace = 28; 670 static const int sequencePutByIdConstantSpace = 3; 671 #elif CPU(MIPS) 672 #if WTF_MIPS_ISA(1) 673 static const int patchOffsetPutByIdStructure = 16; 674 static const int patchOffsetPutByIdPropertyMapOffset = 68; 675 static const int patchOffsetGetByIdStructure = 16; 676 static const int patchOffsetGetByIdBranchToSlowCase = 48; 677 static const int patchOffsetGetByIdPropertyMapOffset = 68; 678 static const int patchOffsetGetByIdPutResult = 88; 679 #if ENABLE(OPCODE_SAMPLING) 680 #error "OPCODE_SAMPLING is not yet supported" 681 #else 682 static const int patchOffsetGetByIdSlowCaseCall = 40; 683 #endif 684 static const int patchOffsetOpCallCompareToJump = 32; 685 static const int patchOffsetMethodCheckProtoObj = 32; 686 static const int patchOffsetMethodCheckProtoStruct = 56; 687 static const int patchOffsetMethodCheckPutFunction = 88; 688 #else // WTF_MIPS_ISA(1) 689 static const int patchOffsetPutByIdStructure = 12; 690 static const int patchOffsetPutByIdPropertyMapOffset = 60; 691 static const int patchOffsetGetByIdStructure = 12; 692 static const int patchOffsetGetByIdBranchToSlowCase = 44; 693 static const int patchOffsetGetByIdPropertyMapOffset = 60; 694 static const int patchOffsetGetByIdPutResult = 76; 695 #if ENABLE(OPCODE_SAMPLING) 696 #error "OPCODE_SAMPLING is not yet supported" 697 #else 698 static const int patchOffsetGetByIdSlowCaseCall = 40; 699 #endif 700 static const int patchOffsetOpCallCompareToJump = 32; 701 static const int patchOffsetMethodCheckProtoObj = 32; 702 static const int patchOffsetMethodCheckProtoStruct = 52; 703 static const int patchOffsetMethodCheckPutFunction = 84; 704 #endif 705 #endif 706 #endif // USE(JSVALUE32_64) 707 708 #if (defined(ASSEMBLER_HAS_CONSTANT_POOL) && ASSEMBLER_HAS_CONSTANT_POOL) 709 #define BEGIN_UNINTERRUPTED_SEQUENCE(name) do { beginUninterruptedSequence(name ## InstructionSpace, name ## ConstantSpace); } while (false) 710 #define END_UNINTERRUPTED_SEQUENCE_FOR_PUT(name, dst) do { endUninterruptedSequence(name ## InstructionSpace, name ## ConstantSpace, dst); } while (false) 711 #define END_UNINTERRUPTED_SEQUENCE(name) END_UNINTERRUPTED_SEQUENCE_FOR_PUT(name, 0) 712 713 void beginUninterruptedSequence(int, int); 714 void endUninterruptedSequence(int, int, int); 715 716 #else 717 #define BEGIN_UNINTERRUPTED_SEQUENCE(name) do { beginUninterruptedSequence(); } while (false) 718 #define END_UNINTERRUPTED_SEQUENCE(name) do { endUninterruptedSequence(); } while (false) 719 #define END_UNINTERRUPTED_SEQUENCE_FOR_PUT(name, dst) do { endUninterruptedSequence(); } while (false) 720 #endif 721 722 void emit_op_add(Instruction*); 723 void emit_op_bitand(Instruction*); 724 void emit_op_bitnot(Instruction*); 725 void emit_op_bitor(Instruction*); 726 void emit_op_bitxor(Instruction*); 727 void emit_op_call(Instruction*); 728 void emit_op_call_eval(Instruction*); 729 void emit_op_call_varargs(Instruction*); 730 void emit_op_call_put_result(Instruction*); 731 void emit_op_catch(Instruction*); 732 void emit_op_construct(Instruction*); 733 void emit_op_get_callee(Instruction*); 734 void emit_op_create_this(Instruction*); 735 void emit_op_convert_this(Instruction*); 736 void emit_op_convert_this_strict(Instruction*); 737 void emit_op_create_arguments(Instruction*); 738 void emit_op_debug(Instruction*); 739 void emit_op_del_by_id(Instruction*); 740 void emit_op_div(Instruction*); 741 void emit_op_end(Instruction*); 742 void emit_op_enter(Instruction*); 743 void emit_op_create_activation(Instruction*); 744 void emit_op_eq(Instruction*); 745 void emit_op_eq_null(Instruction*); 746 void emit_op_get_by_id(Instruction*); 747 void emit_op_get_arguments_length(Instruction*); 748 void emit_op_get_by_val(Instruction*); 749 void emit_op_get_argument_by_val(Instruction*); 750 void emit_op_get_by_pname(Instruction*); 751 void emit_op_get_global_var(Instruction*); 752 void emit_op_get_scoped_var(Instruction*); 753 void emit_op_init_lazy_reg(Instruction*); 754 void emit_op_check_has_instance(Instruction*); 755 void emit_op_instanceof(Instruction*); 756 void emit_op_jeq_null(Instruction*); 757 void emit_op_jfalse(Instruction*); 758 void emit_op_jmp(Instruction*); 759 void emit_op_jmp_scopes(Instruction*); 760 void emit_op_jneq_null(Instruction*); 761 void emit_op_jneq_ptr(Instruction*); 762 void emit_op_jnless(Instruction*); 763 void emit_op_jless(Instruction*); 764 void emit_op_jlesseq(Instruction*, bool invert = false); 765 void emit_op_jnlesseq(Instruction*); 766 void emit_op_jsr(Instruction*); 767 void emit_op_jtrue(Instruction*); 768 void emit_op_load_varargs(Instruction*); 769 void emit_op_loop(Instruction*); 770 void emit_op_loop_if_less(Instruction*); 771 void emit_op_loop_if_lesseq(Instruction*); 772 void emit_op_loop_if_true(Instruction*); 773 void emit_op_loop_if_false(Instruction*); 774 void emit_op_lshift(Instruction*); 775 void emit_op_method_check(Instruction*); 776 void emit_op_mod(Instruction*); 777 void emit_op_mov(Instruction*); 778 void emit_op_mul(Instruction*); 779 void emit_op_negate(Instruction*); 780 void emit_op_neq(Instruction*); 781 void emit_op_neq_null(Instruction*); 782 void emit_op_new_array(Instruction*); 783 void emit_op_new_func(Instruction*); 784 void emit_op_new_func_exp(Instruction*); 785 void emit_op_new_object(Instruction*); 786 void emit_op_new_regexp(Instruction*); 787 void emit_op_get_pnames(Instruction*); 788 void emit_op_next_pname(Instruction*); 789 void emit_op_not(Instruction*); 790 void emit_op_nstricteq(Instruction*); 791 void emit_op_pop_scope(Instruction*); 792 void emit_op_post_dec(Instruction*); 793 void emit_op_post_inc(Instruction*); 794 void emit_op_pre_dec(Instruction*); 795 void emit_op_pre_inc(Instruction*); 796 void emit_op_profile_did_call(Instruction*); 797 void emit_op_profile_will_call(Instruction*); 798 void emit_op_push_new_scope(Instruction*); 799 void emit_op_push_scope(Instruction*); 800 void emit_op_put_by_id(Instruction*); 801 void emit_op_put_by_index(Instruction*); 802 void emit_op_put_by_val(Instruction*); 803 void emit_op_put_getter(Instruction*); 804 void emit_op_put_global_var(Instruction*); 805 void emit_op_put_scoped_var(Instruction*); 806 void emit_op_put_setter(Instruction*); 807 void emit_op_resolve(Instruction*); 808 void emit_op_resolve_base(Instruction*); 809 void emit_op_ensure_property_exists(Instruction*); 810 void emit_op_resolve_global(Instruction*, bool dynamic = false); 811 void emit_op_resolve_global_dynamic(Instruction*); 812 void emit_op_resolve_skip(Instruction*); 813 void emit_op_resolve_with_base(Instruction*); 814 void emit_op_ret(Instruction*); 815 void emit_op_ret_object_or_this(Instruction*); 816 void emit_op_rshift(Instruction*); 817 void emit_op_sret(Instruction*); 818 void emit_op_strcat(Instruction*); 819 void emit_op_stricteq(Instruction*); 820 void emit_op_sub(Instruction*); 821 void emit_op_switch_char(Instruction*); 822 void emit_op_switch_imm(Instruction*); 823 void emit_op_switch_string(Instruction*); 824 void emit_op_tear_off_activation(Instruction*); 825 void emit_op_tear_off_arguments(Instruction*); 826 void emit_op_throw(Instruction*); 827 void emit_op_throw_reference_error(Instruction*); 828 void emit_op_to_jsnumber(Instruction*); 829 void emit_op_to_primitive(Instruction*); 830 void emit_op_unexpected_load(Instruction*); 831 void emit_op_urshift(Instruction*); 832 #if ENABLE(JIT_USE_SOFT_MODULO) 833 void softModulo(); 834 #endif 835 836 void emitSlow_op_add(Instruction*, Vector<SlowCaseEntry>::iterator&); 837 void emitSlow_op_bitand(Instruction*, Vector<SlowCaseEntry>::iterator&); 838 void emitSlow_op_bitnot(Instruction*, Vector<SlowCaseEntry>::iterator&); 839 void emitSlow_op_bitor(Instruction*, Vector<SlowCaseEntry>::iterator&); 840 void emitSlow_op_bitxor(Instruction*, Vector<SlowCaseEntry>::iterator&); 841 void emitSlow_op_call(Instruction*, Vector<SlowCaseEntry>::iterator&); 842 void emitSlow_op_call_eval(Instruction*, Vector<SlowCaseEntry>::iterator&); 843 void emitSlow_op_call_varargs(Instruction*, Vector<SlowCaseEntry>::iterator&); 844 void emitSlow_op_construct(Instruction*, Vector<SlowCaseEntry>::iterator&); 845 void emitSlow_op_convert_this(Instruction*, Vector<SlowCaseEntry>::iterator&); 846 void emitSlow_op_convert_this_strict(Instruction*, Vector<SlowCaseEntry>::iterator&); 847 void emitSlow_op_div(Instruction*, Vector<SlowCaseEntry>::iterator&); 848 void emitSlow_op_eq(Instruction*, Vector<SlowCaseEntry>::iterator&); 849 void emitSlow_op_get_by_id(Instruction*, Vector<SlowCaseEntry>::iterator&); 850 void emitSlow_op_get_arguments_length(Instruction*, Vector<SlowCaseEntry>::iterator&); 851 void emitSlow_op_get_by_val(Instruction*, Vector<SlowCaseEntry>::iterator&); 852 void emitSlow_op_get_argument_by_val(Instruction*, Vector<SlowCaseEntry>::iterator&); 853 void emitSlow_op_get_by_pname(Instruction*, Vector<SlowCaseEntry>::iterator&); 854 void emitSlow_op_check_has_instance(Instruction*, Vector<SlowCaseEntry>::iterator&); 855 void emitSlow_op_instanceof(Instruction*, Vector<SlowCaseEntry>::iterator&); 856 void emitSlow_op_jfalse(Instruction*, Vector<SlowCaseEntry>::iterator&); 857 void emitSlow_op_jnless(Instruction*, Vector<SlowCaseEntry>::iterator&); 858 void emitSlow_op_jless(Instruction*, Vector<SlowCaseEntry>::iterator&); 859 void emitSlow_op_jlesseq(Instruction*, Vector<SlowCaseEntry>::iterator&, bool invert = false); 860 void emitSlow_op_jnlesseq(Instruction*, Vector<SlowCaseEntry>::iterator&); 861 void emitSlow_op_jtrue(Instruction*, Vector<SlowCaseEntry>::iterator&); 862 void emitSlow_op_load_varargs(Instruction*, Vector<SlowCaseEntry>::iterator&); 863 void emitSlow_op_loop_if_less(Instruction*, Vector<SlowCaseEntry>::iterator&); 864 void emitSlow_op_loop_if_lesseq(Instruction*, Vector<SlowCaseEntry>::iterator&); 865 void emitSlow_op_loop_if_true(Instruction*, Vector<SlowCaseEntry>::iterator&); 866 void emitSlow_op_loop_if_false(Instruction*, Vector<SlowCaseEntry>::iterator&); 867 void emitSlow_op_lshift(Instruction*, Vector<SlowCaseEntry>::iterator&); 868 void emitSlow_op_method_check(Instruction*, Vector<SlowCaseEntry>::iterator&); 869 void emitSlow_op_mod(Instruction*, Vector<SlowCaseEntry>::iterator&); 870 void emitSlow_op_mul(Instruction*, Vector<SlowCaseEntry>::iterator&); 871 void emitSlow_op_negate(Instruction*, Vector<SlowCaseEntry>::iterator&); 872 void emitSlow_op_neq(Instruction*, Vector<SlowCaseEntry>::iterator&); 873 void emitSlow_op_not(Instruction*, Vector<SlowCaseEntry>::iterator&); 874 void emitSlow_op_nstricteq(Instruction*, Vector<SlowCaseEntry>::iterator&); 875 void emitSlow_op_post_dec(Instruction*, Vector<SlowCaseEntry>::iterator&); 876 void emitSlow_op_post_inc(Instruction*, Vector<SlowCaseEntry>::iterator&); 877 void emitSlow_op_pre_dec(Instruction*, Vector<SlowCaseEntry>::iterator&); 878 void emitSlow_op_pre_inc(Instruction*, Vector<SlowCaseEntry>::iterator&); 879 void emitSlow_op_put_by_id(Instruction*, Vector<SlowCaseEntry>::iterator&); 880 void emitSlow_op_put_by_val(Instruction*, Vector<SlowCaseEntry>::iterator&); 881 void emitSlow_op_resolve_global(Instruction*, Vector<SlowCaseEntry>::iterator&); 882 void emitSlow_op_resolve_global_dynamic(Instruction*, Vector<SlowCaseEntry>::iterator&); 883 void emitSlow_op_rshift(Instruction*, Vector<SlowCaseEntry>::iterator&); 884 void emitSlow_op_stricteq(Instruction*, Vector<SlowCaseEntry>::iterator&); 885 void emitSlow_op_sub(Instruction*, Vector<SlowCaseEntry>::iterator&); 886 void emitSlow_op_to_jsnumber(Instruction*, Vector<SlowCaseEntry>::iterator&); 887 void emitSlow_op_to_primitive(Instruction*, Vector<SlowCaseEntry>::iterator&); 888 void emitSlow_op_urshift(Instruction*, Vector<SlowCaseEntry>::iterator&); 889 890 891 void emitRightShift(Instruction*, bool isUnsigned); 892 void emitRightShiftSlowCase(Instruction*, Vector<SlowCaseEntry>::iterator&, bool isUnsigned); 893 894 /* This function is deprecated. */ 895 void emitGetJITStubArg(unsigned argumentNumber, RegisterID dst); 896 897 void emitInitRegister(unsigned dst); 898 899 void emitPutToCallFrameHeader(RegisterID from, RegisterFile::CallFrameHeaderEntry entry); 900 void emitPutCellToCallFrameHeader(RegisterID from, RegisterFile::CallFrameHeaderEntry); 901 void emitPutIntToCallFrameHeader(RegisterID from, RegisterFile::CallFrameHeaderEntry); 902 void emitPutImmediateToCallFrameHeader(void* value, RegisterFile::CallFrameHeaderEntry entry); 903 void emitGetFromCallFrameHeaderPtr(RegisterFile::CallFrameHeaderEntry entry, RegisterID to, RegisterID from = callFrameRegister); 904 void emitGetFromCallFrameHeader32(RegisterFile::CallFrameHeaderEntry entry, RegisterID to, RegisterID from = callFrameRegister); 905 906 JSValue getConstantOperand(unsigned src); 907 bool isOperandConstantImmediateInt(unsigned src); 908 bool isOperandConstantImmediateChar(unsigned src); 909 getSlowCase(Vector<SlowCaseEntry>::iterator & iter)910 Jump getSlowCase(Vector<SlowCaseEntry>::iterator& iter) 911 { 912 return iter++->from; 913 } linkSlowCase(Vector<SlowCaseEntry>::iterator & iter)914 void linkSlowCase(Vector<SlowCaseEntry>::iterator& iter) 915 { 916 iter->from.link(this); 917 ++iter; 918 } 919 void linkSlowCaseIfNotJSCell(Vector<SlowCaseEntry>::iterator&, int vReg); 920 921 Jump checkStructure(RegisterID reg, Structure* structure); 922 923 void restoreArgumentReference(); 924 void restoreArgumentReferenceForTrampoline(); 925 926 Call emitNakedCall(CodePtr function = CodePtr()); 927 928 void preserveReturnAddressAfterCall(RegisterID); 929 void restoreReturnAddressBeforeReturn(RegisterID); 930 void restoreReturnAddressBeforeReturn(Address); 931 932 // Loads the character value of a single character string into dst. 933 void emitLoadCharacterString(RegisterID src, RegisterID dst, JumpList& failures); 934 935 void emitTimeoutCheck(); 936 #ifndef NDEBUG 937 void printBytecodeOperandTypes(unsigned src1, unsigned src2); 938 #endif 939 940 #if ENABLE(SAMPLING_FLAGS) 941 void setSamplingFlag(int32_t); 942 void clearSamplingFlag(int32_t); 943 #endif 944 945 #if ENABLE(SAMPLING_COUNTERS) 946 void emitCount(AbstractSamplingCounter&, uint32_t = 1); 947 #endif 948 949 #if ENABLE(OPCODE_SAMPLING) 950 void sampleInstruction(Instruction*, bool = false); 951 #endif 952 953 #if ENABLE(CODEBLOCK_SAMPLING) 954 void sampleCodeBlock(CodeBlock*); 955 #else sampleCodeBlock(CodeBlock *)956 void sampleCodeBlock(CodeBlock*) {} 957 #endif 958 959 Interpreter* m_interpreter; 960 JSGlobalData* m_globalData; 961 CodeBlock* m_codeBlock; 962 963 Vector<CallRecord> m_calls; 964 Vector<Label> m_labels; 965 Vector<PropertyStubCompilationInfo> m_propertyAccessCompilationInfo; 966 Vector<StructureStubCompilationInfo> m_callStructureStubCompilationInfo; 967 Vector<MethodCallCompilationInfo> m_methodCallCompilationInfo; 968 Vector<JumpTable> m_jmpTable; 969 970 unsigned m_bytecodeOffset; 971 Vector<JSRInfo> m_jsrSites; 972 Vector<SlowCaseEntry> m_slowCases; 973 Vector<SwitchRecord> m_switches; 974 975 unsigned m_propertyAccessInstructionIndex; 976 unsigned m_globalResolveInfoIndex; 977 unsigned m_callLinkInfoIndex; 978 979 #if USE(JSVALUE32_64) 980 unsigned m_jumpTargetIndex; 981 unsigned m_mappedBytecodeOffset; 982 unsigned m_mappedVirtualRegisterIndex; 983 RegisterID m_mappedTag; 984 RegisterID m_mappedPayload; 985 #else 986 int m_lastResultBytecodeRegister; 987 unsigned m_jumpTargetsPosition; 988 #endif 989 990 #ifndef NDEBUG 991 #if defined(ASSEMBLER_HAS_CONSTANT_POOL) && ASSEMBLER_HAS_CONSTANT_POOL 992 Label m_uninterruptedInstructionSequenceBegin; 993 int m_uninterruptedConstantSequenceBegin; 994 #endif 995 #endif 996 void* m_linkerOffset; 997 static CodePtr stringGetByValStubGenerator(JSGlobalData* globalData, ExecutablePool* pool); 998 } JIT_CLASS_ALIGNMENT; 999 emit_op_loop(Instruction * currentInstruction)1000 inline void JIT::emit_op_loop(Instruction* currentInstruction) 1001 { 1002 emitTimeoutCheck(); 1003 emit_op_jmp(currentInstruction); 1004 } 1005 emit_op_loop_if_true(Instruction * currentInstruction)1006 inline void JIT::emit_op_loop_if_true(Instruction* currentInstruction) 1007 { 1008 emitTimeoutCheck(); 1009 emit_op_jtrue(currentInstruction); 1010 } 1011 emitSlow_op_loop_if_true(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1012 inline void JIT::emitSlow_op_loop_if_true(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter) 1013 { 1014 emitSlow_op_jtrue(currentInstruction, iter); 1015 } 1016 emit_op_loop_if_false(Instruction * currentInstruction)1017 inline void JIT::emit_op_loop_if_false(Instruction* currentInstruction) 1018 { 1019 emitTimeoutCheck(); 1020 emit_op_jfalse(currentInstruction); 1021 } 1022 emitSlow_op_loop_if_false(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1023 inline void JIT::emitSlow_op_loop_if_false(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter) 1024 { 1025 emitSlow_op_jfalse(currentInstruction, iter); 1026 } 1027 emit_op_loop_if_less(Instruction * currentInstruction)1028 inline void JIT::emit_op_loop_if_less(Instruction* currentInstruction) 1029 { 1030 emitTimeoutCheck(); 1031 emit_op_jless(currentInstruction); 1032 } 1033 emitSlow_op_loop_if_less(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1034 inline void JIT::emitSlow_op_loop_if_less(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter) 1035 { 1036 emitSlow_op_jless(currentInstruction, iter); 1037 } 1038 1039 } // namespace JSC 1040 1041 #endif // ENABLE(JIT) 1042 1043 #endif // JIT_h 1044