• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2008 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24  */
25 
26 #ifndef JIT_h
27 #define JIT_h
28 
29 #include <wtf/Platform.h>
30 
31 #if ENABLE(JIT)
32 
33 // We've run into some problems where changing the size of the class JIT leads to
34 // performance fluctuations.  Try forcing alignment in an attempt to stabalize this.
35 #if COMPILER(GCC)
36 #define JIT_CLASS_ALIGNMENT __attribute__ ((aligned (32)))
37 #else
38 #define JIT_CLASS_ALIGNMENT
39 #endif
40 
41 #define ASSERT_JIT_OFFSET(actual, expected) ASSERT_WITH_MESSAGE(actual == expected, "JIT Offset \"%s\" should be %d, not %d.\n", #expected, static_cast<int>(actual), static_cast<int>(expected));
42 
43 #include "CodeBlock.h"
44 #include "Interpreter.h"
45 #include "JITCode.h"
46 #include "JITStubs.h"
47 #include "Opcode.h"
48 #include "RegisterFile.h"
49 #include "MacroAssembler.h"
50 #include "Profiler.h"
51 #include <bytecode/SamplingTool.h>
52 #include <wtf/AlwaysInline.h>
53 #include <wtf/Vector.h>
54 
55 namespace JSC {
56 
57     class CodeBlock;
58     class JIT;
59     class JSPropertyNameIterator;
60     class Interpreter;
61     class Register;
62     class RegisterFile;
63     class ScopeChainNode;
64     class StructureChain;
65 
66     struct CallLinkInfo;
67     struct Instruction;
68     struct OperandTypes;
69     struct PolymorphicAccessStructureList;
70     struct SimpleJumpTable;
71     struct StringJumpTable;
72     struct StructureStubInfo;
73 
74     struct CallRecord {
75         MacroAssembler::Call from;
76         unsigned bytecodeIndex;
77         void* to;
78 
CallRecordCallRecord79         CallRecord()
80         {
81         }
82 
83         CallRecord(MacroAssembler::Call from, unsigned bytecodeIndex, void* to = 0)
fromCallRecord84             : from(from)
85             , bytecodeIndex(bytecodeIndex)
86             , to(to)
87         {
88         }
89     };
90 
91     struct JumpTable {
92         MacroAssembler::Jump from;
93         unsigned toBytecodeIndex;
94 
JumpTableJumpTable95         JumpTable(MacroAssembler::Jump f, unsigned t)
96             : from(f)
97             , toBytecodeIndex(t)
98         {
99         }
100     };
101 
102     struct SlowCaseEntry {
103         MacroAssembler::Jump from;
104         unsigned to;
105         unsigned hint;
106 
107         SlowCaseEntry(MacroAssembler::Jump f, unsigned t, unsigned h = 0)
fromSlowCaseEntry108             : from(f)
109             , to(t)
110             , hint(h)
111         {
112         }
113     };
114 
115     struct SwitchRecord {
116         enum Type {
117             Immediate,
118             Character,
119             String
120         };
121 
122         Type type;
123 
124         union {
125             SimpleJumpTable* simpleJumpTable;
126             StringJumpTable* stringJumpTable;
127         } jumpTable;
128 
129         unsigned bytecodeIndex;
130         unsigned defaultOffset;
131 
SwitchRecordSwitchRecord132         SwitchRecord(SimpleJumpTable* jumpTable, unsigned bytecodeIndex, unsigned defaultOffset, Type type)
133             : type(type)
134             , bytecodeIndex(bytecodeIndex)
135             , defaultOffset(defaultOffset)
136         {
137             this->jumpTable.simpleJumpTable = jumpTable;
138         }
139 
SwitchRecordSwitchRecord140         SwitchRecord(StringJumpTable* jumpTable, unsigned bytecodeIndex, unsigned defaultOffset)
141             : type(String)
142             , bytecodeIndex(bytecodeIndex)
143             , defaultOffset(defaultOffset)
144         {
145             this->jumpTable.stringJumpTable = jumpTable;
146         }
147     };
148 
149     struct PropertyStubCompilationInfo {
150         MacroAssembler::Call callReturnLocation;
151         MacroAssembler::Label hotPathBegin;
152     };
153 
154     struct StructureStubCompilationInfo {
155         MacroAssembler::DataLabelPtr hotPathBegin;
156         MacroAssembler::Call hotPathOther;
157         MacroAssembler::Call callReturnLocation;
158     };
159 
160     struct MethodCallCompilationInfo {
MethodCallCompilationInfoMethodCallCompilationInfo161         MethodCallCompilationInfo(unsigned propertyAccessIndex)
162             : propertyAccessIndex(propertyAccessIndex)
163         {
164         }
165 
166         MacroAssembler::DataLabelPtr structureToCompare;
167         unsigned propertyAccessIndex;
168     };
169 
170     // Near calls can only be patched to other JIT code, regular calls can be patched to JIT code or relinked to stub functions.
171     void ctiPatchNearCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, MacroAssemblerCodePtr newCalleeFunction);
172     void ctiPatchCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, MacroAssemblerCodePtr newCalleeFunction);
173     void ctiPatchCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, FunctionPtr newCalleeFunction);
174 
175     class JIT : private MacroAssembler {
176         friend class JITStubCall;
177 
178         using MacroAssembler::Jump;
179         using MacroAssembler::JumpList;
180         using MacroAssembler::Label;
181 
182         // NOTES:
183         //
184         // regT0 has two special meanings.  The return value from a stub
185         // call will always be in regT0, and by default (unless
186         // a register is specified) emitPutVirtualRegister() will store
187         // the value from regT0.
188         //
189         // regT3 is required to be callee-preserved.
190         //
191         // tempRegister2 is has no such dependencies.  It is important that
192         // on x86/x86-64 it is ecx for performance reasons, since the
193         // MacroAssembler will need to plant register swaps if it is not -
194         // however the code will still function correctly.
195 #if CPU(X86_64)
196         static const RegisterID returnValueRegister = X86Registers::eax;
197         static const RegisterID cachedResultRegister = X86Registers::eax;
198         static const RegisterID firstArgumentRegister = X86Registers::edi;
199 
200         static const RegisterID timeoutCheckRegister = X86Registers::r12;
201         static const RegisterID callFrameRegister = X86Registers::r13;
202         static const RegisterID tagTypeNumberRegister = X86Registers::r14;
203         static const RegisterID tagMaskRegister = X86Registers::r15;
204 
205         static const RegisterID regT0 = X86Registers::eax;
206         static const RegisterID regT1 = X86Registers::edx;
207         static const RegisterID regT2 = X86Registers::ecx;
208         static const RegisterID regT3 = X86Registers::ebx;
209 
210         static const FPRegisterID fpRegT0 = X86Registers::xmm0;
211         static const FPRegisterID fpRegT1 = X86Registers::xmm1;
212         static const FPRegisterID fpRegT2 = X86Registers::xmm2;
213 #elif CPU(X86)
214         static const RegisterID returnValueRegister = X86Registers::eax;
215         static const RegisterID cachedResultRegister = X86Registers::eax;
216         // On x86 we always use fastcall conventions = but on
217         // OS X if might make more sense to just use regparm.
218         static const RegisterID firstArgumentRegister = X86Registers::ecx;
219 
220         static const RegisterID timeoutCheckRegister = X86Registers::esi;
221         static const RegisterID callFrameRegister = X86Registers::edi;
222 
223         static const RegisterID regT0 = X86Registers::eax;
224         static const RegisterID regT1 = X86Registers::edx;
225         static const RegisterID regT2 = X86Registers::ecx;
226         static const RegisterID regT3 = X86Registers::ebx;
227 
228         static const FPRegisterID fpRegT0 = X86Registers::xmm0;
229         static const FPRegisterID fpRegT1 = X86Registers::xmm1;
230         static const FPRegisterID fpRegT2 = X86Registers::xmm2;
231 #elif CPU(ARM_THUMB2)
232         static const RegisterID returnValueRegister = ARMRegisters::r0;
233         static const RegisterID cachedResultRegister = ARMRegisters::r0;
234         static const RegisterID firstArgumentRegister = ARMRegisters::r0;
235 
236         static const RegisterID regT0 = ARMRegisters::r0;
237         static const RegisterID regT1 = ARMRegisters::r1;
238         static const RegisterID regT2 = ARMRegisters::r2;
239         static const RegisterID regT3 = ARMRegisters::r4;
240 
241         static const RegisterID callFrameRegister = ARMRegisters::r5;
242         static const RegisterID timeoutCheckRegister = ARMRegisters::r6;
243 
244         static const FPRegisterID fpRegT0 = ARMRegisters::d0;
245         static const FPRegisterID fpRegT1 = ARMRegisters::d1;
246         static const FPRegisterID fpRegT2 = ARMRegisters::d2;
247 #elif CPU(ARM_TRADITIONAL)
248         static const RegisterID returnValueRegister = ARMRegisters::r0;
249         static const RegisterID cachedResultRegister = ARMRegisters::r0;
250         static const RegisterID firstArgumentRegister = ARMRegisters::r0;
251 
252         static const RegisterID timeoutCheckRegister = ARMRegisters::r5;
253         static const RegisterID callFrameRegister = ARMRegisters::r4;
254 
255         static const RegisterID regT0 = ARMRegisters::r0;
256         static const RegisterID regT1 = ARMRegisters::r1;
257         static const RegisterID regT2 = ARMRegisters::r2;
258         // Callee preserved
259         static const RegisterID regT3 = ARMRegisters::r7;
260 
261         static const RegisterID regS0 = ARMRegisters::S0;
262         // Callee preserved
263         static const RegisterID regS1 = ARMRegisters::S1;
264 
265         static const RegisterID regStackPtr = ARMRegisters::sp;
266         static const RegisterID regLink = ARMRegisters::lr;
267 
268         static const FPRegisterID fpRegT0 = ARMRegisters::d0;
269         static const FPRegisterID fpRegT1 = ARMRegisters::d1;
270         static const FPRegisterID fpRegT2 = ARMRegisters::d2;
271 #else
272     #error "JIT not supported on this platform."
273 #endif
274 
275         static const int patchGetByIdDefaultStructure = -1;
276         // Magic number - initial offset cannot be representable as a signed 8bit value, or the X86Assembler
277         // will compress the displacement, and we may not be able to fit a patched offset.
278         static const int patchGetByIdDefaultOffset = 256;
279 
280     public:
compile(JSGlobalData * globalData,CodeBlock * codeBlock)281         static JITCode compile(JSGlobalData* globalData, CodeBlock* codeBlock)
282         {
283             return JIT(globalData, codeBlock).privateCompile();
284         }
285 
compileGetByIdProto(JSGlobalData * globalData,CallFrame * callFrame,CodeBlock * codeBlock,StructureStubInfo * stubInfo,Structure * structure,Structure * prototypeStructure,size_t cachedOffset,ReturnAddressPtr returnAddress)286         static void compileGetByIdProto(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, Structure* prototypeStructure, size_t cachedOffset, ReturnAddressPtr returnAddress)
287         {
288             JIT jit(globalData, codeBlock);
289             jit.privateCompileGetByIdProto(stubInfo, structure, prototypeStructure, cachedOffset, returnAddress, callFrame);
290         }
291 
compileGetByIdSelfList(JSGlobalData * globalData,CodeBlock * codeBlock,StructureStubInfo * stubInfo,PolymorphicAccessStructureList * polymorphicStructures,int currentIndex,Structure * structure,size_t cachedOffset)292         static void compileGetByIdSelfList(JSGlobalData* globalData, CodeBlock* codeBlock, StructureStubInfo* stubInfo, PolymorphicAccessStructureList* polymorphicStructures, int currentIndex, Structure* structure, size_t cachedOffset)
293         {
294             JIT jit(globalData, codeBlock);
295             jit.privateCompileGetByIdSelfList(stubInfo, polymorphicStructures, currentIndex, structure, cachedOffset);
296         }
compileGetByIdProtoList(JSGlobalData * globalData,CallFrame * callFrame,CodeBlock * codeBlock,StructureStubInfo * stubInfo,PolymorphicAccessStructureList * prototypeStructureList,int currentIndex,Structure * structure,Structure * prototypeStructure,size_t cachedOffset)297         static void compileGetByIdProtoList(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructureList, int currentIndex, Structure* structure, Structure* prototypeStructure, size_t cachedOffset)
298         {
299             JIT jit(globalData, codeBlock);
300             jit.privateCompileGetByIdProtoList(stubInfo, prototypeStructureList, currentIndex, structure, prototypeStructure, cachedOffset, callFrame);
301         }
compileGetByIdChainList(JSGlobalData * globalData,CallFrame * callFrame,CodeBlock * codeBlock,StructureStubInfo * stubInfo,PolymorphicAccessStructureList * prototypeStructureList,int currentIndex,Structure * structure,StructureChain * chain,size_t count,size_t cachedOffset)302         static void compileGetByIdChainList(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructureList, int currentIndex, Structure* structure, StructureChain* chain, size_t count, size_t cachedOffset)
303         {
304             JIT jit(globalData, codeBlock);
305             jit.privateCompileGetByIdChainList(stubInfo, prototypeStructureList, currentIndex, structure, chain, count, cachedOffset, callFrame);
306         }
307 
compileGetByIdChain(JSGlobalData * globalData,CallFrame * callFrame,CodeBlock * codeBlock,StructureStubInfo * stubInfo,Structure * structure,StructureChain * chain,size_t count,size_t cachedOffset,ReturnAddressPtr returnAddress)308         static void compileGetByIdChain(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, StructureChain* chain, size_t count, size_t cachedOffset, ReturnAddressPtr returnAddress)
309         {
310             JIT jit(globalData, codeBlock);
311             jit.privateCompileGetByIdChain(stubInfo, structure, chain, count, cachedOffset, returnAddress, callFrame);
312         }
313 
compilePutByIdTransition(JSGlobalData * globalData,CodeBlock * codeBlock,StructureStubInfo * stubInfo,Structure * oldStructure,Structure * newStructure,size_t cachedOffset,StructureChain * chain,ReturnAddressPtr returnAddress)314         static void compilePutByIdTransition(JSGlobalData* globalData, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* oldStructure, Structure* newStructure, size_t cachedOffset, StructureChain* chain, ReturnAddressPtr returnAddress)
315         {
316             JIT jit(globalData, codeBlock);
317             jit.privateCompilePutByIdTransition(stubInfo, oldStructure, newStructure, cachedOffset, chain, returnAddress);
318         }
319 
compileCTIMachineTrampolines(JSGlobalData * globalData,RefPtr<ExecutablePool> * executablePool,TrampolineStructure * trampolines)320         static void compileCTIMachineTrampolines(JSGlobalData* globalData, RefPtr<ExecutablePool>* executablePool, TrampolineStructure *trampolines)
321         {
322             JIT jit(globalData);
323             jit.privateCompileCTIMachineTrampolines(executablePool, globalData, trampolines);
324         }
325 
326         static void patchGetByIdSelf(CodeBlock* codeblock, StructureStubInfo*, Structure*, size_t cachedOffset, ReturnAddressPtr returnAddress);
327         static void patchPutByIdReplace(CodeBlock* codeblock, StructureStubInfo*, Structure*, size_t cachedOffset, ReturnAddressPtr returnAddress);
328         static void patchMethodCallProto(CodeBlock* codeblock, MethodCallLinkInfo&, JSFunction*, Structure*, JSObject*, ReturnAddressPtr);
329 
compilePatchGetArrayLength(JSGlobalData * globalData,CodeBlock * codeBlock,ReturnAddressPtr returnAddress)330         static void compilePatchGetArrayLength(JSGlobalData* globalData, CodeBlock* codeBlock, ReturnAddressPtr returnAddress)
331         {
332             JIT jit(globalData, codeBlock);
333             return jit.privateCompilePatchGetArrayLength(returnAddress);
334         }
335 
336         static void linkCall(JSFunction* callee, CodeBlock* callerCodeBlock, CodeBlock* calleeCodeBlock, JITCode&, CallLinkInfo*, int callerArgCount, JSGlobalData*);
337         static void unlinkCall(CallLinkInfo*);
338 
339     private:
340         struct JSRInfo {
341             DataLabelPtr storeLocation;
342             Label target;
343 
JSRInfoJSRInfo344             JSRInfo(DataLabelPtr storeLocation, Label targetLocation)
345                 : storeLocation(storeLocation)
346                 , target(targetLocation)
347             {
348             }
349         };
350 
351         JIT(JSGlobalData*, CodeBlock* = 0);
352 
353         void privateCompileMainPass();
354         void privateCompileLinkPass();
355         void privateCompileSlowCases();
356         JITCode privateCompile();
357         void privateCompileGetByIdProto(StructureStubInfo*, Structure*, Structure* prototypeStructure, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame);
358         void privateCompileGetByIdSelfList(StructureStubInfo*, PolymorphicAccessStructureList*, int, Structure*, size_t cachedOffset);
359         void privateCompileGetByIdProtoList(StructureStubInfo*, PolymorphicAccessStructureList*, int, Structure*, Structure* prototypeStructure, size_t cachedOffset, CallFrame* callFrame);
360         void privateCompileGetByIdChainList(StructureStubInfo*, PolymorphicAccessStructureList*, int, Structure*, StructureChain* chain, size_t count, size_t cachedOffset, CallFrame* callFrame);
361         void privateCompileGetByIdChain(StructureStubInfo*, Structure*, StructureChain*, size_t count, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame);
362         void privateCompilePutByIdTransition(StructureStubInfo*, Structure*, Structure*, size_t cachedOffset, StructureChain*, ReturnAddressPtr returnAddress);
363 
364         void privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool>* executablePool, JSGlobalData* data, TrampolineStructure *trampolines);
365         void privateCompilePatchGetArrayLength(ReturnAddressPtr returnAddress);
366 
367         void addSlowCase(Jump);
368         void addSlowCase(JumpList);
369         void addJump(Jump, int);
370         void emitJumpSlowToHot(Jump, int);
371 
372         void compileOpCall(OpcodeID, Instruction* instruction, unsigned callLinkInfoIndex);
373         void compileOpCallVarargs(Instruction* instruction);
374         void compileOpCallInitializeCallFrame();
375         void compileOpCallSetupArgs(Instruction*);
376         void compileOpCallVarargsSetupArgs(Instruction*);
377         void compileOpCallSlowCase(Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter, unsigned callLinkInfoIndex, OpcodeID opcodeID);
378         void compileOpCallVarargsSlowCase(Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter);
379         void compileOpConstructSetupArgs(Instruction*);
380 
381         enum CompileOpStrictEqType { OpStrictEq, OpNStrictEq };
382         void compileOpStrictEq(Instruction* instruction, CompileOpStrictEqType type);
383         bool isOperandConstantImmediateDouble(unsigned src);
384 
385         void emitLoadDouble(unsigned index, FPRegisterID value);
386         void emitLoadInt32ToDouble(unsigned index, FPRegisterID value);
387 
388         Address addressFor(unsigned index, RegisterID base = callFrameRegister);
389 
390         void testPrototype(Structure*, JumpList& failureCases);
391 
392 #if USE(JSVALUE32_64)
393         Address tagFor(unsigned index, RegisterID base = callFrameRegister);
394         Address payloadFor(unsigned index, RegisterID base = callFrameRegister);
395 
396         bool getOperandConstantImmediateInt(unsigned op1, unsigned op2, unsigned& op, int32_t& constant);
397 
398         void emitLoadTag(unsigned index, RegisterID tag);
399         void emitLoadPayload(unsigned index, RegisterID payload);
400 
401         void emitLoad(const JSValue& v, RegisterID tag, RegisterID payload);
402         void emitLoad(unsigned index, RegisterID tag, RegisterID payload, RegisterID base = callFrameRegister);
403         void emitLoad2(unsigned index1, RegisterID tag1, RegisterID payload1, unsigned index2, RegisterID tag2, RegisterID payload2);
404 
405         void emitStore(unsigned index, RegisterID tag, RegisterID payload, RegisterID base = callFrameRegister);
406         void emitStore(unsigned index, const JSValue constant, RegisterID base = callFrameRegister);
407         void emitStoreInt32(unsigned index, RegisterID payload, bool indexIsInt32 = false);
408         void emitStoreInt32(unsigned index, Imm32 payload, bool indexIsInt32 = false);
409         void emitStoreCell(unsigned index, RegisterID payload, bool indexIsCell = false);
410         void emitStoreBool(unsigned index, RegisterID tag, bool indexIsBool = false);
411         void emitStoreDouble(unsigned index, FPRegisterID value);
412 
413         bool isLabeled(unsigned bytecodeIndex);
414         void map(unsigned bytecodeIndex, unsigned virtualRegisterIndex, RegisterID tag, RegisterID payload);
415         void unmap(RegisterID);
416         void unmap();
417         bool isMapped(unsigned virtualRegisterIndex);
418         bool getMappedPayload(unsigned virtualRegisterIndex, RegisterID& payload);
419         bool getMappedTag(unsigned virtualRegisterIndex, RegisterID& tag);
420 
421         void emitJumpSlowCaseIfNotJSCell(unsigned virtualRegisterIndex);
422         void emitJumpSlowCaseIfNotJSCell(unsigned virtualRegisterIndex, RegisterID tag);
423         void linkSlowCaseIfNotJSCell(Vector<SlowCaseEntry>::iterator&, unsigned virtualRegisterIndex);
424 
425 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
426         void compileGetByIdHotPath();
427         void compileGetByIdSlowCase(int resultVReg, int baseVReg, Identifier* ident, Vector<SlowCaseEntry>::iterator& iter, bool isMethodCheck = false);
428 #endif
429         void compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, Structure* structure, size_t cachedOffset);
430         void compileGetDirectOffset(JSObject* base, RegisterID temp, RegisterID resultTag, RegisterID resultPayload, size_t cachedOffset);
431         void compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, RegisterID structure, RegisterID offset);
432         void compilePutDirectOffset(RegisterID base, RegisterID valueTag, RegisterID valuePayload, Structure* structure, size_t cachedOffset);
433 
434         // Arithmetic opcode helpers
435         void emitAdd32Constant(unsigned dst, unsigned op, int32_t constant, ResultType opType);
436         void emitSub32Constant(unsigned dst, unsigned op, int32_t constant, ResultType opType);
437         void emitBinaryDoubleOp(OpcodeID, unsigned dst, unsigned op1, unsigned op2, OperandTypes, JumpList& notInt32Op1, JumpList& notInt32Op2, bool op1IsInRegisters = true, bool op2IsInRegisters = true);
438 
439 #if CPU(X86)
440         // These architecture specific value are used to enable patching - see comment on op_put_by_id.
441         static const int patchOffsetPutByIdStructure = 7;
442         static const int patchOffsetPutByIdExternalLoad = 13;
443         static const int patchLengthPutByIdExternalLoad = 3;
444         static const int patchOffsetPutByIdPropertyMapOffset1 = 22;
445         static const int patchOffsetPutByIdPropertyMapOffset2 = 28;
446         // These architecture specific value are used to enable patching - see comment on op_get_by_id.
447         static const int patchOffsetGetByIdStructure = 7;
448         static const int patchOffsetGetByIdBranchToSlowCase = 13;
449         static const int patchOffsetGetByIdExternalLoad = 13;
450         static const int patchLengthGetByIdExternalLoad = 3;
451         static const int patchOffsetGetByIdPropertyMapOffset1 = 22;
452         static const int patchOffsetGetByIdPropertyMapOffset2 = 28;
453         static const int patchOffsetGetByIdPutResult = 28;
454 #if ENABLE(OPCODE_SAMPLING) && USE(JIT_STUB_ARGUMENT_VA_LIST)
455         static const int patchOffsetGetByIdSlowCaseCall = 35;
456 #elif ENABLE(OPCODE_SAMPLING)
457         static const int patchOffsetGetByIdSlowCaseCall = 37;
458 #elif USE(JIT_STUB_ARGUMENT_VA_LIST)
459         static const int patchOffsetGetByIdSlowCaseCall = 25;
460 #else
461         static const int patchOffsetGetByIdSlowCaseCall = 27;
462 #endif
463         static const int patchOffsetOpCallCompareToJump = 6;
464 
465         static const int patchOffsetMethodCheckProtoObj = 11;
466         static const int patchOffsetMethodCheckProtoStruct = 18;
467         static const int patchOffsetMethodCheckPutFunction = 29;
468 #elif CPU(ARM_TRADITIONAL)
469         // These architecture specific value are used to enable patching - see comment on op_put_by_id.
470         static const int patchOffsetPutByIdStructure = 4;
471         static const int patchOffsetPutByIdExternalLoad = 16;
472         static const int patchLengthPutByIdExternalLoad = 4;
473         static const int patchOffsetPutByIdPropertyMapOffset1 = 20;
474         static const int patchOffsetPutByIdPropertyMapOffset2 = 28;
475         // These architecture specific value are used to enable patching - see comment on op_get_by_id.
476         static const int patchOffsetGetByIdStructure = 4;
477         static const int patchOffsetGetByIdBranchToSlowCase = 16;
478         static const int patchOffsetGetByIdExternalLoad = 16;
479         static const int patchLengthGetByIdExternalLoad = 4;
480         static const int patchOffsetGetByIdPropertyMapOffset1 = 20;
481         static const int patchOffsetGetByIdPropertyMapOffset2 = 28;
482         static const int patchOffsetGetByIdPutResult = 36;
483 #if ENABLE(OPCODE_SAMPLING)
484         #error "OPCODE_SAMPLING is not yet supported"
485 #else
486         static const int patchOffsetGetByIdSlowCaseCall = 32;
487 #endif
488         static const int patchOffsetOpCallCompareToJump = 12;
489 
490         static const int patchOffsetMethodCheckProtoObj = 12;
491         static const int patchOffsetMethodCheckProtoStruct = 20;
492         static const int patchOffsetMethodCheckPutFunction = 32;
493 
494         // sequenceOpCall
495         static const int sequenceOpCallInstructionSpace = 12;
496         static const int sequenceOpCallConstantSpace = 2;
497         // sequenceMethodCheck
498         static const int sequenceMethodCheckInstructionSpace = 40;
499         static const int sequenceMethodCheckConstantSpace = 6;
500         // sequenceGetByIdHotPath
501         static const int sequenceGetByIdHotPathInstructionSpace = 36;
502         static const int sequenceGetByIdHotPathConstantSpace = 4;
503         // sequenceGetByIdSlowCase
504         static const int sequenceGetByIdSlowCaseInstructionSpace = 40;
505         static const int sequenceGetByIdSlowCaseConstantSpace = 2;
506         // sequencePutById
507         static const int sequencePutByIdInstructionSpace = 36;
508         static const int sequencePutByIdConstantSpace = 4;
509 #else
510 #error "JSVALUE32_64 not supported on this platform."
511 #endif
512 
513 #else // USE(JSVALUE32_64)
514         void emitGetVirtualRegister(int src, RegisterID dst);
515         void emitGetVirtualRegisters(int src1, RegisterID dst1, int src2, RegisterID dst2);
516         void emitPutVirtualRegister(unsigned dst, RegisterID from = regT0);
517 
518         int32_t getConstantOperandImmediateInt(unsigned src);
519 
520         void emitGetVariableObjectRegister(RegisterID variableObject, int index, RegisterID dst);
521         void emitPutVariableObjectRegister(RegisterID src, RegisterID variableObject, int index);
522 
523         void killLastResultRegister();
524 
525         Jump emitJumpIfJSCell(RegisterID);
526         Jump emitJumpIfBothJSCells(RegisterID, RegisterID, RegisterID);
527         void emitJumpSlowCaseIfJSCell(RegisterID);
528         Jump emitJumpIfNotJSCell(RegisterID);
529         void emitJumpSlowCaseIfNotJSCell(RegisterID);
530         void emitJumpSlowCaseIfNotJSCell(RegisterID, int VReg);
531 #if USE(JSVALUE64)
532         JIT::Jump emitJumpIfImmediateNumber(RegisterID);
533         JIT::Jump emitJumpIfNotImmediateNumber(RegisterID);
534 #else
emitJumpIfImmediateNumber(RegisterID reg)535         JIT::Jump emitJumpIfImmediateNumber(RegisterID reg)
536         {
537             return emitJumpIfImmediateInteger(reg);
538         }
539 
emitJumpIfNotImmediateNumber(RegisterID reg)540         JIT::Jump emitJumpIfNotImmediateNumber(RegisterID reg)
541         {
542             return emitJumpIfNotImmediateInteger(reg);
543         }
544 #endif
545         JIT::Jump emitJumpIfImmediateInteger(RegisterID);
546         JIT::Jump emitJumpIfNotImmediateInteger(RegisterID);
547         JIT::Jump emitJumpIfNotImmediateIntegers(RegisterID, RegisterID, RegisterID);
548         void emitJumpSlowCaseIfNotImmediateInteger(RegisterID);
549         void emitJumpSlowCaseIfNotImmediateNumber(RegisterID);
550         void emitJumpSlowCaseIfNotImmediateIntegers(RegisterID, RegisterID, RegisterID);
551 
552 #if !USE(JSVALUE64)
553         void emitFastArithDeTagImmediate(RegisterID);
554         Jump emitFastArithDeTagImmediateJumpIfZero(RegisterID);
555 #endif
556         void emitFastArithReTagImmediate(RegisterID src, RegisterID dest);
557         void emitFastArithImmToInt(RegisterID);
558         void emitFastArithIntToImmNoCheck(RegisterID src, RegisterID dest);
559 
560         void emitTagAsBoolImmediate(RegisterID reg);
561         void compileBinaryArithOp(OpcodeID, unsigned dst, unsigned src1, unsigned src2, OperandTypes opi);
562 #if USE(JSVALUE64)
563         void compileBinaryArithOpSlowCase(OpcodeID, Vector<SlowCaseEntry>::iterator&, unsigned dst, unsigned src1, unsigned src2, OperandTypes, bool op1HasImmediateIntFastCase, bool op2HasImmediateIntFastCase);
564 #else
565         void compileBinaryArithOpSlowCase(OpcodeID, Vector<SlowCaseEntry>::iterator&, unsigned dst, unsigned src1, unsigned src2, OperandTypes);
566 #endif
567 
568 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
569         void compileGetByIdHotPath(int resultVReg, int baseVReg, Identifier* ident, unsigned propertyAccessInstructionIndex);
570         void compileGetByIdSlowCase(int resultVReg, int baseVReg, Identifier* ident, Vector<SlowCaseEntry>::iterator& iter, bool isMethodCheck = false);
571 #endif
572         void compileGetDirectOffset(RegisterID base, RegisterID result, Structure* structure, size_t cachedOffset);
573         void compileGetDirectOffset(JSObject* base, RegisterID temp, RegisterID result, size_t cachedOffset);
574         void compileGetDirectOffset(RegisterID base, RegisterID result, RegisterID structure, RegisterID offset, RegisterID scratch);
575         void compilePutDirectOffset(RegisterID base, RegisterID value, Structure* structure, size_t cachedOffset);
576 
577 #if CPU(X86_64)
578         // These architecture specific value are used to enable patching - see comment on op_put_by_id.
579         static const int patchOffsetPutByIdStructure = 10;
580         static const int patchOffsetPutByIdExternalLoad = 20;
581         static const int patchLengthPutByIdExternalLoad = 4;
582         static const int patchOffsetPutByIdPropertyMapOffset = 31;
583         // These architecture specific value are used to enable patching - see comment on op_get_by_id.
584         static const int patchOffsetGetByIdStructure = 10;
585         static const int patchOffsetGetByIdBranchToSlowCase = 20;
586         static const int patchOffsetGetByIdExternalLoad = 20;
587         static const int patchLengthGetByIdExternalLoad = 4;
588         static const int patchOffsetGetByIdPropertyMapOffset = 31;
589         static const int patchOffsetGetByIdPutResult = 31;
590 #if ENABLE(OPCODE_SAMPLING)
591         static const int patchOffsetGetByIdSlowCaseCall = 64;
592 #else
593         static const int patchOffsetGetByIdSlowCaseCall = 41;
594 #endif
595         static const int patchOffsetOpCallCompareToJump = 9;
596 
597         static const int patchOffsetMethodCheckProtoObj = 20;
598         static const int patchOffsetMethodCheckProtoStruct = 30;
599         static const int patchOffsetMethodCheckPutFunction = 50;
600 #elif CPU(X86)
601         // These architecture specific value are used to enable patching - see comment on op_put_by_id.
602         static const int patchOffsetPutByIdStructure = 7;
603         static const int patchOffsetPutByIdExternalLoad = 13;
604         static const int patchLengthPutByIdExternalLoad = 3;
605         static const int patchOffsetPutByIdPropertyMapOffset = 22;
606         // These architecture specific value are used to enable patching - see comment on op_get_by_id.
607         static const int patchOffsetGetByIdStructure = 7;
608         static const int patchOffsetGetByIdBranchToSlowCase = 13;
609         static const int patchOffsetGetByIdExternalLoad = 13;
610         static const int patchLengthGetByIdExternalLoad = 3;
611         static const int patchOffsetGetByIdPropertyMapOffset = 22;
612         static const int patchOffsetGetByIdPutResult = 22;
613 #if ENABLE(OPCODE_SAMPLING) && USE(JIT_STUB_ARGUMENT_VA_LIST)
614         static const int patchOffsetGetByIdSlowCaseCall = 31;
615 #elif ENABLE(OPCODE_SAMPLING)
616         static const int patchOffsetGetByIdSlowCaseCall = 33;
617 #elif USE(JIT_STUB_ARGUMENT_VA_LIST)
618         static const int patchOffsetGetByIdSlowCaseCall = 21;
619 #else
620         static const int patchOffsetGetByIdSlowCaseCall = 23;
621 #endif
622         static const int patchOffsetOpCallCompareToJump = 6;
623 
624         static const int patchOffsetMethodCheckProtoObj = 11;
625         static const int patchOffsetMethodCheckProtoStruct = 18;
626         static const int patchOffsetMethodCheckPutFunction = 29;
627 #elif CPU(ARM_THUMB2)
628         // These architecture specific value are used to enable patching - see comment on op_put_by_id.
629         static const int patchOffsetPutByIdStructure = 10;
630         static const int patchOffsetPutByIdExternalLoad = 26;
631         static const int patchLengthPutByIdExternalLoad = 12;
632         static const int patchOffsetPutByIdPropertyMapOffset = 46;
633         // These architecture specific value are used to enable patching - see comment on op_get_by_id.
634         static const int patchOffsetGetByIdStructure = 10;
635         static const int patchOffsetGetByIdBranchToSlowCase = 26;
636         static const int patchOffsetGetByIdExternalLoad = 26;
637         static const int patchLengthGetByIdExternalLoad = 12;
638         static const int patchOffsetGetByIdPropertyMapOffset = 46;
639         static const int patchOffsetGetByIdPutResult = 50;
640 #if ENABLE(OPCODE_SAMPLING)
641         static const int patchOffsetGetByIdSlowCaseCall = 0; // FIMXE
642 #else
643         static const int patchOffsetGetByIdSlowCaseCall = 28;
644 #endif
645         static const int patchOffsetOpCallCompareToJump = 16;
646 
647         static const int patchOffsetMethodCheckProtoObj = 24;
648         static const int patchOffsetMethodCheckProtoStruct = 34;
649         static const int patchOffsetMethodCheckPutFunction = 58;
650 #elif CPU(ARM_TRADITIONAL)
651         // These architecture specific value are used to enable patching - see comment on op_put_by_id.
652         static const int patchOffsetPutByIdStructure = 4;
653         static const int patchOffsetPutByIdExternalLoad = 16;
654         static const int patchLengthPutByIdExternalLoad = 4;
655         static const int patchOffsetPutByIdPropertyMapOffset = 20;
656         // These architecture specific value are used to enable patching - see comment on op_get_by_id.
657         static const int patchOffsetGetByIdStructure = 4;
658         static const int patchOffsetGetByIdBranchToSlowCase = 16;
659         static const int patchOffsetGetByIdExternalLoad = 16;
660         static const int patchLengthGetByIdExternalLoad = 4;
661         static const int patchOffsetGetByIdPropertyMapOffset = 20;
662         static const int patchOffsetGetByIdPutResult = 28;
663 #if ENABLE(OPCODE_SAMPLING)
664         #error "OPCODE_SAMPLING is not yet supported"
665 #else
666         static const int patchOffsetGetByIdSlowCaseCall = 28;
667 #endif
668         static const int patchOffsetOpCallCompareToJump = 12;
669 
670         static const int patchOffsetMethodCheckProtoObj = 12;
671         static const int patchOffsetMethodCheckProtoStruct = 20;
672         static const int patchOffsetMethodCheckPutFunction = 32;
673 
674         // sequenceOpCall
675         static const int sequenceOpCallInstructionSpace = 12;
676         static const int sequenceOpCallConstantSpace = 2;
677         // sequenceMethodCheck
678         static const int sequenceMethodCheckInstructionSpace = 40;
679         static const int sequenceMethodCheckConstantSpace = 6;
680         // sequenceGetByIdHotPath
681         static const int sequenceGetByIdHotPathInstructionSpace = 28;
682         static const int sequenceGetByIdHotPathConstantSpace = 3;
683         // sequenceGetByIdSlowCase
684         static const int sequenceGetByIdSlowCaseInstructionSpace = 32;
685         static const int sequenceGetByIdSlowCaseConstantSpace = 2;
686         // sequencePutById
687         static const int sequencePutByIdInstructionSpace = 28;
688         static const int sequencePutByIdConstantSpace = 3;
689 #endif
690 #endif // USE(JSVALUE32_64)
691 
692 #if defined(ASSEMBLER_HAS_CONSTANT_POOL) && ASSEMBLER_HAS_CONSTANT_POOL
693 #define BEGIN_UNINTERRUPTED_SEQUENCE(name) beginUninterruptedSequence(name ## InstructionSpace, name ## ConstantSpace)
694 #define END_UNINTERRUPTED_SEQUENCE(name) endUninterruptedSequence(name ## InstructionSpace, name ## ConstantSpace)
695 
696         void beginUninterruptedSequence(int, int);
697         void endUninterruptedSequence(int, int);
698 
699 #else
700 #define BEGIN_UNINTERRUPTED_SEQUENCE(name)
701 #define END_UNINTERRUPTED_SEQUENCE(name)
702 #endif
703 
704         void emit_op_add(Instruction*);
705         void emit_op_bitand(Instruction*);
706         void emit_op_bitnot(Instruction*);
707         void emit_op_bitor(Instruction*);
708         void emit_op_bitxor(Instruction*);
709         void emit_op_call(Instruction*);
710         void emit_op_call_eval(Instruction*);
711         void emit_op_call_varargs(Instruction*);
712         void emit_op_catch(Instruction*);
713         void emit_op_construct(Instruction*);
714         void emit_op_construct_verify(Instruction*);
715         void emit_op_convert_this(Instruction*);
716         void emit_op_create_arguments(Instruction*);
717         void emit_op_debug(Instruction*);
718         void emit_op_del_by_id(Instruction*);
719         void emit_op_div(Instruction*);
720         void emit_op_end(Instruction*);
721         void emit_op_enter(Instruction*);
722         void emit_op_enter_with_activation(Instruction*);
723         void emit_op_eq(Instruction*);
724         void emit_op_eq_null(Instruction*);
725         void emit_op_get_by_id(Instruction*);
726         void emit_op_get_by_val(Instruction*);
727         void emit_op_get_by_pname(Instruction*);
728         void emit_op_get_global_var(Instruction*);
729         void emit_op_get_scoped_var(Instruction*);
730         void emit_op_init_arguments(Instruction*);
731         void emit_op_instanceof(Instruction*);
732         void emit_op_jeq_null(Instruction*);
733         void emit_op_jfalse(Instruction*);
734         void emit_op_jmp(Instruction*);
735         void emit_op_jmp_scopes(Instruction*);
736         void emit_op_jneq_null(Instruction*);
737         void emit_op_jneq_ptr(Instruction*);
738         void emit_op_jnless(Instruction*);
739         void emit_op_jless(Instruction*);
740         void emit_op_jnlesseq(Instruction*);
741         void emit_op_jsr(Instruction*);
742         void emit_op_jtrue(Instruction*);
743         void emit_op_load_varargs(Instruction*);
744         void emit_op_loop(Instruction*);
745         void emit_op_loop_if_less(Instruction*);
746         void emit_op_loop_if_lesseq(Instruction*);
747         void emit_op_loop_if_true(Instruction*);
748         void emit_op_loop_if_false(Instruction*);
749         void emit_op_lshift(Instruction*);
750         void emit_op_method_check(Instruction*);
751         void emit_op_mod(Instruction*);
752         void emit_op_mov(Instruction*);
753         void emit_op_mul(Instruction*);
754         void emit_op_negate(Instruction*);
755         void emit_op_neq(Instruction*);
756         void emit_op_neq_null(Instruction*);
757         void emit_op_new_array(Instruction*);
758         void emit_op_new_error(Instruction*);
759         void emit_op_new_func(Instruction*);
760         void emit_op_new_func_exp(Instruction*);
761         void emit_op_new_object(Instruction*);
762         void emit_op_new_regexp(Instruction*);
763         void emit_op_get_pnames(Instruction*);
764         void emit_op_next_pname(Instruction*);
765         void emit_op_not(Instruction*);
766         void emit_op_nstricteq(Instruction*);
767         void emit_op_pop_scope(Instruction*);
768         void emit_op_post_dec(Instruction*);
769         void emit_op_post_inc(Instruction*);
770         void emit_op_pre_dec(Instruction*);
771         void emit_op_pre_inc(Instruction*);
772         void emit_op_profile_did_call(Instruction*);
773         void emit_op_profile_will_call(Instruction*);
774         void emit_op_push_new_scope(Instruction*);
775         void emit_op_push_scope(Instruction*);
776         void emit_op_put_by_id(Instruction*);
777         void emit_op_put_by_index(Instruction*);
778         void emit_op_put_by_val(Instruction*);
779         void emit_op_put_getter(Instruction*);
780         void emit_op_put_global_var(Instruction*);
781         void emit_op_put_scoped_var(Instruction*);
782         void emit_op_put_setter(Instruction*);
783         void emit_op_resolve(Instruction*);
784         void emit_op_resolve_base(Instruction*);
785         void emit_op_resolve_global(Instruction*);
786         void emit_op_resolve_skip(Instruction*);
787         void emit_op_resolve_with_base(Instruction*);
788         void emit_op_ret(Instruction*);
789         void emit_op_rshift(Instruction*);
790         void emit_op_sret(Instruction*);
791         void emit_op_strcat(Instruction*);
792         void emit_op_stricteq(Instruction*);
793         void emit_op_sub(Instruction*);
794         void emit_op_switch_char(Instruction*);
795         void emit_op_switch_imm(Instruction*);
796         void emit_op_switch_string(Instruction*);
797         void emit_op_tear_off_activation(Instruction*);
798         void emit_op_tear_off_arguments(Instruction*);
799         void emit_op_throw(Instruction*);
800         void emit_op_to_jsnumber(Instruction*);
801         void emit_op_to_primitive(Instruction*);
802         void emit_op_unexpected_load(Instruction*);
803 #if ENABLE(JIT_OPTIMIZE_MOD)
804         void softModulo();
805 #endif
806 
807         void emitSlow_op_add(Instruction*, Vector<SlowCaseEntry>::iterator&);
808         void emitSlow_op_bitand(Instruction*, Vector<SlowCaseEntry>::iterator&);
809         void emitSlow_op_bitnot(Instruction*, Vector<SlowCaseEntry>::iterator&);
810         void emitSlow_op_bitor(Instruction*, Vector<SlowCaseEntry>::iterator&);
811         void emitSlow_op_bitxor(Instruction*, Vector<SlowCaseEntry>::iterator&);
812         void emitSlow_op_call(Instruction*, Vector<SlowCaseEntry>::iterator&);
813         void emitSlow_op_call_eval(Instruction*, Vector<SlowCaseEntry>::iterator&);
814         void emitSlow_op_call_varargs(Instruction*, Vector<SlowCaseEntry>::iterator&);
815         void emitSlow_op_construct(Instruction*, Vector<SlowCaseEntry>::iterator&);
816         void emitSlow_op_construct_verify(Instruction*, Vector<SlowCaseEntry>::iterator&);
817         void emitSlow_op_convert_this(Instruction*, Vector<SlowCaseEntry>::iterator&);
818         void emitSlow_op_div(Instruction*, Vector<SlowCaseEntry>::iterator&);
819         void emitSlow_op_eq(Instruction*, Vector<SlowCaseEntry>::iterator&);
820         void emitSlow_op_get_by_id(Instruction*, Vector<SlowCaseEntry>::iterator&);
821         void emitSlow_op_get_by_val(Instruction*, Vector<SlowCaseEntry>::iterator&);
822         void emitSlow_op_get_by_pname(Instruction*, Vector<SlowCaseEntry>::iterator&);
823         void emitSlow_op_instanceof(Instruction*, Vector<SlowCaseEntry>::iterator&);
824         void emitSlow_op_jfalse(Instruction*, Vector<SlowCaseEntry>::iterator&);
825         void emitSlow_op_jnless(Instruction*, Vector<SlowCaseEntry>::iterator&);
826         void emitSlow_op_jless(Instruction*, Vector<SlowCaseEntry>::iterator&);
827         void emitSlow_op_jnlesseq(Instruction*, Vector<SlowCaseEntry>::iterator&);
828         void emitSlow_op_jtrue(Instruction*, Vector<SlowCaseEntry>::iterator&);
829         void emitSlow_op_loop_if_less(Instruction*, Vector<SlowCaseEntry>::iterator&);
830         void emitSlow_op_loop_if_lesseq(Instruction*, Vector<SlowCaseEntry>::iterator&);
831         void emitSlow_op_loop_if_true(Instruction*, Vector<SlowCaseEntry>::iterator&);
832         void emitSlow_op_loop_if_false(Instruction*, Vector<SlowCaseEntry>::iterator&);
833         void emitSlow_op_lshift(Instruction*, Vector<SlowCaseEntry>::iterator&);
834         void emitSlow_op_method_check(Instruction*, Vector<SlowCaseEntry>::iterator&);
835         void emitSlow_op_mod(Instruction*, Vector<SlowCaseEntry>::iterator&);
836         void emitSlow_op_mul(Instruction*, Vector<SlowCaseEntry>::iterator&);
837         void emitSlow_op_negate(Instruction*, Vector<SlowCaseEntry>::iterator&);
838         void emitSlow_op_neq(Instruction*, Vector<SlowCaseEntry>::iterator&);
839         void emitSlow_op_not(Instruction*, Vector<SlowCaseEntry>::iterator&);
840         void emitSlow_op_nstricteq(Instruction*, Vector<SlowCaseEntry>::iterator&);
841         void emitSlow_op_post_dec(Instruction*, Vector<SlowCaseEntry>::iterator&);
842         void emitSlow_op_post_inc(Instruction*, Vector<SlowCaseEntry>::iterator&);
843         void emitSlow_op_pre_dec(Instruction*, Vector<SlowCaseEntry>::iterator&);
844         void emitSlow_op_pre_inc(Instruction*, Vector<SlowCaseEntry>::iterator&);
845         void emitSlow_op_put_by_id(Instruction*, Vector<SlowCaseEntry>::iterator&);
846         void emitSlow_op_put_by_val(Instruction*, Vector<SlowCaseEntry>::iterator&);
847         void emitSlow_op_resolve_global(Instruction*, Vector<SlowCaseEntry>::iterator&);
848         void emitSlow_op_rshift(Instruction*, Vector<SlowCaseEntry>::iterator&);
849         void emitSlow_op_stricteq(Instruction*, Vector<SlowCaseEntry>::iterator&);
850         void emitSlow_op_sub(Instruction*, Vector<SlowCaseEntry>::iterator&);
851         void emitSlow_op_to_jsnumber(Instruction*, Vector<SlowCaseEntry>::iterator&);
852         void emitSlow_op_to_primitive(Instruction*, Vector<SlowCaseEntry>::iterator&);
853 
854         /* These functions are deprecated: Please use JITStubCall instead. */
855         void emitPutJITStubArg(RegisterID src, unsigned argumentNumber);
856 #if USE(JSVALUE32_64)
857         void emitPutJITStubArg(RegisterID tag, RegisterID payload, unsigned argumentNumber);
858         void emitPutJITStubArgFromVirtualRegister(unsigned src, unsigned argumentNumber, RegisterID scratch1, RegisterID scratch2);
859 #else
860         void emitPutJITStubArgFromVirtualRegister(unsigned src, unsigned argumentNumber, RegisterID scratch);
861 #endif
862         void emitPutJITStubArgConstant(unsigned value, unsigned argumentNumber);
863         void emitPutJITStubArgConstant(void* value, unsigned argumentNumber);
864         void emitGetJITStubArg(unsigned argumentNumber, RegisterID dst);
865 
866         void emitInitRegister(unsigned dst);
867 
868         void emitPutToCallFrameHeader(RegisterID from, RegisterFile::CallFrameHeaderEntry entry);
869         void emitPutImmediateToCallFrameHeader(void* value, RegisterFile::CallFrameHeaderEntry entry);
870         void emitGetFromCallFrameHeaderPtr(RegisterFile::CallFrameHeaderEntry entry, RegisterID to, RegisterID from = callFrameRegister);
871         void emitGetFromCallFrameHeader32(RegisterFile::CallFrameHeaderEntry entry, RegisterID to, RegisterID from = callFrameRegister);
872 
873         JSValue getConstantOperand(unsigned src);
874         bool isOperandConstantImmediateInt(unsigned src);
875 
getSlowCase(Vector<SlowCaseEntry>::iterator & iter)876         Jump getSlowCase(Vector<SlowCaseEntry>::iterator& iter)
877         {
878             return iter++->from;
879         }
linkSlowCase(Vector<SlowCaseEntry>::iterator & iter)880         void linkSlowCase(Vector<SlowCaseEntry>::iterator& iter)
881         {
882             iter->from.link(this);
883             ++iter;
884         }
885         void linkSlowCaseIfNotJSCell(Vector<SlowCaseEntry>::iterator&, int vReg);
886 
887         Jump checkStructure(RegisterID reg, Structure* structure);
888 
889         void restoreArgumentReference();
890         void restoreArgumentReferenceForTrampoline();
891 
892         Call emitNakedCall(CodePtr function = CodePtr());
893 
894         void preserveReturnAddressAfterCall(RegisterID);
895         void restoreReturnAddressBeforeReturn(RegisterID);
896         void restoreReturnAddressBeforeReturn(Address);
897 
898         void emitTimeoutCheck();
899 #ifndef NDEBUG
900         void printBytecodeOperandTypes(unsigned src1, unsigned src2);
901 #endif
902 
903 #if ENABLE(SAMPLING_FLAGS)
904         void setSamplingFlag(int32_t);
905         void clearSamplingFlag(int32_t);
906 #endif
907 
908 #if ENABLE(SAMPLING_COUNTERS)
909         void emitCount(AbstractSamplingCounter&, uint32_t = 1);
910 #endif
911 
912 #if ENABLE(OPCODE_SAMPLING)
913         void sampleInstruction(Instruction*, bool = false);
914 #endif
915 
916 #if ENABLE(CODEBLOCK_SAMPLING)
917         void sampleCodeBlock(CodeBlock*);
918 #else
sampleCodeBlock(CodeBlock *)919         void sampleCodeBlock(CodeBlock*) {}
920 #endif
921 
922         Interpreter* m_interpreter;
923         JSGlobalData* m_globalData;
924         CodeBlock* m_codeBlock;
925 
926         Vector<CallRecord> m_calls;
927         Vector<Label> m_labels;
928         Vector<PropertyStubCompilationInfo> m_propertyAccessCompilationInfo;
929         Vector<StructureStubCompilationInfo> m_callStructureStubCompilationInfo;
930         Vector<MethodCallCompilationInfo> m_methodCallCompilationInfo;
931         Vector<JumpTable> m_jmpTable;
932 
933         unsigned m_bytecodeIndex;
934         Vector<JSRInfo> m_jsrSites;
935         Vector<SlowCaseEntry> m_slowCases;
936         Vector<SwitchRecord> m_switches;
937 
938         unsigned m_propertyAccessInstructionIndex;
939         unsigned m_globalResolveInfoIndex;
940         unsigned m_callLinkInfoIndex;
941 
942 #if USE(JSVALUE32_64)
943         unsigned m_jumpTargetIndex;
944         unsigned m_mappedBytecodeIndex;
945         unsigned m_mappedVirtualRegisterIndex;
946         RegisterID m_mappedTag;
947         RegisterID m_mappedPayload;
948 #else
949         int m_lastResultBytecodeRegister;
950         unsigned m_jumpTargetsPosition;
951 #endif
952 
953 #ifndef NDEBUG
954 #if defined(ASSEMBLER_HAS_CONSTANT_POOL) && ASSEMBLER_HAS_CONSTANT_POOL
955         Label m_uninterruptedInstructionSequenceBegin;
956         int m_uninterruptedConstantSequenceBegin;
957 #endif
958 #endif
959     } JIT_CLASS_ALIGNMENT;
960 
emit_op_loop(Instruction * currentInstruction)961     inline void JIT::emit_op_loop(Instruction* currentInstruction)
962     {
963         emitTimeoutCheck();
964         emit_op_jmp(currentInstruction);
965     }
966 
emit_op_loop_if_true(Instruction * currentInstruction)967     inline void JIT::emit_op_loop_if_true(Instruction* currentInstruction)
968     {
969         emitTimeoutCheck();
970         emit_op_jtrue(currentInstruction);
971     }
972 
emitSlow_op_loop_if_true(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)973     inline void JIT::emitSlow_op_loop_if_true(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
974     {
975         emitSlow_op_jtrue(currentInstruction, iter);
976     }
977 
emit_op_loop_if_false(Instruction * currentInstruction)978     inline void JIT::emit_op_loop_if_false(Instruction* currentInstruction)
979     {
980         emitTimeoutCheck();
981         emit_op_jfalse(currentInstruction);
982     }
983 
emitSlow_op_loop_if_false(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)984     inline void JIT::emitSlow_op_loop_if_false(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
985     {
986         emitSlow_op_jfalse(currentInstruction, iter);
987     }
988 
emit_op_loop_if_less(Instruction * currentInstruction)989     inline void JIT::emit_op_loop_if_less(Instruction* currentInstruction)
990     {
991         emitTimeoutCheck();
992         emit_op_jless(currentInstruction);
993     }
994 
emitSlow_op_loop_if_less(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)995     inline void JIT::emitSlow_op_loop_if_less(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
996     {
997         emitSlow_op_jless(currentInstruction, iter);
998     }
999 
1000 } // namespace JSC
1001 
1002 #endif // ENABLE(JIT)
1003 
1004 #endif // JIT_h
1005