• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2008 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24  */
25 
26 #include "config.h"
27 
28 #if ENABLE(JIT)
29 #if USE(JSVALUE64)
30 #include "JIT.h"
31 
32 #include "CodeBlock.h"
33 #include "JITInlineMethods.h"
34 #include "JITStubCall.h"
35 #include "JSArray.h"
36 #include "JSFunction.h"
37 #include "Interpreter.h"
38 #include "ResultType.h"
39 #include "SamplingTool.h"
40 
41 #ifndef NDEBUG
42 #include <stdio.h>
43 #endif
44 
45 using namespace std;
46 
47 namespace JSC {
48 
compileOpCallInitializeCallFrame()49 void JIT::compileOpCallInitializeCallFrame()
50 {
51     // regT0 holds callee, regT1 holds argCount
52     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_scopeChain)), regT3); // scopeChain
53     emitPutIntToCallFrameHeader(regT1, RegisterFile::ArgumentCount);
54     emitPutCellToCallFrameHeader(regT0, RegisterFile::Callee);
55     emitPutCellToCallFrameHeader(regT3, RegisterFile::ScopeChain);
56 }
57 
emit_op_call_put_result(Instruction * instruction)58 void JIT::emit_op_call_put_result(Instruction* instruction)
59 {
60     int dst = instruction[1].u.operand;
61     emitPutVirtualRegister(dst);
62 }
63 
compileOpCallVarargs(Instruction * instruction)64 void JIT::compileOpCallVarargs(Instruction* instruction)
65 {
66     int callee = instruction[1].u.operand;
67     int argCountRegister = instruction[2].u.operand;
68     int registerOffset = instruction[3].u.operand;
69 
70     emitGetVirtualRegister(argCountRegister, regT1);
71     emitFastArithImmToInt(regT1);
72     emitGetVirtualRegister(callee, regT0);
73     addPtr(Imm32(registerOffset), regT1, regT2);
74 
75     // Check for JSFunctions.
76     emitJumpSlowCaseIfNotJSCell(regT0);
77     addSlowCase(branchPtr(NotEqual, Address(regT0), TrustedImmPtr(m_globalData->jsFunctionVPtr)));
78 
79     // Speculatively roll the callframe, assuming argCount will match the arity.
80     mul32(TrustedImm32(sizeof(Register)), regT2, regT2);
81     intptr_t offset = (intptr_t)sizeof(Register) * (intptr_t)RegisterFile::CallerFrame;
82     addPtr(Imm32((int32_t)offset), regT2, regT3);
83     addPtr(callFrameRegister, regT3);
84     storePtr(callFrameRegister, regT3);
85     addPtr(regT2, callFrameRegister);
86     emitNakedCall(m_globalData->jitStubs->ctiVirtualCall());
87 
88     sampleCodeBlock(m_codeBlock);
89 }
90 
compileOpCallVarargsSlowCase(Instruction *,Vector<SlowCaseEntry>::iterator & iter)91 void JIT::compileOpCallVarargsSlowCase(Instruction*, Vector<SlowCaseEntry>::iterator& iter)
92 {
93     linkSlowCase(iter);
94     linkSlowCase(iter);
95 
96     JITStubCall stubCall(this, cti_op_call_NotJSFunction);
97     stubCall.addArgument(regT0);
98     stubCall.addArgument(regT2);
99     stubCall.addArgument(regT1);
100     stubCall.call();
101 
102     sampleCodeBlock(m_codeBlock);
103 }
104 
105 #if !ENABLE(JIT_OPTIMIZE_CALL)
106 
107 /* ------------------------------ BEGIN: !ENABLE(JIT_OPTIMIZE_CALL) ------------------------------ */
108 
compileOpCall(OpcodeID opcodeID,Instruction * instruction,unsigned)109 void JIT::compileOpCall(OpcodeID opcodeID, Instruction* instruction, unsigned)
110 {
111     int callee = instruction[1].u.operand;
112     int argCount = instruction[2].u.operand;
113     int registerOffset = instruction[3].u.operand;
114 
115     // Handle eval
116     Jump wasEval;
117     if (opcodeID == op_call_eval) {
118         JITStubCall stubCall(this, cti_op_call_eval);
119         stubCall.addArgument(callee, regT0);
120         stubCall.addArgument(JIT::Imm32(registerOffset));
121         stubCall.addArgument(JIT::Imm32(argCount));
122         stubCall.call();
123         wasEval = branchPtr(NotEqual, regT0, TrustedImmPtr(JSValue::encode(JSValue())));
124     }
125 
126     emitGetVirtualRegister(callee, regT0);
127 
128     // Check for JSFunctions.
129     emitJumpSlowCaseIfNotJSCell(regT0);
130     addSlowCase(branchPtr(NotEqual, Address(regT0), TrustedImmPtr(m_globalData->jsFunctionVPtr)));
131 
132     // Speculatively roll the callframe, assuming argCount will match the arity.
133     storePtr(callFrameRegister, Address(callFrameRegister, (RegisterFile::CallerFrame + registerOffset) * static_cast<int>(sizeof(Register))));
134     addPtr(Imm32(registerOffset * static_cast<int>(sizeof(Register))), callFrameRegister);
135     move(Imm32(argCount), regT1);
136 
137     emitNakedCall(opcodeID == op_construct ? m_globalData->jitStubs->ctiVirtualConstruct() : m_globalData->jitStubs->ctiVirtualCall());
138 
139     if (opcodeID == op_call_eval)
140         wasEval.link(this);
141 
142     sampleCodeBlock(m_codeBlock);
143 }
144 
compileOpCallSlowCase(Instruction * instruction,Vector<SlowCaseEntry>::iterator & iter,unsigned,OpcodeID opcodeID)145 void JIT::compileOpCallSlowCase(Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter, unsigned, OpcodeID opcodeID)
146 {
147     int argCount = instruction[2].u.operand;
148     int registerOffset = instruction[3].u.operand;
149 
150     linkSlowCase(iter);
151     linkSlowCase(iter);
152 
153     JITStubCall stubCall(this, opcodeID == op_construct ? cti_op_construct_NotJSConstruct : cti_op_call_NotJSFunction);
154     stubCall.addArgument(regT0);
155     stubCall.addArgument(JIT::Imm32(registerOffset));
156     stubCall.addArgument(JIT::Imm32(argCount));
157     stubCall.call();
158 
159     sampleCodeBlock(m_codeBlock);
160 }
161 
162 #else // !ENABLE(JIT_OPTIMIZE_CALL)
163 
164 /* ------------------------------ BEGIN: ENABLE(JIT_OPTIMIZE_CALL) ------------------------------ */
165 
compileOpCall(OpcodeID opcodeID,Instruction * instruction,unsigned callLinkInfoIndex)166 void JIT::compileOpCall(OpcodeID opcodeID, Instruction* instruction, unsigned callLinkInfoIndex)
167 {
168     int callee = instruction[1].u.operand;
169     int argCount = instruction[2].u.operand;
170     int registerOffset = instruction[3].u.operand;
171 
172     // Handle eval
173     Jump wasEval;
174     if (opcodeID == op_call_eval) {
175         JITStubCall stubCall(this, cti_op_call_eval);
176         stubCall.addArgument(callee, regT0);
177         stubCall.addArgument(JIT::Imm32(registerOffset));
178         stubCall.addArgument(JIT::Imm32(argCount));
179         stubCall.call();
180         wasEval = branchPtr(NotEqual, regT0, TrustedImmPtr(JSValue::encode(JSValue())));
181     }
182 
183     // This plants a check for a cached JSFunction value, so we can plant a fast link to the callee.
184     // This deliberately leaves the callee in ecx, used when setting up the stack frame below
185     emitGetVirtualRegister(callee, regT0);
186     DataLabelPtr addressOfLinkedFunctionCheck;
187 
188     BEGIN_UNINTERRUPTED_SEQUENCE(sequenceOpCall);
189 
190     Jump jumpToSlow = branchPtrWithPatch(NotEqual, regT0, addressOfLinkedFunctionCheck, TrustedImmPtr(JSValue::encode(JSValue())));
191 
192     END_UNINTERRUPTED_SEQUENCE(sequenceOpCall);
193 
194     addSlowCase(jumpToSlow);
195     ASSERT_JIT_OFFSET(differenceBetween(addressOfLinkedFunctionCheck, jumpToSlow), patchOffsetOpCallCompareToJump);
196     m_callStructureStubCompilationInfo[callLinkInfoIndex].hotPathBegin = addressOfLinkedFunctionCheck;
197 
198     // The following is the fast case, only used whan a callee can be linked.
199 
200     // Fast version of stack frame initialization, directly relative to edi.
201     // Note that this omits to set up RegisterFile::CodeBlock, which is set in the callee
202 
203     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_scopeChain)), regT1); // newScopeChain
204 
205     store32(TrustedImm32(Int32Tag), intTagFor(registerOffset + RegisterFile::ArgumentCount));
206     store32(Imm32(argCount), intPayloadFor(registerOffset + RegisterFile::ArgumentCount));
207     storePtr(callFrameRegister, Address(callFrameRegister, (registerOffset + RegisterFile::CallerFrame) * static_cast<int>(sizeof(Register))));
208     storePtr(regT0, Address(callFrameRegister, (registerOffset + RegisterFile::Callee) * static_cast<int>(sizeof(Register))));
209     storePtr(regT1, Address(callFrameRegister, (registerOffset + RegisterFile::ScopeChain) * static_cast<int>(sizeof(Register))));
210     addPtr(Imm32(registerOffset * sizeof(Register)), callFrameRegister);
211 
212     // Call to the callee
213     m_callStructureStubCompilationInfo[callLinkInfoIndex].hotPathOther = emitNakedCall();
214 
215     if (opcodeID == op_call_eval)
216         wasEval.link(this);
217 
218     sampleCodeBlock(m_codeBlock);
219 }
220 
compileOpCallSlowCase(Instruction * instruction,Vector<SlowCaseEntry>::iterator & iter,unsigned callLinkInfoIndex,OpcodeID opcodeID)221 void JIT::compileOpCallSlowCase(Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter, unsigned callLinkInfoIndex, OpcodeID opcodeID)
222 {
223     int argCount = instruction[2].u.operand;
224     int registerOffset = instruction[3].u.operand;
225 
226     linkSlowCase(iter);
227 
228     // Fast check for JS function.
229     Jump callLinkFailNotObject = emitJumpIfNotJSCell(regT0);
230     Jump callLinkFailNotJSFunction = branchPtr(NotEqual, Address(regT0), TrustedImmPtr(m_globalData->jsFunctionVPtr));
231 
232     // Speculatively roll the callframe, assuming argCount will match the arity.
233     storePtr(callFrameRegister, Address(callFrameRegister, (RegisterFile::CallerFrame + registerOffset) * static_cast<int>(sizeof(Register))));
234     addPtr(Imm32(registerOffset * static_cast<int>(sizeof(Register))), callFrameRegister);
235     move(Imm32(argCount), regT1);
236 
237     m_callStructureStubCompilationInfo[callLinkInfoIndex].callReturnLocation = emitNakedCall(opcodeID == op_construct ? m_globalData->jitStubs->ctiVirtualConstructLink() : m_globalData->jitStubs->ctiVirtualCallLink());
238 
239     // Done! - return back to the hot path.
240     ASSERT(OPCODE_LENGTH(op_call) == OPCODE_LENGTH(op_call_eval));
241     ASSERT(OPCODE_LENGTH(op_call) == OPCODE_LENGTH(op_construct));
242     emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_call));
243 
244     // This handles host functions
245     callLinkFailNotObject.link(this);
246     callLinkFailNotJSFunction.link(this);
247 
248     JITStubCall stubCall(this, opcodeID == op_construct ? cti_op_construct_NotJSConstruct : cti_op_call_NotJSFunction);
249     stubCall.addArgument(regT0);
250     stubCall.addArgument(JIT::Imm32(registerOffset));
251     stubCall.addArgument(JIT::Imm32(argCount));
252     stubCall.call();
253 
254     sampleCodeBlock(m_codeBlock);
255 }
256 
257 /* ------------------------------ END: !ENABLE / ENABLE(JIT_OPTIMIZE_CALL) ------------------------------ */
258 
259 #endif // !ENABLE(JIT_OPTIMIZE_CALL)
260 
261 } // namespace JSC
262 
263 #endif // USE(JSVALUE64)
264 #endif // ENABLE(JIT)
265