• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2009 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24  */
25 
26 #include "config.h"
27 #include "JIT.h"
28 
29 #if ENABLE(JIT)
30 
31 #include "JITInlineMethods.h"
32 #include "JITStubCall.h"
33 #include "JSArray.h"
34 #include "JSCell.h"
35 #include "JSFunction.h"
36 #include "LinkBuffer.h"
37 
38 namespace JSC {
39 
40 #if USE(JSVALUE32_64)
41 
privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool> * executablePool,JSGlobalData * globalData,CodePtr * ctiStringLengthTrampoline,CodePtr * ctiVirtualCallLink,CodePtr * ctiVirtualCall,CodePtr * ctiNativeCallThunk)42 void JIT::privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool>* executablePool, JSGlobalData* globalData, CodePtr* ctiStringLengthTrampoline, CodePtr* ctiVirtualCallLink, CodePtr* ctiVirtualCall, CodePtr* ctiNativeCallThunk)
43 {
44 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
45     // (1) This function provides fast property access for string length
46     Label stringLengthBegin = align();
47 
48     // regT0 holds payload, regT1 holds tag
49 
50     Jump string_failureCases1 = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
51     Jump string_failureCases2 = branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr));
52 
53     // Checks out okay! - get the length from the Ustring.
54     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSString, m_value) + OBJECT_OFFSETOF(UString, m_rep)), regT2);
55     load32(Address(regT2, OBJECT_OFFSETOF(UString::Rep, len)), regT2);
56 
57     Jump string_failureCases3 = branch32(Above, regT2, Imm32(INT_MAX));
58     move(regT2, regT0);
59     move(Imm32(JSValue::Int32Tag), regT1);
60 
61     ret();
62 #endif
63 
64     // (2) Trampolines for the slow cases of op_call / op_call_eval / op_construct.
65 
66 #if ENABLE(JIT_OPTIMIZE_CALL)
67     /* VirtualCallLink Trampoline */
68     Label virtualCallLinkBegin = align();
69 
70     // regT0 holds callee, regT1 holds argCount.
71     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_body)), regT2);
72     loadPtr(Address(regT2, OBJECT_OFFSETOF(FunctionBodyNode, m_code)), regT2);
73     Jump hasCodeBlock2 = branchTestPtr(NonZero, regT2);
74 
75     // Lazily generate a CodeBlock.
76     preserveReturnAddressAfterCall(regT3); // return address
77     restoreArgumentReference();
78     Call callJSFunction2 = call();
79     move(regT0, regT2);
80     emitGetJITStubArg(1, regT0); // callee
81     emitGetJITStubArg(5, regT1); // argCount
82     restoreReturnAddressBeforeReturn(regT3); // return address
83     hasCodeBlock2.link(this);
84 
85     // regT2 holds codeBlock.
86     Jump isNativeFunc2 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(CodeBlock, m_codeType)), Imm32(NativeCode));
87 
88     // Check argCount matches callee arity.
89     Jump arityCheckOkay2 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(CodeBlock, m_numParameters)), regT1);
90     preserveReturnAddressAfterCall(regT3);
91     emitPutJITStubArg(regT3, 3); // return address
92     emitPutJITStubArg(regT2, 7); // codeBlock
93     restoreArgumentReference();
94     Call callArityCheck2 = call();
95     move(regT1, callFrameRegister);
96     emitGetJITStubArg(1, regT0); // callee
97     emitGetJITStubArg(5, regT1); // argCount
98     restoreReturnAddressBeforeReturn(regT3); // return address
99 
100     arityCheckOkay2.link(this);
101     isNativeFunc2.link(this);
102 
103     compileOpCallInitializeCallFrame();
104 
105     preserveReturnAddressAfterCall(regT3);
106     emitPutJITStubArg(regT3, 3);
107     restoreArgumentReference();
108     Call callLazyLinkCall = call();
109     restoreReturnAddressBeforeReturn(regT3);
110     jump(regT0);
111 #endif // ENABLE(JIT_OPTIMIZE_CALL)
112 
113     /* VirtualCall Trampoline */
114     Label virtualCallBegin = align();
115 
116     // regT0 holds callee, regT1 holds argCount.
117     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_body)), regT2);
118     loadPtr(Address(regT2, OBJECT_OFFSETOF(FunctionBodyNode, m_code)), regT2);
119     Jump hasCodeBlock3 = branchTestPtr(NonZero, regT2);
120 
121     // Lazily generate a CodeBlock.
122     preserveReturnAddressAfterCall(regT3); // return address
123     restoreArgumentReference();
124     Call callJSFunction1 = call();
125     move(regT0, regT2);
126     emitGetJITStubArg(1, regT0); // callee
127     emitGetJITStubArg(5, regT1); // argCount
128     restoreReturnAddressBeforeReturn(regT3); // return address
129     hasCodeBlock3.link(this);
130 
131     // regT2 holds codeBlock.
132     Jump isNativeFunc3 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(CodeBlock, m_codeType)), Imm32(NativeCode));
133 
134     // Check argCount matches callee.
135     Jump arityCheckOkay3 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(CodeBlock, m_numParameters)), regT1);
136     preserveReturnAddressAfterCall(regT3);
137     emitPutJITStubArg(regT3, 3); // return address
138     emitPutJITStubArg(regT2, 7); // codeBlock
139     restoreArgumentReference();
140     Call callArityCheck1 = call();
141     move(regT1, callFrameRegister);
142     emitGetJITStubArg(1, regT0); // callee
143     emitGetJITStubArg(5, regT1); // argCount
144     restoreReturnAddressBeforeReturn(regT3); // return address
145 
146     arityCheckOkay3.link(this);
147     isNativeFunc3.link(this);
148     compileOpCallInitializeCallFrame();
149     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_body)), regT0);
150     loadPtr(Address(regT0, OBJECT_OFFSETOF(FunctionBodyNode, m_jitCode)), regT0);
151     jump(regT0);
152 
153 #if PLATFORM(X86)
154     Label nativeCallThunk = align();
155     preserveReturnAddressAfterCall(regT0);
156     emitPutToCallFrameHeader(regT0, RegisterFile::ReturnPC); // Push return address
157 
158     // Load caller frame's scope chain into this callframe so that whatever we call can
159     // get to its global data.
160     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT1);
161     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT1);
162     emitPutToCallFrameHeader(regT1, RegisterFile::ScopeChain);
163 
164     emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT0);
165 
166     /* We have two structs that we use to describe the stackframe we set up for our
167      * call to native code.  NativeCallFrameStructure describes the how we set up the stack
168      * in advance of the call.  NativeFunctionCalleeSignature describes the callframe
169      * as the native code expects it.  We do this as we are using the fastcall calling
170      * convention which results in the callee popping its arguments off the stack, but
171      * not the rest of the callframe so we need a nice way to ensure we increment the
172      * stack pointer by the right amount after the call.
173      */
174 
175 #if COMPILER(MSVC) || PLATFORM(LINUX)
176 #if COMPILER(MSVC)
177 #pragma pack(push)
178 #pragma pack(4)
179 #endif // COMPILER(MSVC)
180     struct NativeCallFrameStructure {
181       //  CallFrame* callFrame; // passed in EDX
182         JSObject* callee;
183         JSValue thisValue;
184         ArgList* argPointer;
185         ArgList args;
186         JSValue result;
187     };
188     struct NativeFunctionCalleeSignature {
189         JSObject* callee;
190         JSValue thisValue;
191         ArgList* argPointer;
192     };
193 #if COMPILER(MSVC)
194 #pragma pack(pop)
195 #endif // COMPILER(MSVC)
196 #else
197     struct NativeCallFrameStructure {
198       //  CallFrame* callFrame; // passed in ECX
199       //  JSObject* callee; // passed in EDX
200         JSValue thisValue;
201         ArgList* argPointer;
202         ArgList args;
203     };
204     struct NativeFunctionCalleeSignature {
205         JSValue thisValue;
206         ArgList* argPointer;
207     };
208 #endif
209 
210     const int NativeCallFrameSize = (sizeof(NativeCallFrameStructure) + 15) & ~15;
211     // Allocate system stack frame
212     subPtr(Imm32(NativeCallFrameSize), stackPointerRegister);
213 
214     // Set up arguments
215     subPtr(Imm32(1), regT0); // Don't include 'this' in argcount
216 
217     // push argcount
218     storePtr(regT0, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, args) + OBJECT_OFFSETOF(ArgList, m_argCount)));
219 
220     // Calculate the start of the callframe header, and store in regT1
221     addPtr(Imm32(-RegisterFile::CallFrameHeaderSize * (int)sizeof(Register)), callFrameRegister, regT1);
222 
223     // Calculate start of arguments as callframe header - sizeof(Register) * argcount (regT0)
224     mul32(Imm32(sizeof(Register)), regT0, regT0);
225     subPtr(regT0, regT1);
226     storePtr(regT1, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, args) + OBJECT_OFFSETOF(ArgList, m_args)));
227 
228     // ArgList is passed by reference so is stackPointerRegister + 4 * sizeof(Register)
229     addPtr(Imm32(OBJECT_OFFSETOF(NativeCallFrameStructure, args)), stackPointerRegister, regT0);
230     storePtr(regT0, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, argPointer)));
231 
232     // regT1 currently points to the first argument, regT1 - sizeof(Register) points to 'this'
233     loadPtr(Address(regT1, -(int)sizeof(Register) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT2);
234     loadPtr(Address(regT1, -(int)sizeof(Register) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT3);
235     storePtr(regT2, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, thisValue) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)));
236     storePtr(regT3, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, thisValue) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)));
237 
238 #if COMPILER(MSVC) || PLATFORM(LINUX)
239     // ArgList is passed by reference so is stackPointerRegister + 4 * sizeof(Register)
240     addPtr(Imm32(OBJECT_OFFSETOF(NativeCallFrameStructure, result)), stackPointerRegister, X86::ecx);
241 
242     // Plant callee
243     emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, X86::eax);
244     storePtr(X86::eax, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, callee)));
245 
246     // Plant callframe
247     move(callFrameRegister, X86::edx);
248 
249     call(Address(X86::eax, OBJECT_OFFSETOF(JSFunction, m_data)));
250 
251     // JSValue is a non-POD type, so eax points to it
252     emitLoad(0, regT1, regT0, X86::eax);
253 #else
254     emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, X86::edx); // callee
255     move(callFrameRegister, X86::ecx); // callFrame
256     call(Address(X86::edx, OBJECT_OFFSETOF(JSFunction, m_data)));
257 #endif
258 
259     // We've put a few temporaries on the stack in addition to the actual arguments
260     // so pull them off now
261     addPtr(Imm32(NativeCallFrameSize - sizeof(NativeFunctionCalleeSignature)), stackPointerRegister);
262 
263     // Check for an exception
264     // FIXME: Maybe we can optimize this comparison to JSValue().
265     move(ImmPtr(&globalData->exception), regT2);
266     Jump sawException1 = branch32(NotEqual, tagFor(0, regT2), Imm32(JSValue::CellTag));
267     Jump sawException2 = branch32(NonZero, payloadFor(0, regT2), Imm32(0));
268 
269     // Grab the return address.
270     emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT3);
271 
272     // Restore our caller's "r".
273     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
274 
275     // Return.
276     restoreReturnAddressBeforeReturn(regT3);
277     ret();
278 
279     // Handle an exception
280     sawException1.link(this);
281     sawException2.link(this);
282     // Grab the return address.
283     emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
284     move(ImmPtr(&globalData->exceptionLocation), regT2);
285     storePtr(regT1, regT2);
286     move(ImmPtr(reinterpret_cast<void*>(ctiVMThrowTrampoline)), regT2);
287     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
288     poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof (void*));
289     restoreReturnAddressBeforeReturn(regT2);
290     ret();
291 
292 #elif ENABLE(JIT_OPTIMIZE_NATIVE_CALL)
293 #error "JIT_OPTIMIZE_NATIVE_CALL not yet supported on this platform."
294 #else
295     breakpoint();
296 #endif
297 
298 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
299     Call string_failureCases1Call = makeTailRecursiveCall(string_failureCases1);
300     Call string_failureCases2Call = makeTailRecursiveCall(string_failureCases2);
301     Call string_failureCases3Call = makeTailRecursiveCall(string_failureCases3);
302 #endif
303 
304     // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
305     LinkBuffer patchBuffer(this, m_globalData->executableAllocator.poolForSize(m_assembler.size()));
306 
307 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
308     patchBuffer.link(string_failureCases1Call, FunctionPtr(cti_op_get_by_id_string_fail));
309     patchBuffer.link(string_failureCases2Call, FunctionPtr(cti_op_get_by_id_string_fail));
310     patchBuffer.link(string_failureCases3Call, FunctionPtr(cti_op_get_by_id_string_fail));
311 #endif
312     patchBuffer.link(callArityCheck1, FunctionPtr(cti_op_call_arityCheck));
313     patchBuffer.link(callJSFunction1, FunctionPtr(cti_op_call_JSFunction));
314 #if ENABLE(JIT_OPTIMIZE_CALL)
315     patchBuffer.link(callArityCheck2, FunctionPtr(cti_op_call_arityCheck));
316     patchBuffer.link(callJSFunction2, FunctionPtr(cti_op_call_JSFunction));
317     patchBuffer.link(callLazyLinkCall, FunctionPtr(cti_vm_lazyLinkCall));
318 #endif
319 
320     CodeRef finalCode = patchBuffer.finalizeCode();
321     *executablePool = finalCode.m_executablePool;
322 
323     *ctiVirtualCall = trampolineAt(finalCode, virtualCallBegin);
324     *ctiNativeCallThunk = trampolineAt(finalCode, nativeCallThunk);
325 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
326     *ctiStringLengthTrampoline = trampolineAt(finalCode, stringLengthBegin);
327 #else
328     UNUSED_PARAM(ctiStringLengthTrampoline);
329 #endif
330 #if ENABLE(JIT_OPTIMIZE_CALL)
331     *ctiVirtualCallLink = trampolineAt(finalCode, virtualCallLinkBegin);
332 #else
333     UNUSED_PARAM(ctiVirtualCallLink);
334 #endif
335 }
336 
emit_op_mov(Instruction * currentInstruction)337 void JIT::emit_op_mov(Instruction* currentInstruction)
338 {
339     unsigned dst = currentInstruction[1].u.operand;
340     unsigned src = currentInstruction[2].u.operand;
341 
342     if (m_codeBlock->isConstantRegisterIndex(src))
343         emitStore(dst, getConstantOperand(src));
344     else {
345         emitLoad(src, regT1, regT0);
346         emitStore(dst, regT1, regT0);
347         map(m_bytecodeIndex + OPCODE_LENGTH(op_mov), dst, regT1, regT0);
348     }
349 }
350 
emit_op_end(Instruction * currentInstruction)351 void JIT::emit_op_end(Instruction* currentInstruction)
352 {
353     if (m_codeBlock->needsFullScopeChain())
354         JITStubCall(this, cti_op_end).call();
355     ASSERT(returnValueRegister != callFrameRegister);
356     emitLoad(currentInstruction[1].u.operand, regT1, regT0);
357     restoreReturnAddressBeforeReturn(Address(callFrameRegister, RegisterFile::ReturnPC * static_cast<int>(sizeof(Register))));
358     ret();
359 }
360 
emit_op_jmp(Instruction * currentInstruction)361 void JIT::emit_op_jmp(Instruction* currentInstruction)
362 {
363     unsigned target = currentInstruction[1].u.operand;
364     addJump(jump(), target + 1);
365 }
366 
emit_op_loop(Instruction * currentInstruction)367 void JIT::emit_op_loop(Instruction* currentInstruction)
368 {
369     unsigned target = currentInstruction[1].u.operand;
370     emitTimeoutCheck();
371     addJump(jump(), target + 1);
372 }
373 
emit_op_loop_if_less(Instruction * currentInstruction)374 void JIT::emit_op_loop_if_less(Instruction* currentInstruction)
375 {
376     unsigned op1 = currentInstruction[1].u.operand;
377     unsigned op2 = currentInstruction[2].u.operand;
378     unsigned target = currentInstruction[3].u.operand;
379 
380     emitTimeoutCheck();
381 
382     if (isOperandConstantImmediateInt(op1)) {
383         emitLoad(op2, regT1, regT0);
384         addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
385         addJump(branch32(GreaterThan, regT0, Imm32(getConstantOperand(op1).asInt32())), target + 3);
386         return;
387     }
388 
389     if (isOperandConstantImmediateInt(op2)) {
390         emitLoad(op1, regT1, regT0);
391         addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
392         addJump(branch32(LessThan, regT0, Imm32(getConstantOperand(op2).asInt32())), target + 3);
393         return;
394     }
395 
396     emitLoad2(op1, regT1, regT0, op2, regT3, regT2);
397     addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
398     addSlowCase(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
399     addJump(branch32(LessThan, regT0, regT2), target + 3);
400 }
401 
emitSlow_op_loop_if_less(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)402 void JIT::emitSlow_op_loop_if_less(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
403 {
404     unsigned op1 = currentInstruction[1].u.operand;
405     unsigned op2 = currentInstruction[2].u.operand;
406     unsigned target = currentInstruction[3].u.operand;
407 
408     if (!isOperandConstantImmediateInt(op1) && !isOperandConstantImmediateInt(op2))
409         linkSlowCase(iter); // int32 check
410     linkSlowCase(iter); // int32 check
411 
412     JITStubCall stubCall(this, cti_op_loop_if_less);
413     stubCall.addArgument(op1);
414     stubCall.addArgument(op2);
415     stubCall.call();
416     emitJumpSlowToHot(branchTest32(NonZero, regT0), target + 3);
417 }
418 
emit_op_loop_if_lesseq(Instruction * currentInstruction)419 void JIT::emit_op_loop_if_lesseq(Instruction* currentInstruction)
420 {
421     unsigned op1 = currentInstruction[1].u.operand;
422     unsigned op2 = currentInstruction[2].u.operand;
423     unsigned target = currentInstruction[3].u.operand;
424 
425     emitTimeoutCheck();
426 
427     if (isOperandConstantImmediateInt(op1)) {
428         emitLoad(op2, regT1, regT0);
429         addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
430         addJump(branch32(GreaterThanOrEqual, regT0, Imm32(getConstantOperand(op1).asInt32())), target + 3);
431         return;
432     }
433 
434     if (isOperandConstantImmediateInt(op2)) {
435         emitLoad(op1, regT1, regT0);
436         addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
437         addJump(branch32(LessThanOrEqual, regT0, Imm32(getConstantOperand(op2).asInt32())), target + 3);
438         return;
439     }
440 
441     emitLoad2(op1, regT1, regT0, op2, regT3, regT2);
442     addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
443     addSlowCase(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
444     addJump(branch32(LessThanOrEqual, regT0, regT2), target + 3);
445 }
446 
emitSlow_op_loop_if_lesseq(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)447 void JIT::emitSlow_op_loop_if_lesseq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
448 {
449     unsigned op1 = currentInstruction[1].u.operand;
450     unsigned op2 = currentInstruction[2].u.operand;
451     unsigned target = currentInstruction[3].u.operand;
452 
453     if (!isOperandConstantImmediateInt(op1) && !isOperandConstantImmediateInt(op2))
454         linkSlowCase(iter); // int32 check
455     linkSlowCase(iter); // int32 check
456 
457     JITStubCall stubCall(this, cti_op_loop_if_lesseq);
458     stubCall.addArgument(op1);
459     stubCall.addArgument(op2);
460     stubCall.call();
461     emitJumpSlowToHot(branchTest32(NonZero, regT0), target + 3);
462 }
463 
emit_op_new_object(Instruction * currentInstruction)464 void JIT::emit_op_new_object(Instruction* currentInstruction)
465 {
466     JITStubCall(this, cti_op_new_object).call(currentInstruction[1].u.operand);
467 }
468 
emit_op_instanceof(Instruction * currentInstruction)469 void JIT::emit_op_instanceof(Instruction* currentInstruction)
470 {
471     unsigned dst = currentInstruction[1].u.operand;
472     unsigned value = currentInstruction[2].u.operand;
473     unsigned baseVal = currentInstruction[3].u.operand;
474     unsigned proto = currentInstruction[4].u.operand;
475 
476     // Load the operands (baseVal, proto, and value respectively) into registers.
477     // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
478     emitLoadPayload(proto, regT1);
479     emitLoadPayload(baseVal, regT0);
480     emitLoadPayload(value, regT2);
481 
482     // Check that baseVal & proto are cells.
483     emitJumpSlowCaseIfNotJSCell(proto);
484     emitJumpSlowCaseIfNotJSCell(baseVal);
485 
486     // Check that baseVal is an object, that it 'ImplementsHasInstance' but that it does not 'OverridesHasInstance'.
487     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT0);
488     addSlowCase(branch32(NotEqual, Address(regT0, OBJECT_OFFSETOF(Structure, m_typeInfo.m_type)), Imm32(ObjectType))); // FIXME: Maybe remove this test.
489     addSlowCase(branchTest32(Zero, Address(regT0, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(ImplementsHasInstance))); // FIXME: TOT checks ImplementsDefaultHasInstance.
490 
491     // If value is not an Object, return false.
492     emitLoadTag(value, regT0);
493     Jump valueIsImmediate = branch32(NotEqual, regT0, Imm32(JSValue::CellTag));
494     loadPtr(Address(regT2, OBJECT_OFFSETOF(JSCell, m_structure)), regT0);
495     Jump valueIsNotObject = branch32(NotEqual, Address(regT0, OBJECT_OFFSETOF(Structure, m_typeInfo.m_type)), Imm32(ObjectType)); // FIXME: Maybe remove this test.
496 
497     // Check proto is object.
498     loadPtr(Address(regT1, OBJECT_OFFSETOF(JSCell, m_structure)), regT0);
499     addSlowCase(branch32(NotEqual, Address(regT0, OBJECT_OFFSETOF(Structure, m_typeInfo.m_type)), Imm32(ObjectType)));
500 
501     // Optimistically load the result true, and start looping.
502     // Initially, regT1 still contains proto and regT2 still contains value.
503     // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
504     move(Imm32(JSValue::TrueTag), regT0);
505     Label loop(this);
506 
507     // Load the prototype of the object in regT2.  If this is equal to regT1 - WIN!
508     // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
509     loadPtr(Address(regT2, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
510     load32(Address(regT2, OBJECT_OFFSETOF(Structure, m_prototype) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT2);
511     Jump isInstance = branchPtr(Equal, regT2, regT1);
512     branch32(NotEqual, regT2, Imm32(0), loop);
513 
514     // We get here either by dropping out of the loop, or if value was not an Object.  Result is false.
515     valueIsImmediate.link(this);
516     valueIsNotObject.link(this);
517     move(Imm32(JSValue::FalseTag), regT0);
518 
519     // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
520     isInstance.link(this);
521     emitStoreBool(dst, regT0);
522 }
523 
emitSlow_op_instanceof(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)524 void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
525 {
526     unsigned dst = currentInstruction[1].u.operand;
527     unsigned value = currentInstruction[2].u.operand;
528     unsigned baseVal = currentInstruction[3].u.operand;
529     unsigned proto = currentInstruction[4].u.operand;
530 
531     linkSlowCaseIfNotJSCell(iter, baseVal);
532     linkSlowCaseIfNotJSCell(iter, proto);
533     linkSlowCase(iter);
534     linkSlowCase(iter);
535     linkSlowCase(iter);
536 
537     JITStubCall stubCall(this, cti_op_instanceof);
538     stubCall.addArgument(value);
539     stubCall.addArgument(baseVal);
540     stubCall.addArgument(proto);
541     stubCall.call(dst);
542 }
543 
emit_op_new_func(Instruction * currentInstruction)544 void JIT::emit_op_new_func(Instruction* currentInstruction)
545 {
546     JITStubCall stubCall(this, cti_op_new_func);
547     stubCall.addArgument(ImmPtr(m_codeBlock->function(currentInstruction[2].u.operand)));
548     stubCall.call(currentInstruction[1].u.operand);
549 }
550 
emit_op_get_global_var(Instruction * currentInstruction)551 void JIT::emit_op_get_global_var(Instruction* currentInstruction)
552 {
553     int dst = currentInstruction[1].u.operand;
554     JSGlobalObject* globalObject = static_cast<JSGlobalObject*>(currentInstruction[2].u.jsCell);
555     ASSERT(globalObject->isGlobalObject());
556     int index = currentInstruction[3].u.operand;
557 
558     loadPtr(&globalObject->d()->registers, regT2);
559 
560     emitLoad(index, regT1, regT0, regT2);
561     emitStore(dst, regT1, regT0);
562     map(m_bytecodeIndex + OPCODE_LENGTH(op_get_global_var), dst, regT1, regT0);
563 }
564 
emit_op_put_global_var(Instruction * currentInstruction)565 void JIT::emit_op_put_global_var(Instruction* currentInstruction)
566 {
567     JSGlobalObject* globalObject = static_cast<JSGlobalObject*>(currentInstruction[1].u.jsCell);
568     ASSERT(globalObject->isGlobalObject());
569     int index = currentInstruction[2].u.operand;
570     int value = currentInstruction[3].u.operand;
571 
572     emitLoad(value, regT1, regT0);
573 
574     loadPtr(&globalObject->d()->registers, regT2);
575     emitStore(index, regT1, regT0, regT2);
576     map(m_bytecodeIndex + OPCODE_LENGTH(op_put_global_var), value, regT1, regT0);
577 }
578 
emit_op_get_scoped_var(Instruction * currentInstruction)579 void JIT::emit_op_get_scoped_var(Instruction* currentInstruction)
580 {
581     int dst = currentInstruction[1].u.operand;
582     int index = currentInstruction[2].u.operand;
583     int skip = currentInstruction[3].u.operand + m_codeBlock->needsFullScopeChain();
584 
585     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2);
586     while (skip--)
587         loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
588 
589     loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, object)), regT2);
590     loadPtr(Address(regT2, OBJECT_OFFSETOF(JSVariableObject, d)), regT2);
591     loadPtr(Address(regT2, OBJECT_OFFSETOF(JSVariableObject::JSVariableObjectData, registers)), regT2);
592 
593     emitLoad(index, regT1, regT0, regT2);
594     emitStore(dst, regT1, regT0);
595     map(m_bytecodeIndex + OPCODE_LENGTH(op_get_scoped_var), dst, regT1, regT0);
596 }
597 
emit_op_put_scoped_var(Instruction * currentInstruction)598 void JIT::emit_op_put_scoped_var(Instruction* currentInstruction)
599 {
600     int index = currentInstruction[1].u.operand;
601     int skip = currentInstruction[2].u.operand + m_codeBlock->needsFullScopeChain();
602     int value = currentInstruction[3].u.operand;
603 
604     emitLoad(value, regT1, regT0);
605 
606     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2);
607     while (skip--)
608         loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
609 
610     loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, object)), regT2);
611     loadPtr(Address(regT2, OBJECT_OFFSETOF(JSVariableObject, d)), regT2);
612     loadPtr(Address(regT2, OBJECT_OFFSETOF(JSVariableObject::JSVariableObjectData, registers)), regT2);
613 
614     emitStore(index, regT1, regT0, regT2);
615     map(m_bytecodeIndex + OPCODE_LENGTH(op_put_scoped_var), value, regT1, regT0);
616 }
617 
emit_op_tear_off_activation(Instruction * currentInstruction)618 void JIT::emit_op_tear_off_activation(Instruction* currentInstruction)
619 {
620     JITStubCall stubCall(this, cti_op_tear_off_activation);
621     stubCall.addArgument(currentInstruction[1].u.operand);
622     stubCall.call();
623 }
624 
emit_op_tear_off_arguments(Instruction *)625 void JIT::emit_op_tear_off_arguments(Instruction*)
626 {
627     JITStubCall(this, cti_op_tear_off_arguments).call();
628 }
629 
emit_op_new_array(Instruction * currentInstruction)630 void JIT::emit_op_new_array(Instruction* currentInstruction)
631 {
632     JITStubCall stubCall(this, cti_op_new_array);
633     stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
634     stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
635     stubCall.call(currentInstruction[1].u.operand);
636 }
637 
emit_op_resolve(Instruction * currentInstruction)638 void JIT::emit_op_resolve(Instruction* currentInstruction)
639 {
640     JITStubCall stubCall(this, cti_op_resolve);
641     stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
642     stubCall.call(currentInstruction[1].u.operand);
643 }
644 
emit_op_to_primitive(Instruction * currentInstruction)645 void JIT::emit_op_to_primitive(Instruction* currentInstruction)
646 {
647     int dst = currentInstruction[1].u.operand;
648     int src = currentInstruction[2].u.operand;
649 
650     emitLoad(src, regT1, regT0);
651 
652     Jump isImm = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
653     addSlowCase(branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr)));
654     isImm.link(this);
655 
656     if (dst != src)
657         emitStore(dst, regT1, regT0);
658     map(m_bytecodeIndex + OPCODE_LENGTH(op_to_primitive), dst, regT1, regT0);
659 }
660 
emitSlow_op_to_primitive(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)661 void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
662 {
663     int dst = currentInstruction[1].u.operand;
664 
665     linkSlowCase(iter);
666 
667     JITStubCall stubCall(this, cti_op_to_primitive);
668     stubCall.addArgument(regT1, regT0);
669     stubCall.call(dst);
670 }
671 
emit_op_strcat(Instruction * currentInstruction)672 void JIT::emit_op_strcat(Instruction* currentInstruction)
673 {
674     JITStubCall stubCall(this, cti_op_strcat);
675     stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
676     stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
677     stubCall.call(currentInstruction[1].u.operand);
678 }
679 
emit_op_loop_if_true(Instruction * currentInstruction)680 void JIT::emit_op_loop_if_true(Instruction* currentInstruction)
681 {
682     unsigned cond = currentInstruction[1].u.operand;
683     unsigned target = currentInstruction[2].u.operand;
684 
685     emitTimeoutCheck();
686 
687     emitLoad(cond, regT1, regT0);
688 
689     Jump isNotInteger = branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag));
690     addJump(branch32(NotEqual, regT0, Imm32(0)), target + 2);
691     Jump isNotZero = jump();
692 
693     isNotInteger.link(this);
694 
695     addJump(branch32(Equal, regT1, Imm32(JSValue::TrueTag)), target + 2);
696     addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::FalseTag)));
697 
698     isNotZero.link(this);
699 }
700 
emitSlow_op_loop_if_true(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)701 void JIT::emitSlow_op_loop_if_true(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
702 {
703     unsigned cond = currentInstruction[1].u.operand;
704     unsigned target = currentInstruction[2].u.operand;
705 
706     linkSlowCase(iter);
707 
708     JITStubCall stubCall(this, cti_op_jtrue);
709     stubCall.addArgument(cond);
710     stubCall.call();
711     emitJumpSlowToHot(branchTest32(NonZero, regT0), target + 2);
712 }
713 
emit_op_resolve_base(Instruction * currentInstruction)714 void JIT::emit_op_resolve_base(Instruction* currentInstruction)
715 {
716     JITStubCall stubCall(this, cti_op_resolve_base);
717     stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
718     stubCall.call(currentInstruction[1].u.operand);
719 }
720 
emit_op_resolve_skip(Instruction * currentInstruction)721 void JIT::emit_op_resolve_skip(Instruction* currentInstruction)
722 {
723     JITStubCall stubCall(this, cti_op_resolve_skip);
724     stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
725     stubCall.addArgument(Imm32(currentInstruction[3].u.operand + m_codeBlock->needsFullScopeChain()));
726     stubCall.call(currentInstruction[1].u.operand);
727 }
728 
emit_op_resolve_global(Instruction * currentInstruction)729 void JIT::emit_op_resolve_global(Instruction* currentInstruction)
730 {
731     // FIXME: Optimize to use patching instead of so many memory accesses.
732 
733     unsigned dst = currentInstruction[1].u.operand;
734     void* globalObject = currentInstruction[2].u.jsCell;
735 
736     unsigned currentIndex = m_globalResolveInfoIndex++;
737     void* structureAddress = &(m_codeBlock->globalResolveInfo(currentIndex).structure);
738     void* offsetAddr = &(m_codeBlock->globalResolveInfo(currentIndex).offset);
739 
740     // Verify structure.
741     move(ImmPtr(globalObject), regT0);
742     loadPtr(structureAddress, regT1);
743     addSlowCase(branchPtr(NotEqual, regT1, Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure))));
744 
745     // Load property.
746     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSGlobalObject, m_externalStorage)), regT2);
747     load32(offsetAddr, regT3);
748     load32(BaseIndex(regT2, regT3, TimesEight), regT0); // payload
749     load32(BaseIndex(regT2, regT3, TimesEight, 4), regT1); // tag
750     emitStore(dst, regT1, regT0);
751     map(m_bytecodeIndex + OPCODE_LENGTH(op_resolve_global), dst, regT1, regT0);
752 }
753 
emitSlow_op_resolve_global(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)754 void JIT::emitSlow_op_resolve_global(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
755 {
756     unsigned dst = currentInstruction[1].u.operand;
757     void* globalObject = currentInstruction[2].u.jsCell;
758     Identifier* ident = &m_codeBlock->identifier(currentInstruction[3].u.operand);
759 
760     unsigned currentIndex = m_globalResolveInfoIndex++;
761 
762     linkSlowCase(iter);
763     JITStubCall stubCall(this, cti_op_resolve_global);
764     stubCall.addArgument(ImmPtr(globalObject));
765     stubCall.addArgument(ImmPtr(ident));
766     stubCall.addArgument(Imm32(currentIndex));
767     stubCall.call(dst);
768 }
769 
emit_op_not(Instruction * currentInstruction)770 void JIT::emit_op_not(Instruction* currentInstruction)
771 {
772     unsigned dst = currentInstruction[1].u.operand;
773     unsigned src = currentInstruction[2].u.operand;
774 
775     emitLoadTag(src, regT0);
776 
777     xor32(Imm32(JSValue::FalseTag), regT0);
778     addSlowCase(branchTest32(NonZero, regT0, Imm32(~1)));
779     xor32(Imm32(JSValue::TrueTag), regT0);
780 
781     emitStoreBool(dst, regT0, (dst == src));
782 }
783 
emitSlow_op_not(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)784 void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
785 {
786     unsigned dst = currentInstruction[1].u.operand;
787     unsigned src = currentInstruction[2].u.operand;
788 
789     linkSlowCase(iter);
790 
791     JITStubCall stubCall(this, cti_op_not);
792     stubCall.addArgument(src);
793     stubCall.call(dst);
794 }
795 
emit_op_jfalse(Instruction * currentInstruction)796 void JIT::emit_op_jfalse(Instruction* currentInstruction)
797 {
798     unsigned cond = currentInstruction[1].u.operand;
799     unsigned target = currentInstruction[2].u.operand;
800 
801     emitLoad(cond, regT1, regT0);
802 
803     Jump isTrue = branch32(Equal, regT1, Imm32(JSValue::TrueTag));
804     addJump(branch32(Equal, regT1, Imm32(JSValue::FalseTag)), target + 2);
805 
806     Jump isNotInteger = branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag));
807     Jump isTrue2 = branch32(NotEqual, regT0, Imm32(0));
808     addJump(jump(), target + 2);
809 
810     isNotInteger.link(this);
811 
812     addSlowCase(branch32(Above, regT1, Imm32(JSValue::LowestTag)));
813 
814     zeroDouble(fpRegT0);
815     emitLoadDouble(cond, fpRegT1);
816     addJump(branchDouble(DoubleEqual, fpRegT0, fpRegT1), target + 2);
817 
818     isTrue.link(this);
819     isTrue2.link(this);
820 }
821 
emitSlow_op_jfalse(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)822 void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
823 {
824     unsigned cond = currentInstruction[1].u.operand;
825     unsigned target = currentInstruction[2].u.operand;
826 
827     linkSlowCase(iter);
828     JITStubCall stubCall(this, cti_op_jtrue);
829     stubCall.addArgument(cond);
830     stubCall.call();
831     emitJumpSlowToHot(branchTest32(Zero, regT0), target + 2); // Inverted.
832 }
833 
emit_op_jtrue(Instruction * currentInstruction)834 void JIT::emit_op_jtrue(Instruction* currentInstruction)
835 {
836     unsigned cond = currentInstruction[1].u.operand;
837     unsigned target = currentInstruction[2].u.operand;
838 
839     emitLoad(cond, regT1, regT0);
840 
841     Jump isFalse = branch32(Equal, regT1, Imm32(JSValue::FalseTag));
842     addJump(branch32(Equal, regT1, Imm32(JSValue::TrueTag)), target + 2);
843 
844     Jump isNotInteger = branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag));
845     Jump isFalse2 = branch32(Equal, regT0, Imm32(0));
846     addJump(jump(), target + 2);
847 
848     isNotInteger.link(this);
849 
850     addSlowCase(branch32(Above, regT1, Imm32(JSValue::LowestTag)));
851 
852     zeroDouble(fpRegT0);
853     emitLoadDouble(cond, fpRegT1);
854     addJump(branchDouble(DoubleNotEqual, fpRegT0, fpRegT1), target + 2);
855 
856     isFalse.link(this);
857     isFalse2.link(this);
858 }
859 
emitSlow_op_jtrue(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)860 void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
861 {
862     unsigned cond = currentInstruction[1].u.operand;
863     unsigned target = currentInstruction[2].u.operand;
864 
865     linkSlowCase(iter);
866     JITStubCall stubCall(this, cti_op_jtrue);
867     stubCall.addArgument(cond);
868     stubCall.call();
869     emitJumpSlowToHot(branchTest32(NonZero, regT0), target + 2);
870 }
871 
emit_op_jeq_null(Instruction * currentInstruction)872 void JIT::emit_op_jeq_null(Instruction* currentInstruction)
873 {
874     unsigned src = currentInstruction[1].u.operand;
875     unsigned target = currentInstruction[2].u.operand;
876 
877     emitLoad(src, regT1, regT0);
878 
879     Jump isImmediate = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
880 
881     // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
882     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
883     addJump(branchTest32(NonZero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined)), target + 2);
884 
885     Jump wasNotImmediate = jump();
886 
887     // Now handle the immediate cases - undefined & null
888     isImmediate.link(this);
889 
890     set32(Equal, regT1, Imm32(JSValue::NullTag), regT2);
891     set32(Equal, regT1, Imm32(JSValue::UndefinedTag), regT1);
892     or32(regT2, regT1);
893 
894     addJump(branchTest32(NonZero, regT1), target + 2);
895 
896     wasNotImmediate.link(this);
897 }
898 
emit_op_jneq_null(Instruction * currentInstruction)899 void JIT::emit_op_jneq_null(Instruction* currentInstruction)
900 {
901     unsigned src = currentInstruction[1].u.operand;
902     unsigned target = currentInstruction[2].u.operand;
903 
904     emitLoad(src, regT1, regT0);
905 
906     Jump isImmediate = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
907 
908     // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
909     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
910     addJump(branchTest32(Zero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined)), target + 2);
911 
912     Jump wasNotImmediate = jump();
913 
914     // Now handle the immediate cases - undefined & null
915     isImmediate.link(this);
916 
917     set32(Equal, regT1, Imm32(JSValue::NullTag), regT2);
918     set32(Equal, regT1, Imm32(JSValue::UndefinedTag), regT1);
919     or32(regT2, regT1);
920 
921     addJump(branchTest32(Zero, regT1), target + 2);
922 
923     wasNotImmediate.link(this);
924 }
925 
emit_op_jneq_ptr(Instruction * currentInstruction)926 void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
927 {
928     unsigned src = currentInstruction[1].u.operand;
929     JSCell* ptr = currentInstruction[2].u.jsCell;
930     unsigned target = currentInstruction[3].u.operand;
931 
932     emitLoad(src, regT1, regT0);
933     addJump(branch32(NotEqual, regT1, Imm32(JSValue::CellTag)), target + 3);
934     addJump(branchPtr(NotEqual, regT0, ImmPtr(ptr)), target + 3);
935 }
936 
emit_op_jsr(Instruction * currentInstruction)937 void JIT::emit_op_jsr(Instruction* currentInstruction)
938 {
939     int retAddrDst = currentInstruction[1].u.operand;
940     int target = currentInstruction[2].u.operand;
941     DataLabelPtr storeLocation = storePtrWithPatch(ImmPtr(0), Address(callFrameRegister, sizeof(Register) * retAddrDst));
942     addJump(jump(), target + 2);
943     m_jsrSites.append(JSRInfo(storeLocation, label()));
944 }
945 
emit_op_sret(Instruction * currentInstruction)946 void JIT::emit_op_sret(Instruction* currentInstruction)
947 {
948     jump(Address(callFrameRegister, sizeof(Register) * currentInstruction[1].u.operand));
949 }
950 
emit_op_eq(Instruction * currentInstruction)951 void JIT::emit_op_eq(Instruction* currentInstruction)
952 {
953     unsigned dst = currentInstruction[1].u.operand;
954     unsigned src1 = currentInstruction[2].u.operand;
955     unsigned src2 = currentInstruction[3].u.operand;
956 
957     emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
958     addSlowCase(branch32(NotEqual, regT1, regT3));
959     addSlowCase(branch32(Equal, regT1, Imm32(JSValue::CellTag)));
960     addSlowCase(branch32(Below, regT1, Imm32(JSValue::LowestTag)));
961 
962     set8(Equal, regT0, regT2, regT0);
963     or32(Imm32(JSValue::FalseTag), regT0);
964 
965     emitStoreBool(dst, regT0);
966 }
967 
emitSlow_op_eq(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)968 void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
969 {
970     unsigned dst = currentInstruction[1].u.operand;
971     unsigned op1 = currentInstruction[2].u.operand;
972     unsigned op2 = currentInstruction[3].u.operand;
973 
974     JumpList storeResult;
975     JumpList genericCase;
976 
977     genericCase.append(getSlowCase(iter)); // tags not equal
978 
979     linkSlowCase(iter); // tags equal and JSCell
980     genericCase.append(branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr)));
981     genericCase.append(branchPtr(NotEqual, Address(regT2), ImmPtr(m_globalData->jsStringVPtr)));
982 
983     // String case.
984     JITStubCall stubCallEqStrings(this, cti_op_eq_strings);
985     stubCallEqStrings.addArgument(regT0);
986     stubCallEqStrings.addArgument(regT2);
987     stubCallEqStrings.call();
988     storeResult.append(jump());
989 
990     // Generic case.
991     genericCase.append(getSlowCase(iter)); // doubles
992     genericCase.link(this);
993     JITStubCall stubCallEq(this, cti_op_eq);
994     stubCallEq.addArgument(op1);
995     stubCallEq.addArgument(op2);
996     stubCallEq.call(regT0);
997 
998     storeResult.link(this);
999     or32(Imm32(JSValue::FalseTag), regT0);
1000     emitStoreBool(dst, regT0);
1001 }
1002 
emit_op_neq(Instruction * currentInstruction)1003 void JIT::emit_op_neq(Instruction* currentInstruction)
1004 {
1005     unsigned dst = currentInstruction[1].u.operand;
1006     unsigned src1 = currentInstruction[2].u.operand;
1007     unsigned src2 = currentInstruction[3].u.operand;
1008 
1009     emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
1010     addSlowCase(branch32(NotEqual, regT1, regT3));
1011     addSlowCase(branch32(Equal, regT1, Imm32(JSValue::CellTag)));
1012     addSlowCase(branch32(Below, regT1, Imm32(JSValue::LowestTag)));
1013 
1014     set8(NotEqual, regT0, regT2, regT0);
1015     or32(Imm32(JSValue::FalseTag), regT0);
1016 
1017     emitStoreBool(dst, regT0);
1018 }
1019 
emitSlow_op_neq(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1020 void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1021 {
1022     unsigned dst = currentInstruction[1].u.operand;
1023 
1024     JumpList storeResult;
1025     JumpList genericCase;
1026 
1027     genericCase.append(getSlowCase(iter)); // tags not equal
1028 
1029     linkSlowCase(iter); // tags equal and JSCell
1030     genericCase.append(branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr)));
1031     genericCase.append(branchPtr(NotEqual, Address(regT2), ImmPtr(m_globalData->jsStringVPtr)));
1032 
1033     // String case.
1034     JITStubCall stubCallEqStrings(this, cti_op_eq_strings);
1035     stubCallEqStrings.addArgument(regT0);
1036     stubCallEqStrings.addArgument(regT2);
1037     stubCallEqStrings.call(regT0);
1038     storeResult.append(jump());
1039 
1040     // Generic case.
1041     genericCase.append(getSlowCase(iter)); // doubles
1042     genericCase.link(this);
1043     JITStubCall stubCallEq(this, cti_op_eq);
1044     stubCallEq.addArgument(regT1, regT0);
1045     stubCallEq.addArgument(regT3, regT2);
1046     stubCallEq.call(regT0);
1047 
1048     storeResult.link(this);
1049     xor32(Imm32(0x1), regT0);
1050     or32(Imm32(JSValue::FalseTag), regT0);
1051     emitStoreBool(dst, regT0);
1052 }
1053 
compileOpStrictEq(Instruction * currentInstruction,CompileOpStrictEqType type)1054 void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
1055 {
1056     unsigned dst = currentInstruction[1].u.operand;
1057     unsigned src1 = currentInstruction[2].u.operand;
1058     unsigned src2 = currentInstruction[3].u.operand;
1059 
1060     emitLoadTag(src1, regT0);
1061     emitLoadTag(src2, regT1);
1062 
1063     // Jump to a slow case if either operand is double, or if both operands are
1064     // cells and/or Int32s.
1065     move(regT0, regT2);
1066     and32(regT1, regT2);
1067     addSlowCase(branch32(Below, regT2, Imm32(JSValue::LowestTag)));
1068     addSlowCase(branch32(AboveOrEqual, regT2, Imm32(JSValue::CellTag)));
1069 
1070     if (type == OpStrictEq)
1071         set8(Equal, regT0, regT1, regT0);
1072     else
1073         set8(NotEqual, regT0, regT1, regT0);
1074 
1075     or32(Imm32(JSValue::FalseTag), regT0);
1076 
1077     emitStoreBool(dst, regT0);
1078 }
1079 
emit_op_stricteq(Instruction * currentInstruction)1080 void JIT::emit_op_stricteq(Instruction* currentInstruction)
1081 {
1082     compileOpStrictEq(currentInstruction, OpStrictEq);
1083 }
1084 
emitSlow_op_stricteq(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1085 void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1086 {
1087     unsigned dst = currentInstruction[1].u.operand;
1088     unsigned src1 = currentInstruction[2].u.operand;
1089     unsigned src2 = currentInstruction[3].u.operand;
1090 
1091     linkSlowCase(iter);
1092     linkSlowCase(iter);
1093 
1094     JITStubCall stubCall(this, cti_op_stricteq);
1095     stubCall.addArgument(src1);
1096     stubCall.addArgument(src2);
1097     stubCall.call(dst);
1098 }
1099 
emit_op_nstricteq(Instruction * currentInstruction)1100 void JIT::emit_op_nstricteq(Instruction* currentInstruction)
1101 {
1102     compileOpStrictEq(currentInstruction, OpNStrictEq);
1103 }
1104 
emitSlow_op_nstricteq(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1105 void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1106 {
1107     unsigned dst = currentInstruction[1].u.operand;
1108     unsigned src1 = currentInstruction[2].u.operand;
1109     unsigned src2 = currentInstruction[3].u.operand;
1110 
1111     linkSlowCase(iter);
1112     linkSlowCase(iter);
1113 
1114     JITStubCall stubCall(this, cti_op_nstricteq);
1115     stubCall.addArgument(src1);
1116     stubCall.addArgument(src2);
1117     stubCall.call(dst);
1118 }
1119 
emit_op_eq_null(Instruction * currentInstruction)1120 void JIT::emit_op_eq_null(Instruction* currentInstruction)
1121 {
1122     unsigned dst = currentInstruction[1].u.operand;
1123     unsigned src = currentInstruction[2].u.operand;
1124 
1125     emitLoad(src, regT1, regT0);
1126     Jump isImmediate = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
1127 
1128     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT1);
1129     setTest8(NonZero, Address(regT1, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined), regT1);
1130 
1131     Jump wasNotImmediate = jump();
1132 
1133     isImmediate.link(this);
1134 
1135     set8(Equal, regT1, Imm32(JSValue::NullTag), regT2);
1136     set8(Equal, regT1, Imm32(JSValue::UndefinedTag), regT1);
1137     or32(regT2, regT1);
1138 
1139     wasNotImmediate.link(this);
1140 
1141     or32(Imm32(JSValue::FalseTag), regT1);
1142 
1143     emitStoreBool(dst, regT1);
1144 }
1145 
emit_op_neq_null(Instruction * currentInstruction)1146 void JIT::emit_op_neq_null(Instruction* currentInstruction)
1147 {
1148     unsigned dst = currentInstruction[1].u.operand;
1149     unsigned src = currentInstruction[2].u.operand;
1150 
1151     emitLoad(src, regT1, regT0);
1152     Jump isImmediate = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
1153 
1154     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT1);
1155     setTest8(Zero, Address(regT1, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined), regT1);
1156 
1157     Jump wasNotImmediate = jump();
1158 
1159     isImmediate.link(this);
1160 
1161     set8(NotEqual, regT1, Imm32(JSValue::NullTag), regT2);
1162     set8(NotEqual, regT1, Imm32(JSValue::UndefinedTag), regT1);
1163     and32(regT2, regT1);
1164 
1165     wasNotImmediate.link(this);
1166 
1167     or32(Imm32(JSValue::FalseTag), regT1);
1168 
1169     emitStoreBool(dst, regT1);
1170 }
1171 
emit_op_resolve_with_base(Instruction * currentInstruction)1172 void JIT::emit_op_resolve_with_base(Instruction* currentInstruction)
1173 {
1174     JITStubCall stubCall(this, cti_op_resolve_with_base);
1175     stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[3].u.operand)));
1176     stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
1177     stubCall.call(currentInstruction[2].u.operand);
1178 }
1179 
emit_op_new_func_exp(Instruction * currentInstruction)1180 void JIT::emit_op_new_func_exp(Instruction* currentInstruction)
1181 {
1182     JITStubCall stubCall(this, cti_op_new_func_exp);
1183     stubCall.addArgument(ImmPtr(m_codeBlock->functionExpression(currentInstruction[2].u.operand)));
1184     stubCall.call(currentInstruction[1].u.operand);
1185 }
1186 
emit_op_new_regexp(Instruction * currentInstruction)1187 void JIT::emit_op_new_regexp(Instruction* currentInstruction)
1188 {
1189     JITStubCall stubCall(this, cti_op_new_regexp);
1190     stubCall.addArgument(ImmPtr(m_codeBlock->regexp(currentInstruction[2].u.operand)));
1191     stubCall.call(currentInstruction[1].u.operand);
1192 }
1193 
emit_op_throw(Instruction * currentInstruction)1194 void JIT::emit_op_throw(Instruction* currentInstruction)
1195 {
1196     unsigned exception = currentInstruction[1].u.operand;
1197     JITStubCall stubCall(this, cti_op_throw);
1198     stubCall.addArgument(exception);
1199     stubCall.call();
1200 
1201 #ifndef NDEBUG
1202     // cti_op_throw always changes it's return address,
1203     // this point in the code should never be reached.
1204     breakpoint();
1205 #endif
1206 }
1207 
emit_op_next_pname(Instruction * currentInstruction)1208 void JIT::emit_op_next_pname(Instruction* currentInstruction)
1209 {
1210     int dst = currentInstruction[1].u.operand;
1211     int iter = currentInstruction[2].u.operand;
1212     int target = currentInstruction[3].u.operand;
1213 
1214     load32(Address(callFrameRegister, (iter * sizeof(Register))), regT0);
1215 
1216     JITStubCall stubCall(this, cti_op_next_pname);
1217     stubCall.addArgument(regT0);
1218     stubCall.call();
1219 
1220     Jump endOfIter = branchTestPtr(Zero, regT0);
1221     emitStore(dst, regT1, regT0);
1222     map(m_bytecodeIndex + OPCODE_LENGTH(op_next_pname), dst, regT1, regT0);
1223     addJump(jump(), target + 3);
1224     endOfIter.link(this);
1225 }
1226 
emit_op_push_scope(Instruction * currentInstruction)1227 void JIT::emit_op_push_scope(Instruction* currentInstruction)
1228 {
1229     JITStubCall stubCall(this, cti_op_push_scope);
1230     stubCall.addArgument(currentInstruction[1].u.operand);
1231     stubCall.call(currentInstruction[1].u.operand);
1232 }
1233 
emit_op_pop_scope(Instruction *)1234 void JIT::emit_op_pop_scope(Instruction*)
1235 {
1236     JITStubCall(this, cti_op_pop_scope).call();
1237 }
1238 
emit_op_to_jsnumber(Instruction * currentInstruction)1239 void JIT::emit_op_to_jsnumber(Instruction* currentInstruction)
1240 {
1241     int dst = currentInstruction[1].u.operand;
1242     int src = currentInstruction[2].u.operand;
1243 
1244     emitLoad(src, regT1, regT0);
1245 
1246     Jump isInt32 = branch32(Equal, regT1, Imm32(JSValue::Int32Tag));
1247     addSlowCase(branch32(AboveOrEqual, regT1, Imm32(JSValue::DeletedValueTag)));
1248     isInt32.link(this);
1249 
1250     if (src != dst)
1251         emitStore(dst, regT1, regT0);
1252     map(m_bytecodeIndex + OPCODE_LENGTH(op_to_jsnumber), dst, regT1, regT0);
1253 }
1254 
emitSlow_op_to_jsnumber(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1255 void JIT::emitSlow_op_to_jsnumber(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1256 {
1257     int dst = currentInstruction[1].u.operand;
1258 
1259     linkSlowCase(iter);
1260 
1261     JITStubCall stubCall(this, cti_op_to_jsnumber);
1262     stubCall.addArgument(regT1, regT0);
1263     stubCall.call(dst);
1264 }
1265 
emit_op_push_new_scope(Instruction * currentInstruction)1266 void JIT::emit_op_push_new_scope(Instruction* currentInstruction)
1267 {
1268     JITStubCall stubCall(this, cti_op_push_new_scope);
1269     stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
1270     stubCall.addArgument(currentInstruction[3].u.operand);
1271     stubCall.call(currentInstruction[1].u.operand);
1272 }
1273 
emit_op_catch(Instruction * currentInstruction)1274 void JIT::emit_op_catch(Instruction* currentInstruction)
1275 {
1276     unsigned exception = currentInstruction[1].u.operand;
1277 
1278     // This opcode only executes after a return from cti_op_throw.
1279 
1280     // cti_op_throw may have taken us to a call frame further up the stack; reload
1281     // the call frame pointer to adjust.
1282     peek(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof (void*));
1283 
1284     // Now store the exception returned by cti_op_throw.
1285     emitStore(exception, regT1, regT0);
1286     map(m_bytecodeIndex + OPCODE_LENGTH(op_catch), exception, regT1, regT0);
1287 }
1288 
emit_op_jmp_scopes(Instruction * currentInstruction)1289 void JIT::emit_op_jmp_scopes(Instruction* currentInstruction)
1290 {
1291     JITStubCall stubCall(this, cti_op_jmp_scopes);
1292     stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
1293     stubCall.call();
1294     addJump(jump(), currentInstruction[2].u.operand + 2);
1295 }
1296 
emit_op_switch_imm(Instruction * currentInstruction)1297 void JIT::emit_op_switch_imm(Instruction* currentInstruction)
1298 {
1299     unsigned tableIndex = currentInstruction[1].u.operand;
1300     unsigned defaultOffset = currentInstruction[2].u.operand;
1301     unsigned scrutinee = currentInstruction[3].u.operand;
1302 
1303     // create jump table for switch destinations, track this switch statement.
1304     SimpleJumpTable* jumpTable = &m_codeBlock->immediateSwitchJumpTable(tableIndex);
1305     m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset, SwitchRecord::Immediate));
1306     jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1307 
1308     JITStubCall stubCall(this, cti_op_switch_imm);
1309     stubCall.addArgument(scrutinee);
1310     stubCall.addArgument(Imm32(tableIndex));
1311     stubCall.call();
1312     jump(regT0);
1313 }
1314 
emit_op_switch_char(Instruction * currentInstruction)1315 void JIT::emit_op_switch_char(Instruction* currentInstruction)
1316 {
1317     unsigned tableIndex = currentInstruction[1].u.operand;
1318     unsigned defaultOffset = currentInstruction[2].u.operand;
1319     unsigned scrutinee = currentInstruction[3].u.operand;
1320 
1321     // create jump table for switch destinations, track this switch statement.
1322     SimpleJumpTable* jumpTable = &m_codeBlock->characterSwitchJumpTable(tableIndex);
1323     m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset, SwitchRecord::Character));
1324     jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1325 
1326     JITStubCall stubCall(this, cti_op_switch_char);
1327     stubCall.addArgument(scrutinee);
1328     stubCall.addArgument(Imm32(tableIndex));
1329     stubCall.call();
1330     jump(regT0);
1331 }
1332 
emit_op_switch_string(Instruction * currentInstruction)1333 void JIT::emit_op_switch_string(Instruction* currentInstruction)
1334 {
1335     unsigned tableIndex = currentInstruction[1].u.operand;
1336     unsigned defaultOffset = currentInstruction[2].u.operand;
1337     unsigned scrutinee = currentInstruction[3].u.operand;
1338 
1339     // create jump table for switch destinations, track this switch statement.
1340     StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
1341     m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset));
1342 
1343     JITStubCall stubCall(this, cti_op_switch_string);
1344     stubCall.addArgument(scrutinee);
1345     stubCall.addArgument(Imm32(tableIndex));
1346     stubCall.call();
1347     jump(regT0);
1348 }
1349 
emit_op_new_error(Instruction * currentInstruction)1350 void JIT::emit_op_new_error(Instruction* currentInstruction)
1351 {
1352     unsigned dst = currentInstruction[1].u.operand;
1353     unsigned type = currentInstruction[2].u.operand;
1354     unsigned message = currentInstruction[3].u.operand;
1355 
1356     JITStubCall stubCall(this, cti_op_new_error);
1357     stubCall.addArgument(Imm32(type));
1358     stubCall.addArgument(m_codeBlock->getConstant(message));
1359     stubCall.addArgument(Imm32(m_bytecodeIndex));
1360     stubCall.call(dst);
1361 }
1362 
emit_op_debug(Instruction * currentInstruction)1363 void JIT::emit_op_debug(Instruction* currentInstruction)
1364 {
1365     JITStubCall stubCall(this, cti_op_debug);
1366     stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
1367     stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
1368     stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
1369     stubCall.call();
1370 }
1371 
1372 
emit_op_enter(Instruction *)1373 void JIT::emit_op_enter(Instruction*)
1374 {
1375     // Even though JIT code doesn't use them, we initialize our constant
1376     // registers to zap stale pointers, to avoid unnecessarily prolonging
1377     // object lifetime and increasing GC pressure.
1378     for (int i = 0; i < m_codeBlock->m_numVars; ++i)
1379         emitStore(i, jsUndefined());
1380 }
1381 
emit_op_enter_with_activation(Instruction * currentInstruction)1382 void JIT::emit_op_enter_with_activation(Instruction* currentInstruction)
1383 {
1384     emit_op_enter(currentInstruction);
1385 
1386     JITStubCall(this, cti_op_push_activation).call(currentInstruction[1].u.operand);
1387 }
1388 
emit_op_create_arguments(Instruction *)1389 void JIT::emit_op_create_arguments(Instruction*)
1390 {
1391     Jump argsNotCell = branch32(NotEqual, tagFor(RegisterFile::ArgumentsRegister, callFrameRegister), Imm32(JSValue::CellTag));
1392     Jump argsNotNull = branchTestPtr(NonZero, payloadFor(RegisterFile::ArgumentsRegister, callFrameRegister));
1393 
1394     // If we get here the arguments pointer is a null cell - i.e. arguments need lazy creation.
1395     if (m_codeBlock->m_numParameters == 1)
1396         JITStubCall(this, cti_op_create_arguments_no_params).call();
1397     else
1398         JITStubCall(this, cti_op_create_arguments).call();
1399 
1400     argsNotCell.link(this);
1401     argsNotNull.link(this);
1402 }
1403 
emit_op_init_arguments(Instruction *)1404 void JIT::emit_op_init_arguments(Instruction*)
1405 {
1406     emitStore(RegisterFile::ArgumentsRegister, JSValue(), callFrameRegister);
1407 }
1408 
emit_op_convert_this(Instruction * currentInstruction)1409 void JIT::emit_op_convert_this(Instruction* currentInstruction)
1410 {
1411     unsigned thisRegister = currentInstruction[1].u.operand;
1412 
1413     emitLoad(thisRegister, regT1, regT0);
1414 
1415     addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::CellTag)));
1416 
1417     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
1418     addSlowCase(branchTest32(NonZero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(NeedsThisConversion)));
1419 
1420     map(m_bytecodeIndex + OPCODE_LENGTH(op_convert_this), thisRegister, regT1, regT0);
1421 }
1422 
emitSlow_op_convert_this(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1423 void JIT::emitSlow_op_convert_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1424 {
1425     unsigned thisRegister = currentInstruction[1].u.operand;
1426 
1427     linkSlowCase(iter);
1428     linkSlowCase(iter);
1429 
1430     JITStubCall stubCall(this, cti_op_convert_this);
1431     stubCall.addArgument(regT1, regT0);
1432     stubCall.call(thisRegister);
1433 }
1434 
emit_op_profile_will_call(Instruction * currentInstruction)1435 void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
1436 {
1437     peek(regT2, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof (void*));
1438     Jump noProfiler = branchTestPtr(Zero, Address(regT2));
1439 
1440     JITStubCall stubCall(this, cti_op_profile_will_call);
1441     stubCall.addArgument(currentInstruction[1].u.operand);
1442     stubCall.call();
1443     noProfiler.link(this);
1444 }
1445 
emit_op_profile_did_call(Instruction * currentInstruction)1446 void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
1447 {
1448     peek(regT2, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof (void*));
1449     Jump noProfiler = branchTestPtr(Zero, Address(regT2));
1450 
1451     JITStubCall stubCall(this, cti_op_profile_did_call);
1452     stubCall.addArgument(currentInstruction[1].u.operand);
1453     stubCall.call();
1454     noProfiler.link(this);
1455 }
1456 
1457 #else // USE(JSVALUE32_64)
1458 
1459 #define RECORD_JUMP_TARGET(targetOffset) \
1460    do { m_labels[m_bytecodeIndex + (targetOffset)].used(); } while (false)
1461 
1462 void JIT::privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool>* executablePool, JSGlobalData* globalData, CodePtr* ctiStringLengthTrampoline, CodePtr* ctiVirtualCallLink, CodePtr* ctiVirtualCall, CodePtr* ctiNativeCallThunk)
1463 {
1464 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
1465     // (2) The second function provides fast property access for string length
1466     Label stringLengthBegin = align();
1467 
1468     // Check eax is a string
1469     Jump string_failureCases1 = emitJumpIfNotJSCell(regT0);
1470     Jump string_failureCases2 = branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr));
1471 
1472     // Checks out okay! - get the length from the Ustring.
1473     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSString, m_value) + OBJECT_OFFSETOF(UString, m_rep)), regT0);
1474     load32(Address(regT0, OBJECT_OFFSETOF(UString::Rep, len)), regT0);
1475 
1476     Jump string_failureCases3 = branch32(Above, regT0, Imm32(JSImmediate::maxImmediateInt));
1477 
1478     // regT0 contains a 64 bit value (is positive, is zero extended) so we don't need sign extend here.
1479     emitFastArithIntToImmNoCheck(regT0, regT0);
1480 
1481     ret();
1482 #endif
1483 
1484     // (3) Trampolines for the slow cases of op_call / op_call_eval / op_construct.
1485     COMPILE_ASSERT(sizeof(CodeType) == 4, CodeTypeEnumMustBe32Bit);
1486 
1487     Label virtualCallLinkBegin = align();
1488 
1489     // Load the callee CodeBlock* into eax
1490     loadPtr(Address(regT2, OBJECT_OFFSETOF(JSFunction, m_body)), regT3);
1491     loadPtr(Address(regT3, OBJECT_OFFSETOF(FunctionBodyNode, m_code)), regT0);
1492     Jump hasCodeBlock2 = branchTestPtr(NonZero, regT0);
1493     preserveReturnAddressAfterCall(regT3);
1494     restoreArgumentReference();
1495     Call callJSFunction2 = call();
1496     emitGetJITStubArg(1, regT2);
1497     emitGetJITStubArg(3, regT1);
1498     restoreReturnAddressBeforeReturn(regT3);
1499     hasCodeBlock2.link(this);
1500 
1501     Jump isNativeFunc2 = branch32(Equal, Address(regT0, OBJECT_OFFSETOF(CodeBlock, m_codeType)), Imm32(NativeCode));
1502 
1503     // Check argCount matches callee arity.
1504     Jump arityCheckOkay2 = branch32(Equal, Address(regT0, OBJECT_OFFSETOF(CodeBlock, m_numParameters)), regT1);
1505     preserveReturnAddressAfterCall(regT3);
1506     emitPutJITStubArg(regT3, 2);
1507     emitPutJITStubArg(regT0, 4);
1508     restoreArgumentReference();
1509     Call callArityCheck2 = call();
1510     move(regT1, callFrameRegister);
1511     emitGetJITStubArg(1, regT2);
1512     emitGetJITStubArg(3, regT1);
1513     restoreReturnAddressBeforeReturn(regT3);
1514     arityCheckOkay2.link(this);
1515     isNativeFunc2.link(this);
1516 
1517     compileOpCallInitializeCallFrame();
1518 
1519     preserveReturnAddressAfterCall(regT3);
1520     emitPutJITStubArg(regT3, 2);
1521     restoreArgumentReference();
1522     Call callLazyLinkCall = call();
1523     restoreReturnAddressBeforeReturn(regT3);
1524 
1525     jump(regT0);
1526 
1527     Label virtualCallBegin = align();
1528 
1529     // Load the callee CodeBlock* into eax
1530     loadPtr(Address(regT2, OBJECT_OFFSETOF(JSFunction, m_body)), regT3);
1531     loadPtr(Address(regT3, OBJECT_OFFSETOF(FunctionBodyNode, m_code)), regT0);
1532     Jump hasCodeBlock3 = branchTestPtr(NonZero, regT0);
1533     preserveReturnAddressAfterCall(regT3);
1534     restoreArgumentReference();
1535     Call callJSFunction1 = call();
1536     emitGetJITStubArg(1, regT2);
1537     emitGetJITStubArg(3, regT1);
1538     restoreReturnAddressBeforeReturn(regT3);
1539     loadPtr(Address(regT2, OBJECT_OFFSETOF(JSFunction, m_body)), regT3); // reload the function body nody, so we can reload the code pointer.
1540     hasCodeBlock3.link(this);
1541 
1542     Jump isNativeFunc3 = branch32(Equal, Address(regT0, OBJECT_OFFSETOF(CodeBlock, m_codeType)), Imm32(NativeCode));
1543 
1544     // Check argCount matches callee arity.
1545     Jump arityCheckOkay3 = branch32(Equal, Address(regT0, OBJECT_OFFSETOF(CodeBlock, m_numParameters)), regT1);
1546     preserveReturnAddressAfterCall(regT3);
1547     emitPutJITStubArg(regT3, 2);
1548     emitPutJITStubArg(regT0, 4);
1549     restoreArgumentReference();
1550     Call callArityCheck1 = call();
1551     move(regT1, callFrameRegister);
1552     emitGetJITStubArg(1, regT2);
1553     emitGetJITStubArg(3, regT1);
1554     restoreReturnAddressBeforeReturn(regT3);
1555     loadPtr(Address(regT2, OBJECT_OFFSETOF(JSFunction, m_body)), regT3); // reload the function body nody, so we can reload the code pointer.
1556     arityCheckOkay3.link(this);
1557     isNativeFunc3.link(this);
1558 
1559     // load ctiCode from the new codeBlock.
1560     loadPtr(Address(regT3, OBJECT_OFFSETOF(FunctionBodyNode, m_jitCode)), regT0);
1561 
1562     compileOpCallInitializeCallFrame();
1563     jump(regT0);
1564 
1565 
1566     Label nativeCallThunk = align();
1567     preserveReturnAddressAfterCall(regT0);
1568     emitPutToCallFrameHeader(regT0, RegisterFile::ReturnPC); // Push return address
1569 
1570     // Load caller frame's scope chain into this callframe so that whatever we call can
1571     // get to its global data.
1572     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT1);
1573     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT1);
1574     emitPutToCallFrameHeader(regT1, RegisterFile::ScopeChain);
1575 
1576 
1577 #if PLATFORM(X86_64)
1578     emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, X86::ecx);
1579 
1580     // Allocate stack space for our arglist
1581     subPtr(Imm32(sizeof(ArgList)), stackPointerRegister);
1582     COMPILE_ASSERT((sizeof(ArgList) & 0xf) == 0, ArgList_should_by_16byte_aligned);
1583 
1584     // Set up arguments
1585     subPtr(Imm32(1), X86::ecx); // Don't include 'this' in argcount
1586 
1587     // Push argcount
1588     storePtr(X86::ecx, Address(stackPointerRegister, OBJECT_OFFSETOF(ArgList, m_argCount)));
1589 
1590     // Calculate the start of the callframe header, and store in edx
1591     addPtr(Imm32(-RegisterFile::CallFrameHeaderSize * (int32_t)sizeof(Register)), callFrameRegister, X86::edx);
1592 
1593     // Calculate start of arguments as callframe header - sizeof(Register) * argcount (ecx)
1594     mul32(Imm32(sizeof(Register)), X86::ecx, X86::ecx);
1595     subPtr(X86::ecx, X86::edx);
1596 
1597     // push pointer to arguments
1598     storePtr(X86::edx, Address(stackPointerRegister, OBJECT_OFFSETOF(ArgList, m_args)));
1599 
1600     // ArgList is passed by reference so is stackPointerRegister
1601     move(stackPointerRegister, X86::ecx);
1602 
1603     // edx currently points to the first argument, edx-sizeof(Register) points to 'this'
1604     loadPtr(Address(X86::edx, -(int32_t)sizeof(Register)), X86::edx);
1605 
1606     emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, X86::esi);
1607 
1608     move(callFrameRegister, X86::edi);
1609 
1610     call(Address(X86::esi, OBJECT_OFFSETOF(JSFunction, m_data)));
1611 
1612     addPtr(Imm32(sizeof(ArgList)), stackPointerRegister);
1613 #elif PLATFORM(X86)
1614     emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT0);
1615 
1616     /* We have two structs that we use to describe the stackframe we set up for our
1617      * call to native code.  NativeCallFrameStructure describes the how we set up the stack
1618      * in advance of the call.  NativeFunctionCalleeSignature describes the callframe
1619      * as the native code expects it.  We do this as we are using the fastcall calling
1620      * convention which results in the callee popping its arguments off the stack, but
1621      * not the rest of the callframe so we need a nice way to ensure we increment the
1622      * stack pointer by the right amount after the call.
1623      */
1624 #if COMPILER(MSVC) || PLATFORM(LINUX)
1625     struct NativeCallFrameStructure {
1626       //  CallFrame* callFrame; // passed in EDX
1627         JSObject* callee;
1628         JSValue thisValue;
1629         ArgList* argPointer;
1630         ArgList args;
1631         JSValue result;
1632     };
1633     struct NativeFunctionCalleeSignature {
1634         JSObject* callee;
1635         JSValue thisValue;
1636         ArgList* argPointer;
1637     };
1638 #else
1639     struct NativeCallFrameStructure {
1640       //  CallFrame* callFrame; // passed in ECX
1641       //  JSObject* callee; // passed in EDX
1642         JSValue thisValue;
1643         ArgList* argPointer;
1644         ArgList args;
1645     };
1646     struct NativeFunctionCalleeSignature {
1647         JSValue thisValue;
1648         ArgList* argPointer;
1649     };
1650 #endif
1651     const int NativeCallFrameSize = (sizeof(NativeCallFrameStructure) + 15) & ~15;
1652     // Allocate system stack frame
1653     subPtr(Imm32(NativeCallFrameSize), stackPointerRegister);
1654 
1655     // Set up arguments
1656     subPtr(Imm32(1), regT0); // Don't include 'this' in argcount
1657 
1658     // push argcount
1659     storePtr(regT0, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, args) + OBJECT_OFFSETOF(ArgList, m_argCount)));
1660 
1661     // Calculate the start of the callframe header, and store in regT1
1662     addPtr(Imm32(-RegisterFile::CallFrameHeaderSize * (int)sizeof(Register)), callFrameRegister, regT1);
1663 
1664     // Calculate start of arguments as callframe header - sizeof(Register) * argcount (regT0)
1665     mul32(Imm32(sizeof(Register)), regT0, regT0);
1666     subPtr(regT0, regT1);
1667     storePtr(regT1, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, args) + OBJECT_OFFSETOF(ArgList, m_args)));
1668 
1669     // ArgList is passed by reference so is stackPointerRegister + 4 * sizeof(Register)
1670     addPtr(Imm32(OBJECT_OFFSETOF(NativeCallFrameStructure, args)), stackPointerRegister, regT0);
1671     storePtr(regT0, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, argPointer)));
1672 
1673     // regT1 currently points to the first argument, regT1 - sizeof(Register) points to 'this'
1674     loadPtr(Address(regT1, -(int)sizeof(Register)), regT1);
1675     storePtr(regT1, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, thisValue)));
1676 
1677 #if COMPILER(MSVC) || PLATFORM(LINUX)
1678     // ArgList is passed by reference so is stackPointerRegister + 4 * sizeof(Register)
1679     addPtr(Imm32(OBJECT_OFFSETOF(NativeCallFrameStructure, result)), stackPointerRegister, X86::ecx);
1680 
1681     // Plant callee
1682     emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, X86::eax);
1683     storePtr(X86::eax, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, callee)));
1684 
1685     // Plant callframe
1686     move(callFrameRegister, X86::edx);
1687 
1688     call(Address(X86::eax, OBJECT_OFFSETOF(JSFunction, m_data)));
1689 
1690     // JSValue is a non-POD type
1691     loadPtr(Address(X86::eax), X86::eax);
1692 #else
1693     // Plant callee
1694     emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, X86::edx);
1695 
1696     // Plant callframe
1697     move(callFrameRegister, X86::ecx);
1698     call(Address(X86::edx, OBJECT_OFFSETOF(JSFunction, m_data)));
1699 #endif
1700 
1701     // We've put a few temporaries on the stack in addition to the actual arguments
1702     // so pull them off now
1703     addPtr(Imm32(NativeCallFrameSize - sizeof(NativeFunctionCalleeSignature)), stackPointerRegister);
1704 
1705 #elif PLATFORM(ARM) && !PLATFORM_ARM_ARCH(7)
1706     emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT0);
1707 
1708     // Allocate stack space for our arglist
1709     COMPILE_ASSERT((sizeof(ArgList) & 0x7) == 0, ArgList_should_by_8byte_aligned);
1710     subPtr(Imm32(sizeof(ArgList)), stackPointerRegister);
1711 
1712     // Set up arguments
1713     subPtr(Imm32(1), regT0); // Don't include 'this' in argcount
1714 
1715     // Push argcount
1716     storePtr(regT0, Address(stackPointerRegister, OBJECT_OFFSETOF(ArgList, m_argCount)));
1717 
1718     // Calculate the start of the callframe header, and store in regT1
1719     move(callFrameRegister, regT1);
1720     sub32(Imm32(RegisterFile::CallFrameHeaderSize * (int32_t)sizeof(Register)), regT1);
1721 
1722     // Calculate start of arguments as callframe header - sizeof(Register) * argcount (regT1)
1723     mul32(Imm32(sizeof(Register)), regT0, regT0);
1724     subPtr(regT0, regT1);
1725 
1726     // push pointer to arguments
1727     storePtr(regT1, Address(stackPointerRegister, OBJECT_OFFSETOF(ArgList, m_args)));
1728 
1729     // Setup arg3: regT1 currently points to the first argument, regT1-sizeof(Register) points to 'this'
1730     loadPtr(Address(regT1, -(int32_t)sizeof(Register)), regT2);
1731 
1732     // Setup arg2:
1733     emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT1);
1734 
1735     // Setup arg1:
1736     move(callFrameRegister, regT0);
1737 
1738     // Setup arg4: This is a plain hack
1739     move(stackPointerRegister, ARM::S0);
1740 
1741     move(ctiReturnRegister, ARM::lr);
1742     call(Address(regT1, OBJECT_OFFSETOF(JSFunction, m_data)));
1743 
1744     addPtr(Imm32(sizeof(ArgList)), stackPointerRegister);
1745 
1746 #elif ENABLE(JIT_OPTIMIZE_NATIVE_CALL)
1747 #error "JIT_OPTIMIZE_NATIVE_CALL not yet supported on this platform."
1748 #else
1749     breakpoint();
1750 #endif
1751 
1752     // Check for an exception
1753     loadPtr(&(globalData->exception), regT2);
1754     Jump exceptionHandler = branchTestPtr(NonZero, regT2);
1755 
1756     // Grab the return address.
1757     emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
1758 
1759     // Restore our caller's "r".
1760     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
1761 
1762     // Return.
1763     restoreReturnAddressBeforeReturn(regT1);
1764     ret();
1765 
1766     // Handle an exception
1767     exceptionHandler.link(this);
1768     // Grab the return address.
1769     emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
1770     move(ImmPtr(&globalData->exceptionLocation), regT2);
1771     storePtr(regT1, regT2);
1772     move(ImmPtr(reinterpret_cast<void*>(ctiVMThrowTrampoline)), regT2);
1773     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
1774     poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof (void*));
1775     restoreReturnAddressBeforeReturn(regT2);
1776     ret();
1777 
1778 
1779 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
1780     Call string_failureCases1Call = makeTailRecursiveCall(string_failureCases1);
1781     Call string_failureCases2Call = makeTailRecursiveCall(string_failureCases2);
1782     Call string_failureCases3Call = makeTailRecursiveCall(string_failureCases3);
1783 #endif
1784 
1785     // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
1786     LinkBuffer patchBuffer(this, m_globalData->executableAllocator.poolForSize(m_assembler.size()));
1787 
1788 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
1789     patchBuffer.link(string_failureCases1Call, FunctionPtr(cti_op_get_by_id_string_fail));
1790     patchBuffer.link(string_failureCases2Call, FunctionPtr(cti_op_get_by_id_string_fail));
1791     patchBuffer.link(string_failureCases3Call, FunctionPtr(cti_op_get_by_id_string_fail));
1792 #endif
1793     patchBuffer.link(callArityCheck1, FunctionPtr(cti_op_call_arityCheck));
1794     patchBuffer.link(callJSFunction1, FunctionPtr(cti_op_call_JSFunction));
1795 #if ENABLE(JIT_OPTIMIZE_CALL)
1796     patchBuffer.link(callArityCheck2, FunctionPtr(cti_op_call_arityCheck));
1797     patchBuffer.link(callJSFunction2, FunctionPtr(cti_op_call_JSFunction));
1798     patchBuffer.link(callLazyLinkCall, FunctionPtr(cti_vm_lazyLinkCall));
1799 #endif
1800 
1801     CodeRef finalCode = patchBuffer.finalizeCode();
1802     *executablePool = finalCode.m_executablePool;
1803 
1804     *ctiVirtualCallLink = trampolineAt(finalCode, virtualCallLinkBegin);
1805     *ctiVirtualCall = trampolineAt(finalCode, virtualCallBegin);
1806     *ctiNativeCallThunk = trampolineAt(finalCode, nativeCallThunk);
1807 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
1808     *ctiStringLengthTrampoline = trampolineAt(finalCode, stringLengthBegin);
1809 #else
1810     UNUSED_PARAM(ctiStringLengthTrampoline);
1811 #endif
1812 }
1813 
1814 void JIT::emit_op_mov(Instruction* currentInstruction)
1815 {
1816     int dst = currentInstruction[1].u.operand;
1817     int src = currentInstruction[2].u.operand;
1818 
1819     if (m_codeBlock->isConstantRegisterIndex(src)) {
1820         storePtr(ImmPtr(JSValue::encode(getConstantOperand(src))), Address(callFrameRegister, dst * sizeof(Register)));
1821         if (dst == m_lastResultBytecodeRegister)
1822             killLastResultRegister();
1823     } else if ((src == m_lastResultBytecodeRegister) || (dst == m_lastResultBytecodeRegister)) {
1824         // If either the src or dst is the cached register go though
1825         // get/put registers to make sure we track this correctly.
1826         emitGetVirtualRegister(src, regT0);
1827         emitPutVirtualRegister(dst);
1828     } else {
1829         // Perform the copy via regT1; do not disturb any mapping in regT0.
1830         loadPtr(Address(callFrameRegister, src * sizeof(Register)), regT1);
1831         storePtr(regT1, Address(callFrameRegister, dst * sizeof(Register)));
1832     }
1833 }
1834 
1835 void JIT::emit_op_end(Instruction* currentInstruction)
1836 {
1837     if (m_codeBlock->needsFullScopeChain())
1838         JITStubCall(this, cti_op_end).call();
1839     ASSERT(returnValueRegister != callFrameRegister);
1840     emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
1841     restoreReturnAddressBeforeReturn(Address(callFrameRegister, RegisterFile::ReturnPC * static_cast<int>(sizeof(Register))));
1842     ret();
1843 }
1844 
1845 void JIT::emit_op_jmp(Instruction* currentInstruction)
1846 {
1847     unsigned target = currentInstruction[1].u.operand;
1848     addJump(jump(), target + 1);
1849     RECORD_JUMP_TARGET(target + 1);
1850 }
1851 
1852 void JIT::emit_op_loop(Instruction* currentInstruction)
1853 {
1854     emitTimeoutCheck();
1855 
1856     unsigned target = currentInstruction[1].u.operand;
1857     addJump(jump(), target + 1);
1858 }
1859 
1860 void JIT::emit_op_loop_if_less(Instruction* currentInstruction)
1861 {
1862     emitTimeoutCheck();
1863 
1864     unsigned op1 = currentInstruction[1].u.operand;
1865     unsigned op2 = currentInstruction[2].u.operand;
1866     unsigned target = currentInstruction[3].u.operand;
1867     if (isOperandConstantImmediateInt(op2)) {
1868         emitGetVirtualRegister(op1, regT0);
1869         emitJumpSlowCaseIfNotImmediateInteger(regT0);
1870 #if USE(JSVALUE64)
1871         int32_t op2imm = getConstantOperandImmediateInt(op2);
1872 #else
1873         int32_t op2imm = static_cast<int32_t>(JSImmediate::rawValue(getConstantOperand(op2)));
1874 #endif
1875         addJump(branch32(LessThan, regT0, Imm32(op2imm)), target + 3);
1876     } else if (isOperandConstantImmediateInt(op1)) {
1877         emitGetVirtualRegister(op2, regT0);
1878         emitJumpSlowCaseIfNotImmediateInteger(regT0);
1879 #if USE(JSVALUE64)
1880         int32_t op1imm = getConstantOperandImmediateInt(op1);
1881 #else
1882         int32_t op1imm = static_cast<int32_t>(JSImmediate::rawValue(getConstantOperand(op1)));
1883 #endif
1884         addJump(branch32(GreaterThan, regT0, Imm32(op1imm)), target + 3);
1885     } else {
1886         emitGetVirtualRegisters(op1, regT0, op2, regT1);
1887         emitJumpSlowCaseIfNotImmediateInteger(regT0);
1888         emitJumpSlowCaseIfNotImmediateInteger(regT1);
1889         addJump(branch32(LessThan, regT0, regT1), target + 3);
1890     }
1891 }
1892 
1893 void JIT::emit_op_loop_if_lesseq(Instruction* currentInstruction)
1894 {
1895     emitTimeoutCheck();
1896 
1897     unsigned op1 = currentInstruction[1].u.operand;
1898     unsigned op2 = currentInstruction[2].u.operand;
1899     unsigned target = currentInstruction[3].u.operand;
1900     if (isOperandConstantImmediateInt(op2)) {
1901         emitGetVirtualRegister(op1, regT0);
1902         emitJumpSlowCaseIfNotImmediateInteger(regT0);
1903 #if USE(JSVALUE64)
1904         int32_t op2imm = getConstantOperandImmediateInt(op2);
1905 #else
1906         int32_t op2imm = static_cast<int32_t>(JSImmediate::rawValue(getConstantOperand(op2)));
1907 #endif
1908         addJump(branch32(LessThanOrEqual, regT0, Imm32(op2imm)), target + 3);
1909     } else {
1910         emitGetVirtualRegisters(op1, regT0, op2, regT1);
1911         emitJumpSlowCaseIfNotImmediateInteger(regT0);
1912         emitJumpSlowCaseIfNotImmediateInteger(regT1);
1913         addJump(branch32(LessThanOrEqual, regT0, regT1), target + 3);
1914     }
1915 }
1916 
1917 void JIT::emit_op_new_object(Instruction* currentInstruction)
1918 {
1919     JITStubCall(this, cti_op_new_object).call(currentInstruction[1].u.operand);
1920 }
1921 
1922 void JIT::emit_op_instanceof(Instruction* currentInstruction)
1923 {
1924     // Load the operands (baseVal, proto, and value respectively) into registers.
1925     // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
1926     emitGetVirtualRegister(currentInstruction[3].u.operand, regT0);
1927     emitGetVirtualRegister(currentInstruction[4].u.operand, regT1);
1928     emitGetVirtualRegister(currentInstruction[2].u.operand, regT2);
1929 
1930     // Check that baseVal & proto are cells.
1931     emitJumpSlowCaseIfNotJSCell(regT0);
1932     emitJumpSlowCaseIfNotJSCell(regT1);
1933 
1934     // Check that baseVal is an object, that it 'ImplementsHasInstance' but that it does not 'OverridesHasInstance'.
1935     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT0);
1936     addSlowCase(branch32(NotEqual, Address(regT0, OBJECT_OFFSETOF(Structure, m_typeInfo.m_type)), Imm32(ObjectType)));
1937     addSlowCase(branchTest32(Zero, Address(regT0, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(ImplementsDefaultHasInstance)));
1938 
1939     // If value is not an Object, return false.
1940     Jump valueIsImmediate = emitJumpIfNotJSCell(regT2);
1941     loadPtr(Address(regT2, OBJECT_OFFSETOF(JSCell, m_structure)), regT0);
1942     Jump valueIsNotObject = branch32(NotEqual, Address(regT0, OBJECT_OFFSETOF(Structure, m_typeInfo.m_type)), Imm32(ObjectType));
1943 
1944     // Check proto is object.
1945     loadPtr(Address(regT1, OBJECT_OFFSETOF(JSCell, m_structure)), regT0);
1946     addSlowCase(branch32(NotEqual, Address(regT0, OBJECT_OFFSETOF(Structure, m_typeInfo.m_type)), Imm32(ObjectType)));
1947 
1948     // Optimistically load the result true, and start looping.
1949     // Initially, regT1 still contains proto and regT2 still contains value.
1950     // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
1951     move(ImmPtr(JSValue::encode(jsBoolean(true))), regT0);
1952     Label loop(this);
1953 
1954     // Load the prototype of the object in regT2.  If this is equal to regT1 - WIN!
1955     // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
1956     loadPtr(Address(regT2, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
1957     loadPtr(Address(regT2, OBJECT_OFFSETOF(Structure, m_prototype)), regT2);
1958     Jump isInstance = branchPtr(Equal, regT2, regT1);
1959     branchPtr(NotEqual, regT2, ImmPtr(JSValue::encode(jsNull())), loop);
1960 
1961     // We get here either by dropping out of the loop, or if value was not an Object.  Result is false.
1962     valueIsImmediate.link(this);
1963     valueIsNotObject.link(this);
1964     move(ImmPtr(JSValue::encode(jsBoolean(false))), regT0);
1965 
1966     // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
1967     isInstance.link(this);
1968     emitPutVirtualRegister(currentInstruction[1].u.operand);
1969 }
1970 
1971 void JIT::emit_op_new_func(Instruction* currentInstruction)
1972 {
1973     JITStubCall stubCall(this, cti_op_new_func);
1974     stubCall.addArgument(ImmPtr(m_codeBlock->function(currentInstruction[2].u.operand)));
1975     stubCall.call(currentInstruction[1].u.operand);
1976 }
1977 
1978 void JIT::emit_op_call(Instruction* currentInstruction)
1979 {
1980     compileOpCall(op_call, currentInstruction, m_callLinkInfoIndex++);
1981 }
1982 
1983 void JIT::emit_op_call_eval(Instruction* currentInstruction)
1984 {
1985     compileOpCall(op_call_eval, currentInstruction, m_callLinkInfoIndex++);
1986 }
1987 
1988 void JIT::emit_op_load_varargs(Instruction* currentInstruction)
1989 {
1990     int argCountDst = currentInstruction[1].u.operand;
1991     int argsOffset = currentInstruction[2].u.operand;
1992 
1993     JITStubCall stubCall(this, cti_op_load_varargs);
1994     stubCall.addArgument(Imm32(argsOffset));
1995     stubCall.call();
1996     // Stores a naked int32 in the register file.
1997     store32(returnValueRegister, Address(callFrameRegister, argCountDst * sizeof(Register)));
1998 }
1999 
2000 void JIT::emit_op_call_varargs(Instruction* currentInstruction)
2001 {
2002     compileOpCallVarargs(currentInstruction);
2003 }
2004 
2005 void JIT::emit_op_construct(Instruction* currentInstruction)
2006 {
2007     compileOpCall(op_construct, currentInstruction, m_callLinkInfoIndex++);
2008 }
2009 
2010 void JIT::emit_op_get_global_var(Instruction* currentInstruction)
2011 {
2012     JSVariableObject* globalObject = static_cast<JSVariableObject*>(currentInstruction[2].u.jsCell);
2013     move(ImmPtr(globalObject), regT0);
2014     emitGetVariableObjectRegister(regT0, currentInstruction[3].u.operand, regT0);
2015     emitPutVirtualRegister(currentInstruction[1].u.operand);
2016 }
2017 
2018 void JIT::emit_op_put_global_var(Instruction* currentInstruction)
2019 {
2020     emitGetVirtualRegister(currentInstruction[3].u.operand, regT1);
2021     JSVariableObject* globalObject = static_cast<JSVariableObject*>(currentInstruction[1].u.jsCell);
2022     move(ImmPtr(globalObject), regT0);
2023     emitPutVariableObjectRegister(regT1, regT0, currentInstruction[2].u.operand);
2024 }
2025 
2026 void JIT::emit_op_get_scoped_var(Instruction* currentInstruction)
2027 {
2028     int skip = currentInstruction[3].u.operand + m_codeBlock->needsFullScopeChain();
2029 
2030     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT0);
2031     while (skip--)
2032         loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, next)), regT0);
2033 
2034     loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, object)), regT0);
2035     emitGetVariableObjectRegister(regT0, currentInstruction[2].u.operand, regT0);
2036     emitPutVirtualRegister(currentInstruction[1].u.operand);
2037 }
2038 
2039 void JIT::emit_op_put_scoped_var(Instruction* currentInstruction)
2040 {
2041     int skip = currentInstruction[2].u.operand + m_codeBlock->needsFullScopeChain();
2042 
2043     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1);
2044     emitGetVirtualRegister(currentInstruction[3].u.operand, regT0);
2045     while (skip--)
2046         loadPtr(Address(regT1, OBJECT_OFFSETOF(ScopeChainNode, next)), regT1);
2047 
2048     loadPtr(Address(regT1, OBJECT_OFFSETOF(ScopeChainNode, object)), regT1);
2049     emitPutVariableObjectRegister(regT0, regT1, currentInstruction[1].u.operand);
2050 }
2051 
2052 void JIT::emit_op_tear_off_activation(Instruction* currentInstruction)
2053 {
2054     JITStubCall stubCall(this, cti_op_tear_off_activation);
2055     stubCall.addArgument(currentInstruction[1].u.operand, regT2);
2056     stubCall.call();
2057 }
2058 
2059 void JIT::emit_op_tear_off_arguments(Instruction*)
2060 {
2061     JITStubCall(this, cti_op_tear_off_arguments).call();
2062 }
2063 
2064 void JIT::emit_op_ret(Instruction* currentInstruction)
2065 {
2066     // We could JIT generate the deref, only calling out to C when the refcount hits zero.
2067     if (m_codeBlock->needsFullScopeChain())
2068         JITStubCall(this, cti_op_ret_scopeChain).call();
2069 
2070     ASSERT(callFrameRegister != regT1);
2071     ASSERT(regT1 != returnValueRegister);
2072     ASSERT(returnValueRegister != callFrameRegister);
2073 
2074     // Return the result in %eax.
2075     emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
2076 
2077     // Grab the return address.
2078     emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
2079 
2080     // Restore our caller's "r".
2081     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
2082 
2083     // Return.
2084     restoreReturnAddressBeforeReturn(regT1);
2085     ret();
2086 }
2087 
2088 void JIT::emit_op_new_array(Instruction* currentInstruction)
2089 {
2090     JITStubCall stubCall(this, cti_op_new_array);
2091     stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
2092     stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
2093     stubCall.call(currentInstruction[1].u.operand);
2094 }
2095 
2096 void JIT::emit_op_resolve(Instruction* currentInstruction)
2097 {
2098     JITStubCall stubCall(this, cti_op_resolve);
2099     stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
2100     stubCall.call(currentInstruction[1].u.operand);
2101 }
2102 
2103 void JIT::emit_op_construct_verify(Instruction* currentInstruction)
2104 {
2105     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
2106 
2107     emitJumpSlowCaseIfNotJSCell(regT0);
2108     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
2109     addSlowCase(branch32(NotEqual, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo) + OBJECT_OFFSETOF(TypeInfo, m_type)), Imm32(ObjectType)));
2110 
2111 }
2112 
2113 void JIT::emit_op_to_primitive(Instruction* currentInstruction)
2114 {
2115     int dst = currentInstruction[1].u.operand;
2116     int src = currentInstruction[2].u.operand;
2117 
2118     emitGetVirtualRegister(src, regT0);
2119 
2120     Jump isImm = emitJumpIfNotJSCell(regT0);
2121     addSlowCase(branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr)));
2122     isImm.link(this);
2123 
2124     if (dst != src)
2125         emitPutVirtualRegister(dst);
2126 
2127 }
2128 
2129 void JIT::emit_op_strcat(Instruction* currentInstruction)
2130 {
2131     JITStubCall stubCall(this, cti_op_strcat);
2132     stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
2133     stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
2134     stubCall.call(currentInstruction[1].u.operand);
2135 }
2136 
2137 void JIT::emit_op_loop_if_true(Instruction* currentInstruction)
2138 {
2139     emitTimeoutCheck();
2140 
2141     unsigned target = currentInstruction[2].u.operand;
2142     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
2143 
2144     Jump isZero = branchPtr(Equal, regT0, ImmPtr(JSValue::encode(jsNumber(m_globalData, 0))));
2145     addJump(emitJumpIfImmediateInteger(regT0), target + 2);
2146 
2147     addJump(branchPtr(Equal, regT0, ImmPtr(JSValue::encode(jsBoolean(true)))), target + 2);
2148     addSlowCase(branchPtr(NotEqual, regT0, ImmPtr(JSValue::encode(jsBoolean(false)))));
2149 
2150     isZero.link(this);
2151 };
2152 void JIT::emit_op_resolve_base(Instruction* currentInstruction)
2153 {
2154     JITStubCall stubCall(this, cti_op_resolve_base);
2155     stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
2156     stubCall.call(currentInstruction[1].u.operand);
2157 }
2158 
2159 void JIT::emit_op_resolve_skip(Instruction* currentInstruction)
2160 {
2161     JITStubCall stubCall(this, cti_op_resolve_skip);
2162     stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
2163     stubCall.addArgument(Imm32(currentInstruction[3].u.operand + m_codeBlock->needsFullScopeChain()));
2164     stubCall.call(currentInstruction[1].u.operand);
2165 }
2166 
2167 void JIT::emit_op_resolve_global(Instruction* currentInstruction)
2168 {
2169     // Fast case
2170     void* globalObject = currentInstruction[2].u.jsCell;
2171     Identifier* ident = &m_codeBlock->identifier(currentInstruction[3].u.operand);
2172 
2173     unsigned currentIndex = m_globalResolveInfoIndex++;
2174     void* structureAddress = &(m_codeBlock->globalResolveInfo(currentIndex).structure);
2175     void* offsetAddr = &(m_codeBlock->globalResolveInfo(currentIndex).offset);
2176 
2177     // Check Structure of global object
2178     move(ImmPtr(globalObject), regT0);
2179     loadPtr(structureAddress, regT1);
2180     Jump noMatch = branchPtr(NotEqual, regT1, Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure))); // Structures don't match
2181 
2182     // Load cached property
2183     // Assume that the global object always uses external storage.
2184     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSGlobalObject, m_externalStorage)), regT0);
2185     load32(offsetAddr, regT1);
2186     loadPtr(BaseIndex(regT0, regT1, ScalePtr), regT0);
2187     emitPutVirtualRegister(currentInstruction[1].u.operand);
2188     Jump end = jump();
2189 
2190     // Slow case
2191     noMatch.link(this);
2192     JITStubCall stubCall(this, cti_op_resolve_global);
2193     stubCall.addArgument(ImmPtr(globalObject));
2194     stubCall.addArgument(ImmPtr(ident));
2195     stubCall.addArgument(Imm32(currentIndex));
2196     stubCall.call(currentInstruction[1].u.operand);
2197     end.link(this);
2198 }
2199 
2200 void JIT::emit_op_not(Instruction* currentInstruction)
2201 {
2202     emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
2203     xorPtr(Imm32(static_cast<int32_t>(JSImmediate::FullTagTypeBool)), regT0);
2204     addSlowCase(branchTestPtr(NonZero, regT0, Imm32(static_cast<int32_t>(~JSImmediate::ExtendedPayloadBitBoolValue))));
2205     xorPtr(Imm32(static_cast<int32_t>(JSImmediate::FullTagTypeBool | JSImmediate::ExtendedPayloadBitBoolValue)), regT0);
2206     emitPutVirtualRegister(currentInstruction[1].u.operand);
2207 }
2208 
2209 void JIT::emit_op_jfalse(Instruction* currentInstruction)
2210 {
2211     unsigned target = currentInstruction[2].u.operand;
2212     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
2213 
2214     addJump(branchPtr(Equal, regT0, ImmPtr(JSValue::encode(jsNumber(m_globalData, 0)))), target + 2);
2215     Jump isNonZero = emitJumpIfImmediateInteger(regT0);
2216 
2217     addJump(branchPtr(Equal, regT0, ImmPtr(JSValue::encode(jsBoolean(false)))), target + 2);
2218     addSlowCase(branchPtr(NotEqual, regT0, ImmPtr(JSValue::encode(jsBoolean(true)))));
2219 
2220     isNonZero.link(this);
2221     RECORD_JUMP_TARGET(target + 2);
2222 };
2223 void JIT::emit_op_jeq_null(Instruction* currentInstruction)
2224 {
2225     unsigned src = currentInstruction[1].u.operand;
2226     unsigned target = currentInstruction[2].u.operand;
2227 
2228     emitGetVirtualRegister(src, regT0);
2229     Jump isImmediate = emitJumpIfNotJSCell(regT0);
2230 
2231     // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
2232     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
2233     addJump(branchTest32(NonZero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined)), target + 2);
2234     Jump wasNotImmediate = jump();
2235 
2236     // Now handle the immediate cases - undefined & null
2237     isImmediate.link(this);
2238     andPtr(Imm32(~JSImmediate::ExtendedTagBitUndefined), regT0);
2239     addJump(branchPtr(Equal, regT0, ImmPtr(JSValue::encode(jsNull()))), target + 2);
2240 
2241     wasNotImmediate.link(this);
2242     RECORD_JUMP_TARGET(target + 2);
2243 };
2244 void JIT::emit_op_jneq_null(Instruction* currentInstruction)
2245 {
2246     unsigned src = currentInstruction[1].u.operand;
2247     unsigned target = currentInstruction[2].u.operand;
2248 
2249     emitGetVirtualRegister(src, regT0);
2250     Jump isImmediate = emitJumpIfNotJSCell(regT0);
2251 
2252     // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
2253     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
2254     addJump(branchTest32(Zero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined)), target + 2);
2255     Jump wasNotImmediate = jump();
2256 
2257     // Now handle the immediate cases - undefined & null
2258     isImmediate.link(this);
2259     andPtr(Imm32(~JSImmediate::ExtendedTagBitUndefined), regT0);
2260     addJump(branchPtr(NotEqual, regT0, ImmPtr(JSValue::encode(jsNull()))), target + 2);
2261 
2262     wasNotImmediate.link(this);
2263     RECORD_JUMP_TARGET(target + 2);
2264 }
2265 
2266 void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
2267 {
2268     unsigned src = currentInstruction[1].u.operand;
2269     JSCell* ptr = currentInstruction[2].u.jsCell;
2270     unsigned target = currentInstruction[3].u.operand;
2271 
2272     emitGetVirtualRegister(src, regT0);
2273     addJump(branchPtr(NotEqual, regT0, ImmPtr(JSValue::encode(JSValue(ptr)))), target + 3);
2274 
2275     RECORD_JUMP_TARGET(target + 3);
2276 }
2277 
2278 void JIT::emit_op_jsr(Instruction* currentInstruction)
2279 {
2280     int retAddrDst = currentInstruction[1].u.operand;
2281     int target = currentInstruction[2].u.operand;
2282     DataLabelPtr storeLocation = storePtrWithPatch(ImmPtr(0), Address(callFrameRegister, sizeof(Register) * retAddrDst));
2283     addJump(jump(), target + 2);
2284     m_jsrSites.append(JSRInfo(storeLocation, label()));
2285     killLastResultRegister();
2286     RECORD_JUMP_TARGET(target + 2);
2287 }
2288 
2289 void JIT::emit_op_sret(Instruction* currentInstruction)
2290 {
2291     jump(Address(callFrameRegister, sizeof(Register) * currentInstruction[1].u.operand));
2292     killLastResultRegister();
2293 }
2294 
2295 void JIT::emit_op_eq(Instruction* currentInstruction)
2296 {
2297     emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
2298     emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
2299     set32(Equal, regT1, regT0, regT0);
2300     emitTagAsBoolImmediate(regT0);
2301     emitPutVirtualRegister(currentInstruction[1].u.operand);
2302 }
2303 
2304 void JIT::emit_op_bitnot(Instruction* currentInstruction)
2305 {
2306     emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
2307     emitJumpSlowCaseIfNotImmediateInteger(regT0);
2308 #if USE(JSVALUE64)
2309     not32(regT0);
2310     emitFastArithIntToImmNoCheck(regT0, regT0);
2311 #else
2312     xorPtr(Imm32(~JSImmediate::TagTypeNumber), regT0);
2313 #endif
2314     emitPutVirtualRegister(currentInstruction[1].u.operand);
2315 }
2316 
2317 void JIT::emit_op_resolve_with_base(Instruction* currentInstruction)
2318 {
2319     JITStubCall stubCall(this, cti_op_resolve_with_base);
2320     stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[3].u.operand)));
2321     stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
2322     stubCall.call(currentInstruction[2].u.operand);
2323 }
2324 
2325 void JIT::emit_op_new_func_exp(Instruction* currentInstruction)
2326 {
2327     JITStubCall stubCall(this, cti_op_new_func_exp);
2328     stubCall.addArgument(ImmPtr(m_codeBlock->functionExpression(currentInstruction[2].u.operand)));
2329     stubCall.call(currentInstruction[1].u.operand);
2330 }
2331 
2332 void JIT::emit_op_jtrue(Instruction* currentInstruction)
2333 {
2334     unsigned target = currentInstruction[2].u.operand;
2335     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
2336 
2337     Jump isZero = branchPtr(Equal, regT0, ImmPtr(JSValue::encode(jsNumber(m_globalData, 0))));
2338     addJump(emitJumpIfImmediateInteger(regT0), target + 2);
2339 
2340     addJump(branchPtr(Equal, regT0, ImmPtr(JSValue::encode(jsBoolean(true)))), target + 2);
2341     addSlowCase(branchPtr(NotEqual, regT0, ImmPtr(JSValue::encode(jsBoolean(false)))));
2342 
2343     isZero.link(this);
2344     RECORD_JUMP_TARGET(target + 2);
2345 }
2346 
2347 void JIT::emit_op_neq(Instruction* currentInstruction)
2348 {
2349     emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
2350     emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
2351     set32(NotEqual, regT1, regT0, regT0);
2352     emitTagAsBoolImmediate(regT0);
2353 
2354     emitPutVirtualRegister(currentInstruction[1].u.operand);
2355 
2356 }
2357 
2358 void JIT::emit_op_bitxor(Instruction* currentInstruction)
2359 {
2360     emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
2361     emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
2362     xorPtr(regT1, regT0);
2363     emitFastArithReTagImmediate(regT0, regT0);
2364     emitPutVirtualRegister(currentInstruction[1].u.operand);
2365 }
2366 
2367 void JIT::emit_op_new_regexp(Instruction* currentInstruction)
2368 {
2369     JITStubCall stubCall(this, cti_op_new_regexp);
2370     stubCall.addArgument(ImmPtr(m_codeBlock->regexp(currentInstruction[2].u.operand)));
2371     stubCall.call(currentInstruction[1].u.operand);
2372 }
2373 
2374 void JIT::emit_op_bitor(Instruction* currentInstruction)
2375 {
2376     emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
2377     emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
2378     orPtr(regT1, regT0);
2379     emitPutVirtualRegister(currentInstruction[1].u.operand);
2380 }
2381 
2382 void JIT::emit_op_throw(Instruction* currentInstruction)
2383 {
2384     JITStubCall stubCall(this, cti_op_throw);
2385     stubCall.addArgument(currentInstruction[1].u.operand, regT2);
2386     stubCall.call();
2387     ASSERT(regT0 == returnValueRegister);
2388 #ifndef NDEBUG
2389     // cti_op_throw always changes it's return address,
2390     // this point in the code should never be reached.
2391     breakpoint();
2392 #endif
2393 }
2394 
2395 void JIT::emit_op_next_pname(Instruction* currentInstruction)
2396 {
2397     JITStubCall stubCall(this, cti_op_next_pname);
2398     stubCall.addArgument(currentInstruction[2].u.operand, regT2);
2399     stubCall.call();
2400     Jump endOfIter = branchTestPtr(Zero, regT0);
2401     emitPutVirtualRegister(currentInstruction[1].u.operand);
2402     addJump(jump(), currentInstruction[3].u.operand + 3);
2403     endOfIter.link(this);
2404 }
2405 
2406 void JIT::emit_op_push_scope(Instruction* currentInstruction)
2407 {
2408     JITStubCall stubCall(this, cti_op_push_scope);
2409     stubCall.addArgument(currentInstruction[1].u.operand, regT2);
2410     stubCall.call(currentInstruction[1].u.operand);
2411 }
2412 
2413 void JIT::emit_op_pop_scope(Instruction*)
2414 {
2415     JITStubCall(this, cti_op_pop_scope).call();
2416 }
2417 
2418 void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
2419 {
2420     unsigned dst = currentInstruction[1].u.operand;
2421     unsigned src1 = currentInstruction[2].u.operand;
2422     unsigned src2 = currentInstruction[3].u.operand;
2423 
2424     emitGetVirtualRegisters(src1, regT0, src2, regT1);
2425 
2426     // Jump to a slow case if either operand is a number, or if both are JSCell*s.
2427     move(regT0, regT2);
2428     orPtr(regT1, regT2);
2429     addSlowCase(emitJumpIfJSCell(regT2));
2430     addSlowCase(emitJumpIfImmediateNumber(regT2));
2431 
2432     if (type == OpStrictEq)
2433         set32(Equal, regT1, regT0, regT0);
2434     else
2435         set32(NotEqual, regT1, regT0, regT0);
2436     emitTagAsBoolImmediate(regT0);
2437 
2438     emitPutVirtualRegister(dst);
2439 }
2440 
2441 void JIT::emit_op_stricteq(Instruction* currentInstruction)
2442 {
2443     compileOpStrictEq(currentInstruction, OpStrictEq);
2444 }
2445 
2446 void JIT::emit_op_nstricteq(Instruction* currentInstruction)
2447 {
2448     compileOpStrictEq(currentInstruction, OpNStrictEq);
2449 }
2450 
2451 void JIT::emit_op_to_jsnumber(Instruction* currentInstruction)
2452 {
2453     int srcVReg = currentInstruction[2].u.operand;
2454     emitGetVirtualRegister(srcVReg, regT0);
2455 
2456     Jump wasImmediate = emitJumpIfImmediateInteger(regT0);
2457 
2458     emitJumpSlowCaseIfNotJSCell(regT0, srcVReg);
2459     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
2460     addSlowCase(branch32(NotEqual, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_type)), Imm32(NumberType)));
2461 
2462     wasImmediate.link(this);
2463 
2464     emitPutVirtualRegister(currentInstruction[1].u.operand);
2465 }
2466 
2467 void JIT::emit_op_push_new_scope(Instruction* currentInstruction)
2468 {
2469     JITStubCall stubCall(this, cti_op_push_new_scope);
2470     stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
2471     stubCall.addArgument(currentInstruction[3].u.operand, regT2);
2472     stubCall.call(currentInstruction[1].u.operand);
2473 }
2474 
2475 void JIT::emit_op_catch(Instruction* currentInstruction)
2476 {
2477     killLastResultRegister(); // FIXME: Implicitly treat op_catch as a labeled statement, and remove this line of code.
2478     peek(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof (void*));
2479     emitPutVirtualRegister(currentInstruction[1].u.operand);
2480 }
2481 
2482 void JIT::emit_op_jmp_scopes(Instruction* currentInstruction)
2483 {
2484     JITStubCall stubCall(this, cti_op_jmp_scopes);
2485     stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
2486     stubCall.call();
2487     addJump(jump(), currentInstruction[2].u.operand + 2);
2488     RECORD_JUMP_TARGET(currentInstruction[2].u.operand + 2);
2489 }
2490 
2491 void JIT::emit_op_switch_imm(Instruction* currentInstruction)
2492 {
2493     unsigned tableIndex = currentInstruction[1].u.operand;
2494     unsigned defaultOffset = currentInstruction[2].u.operand;
2495     unsigned scrutinee = currentInstruction[3].u.operand;
2496 
2497     // create jump table for switch destinations, track this switch statement.
2498     SimpleJumpTable* jumpTable = &m_codeBlock->immediateSwitchJumpTable(tableIndex);
2499     m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset, SwitchRecord::Immediate));
2500     jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
2501 
2502     JITStubCall stubCall(this, cti_op_switch_imm);
2503     stubCall.addArgument(scrutinee, regT2);
2504     stubCall.addArgument(Imm32(tableIndex));
2505     stubCall.call();
2506     jump(regT0);
2507 }
2508 
2509 void JIT::emit_op_switch_char(Instruction* currentInstruction)
2510 {
2511     unsigned tableIndex = currentInstruction[1].u.operand;
2512     unsigned defaultOffset = currentInstruction[2].u.operand;
2513     unsigned scrutinee = currentInstruction[3].u.operand;
2514 
2515     // create jump table for switch destinations, track this switch statement.
2516     SimpleJumpTable* jumpTable = &m_codeBlock->characterSwitchJumpTable(tableIndex);
2517     m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset, SwitchRecord::Character));
2518     jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
2519 
2520     JITStubCall stubCall(this, cti_op_switch_char);
2521     stubCall.addArgument(scrutinee, regT2);
2522     stubCall.addArgument(Imm32(tableIndex));
2523     stubCall.call();
2524     jump(regT0);
2525 }
2526 
2527 void JIT::emit_op_switch_string(Instruction* currentInstruction)
2528 {
2529     unsigned tableIndex = currentInstruction[1].u.operand;
2530     unsigned defaultOffset = currentInstruction[2].u.operand;
2531     unsigned scrutinee = currentInstruction[3].u.operand;
2532 
2533     // create jump table for switch destinations, track this switch statement.
2534     StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
2535     m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset));
2536 
2537     JITStubCall stubCall(this, cti_op_switch_string);
2538     stubCall.addArgument(scrutinee, regT2);
2539     stubCall.addArgument(Imm32(tableIndex));
2540     stubCall.call();
2541     jump(regT0);
2542 }
2543 
2544 void JIT::emit_op_new_error(Instruction* currentInstruction)
2545 {
2546     JITStubCall stubCall(this, cti_op_new_error);
2547     stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
2548     stubCall.addArgument(ImmPtr(JSValue::encode(m_codeBlock->getConstant(currentInstruction[3].u.operand))));
2549     stubCall.addArgument(Imm32(m_bytecodeIndex));
2550     stubCall.call(currentInstruction[1].u.operand);
2551 }
2552 
2553 void JIT::emit_op_debug(Instruction* currentInstruction)
2554 {
2555     JITStubCall stubCall(this, cti_op_debug);
2556     stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
2557     stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
2558     stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
2559     stubCall.call();
2560 }
2561 
2562 void JIT::emit_op_eq_null(Instruction* currentInstruction)
2563 {
2564     unsigned dst = currentInstruction[1].u.operand;
2565     unsigned src1 = currentInstruction[2].u.operand;
2566 
2567     emitGetVirtualRegister(src1, regT0);
2568     Jump isImmediate = emitJumpIfNotJSCell(regT0);
2569 
2570     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
2571     setTest32(NonZero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined), regT0);
2572 
2573     Jump wasNotImmediate = jump();
2574 
2575     isImmediate.link(this);
2576 
2577     andPtr(Imm32(~JSImmediate::ExtendedTagBitUndefined), regT0);
2578     setPtr(Equal, regT0, Imm32(JSImmediate::FullTagTypeNull), regT0);
2579 
2580     wasNotImmediate.link(this);
2581 
2582     emitTagAsBoolImmediate(regT0);
2583     emitPutVirtualRegister(dst);
2584 
2585 }
2586 
2587 void JIT::emit_op_neq_null(Instruction* currentInstruction)
2588 {
2589     unsigned dst = currentInstruction[1].u.operand;
2590     unsigned src1 = currentInstruction[2].u.operand;
2591 
2592     emitGetVirtualRegister(src1, regT0);
2593     Jump isImmediate = emitJumpIfNotJSCell(regT0);
2594 
2595     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
2596     setTest32(Zero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined), regT0);
2597 
2598     Jump wasNotImmediate = jump();
2599 
2600     isImmediate.link(this);
2601 
2602     andPtr(Imm32(~JSImmediate::ExtendedTagBitUndefined), regT0);
2603     setPtr(NotEqual, regT0, Imm32(JSImmediate::FullTagTypeNull), regT0);
2604 
2605     wasNotImmediate.link(this);
2606 
2607     emitTagAsBoolImmediate(regT0);
2608     emitPutVirtualRegister(dst);
2609 
2610 }
2611 
2612 void JIT::emit_op_enter(Instruction*)
2613 {
2614     // Even though CTI doesn't use them, we initialize our constant
2615     // registers to zap stale pointers, to avoid unnecessarily prolonging
2616     // object lifetime and increasing GC pressure.
2617     size_t count = m_codeBlock->m_numVars;
2618     for (size_t j = 0; j < count; ++j)
2619         emitInitRegister(j);
2620 
2621 }
2622 
2623 void JIT::emit_op_enter_with_activation(Instruction* currentInstruction)
2624 {
2625     // Even though CTI doesn't use them, we initialize our constant
2626     // registers to zap stale pointers, to avoid unnecessarily prolonging
2627     // object lifetime and increasing GC pressure.
2628     size_t count = m_codeBlock->m_numVars;
2629     for (size_t j = 0; j < count; ++j)
2630         emitInitRegister(j);
2631 
2632     JITStubCall(this, cti_op_push_activation).call(currentInstruction[1].u.operand);
2633 }
2634 
2635 void JIT::emit_op_create_arguments(Instruction*)
2636 {
2637     Jump argsCreated = branchTestPtr(NonZero, Address(callFrameRegister, sizeof(Register) * RegisterFile::ArgumentsRegister));
2638     if (m_codeBlock->m_numParameters == 1)
2639         JITStubCall(this, cti_op_create_arguments_no_params).call();
2640     else
2641         JITStubCall(this, cti_op_create_arguments).call();
2642     argsCreated.link(this);
2643 }
2644 
2645 void JIT::emit_op_init_arguments(Instruction*)
2646 {
2647     storePtr(ImmPtr(0), Address(callFrameRegister, sizeof(Register) * RegisterFile::ArgumentsRegister));
2648 }
2649 
2650 void JIT::emit_op_convert_this(Instruction* currentInstruction)
2651 {
2652     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
2653 
2654     emitJumpSlowCaseIfNotJSCell(regT0);
2655     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT1);
2656     addSlowCase(branchTest32(NonZero, Address(regT1, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(NeedsThisConversion)));
2657 
2658 }
2659 
2660 void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
2661 {
2662     peek(regT1, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof (void*));
2663     Jump noProfiler = branchTestPtr(Zero, Address(regT1));
2664 
2665     JITStubCall stubCall(this, cti_op_profile_will_call);
2666     stubCall.addArgument(currentInstruction[1].u.operand, regT1);
2667     stubCall.call();
2668     noProfiler.link(this);
2669 
2670 }
2671 
2672 void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
2673 {
2674     peek(regT1, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof (void*));
2675     Jump noProfiler = branchTestPtr(Zero, Address(regT1));
2676 
2677     JITStubCall stubCall(this, cti_op_profile_did_call);
2678     stubCall.addArgument(currentInstruction[1].u.operand, regT1);
2679     stubCall.call();
2680     noProfiler.link(this);
2681 }
2682 
2683 
2684 // Slow cases
2685 
2686 void JIT::emitSlow_op_convert_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2687 {
2688     linkSlowCase(iter);
2689     linkSlowCase(iter);
2690     JITStubCall stubCall(this, cti_op_convert_this);
2691     stubCall.addArgument(regT0);
2692     stubCall.call(currentInstruction[1].u.operand);
2693 }
2694 
2695 void JIT::emitSlow_op_construct_verify(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2696 {
2697     linkSlowCase(iter);
2698     linkSlowCase(iter);
2699     emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
2700     emitPutVirtualRegister(currentInstruction[1].u.operand);
2701 }
2702 
2703 void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2704 {
2705     linkSlowCase(iter);
2706 
2707     JITStubCall stubCall(this, cti_op_to_primitive);
2708     stubCall.addArgument(regT0);
2709     stubCall.call(currentInstruction[1].u.operand);
2710 }
2711 
2712 void JIT::emitSlow_op_get_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2713 {
2714     // The slow void JIT::emitSlow_that handles accesses to arrays (below) may jump back up to here.
2715     Label beginGetByValSlow(this);
2716 
2717     Jump notImm = getSlowCase(iter);
2718     linkSlowCase(iter);
2719     linkSlowCase(iter);
2720     emitFastArithIntToImmNoCheck(regT1, regT1);
2721 
2722     notImm.link(this);
2723     JITStubCall stubCall(this, cti_op_get_by_val);
2724     stubCall.addArgument(regT0);
2725     stubCall.addArgument(regT1);
2726     stubCall.call(currentInstruction[1].u.operand);
2727     emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_get_by_val));
2728 
2729     // This is slow void JIT::emitSlow_that handles accesses to arrays above the fast cut-off.
2730     // First, check if this is an access to the vector
2731     linkSlowCase(iter);
2732     branch32(AboveOrEqual, regT1, Address(regT2, OBJECT_OFFSETOF(ArrayStorage, m_vectorLength)), beginGetByValSlow);
2733 
2734     // okay, missed the fast region, but it is still in the vector.  Get the value.
2735     loadPtr(BaseIndex(regT2, regT1, ScalePtr, OBJECT_OFFSETOF(ArrayStorage, m_vector[0])), regT2);
2736     // Check whether the value loaded is zero; if so we need to return undefined.
2737     branchTestPtr(Zero, regT2, beginGetByValSlow);
2738     move(regT2, regT0);
2739     emitPutVirtualRegister(currentInstruction[1].u.operand, regT0);
2740 }
2741 
2742 void JIT::emitSlow_op_loop_if_less(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2743 {
2744     unsigned op1 = currentInstruction[1].u.operand;
2745     unsigned op2 = currentInstruction[2].u.operand;
2746     unsigned target = currentInstruction[3].u.operand;
2747     if (isOperandConstantImmediateInt(op2)) {
2748         linkSlowCase(iter);
2749         JITStubCall stubCall(this, cti_op_loop_if_less);
2750         stubCall.addArgument(regT0);
2751         stubCall.addArgument(op2, regT2);
2752         stubCall.call();
2753         emitJumpSlowToHot(branchTest32(NonZero, regT0), target + 3);
2754     } else if (isOperandConstantImmediateInt(op1)) {
2755         linkSlowCase(iter);
2756         JITStubCall stubCall(this, cti_op_loop_if_less);
2757         stubCall.addArgument(op1, regT2);
2758         stubCall.addArgument(regT0);
2759         stubCall.call();
2760         emitJumpSlowToHot(branchTest32(NonZero, regT0), target + 3);
2761     } else {
2762         linkSlowCase(iter);
2763         linkSlowCase(iter);
2764         JITStubCall stubCall(this, cti_op_loop_if_less);
2765         stubCall.addArgument(regT0);
2766         stubCall.addArgument(regT1);
2767         stubCall.call();
2768         emitJumpSlowToHot(branchTest32(NonZero, regT0), target + 3);
2769     }
2770 }
2771 
2772 void JIT::emitSlow_op_loop_if_lesseq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2773 {
2774     unsigned op2 = currentInstruction[2].u.operand;
2775     unsigned target = currentInstruction[3].u.operand;
2776     if (isOperandConstantImmediateInt(op2)) {
2777         linkSlowCase(iter);
2778         JITStubCall stubCall(this, cti_op_loop_if_lesseq);
2779         stubCall.addArgument(regT0);
2780         stubCall.addArgument(currentInstruction[2].u.operand, regT2);
2781         stubCall.call();
2782         emitJumpSlowToHot(branchTest32(NonZero, regT0), target + 3);
2783     } else {
2784         linkSlowCase(iter);
2785         linkSlowCase(iter);
2786         JITStubCall stubCall(this, cti_op_loop_if_lesseq);
2787         stubCall.addArgument(regT0);
2788         stubCall.addArgument(regT1);
2789         stubCall.call();
2790         emitJumpSlowToHot(branchTest32(NonZero, regT0), target + 3);
2791     }
2792 }
2793 
2794 void JIT::emitSlow_op_put_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2795 {
2796     // Normal slow cases - either is not an immediate imm, or is an array.
2797     Jump notImm = getSlowCase(iter);
2798     linkSlowCase(iter);
2799     linkSlowCase(iter);
2800     emitFastArithIntToImmNoCheck(regT1, regT1);
2801 
2802     notImm.link(this); {
2803         JITStubCall stubCall(this, cti_op_put_by_val);
2804         stubCall.addArgument(regT0);
2805         stubCall.addArgument(regT1);
2806         stubCall.addArgument(currentInstruction[3].u.operand, regT2);
2807         stubCall.call();
2808         emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_put_by_val));
2809     }
2810 
2811     // slow cases for immediate int accesses to arrays
2812     linkSlowCase(iter);
2813     linkSlowCase(iter); {
2814         JITStubCall stubCall(this, cti_op_put_by_val_array);
2815         stubCall.addArgument(regT0);
2816         stubCall.addArgument(regT1);
2817         stubCall.addArgument(currentInstruction[3].u.operand, regT2);
2818         stubCall.call();
2819     }
2820 }
2821 
2822 void JIT::emitSlow_op_loop_if_true(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2823 {
2824     linkSlowCase(iter);
2825     JITStubCall stubCall(this, cti_op_jtrue);
2826     stubCall.addArgument(regT0);
2827     stubCall.call();
2828     emitJumpSlowToHot(branchTest32(NonZero, regT0), currentInstruction[2].u.operand + 2);
2829 }
2830 
2831 void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2832 {
2833     linkSlowCase(iter);
2834     xorPtr(Imm32(static_cast<int32_t>(JSImmediate::FullTagTypeBool)), regT0);
2835     JITStubCall stubCall(this, cti_op_not);
2836     stubCall.addArgument(regT0);
2837     stubCall.call(currentInstruction[1].u.operand);
2838 }
2839 
2840 void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2841 {
2842     linkSlowCase(iter);
2843     JITStubCall stubCall(this, cti_op_jtrue);
2844     stubCall.addArgument(regT0);
2845     stubCall.call();
2846     emitJumpSlowToHot(branchTest32(Zero, regT0), currentInstruction[2].u.operand + 2); // inverted!
2847 }
2848 
2849 void JIT::emitSlow_op_bitnot(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2850 {
2851     linkSlowCase(iter);
2852     JITStubCall stubCall(this, cti_op_bitnot);
2853     stubCall.addArgument(regT0);
2854     stubCall.call(currentInstruction[1].u.operand);
2855 }
2856 
2857 void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2858 {
2859     linkSlowCase(iter);
2860     JITStubCall stubCall(this, cti_op_jtrue);
2861     stubCall.addArgument(regT0);
2862     stubCall.call();
2863     emitJumpSlowToHot(branchTest32(NonZero, regT0), currentInstruction[2].u.operand + 2);
2864 }
2865 
2866 void JIT::emitSlow_op_bitxor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2867 {
2868     linkSlowCase(iter);
2869     JITStubCall stubCall(this, cti_op_bitxor);
2870     stubCall.addArgument(regT0);
2871     stubCall.addArgument(regT1);
2872     stubCall.call(currentInstruction[1].u.operand);
2873 }
2874 
2875 void JIT::emitSlow_op_bitor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2876 {
2877     linkSlowCase(iter);
2878     JITStubCall stubCall(this, cti_op_bitor);
2879     stubCall.addArgument(regT0);
2880     stubCall.addArgument(regT1);
2881     stubCall.call(currentInstruction[1].u.operand);
2882 }
2883 
2884 void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2885 {
2886     linkSlowCase(iter);
2887     JITStubCall stubCall(this, cti_op_eq);
2888     stubCall.addArgument(regT0);
2889     stubCall.addArgument(regT1);
2890     stubCall.call();
2891     emitTagAsBoolImmediate(regT0);
2892     emitPutVirtualRegister(currentInstruction[1].u.operand);
2893 }
2894 
2895 void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2896 {
2897     linkSlowCase(iter);
2898     JITStubCall stubCall(this, cti_op_eq);
2899     stubCall.addArgument(regT0);
2900     stubCall.addArgument(regT1);
2901     stubCall.call();
2902     xor32(Imm32(0x1), regT0);
2903     emitTagAsBoolImmediate(regT0);
2904     emitPutVirtualRegister(currentInstruction[1].u.operand);
2905 }
2906 
2907 void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2908 {
2909     linkSlowCase(iter);
2910     linkSlowCase(iter);
2911     JITStubCall stubCall(this, cti_op_stricteq);
2912     stubCall.addArgument(regT0);
2913     stubCall.addArgument(regT1);
2914     stubCall.call(currentInstruction[1].u.operand);
2915 }
2916 
2917 void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2918 {
2919     linkSlowCase(iter);
2920     linkSlowCase(iter);
2921     JITStubCall stubCall(this, cti_op_nstricteq);
2922     stubCall.addArgument(regT0);
2923     stubCall.addArgument(regT1);
2924     stubCall.call(currentInstruction[1].u.operand);
2925 }
2926 
2927 void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2928 {
2929     linkSlowCase(iter);
2930     linkSlowCase(iter);
2931     linkSlowCase(iter);
2932     linkSlowCase(iter);
2933     linkSlowCase(iter);
2934     JITStubCall stubCall(this, cti_op_instanceof);
2935     stubCall.addArgument(currentInstruction[2].u.operand, regT2);
2936     stubCall.addArgument(currentInstruction[3].u.operand, regT2);
2937     stubCall.addArgument(currentInstruction[4].u.operand, regT2);
2938     stubCall.call(currentInstruction[1].u.operand);
2939 }
2940 
2941 void JIT::emitSlow_op_call(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2942 {
2943     compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_call);
2944 }
2945 
2946 void JIT::emitSlow_op_call_eval(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2947 {
2948     compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_call_eval);
2949 }
2950 
2951 void JIT::emitSlow_op_call_varargs(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2952 {
2953     compileOpCallVarargsSlowCase(currentInstruction, iter);
2954 }
2955 
2956 void JIT::emitSlow_op_construct(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2957 {
2958     compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_construct);
2959 }
2960 
2961 void JIT::emitSlow_op_to_jsnumber(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2962 {
2963     linkSlowCaseIfNotJSCell(iter, currentInstruction[2].u.operand);
2964     linkSlowCase(iter);
2965 
2966     JITStubCall stubCall(this, cti_op_to_jsnumber);
2967     stubCall.addArgument(regT0);
2968     stubCall.call(currentInstruction[1].u.operand);
2969 }
2970 
2971 #endif // USE(JSVALUE32_64)
2972 
2973 } // namespace JSC
2974 
2975 #endif // ENABLE(JIT)
2976