• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2009 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24  */
25 
26 #include "config.h"
27 #include "JIT.h"
28 
29 #if ENABLE(JIT)
30 
31 #include "JITInlineMethods.h"
32 #include "JITStubCall.h"
33 #include "JSArray.h"
34 #include "JSCell.h"
35 #include "JSFunction.h"
36 #include "JSPropertyNameIterator.h"
37 #include "LinkBuffer.h"
38 
39 namespace JSC {
40 
41 #if USE(JSVALUE32_64)
42 
privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool> * executablePool,JSGlobalData * globalData,TrampolineStructure * trampolines)43 void JIT::privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool>* executablePool, JSGlobalData* globalData, TrampolineStructure *trampolines)
44 {
45 #if ENABLE(JIT_OPTIMIZE_MOD)
46     Label softModBegin = align();
47     softModulo();
48 #endif
49 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
50     // (1) This function provides fast property access for string length
51     Label stringLengthBegin = align();
52 
53     // regT0 holds payload, regT1 holds tag
54 
55     Jump string_failureCases1 = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
56     Jump string_failureCases2 = branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr));
57 
58     // Checks out okay! - get the length from the Ustring.
59     load32(Address(regT0, OBJECT_OFFSETOF(JSString, m_stringLength)), regT2);
60 
61     Jump string_failureCases3 = branch32(Above, regT2, Imm32(INT_MAX));
62     move(regT2, regT0);
63     move(Imm32(JSValue::Int32Tag), regT1);
64 
65     ret();
66 #endif
67 
68     // (2) Trampolines for the slow cases of op_call / op_call_eval / op_construct.
69 
70 #if ENABLE(JIT_OPTIMIZE_CALL)
71     // VirtualCallLink Trampoline
72     // regT0 holds callee, regT1 holds argCount.  regT2 will hold the FunctionExecutable.
73     Label virtualCallLinkBegin = align();
74     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
75 
76     Jump isNativeFunc2 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), Imm32(0));
77 
78     Jump hasCodeBlock2 = branch32(GreaterThan, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), Imm32(0));
79     preserveReturnAddressAfterCall(regT3);
80     restoreArgumentReference();
81     Call callJSFunction2 = call();
82     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
83     emitGetJITStubArg(2, regT1); // argCount
84     restoreReturnAddressBeforeReturn(regT3);
85     hasCodeBlock2.link(this);
86 
87     // Check argCount matches callee arity.
88     Jump arityCheckOkay2 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), regT1);
89     preserveReturnAddressAfterCall(regT3);
90     emitPutJITStubArg(regT3, 1); // return address
91     restoreArgumentReference();
92     Call callArityCheck2 = call();
93     move(regT1, callFrameRegister);
94     emitGetJITStubArg(2, regT1); // argCount
95     restoreReturnAddressBeforeReturn(regT3);
96     arityCheckOkay2.link(this);
97 
98     isNativeFunc2.link(this);
99 
100     compileOpCallInitializeCallFrame();
101 
102     preserveReturnAddressAfterCall(regT3);
103     emitPutJITStubArg(regT3, 1); // return address
104     restoreArgumentReference();
105     Call callLazyLinkCall = call();
106     restoreReturnAddressBeforeReturn(regT3);
107     jump(regT0);
108 #endif // ENABLE(JIT_OPTIMIZE_CALL)
109 
110     // VirtualCall Trampoline
111     // regT0 holds callee, regT1 holds argCount.  regT2 will hold the FunctionExecutable.
112     Label virtualCallBegin = align();
113     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
114 
115     Jump isNativeFunc3 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), Imm32(0));
116 
117     Jump hasCodeBlock3 = branch32(GreaterThan, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), Imm32(0));
118     preserveReturnAddressAfterCall(regT3);
119     restoreArgumentReference();
120     Call callJSFunction1 = call();
121     emitGetJITStubArg(2, regT1); // argCount
122     restoreReturnAddressBeforeReturn(regT3);
123     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
124     hasCodeBlock3.link(this);
125 
126     // Check argCount matches callee arity.
127     Jump arityCheckOkay3 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), regT1);
128     preserveReturnAddressAfterCall(regT3);
129     emitPutJITStubArg(regT3, 1); // return address
130     restoreArgumentReference();
131     Call callArityCheck1 = call();
132     move(regT1, callFrameRegister);
133     emitGetJITStubArg(2, regT1); // argCount
134     restoreReturnAddressBeforeReturn(regT3);
135     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
136     arityCheckOkay3.link(this);
137 
138     isNativeFunc3.link(this);
139 
140     compileOpCallInitializeCallFrame();
141     loadPtr(Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_jitCode)), regT0);
142     jump(regT0);
143 
144 #if CPU(X86) || CPU(ARM_TRADITIONAL)
145     Label nativeCallThunk = align();
146     preserveReturnAddressAfterCall(regT0);
147     emitPutToCallFrameHeader(regT0, RegisterFile::ReturnPC); // Push return address
148 
149     // Load caller frame's scope chain into this callframe so that whatever we call can
150     // get to its global data.
151     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT1);
152     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT1);
153     emitPutToCallFrameHeader(regT1, RegisterFile::ScopeChain);
154 
155 #if CPU(X86)
156     emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT0);
157 
158     /* We have two structs that we use to describe the stackframe we set up for our
159      * call to native code.  NativeCallFrameStructure describes the how we set up the stack
160      * in advance of the call.  NativeFunctionCalleeSignature describes the callframe
161      * as the native code expects it.  We do this as we are using the fastcall calling
162      * convention which results in the callee popping its arguments off the stack, but
163      * not the rest of the callframe so we need a nice way to ensure we increment the
164      * stack pointer by the right amount after the call.
165      */
166 
167 #if COMPILER(MSVC) || OS(LINUX)
168 #if COMPILER(MSVC)
169 #pragma pack(push)
170 #pragma pack(4)
171 #endif // COMPILER(MSVC)
172     struct NativeCallFrameStructure {
173       //  CallFrame* callFrame; // passed in EDX
174         JSObject* callee;
175         JSValue thisValue;
176         ArgList* argPointer;
177         ArgList args;
178         JSValue result;
179     };
180     struct NativeFunctionCalleeSignature {
181         JSObject* callee;
182         JSValue thisValue;
183         ArgList* argPointer;
184     };
185 #if COMPILER(MSVC)
186 #pragma pack(pop)
187 #endif // COMPILER(MSVC)
188 #else
189     struct NativeCallFrameStructure {
190       //  CallFrame* callFrame; // passed in ECX
191       //  JSObject* callee; // passed in EDX
192         JSValue thisValue;
193         ArgList* argPointer;
194         ArgList args;
195     };
196     struct NativeFunctionCalleeSignature {
197         JSValue thisValue;
198         ArgList* argPointer;
199     };
200 #endif
201 
202     const int NativeCallFrameSize = (sizeof(NativeCallFrameStructure) + 15) & ~15;
203     // Allocate system stack frame
204     subPtr(Imm32(NativeCallFrameSize), stackPointerRegister);
205 
206     // Set up arguments
207     subPtr(Imm32(1), regT0); // Don't include 'this' in argcount
208 
209     // push argcount
210     storePtr(regT0, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, args) + OBJECT_OFFSETOF(ArgList, m_argCount)));
211 
212     // Calculate the start of the callframe header, and store in regT1
213     addPtr(Imm32(-RegisterFile::CallFrameHeaderSize * (int)sizeof(Register)), callFrameRegister, regT1);
214 
215     // Calculate start of arguments as callframe header - sizeof(Register) * argcount (regT0)
216     mul32(Imm32(sizeof(Register)), regT0, regT0);
217     subPtr(regT0, regT1);
218     storePtr(regT1, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, args) + OBJECT_OFFSETOF(ArgList, m_args)));
219 
220     // ArgList is passed by reference so is stackPointerRegister + 4 * sizeof(Register)
221     addPtr(Imm32(OBJECT_OFFSETOF(NativeCallFrameStructure, args)), stackPointerRegister, regT0);
222     storePtr(regT0, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, argPointer)));
223 
224     // regT1 currently points to the first argument, regT1 - sizeof(Register) points to 'this'
225     loadPtr(Address(regT1, -(int)sizeof(Register) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT2);
226     loadPtr(Address(regT1, -(int)sizeof(Register) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT3);
227     storePtr(regT2, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, thisValue) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)));
228     storePtr(regT3, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, thisValue) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)));
229 
230 #if COMPILER(MSVC) || OS(LINUX)
231     // ArgList is passed by reference so is stackPointerRegister + 4 * sizeof(Register)
232     addPtr(Imm32(OBJECT_OFFSETOF(NativeCallFrameStructure, result)), stackPointerRegister, X86Registers::ecx);
233 
234     // Plant callee
235     emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, X86Registers::eax);
236     storePtr(X86Registers::eax, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, callee)));
237 
238     // Plant callframe
239     move(callFrameRegister, X86Registers::edx);
240 
241     call(Address(X86Registers::eax, OBJECT_OFFSETOF(JSFunction, m_data)));
242 
243     // JSValue is a non-POD type, so eax points to it
244     emitLoad(0, regT1, regT0, X86Registers::eax);
245 #else
246     emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, X86Registers::edx); // callee
247     move(callFrameRegister, X86Registers::ecx); // callFrame
248     call(Address(X86Registers::edx, OBJECT_OFFSETOF(JSFunction, m_data)));
249 #endif
250 
251     // We've put a few temporaries on the stack in addition to the actual arguments
252     // so pull them off now
253     addPtr(Imm32(NativeCallFrameSize - sizeof(NativeFunctionCalleeSignature)), stackPointerRegister);
254 
255 #elif CPU(ARM_TRADITIONAL)
256     emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT0);
257 
258     // Allocate stack space for our arglist
259     COMPILE_ASSERT((sizeof(ArgList) & 0x7) == 0 && sizeof(JSValue) == 8 && sizeof(Register) == 8, ArgList_should_by_8byte_aligned);
260     subPtr(Imm32(sizeof(ArgList)), stackPointerRegister);
261 
262     // Set up arguments
263     subPtr(Imm32(1), regT0); // Don't include 'this' in argcount
264 
265     // Push argcount
266     storePtr(regT0, Address(stackPointerRegister, OBJECT_OFFSETOF(ArgList, m_argCount)));
267 
268     // Calculate the start of the callframe header, and store in regT1
269     move(callFrameRegister, regT1);
270     sub32(Imm32(RegisterFile::CallFrameHeaderSize * (int32_t)sizeof(Register)), regT1);
271 
272     // Calculate start of arguments as callframe header - sizeof(Register) * argcount (regT1)
273     mul32(Imm32(sizeof(Register)), regT0, regT0);
274     subPtr(regT0, regT1);
275 
276     // push pointer to arguments
277     storePtr(regT1, Address(stackPointerRegister, OBJECT_OFFSETOF(ArgList, m_args)));
278 
279     // Argument passing method:
280     // r0 - points to return value
281     // r1 - callFrame
282     // r2 - callee
283     // stack: this(JSValue) and a pointer to ArgList
284 
285     move(stackPointerRegister, regT3);
286     subPtr(Imm32(8), stackPointerRegister);
287     move(stackPointerRegister, regT0);
288     subPtr(Imm32(8 + 4 + 4 /* padding */), stackPointerRegister);
289 
290     // Setup arg4:
291     storePtr(regT3, Address(stackPointerRegister, 8));
292 
293     // Setup arg3
294     // regT1 currently points to the first argument, regT1-sizeof(Register) points to 'this'
295     load32(Address(regT1, -(int32_t)sizeof(void*) * 2), regT3);
296     storePtr(regT3, Address(stackPointerRegister, 0));
297     load32(Address(regT1, -(int32_t)sizeof(void*)), regT3);
298     storePtr(regT3, Address(stackPointerRegister, 4));
299 
300     // Setup arg2:
301     emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT2);
302 
303     // Setup arg1:
304     move(callFrameRegister, regT1);
305 
306     call(Address(regT2, OBJECT_OFFSETOF(JSFunction, m_data)));
307 
308     // Load return value
309     load32(Address(stackPointerRegister, 16), regT0);
310     load32(Address(stackPointerRegister, 20), regT1);
311 
312     addPtr(Imm32(sizeof(ArgList) + 16 + 8), stackPointerRegister);
313 #endif
314 
315     // Check for an exception
316     move(ImmPtr(&globalData->exception), regT2);
317     Jump sawException = branch32(NotEqual, tagFor(0, regT2), Imm32(JSValue::EmptyValueTag));
318 
319     // Grab the return address.
320     emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT3);
321 
322     // Restore our caller's "r".
323     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
324 
325     // Return.
326     restoreReturnAddressBeforeReturn(regT3);
327     ret();
328 
329     // Handle an exception
330     sawException.link(this);
331     // Grab the return address.
332     emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
333     move(ImmPtr(&globalData->exceptionLocation), regT2);
334     storePtr(regT1, regT2);
335     move(ImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()), regT2);
336     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
337     poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof (void*));
338     restoreReturnAddressBeforeReturn(regT2);
339     ret();
340 
341 #elif ENABLE(JIT_OPTIMIZE_NATIVE_CALL)
342 #error "JIT_OPTIMIZE_NATIVE_CALL not yet supported on this platform."
343 #else
344     breakpoint();
345 #endif
346 
347 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
348     Call string_failureCases1Call = makeTailRecursiveCall(string_failureCases1);
349     Call string_failureCases2Call = makeTailRecursiveCall(string_failureCases2);
350     Call string_failureCases3Call = makeTailRecursiveCall(string_failureCases3);
351 #endif
352 
353     // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
354     LinkBuffer patchBuffer(this, m_globalData->executableAllocator.poolForSize(m_assembler.size()));
355 
356 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
357     patchBuffer.link(string_failureCases1Call, FunctionPtr(cti_op_get_by_id_string_fail));
358     patchBuffer.link(string_failureCases2Call, FunctionPtr(cti_op_get_by_id_string_fail));
359     patchBuffer.link(string_failureCases3Call, FunctionPtr(cti_op_get_by_id_string_fail));
360 #endif
361     patchBuffer.link(callArityCheck1, FunctionPtr(cti_op_call_arityCheck));
362     patchBuffer.link(callJSFunction1, FunctionPtr(cti_op_call_JSFunction));
363 #if ENABLE(JIT_OPTIMIZE_CALL)
364     patchBuffer.link(callArityCheck2, FunctionPtr(cti_op_call_arityCheck));
365     patchBuffer.link(callJSFunction2, FunctionPtr(cti_op_call_JSFunction));
366     patchBuffer.link(callLazyLinkCall, FunctionPtr(cti_vm_lazyLinkCall));
367 #endif
368 
369     CodeRef finalCode = patchBuffer.finalizeCode();
370     *executablePool = finalCode.m_executablePool;
371 
372     trampolines->ctiVirtualCall = trampolineAt(finalCode, virtualCallBegin);
373     trampolines->ctiNativeCallThunk = trampolineAt(finalCode, nativeCallThunk);
374 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
375     trampolines->ctiStringLengthTrampoline = trampolineAt(finalCode, stringLengthBegin);
376 #else
377     UNUSED_PARAM(ctiStringLengthTrampoline);
378 #endif
379 #if ENABLE(JIT_OPTIMIZE_CALL)
380     trampolines->ctiVirtualCallLink = trampolineAt(finalCode, virtualCallLinkBegin);
381 #else
382     UNUSED_PARAM(ctiVirtualCallLink);
383 #endif
384 #if ENABLE(JIT_OPTIMIZE_MOD)
385     trampolines->ctiSoftModulo = trampolineAt(finalCode, softModBegin);
386 #endif
387 }
388 
emit_op_mov(Instruction * currentInstruction)389 void JIT::emit_op_mov(Instruction* currentInstruction)
390 {
391     unsigned dst = currentInstruction[1].u.operand;
392     unsigned src = currentInstruction[2].u.operand;
393 
394     if (m_codeBlock->isConstantRegisterIndex(src))
395         emitStore(dst, getConstantOperand(src));
396     else {
397         emitLoad(src, regT1, regT0);
398         emitStore(dst, regT1, regT0);
399         map(m_bytecodeIndex + OPCODE_LENGTH(op_mov), dst, regT1, regT0);
400     }
401 }
402 
emit_op_end(Instruction * currentInstruction)403 void JIT::emit_op_end(Instruction* currentInstruction)
404 {
405     if (m_codeBlock->needsFullScopeChain())
406         JITStubCall(this, cti_op_end).call();
407     ASSERT(returnValueRegister != callFrameRegister);
408     emitLoad(currentInstruction[1].u.operand, regT1, regT0);
409     restoreReturnAddressBeforeReturn(Address(callFrameRegister, RegisterFile::ReturnPC * static_cast<int>(sizeof(Register))));
410     ret();
411 }
412 
emit_op_jmp(Instruction * currentInstruction)413 void JIT::emit_op_jmp(Instruction* currentInstruction)
414 {
415     unsigned target = currentInstruction[1].u.operand;
416     addJump(jump(), target);
417 }
418 
emit_op_loop_if_lesseq(Instruction * currentInstruction)419 void JIT::emit_op_loop_if_lesseq(Instruction* currentInstruction)
420 {
421     unsigned op1 = currentInstruction[1].u.operand;
422     unsigned op2 = currentInstruction[2].u.operand;
423     unsigned target = currentInstruction[3].u.operand;
424 
425     emitTimeoutCheck();
426 
427     if (isOperandConstantImmediateInt(op1)) {
428         emitLoad(op2, regT1, regT0);
429         addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
430         addJump(branch32(GreaterThanOrEqual, regT0, Imm32(getConstantOperand(op1).asInt32())), target);
431         return;
432     }
433 
434     if (isOperandConstantImmediateInt(op2)) {
435         emitLoad(op1, regT1, regT0);
436         addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
437         addJump(branch32(LessThanOrEqual, regT0, Imm32(getConstantOperand(op2).asInt32())), target);
438         return;
439     }
440 
441     emitLoad2(op1, regT1, regT0, op2, regT3, regT2);
442     addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
443     addSlowCase(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
444     addJump(branch32(LessThanOrEqual, regT0, regT2), target);
445 }
446 
emitSlow_op_loop_if_lesseq(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)447 void JIT::emitSlow_op_loop_if_lesseq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
448 {
449     unsigned op1 = currentInstruction[1].u.operand;
450     unsigned op2 = currentInstruction[2].u.operand;
451     unsigned target = currentInstruction[3].u.operand;
452 
453     if (!isOperandConstantImmediateInt(op1) && !isOperandConstantImmediateInt(op2))
454         linkSlowCase(iter); // int32 check
455     linkSlowCase(iter); // int32 check
456 
457     JITStubCall stubCall(this, cti_op_loop_if_lesseq);
458     stubCall.addArgument(op1);
459     stubCall.addArgument(op2);
460     stubCall.call();
461     emitJumpSlowToHot(branchTest32(NonZero, regT0), target);
462 }
463 
emit_op_new_object(Instruction * currentInstruction)464 void JIT::emit_op_new_object(Instruction* currentInstruction)
465 {
466     JITStubCall(this, cti_op_new_object).call(currentInstruction[1].u.operand);
467 }
468 
emit_op_instanceof(Instruction * currentInstruction)469 void JIT::emit_op_instanceof(Instruction* currentInstruction)
470 {
471     unsigned dst = currentInstruction[1].u.operand;
472     unsigned value = currentInstruction[2].u.operand;
473     unsigned baseVal = currentInstruction[3].u.operand;
474     unsigned proto = currentInstruction[4].u.operand;
475 
476     // Load the operands into registers.
477     // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
478     emitLoadPayload(value, regT2);
479     emitLoadPayload(baseVal, regT0);
480     emitLoadPayload(proto, regT1);
481 
482     // Check that value, baseVal, and proto are cells.
483     emitJumpSlowCaseIfNotJSCell(value);
484     emitJumpSlowCaseIfNotJSCell(baseVal);
485     emitJumpSlowCaseIfNotJSCell(proto);
486 
487     // Check that baseVal 'ImplementsDefaultHasInstance'.
488     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT0);
489     addSlowCase(branchTest32(Zero, Address(regT0, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(ImplementsDefaultHasInstance)));
490 
491     // Optimistically load the result true, and start looping.
492     // Initially, regT1 still contains proto and regT2 still contains value.
493     // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
494     move(Imm32(JSValue::TrueTag), regT0);
495     Label loop(this);
496 
497     // Load the prototype of the cell in regT2.  If this is equal to regT1 - WIN!
498     // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
499     loadPtr(Address(regT2, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
500     load32(Address(regT2, OBJECT_OFFSETOF(Structure, m_prototype) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT2);
501     Jump isInstance = branchPtr(Equal, regT2, regT1);
502     branchTest32(NonZero, regT2).linkTo(loop, this);
503 
504     // We get here either by dropping out of the loop, or if value was not an Object.  Result is false.
505     move(Imm32(JSValue::FalseTag), regT0);
506 
507     // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
508     isInstance.link(this);
509     emitStoreBool(dst, regT0);
510 }
511 
emitSlow_op_instanceof(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)512 void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
513 {
514     unsigned dst = currentInstruction[1].u.operand;
515     unsigned value = currentInstruction[2].u.operand;
516     unsigned baseVal = currentInstruction[3].u.operand;
517     unsigned proto = currentInstruction[4].u.operand;
518 
519     linkSlowCaseIfNotJSCell(iter, value);
520     linkSlowCaseIfNotJSCell(iter, baseVal);
521     linkSlowCaseIfNotJSCell(iter, proto);
522     linkSlowCase(iter);
523 
524     JITStubCall stubCall(this, cti_op_instanceof);
525     stubCall.addArgument(value);
526     stubCall.addArgument(baseVal);
527     stubCall.addArgument(proto);
528     stubCall.call(dst);
529 }
530 
emit_op_new_func(Instruction * currentInstruction)531 void JIT::emit_op_new_func(Instruction* currentInstruction)
532 {
533     JITStubCall stubCall(this, cti_op_new_func);
534     stubCall.addArgument(ImmPtr(m_codeBlock->functionDecl(currentInstruction[2].u.operand)));
535     stubCall.call(currentInstruction[1].u.operand);
536 }
537 
emit_op_get_global_var(Instruction * currentInstruction)538 void JIT::emit_op_get_global_var(Instruction* currentInstruction)
539 {
540     int dst = currentInstruction[1].u.operand;
541     JSGlobalObject* globalObject = static_cast<JSGlobalObject*>(currentInstruction[2].u.jsCell);
542     ASSERT(globalObject->isGlobalObject());
543     int index = currentInstruction[3].u.operand;
544 
545     loadPtr(&globalObject->d()->registers, regT2);
546 
547     emitLoad(index, regT1, regT0, regT2);
548     emitStore(dst, regT1, regT0);
549     map(m_bytecodeIndex + OPCODE_LENGTH(op_get_global_var), dst, regT1, regT0);
550 }
551 
emit_op_put_global_var(Instruction * currentInstruction)552 void JIT::emit_op_put_global_var(Instruction* currentInstruction)
553 {
554     JSGlobalObject* globalObject = static_cast<JSGlobalObject*>(currentInstruction[1].u.jsCell);
555     ASSERT(globalObject->isGlobalObject());
556     int index = currentInstruction[2].u.operand;
557     int value = currentInstruction[3].u.operand;
558 
559     emitLoad(value, regT1, regT0);
560 
561     loadPtr(&globalObject->d()->registers, regT2);
562     emitStore(index, regT1, regT0, regT2);
563     map(m_bytecodeIndex + OPCODE_LENGTH(op_put_global_var), value, regT1, regT0);
564 }
565 
emit_op_get_scoped_var(Instruction * currentInstruction)566 void JIT::emit_op_get_scoped_var(Instruction* currentInstruction)
567 {
568     int dst = currentInstruction[1].u.operand;
569     int index = currentInstruction[2].u.operand;
570     int skip = currentInstruction[3].u.operand + m_codeBlock->needsFullScopeChain();
571 
572     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2);
573     while (skip--)
574         loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
575 
576     loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, object)), regT2);
577     loadPtr(Address(regT2, OBJECT_OFFSETOF(JSVariableObject, d)), regT2);
578     loadPtr(Address(regT2, OBJECT_OFFSETOF(JSVariableObject::JSVariableObjectData, registers)), regT2);
579 
580     emitLoad(index, regT1, regT0, regT2);
581     emitStore(dst, regT1, regT0);
582     map(m_bytecodeIndex + OPCODE_LENGTH(op_get_scoped_var), dst, regT1, regT0);
583 }
584 
emit_op_put_scoped_var(Instruction * currentInstruction)585 void JIT::emit_op_put_scoped_var(Instruction* currentInstruction)
586 {
587     int index = currentInstruction[1].u.operand;
588     int skip = currentInstruction[2].u.operand + m_codeBlock->needsFullScopeChain();
589     int value = currentInstruction[3].u.operand;
590 
591     emitLoad(value, regT1, regT0);
592 
593     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2);
594     while (skip--)
595         loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
596 
597     loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, object)), regT2);
598     loadPtr(Address(regT2, OBJECT_OFFSETOF(JSVariableObject, d)), regT2);
599     loadPtr(Address(regT2, OBJECT_OFFSETOF(JSVariableObject::JSVariableObjectData, registers)), regT2);
600 
601     emitStore(index, regT1, regT0, regT2);
602     map(m_bytecodeIndex + OPCODE_LENGTH(op_put_scoped_var), value, regT1, regT0);
603 }
604 
emit_op_tear_off_activation(Instruction * currentInstruction)605 void JIT::emit_op_tear_off_activation(Instruction* currentInstruction)
606 {
607     JITStubCall stubCall(this, cti_op_tear_off_activation);
608     stubCall.addArgument(currentInstruction[1].u.operand);
609     stubCall.call();
610 }
611 
emit_op_tear_off_arguments(Instruction *)612 void JIT::emit_op_tear_off_arguments(Instruction*)
613 {
614     JITStubCall(this, cti_op_tear_off_arguments).call();
615 }
616 
emit_op_new_array(Instruction * currentInstruction)617 void JIT::emit_op_new_array(Instruction* currentInstruction)
618 {
619     JITStubCall stubCall(this, cti_op_new_array);
620     stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
621     stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
622     stubCall.call(currentInstruction[1].u.operand);
623 }
624 
emit_op_resolve(Instruction * currentInstruction)625 void JIT::emit_op_resolve(Instruction* currentInstruction)
626 {
627     JITStubCall stubCall(this, cti_op_resolve);
628     stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
629     stubCall.call(currentInstruction[1].u.operand);
630 }
631 
emit_op_to_primitive(Instruction * currentInstruction)632 void JIT::emit_op_to_primitive(Instruction* currentInstruction)
633 {
634     int dst = currentInstruction[1].u.operand;
635     int src = currentInstruction[2].u.operand;
636 
637     emitLoad(src, regT1, regT0);
638 
639     Jump isImm = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
640     addSlowCase(branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr)));
641     isImm.link(this);
642 
643     if (dst != src)
644         emitStore(dst, regT1, regT0);
645     map(m_bytecodeIndex + OPCODE_LENGTH(op_to_primitive), dst, regT1, regT0);
646 }
647 
emitSlow_op_to_primitive(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)648 void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
649 {
650     int dst = currentInstruction[1].u.operand;
651 
652     linkSlowCase(iter);
653 
654     JITStubCall stubCall(this, cti_op_to_primitive);
655     stubCall.addArgument(regT1, regT0);
656     stubCall.call(dst);
657 }
658 
emit_op_strcat(Instruction * currentInstruction)659 void JIT::emit_op_strcat(Instruction* currentInstruction)
660 {
661     JITStubCall stubCall(this, cti_op_strcat);
662     stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
663     stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
664     stubCall.call(currentInstruction[1].u.operand);
665 }
666 
emit_op_resolve_base(Instruction * currentInstruction)667 void JIT::emit_op_resolve_base(Instruction* currentInstruction)
668 {
669     JITStubCall stubCall(this, cti_op_resolve_base);
670     stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
671     stubCall.call(currentInstruction[1].u.operand);
672 }
673 
emit_op_resolve_skip(Instruction * currentInstruction)674 void JIT::emit_op_resolve_skip(Instruction* currentInstruction)
675 {
676     JITStubCall stubCall(this, cti_op_resolve_skip);
677     stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
678     stubCall.addArgument(Imm32(currentInstruction[3].u.operand + m_codeBlock->needsFullScopeChain()));
679     stubCall.call(currentInstruction[1].u.operand);
680 }
681 
emit_op_resolve_global(Instruction * currentInstruction)682 void JIT::emit_op_resolve_global(Instruction* currentInstruction)
683 {
684     // FIXME: Optimize to use patching instead of so many memory accesses.
685 
686     unsigned dst = currentInstruction[1].u.operand;
687     void* globalObject = currentInstruction[2].u.jsCell;
688 
689     unsigned currentIndex = m_globalResolveInfoIndex++;
690     void* structureAddress = &(m_codeBlock->globalResolveInfo(currentIndex).structure);
691     void* offsetAddr = &(m_codeBlock->globalResolveInfo(currentIndex).offset);
692 
693     // Verify structure.
694     move(ImmPtr(globalObject), regT0);
695     loadPtr(structureAddress, regT1);
696     addSlowCase(branchPtr(NotEqual, regT1, Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure))));
697 
698     // Load property.
699     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSGlobalObject, m_externalStorage)), regT2);
700     load32(offsetAddr, regT3);
701     load32(BaseIndex(regT2, regT3, TimesEight), regT0); // payload
702     load32(BaseIndex(regT2, regT3, TimesEight, 4), regT1); // tag
703     emitStore(dst, regT1, regT0);
704     map(m_bytecodeIndex + OPCODE_LENGTH(op_resolve_global), dst, regT1, regT0);
705 }
706 
emitSlow_op_resolve_global(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)707 void JIT::emitSlow_op_resolve_global(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
708 {
709     unsigned dst = currentInstruction[1].u.operand;
710     void* globalObject = currentInstruction[2].u.jsCell;
711     Identifier* ident = &m_codeBlock->identifier(currentInstruction[3].u.operand);
712 
713     unsigned currentIndex = m_globalResolveInfoIndex++;
714 
715     linkSlowCase(iter);
716     JITStubCall stubCall(this, cti_op_resolve_global);
717     stubCall.addArgument(ImmPtr(globalObject));
718     stubCall.addArgument(ImmPtr(ident));
719     stubCall.addArgument(Imm32(currentIndex));
720     stubCall.call(dst);
721 }
722 
emit_op_not(Instruction * currentInstruction)723 void JIT::emit_op_not(Instruction* currentInstruction)
724 {
725     unsigned dst = currentInstruction[1].u.operand;
726     unsigned src = currentInstruction[2].u.operand;
727 
728     emitLoadTag(src, regT0);
729 
730     xor32(Imm32(JSValue::FalseTag), regT0);
731     addSlowCase(branchTest32(NonZero, regT0, Imm32(~1)));
732     xor32(Imm32(JSValue::TrueTag), regT0);
733 
734     emitStoreBool(dst, regT0, (dst == src));
735 }
736 
emitSlow_op_not(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)737 void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
738 {
739     unsigned dst = currentInstruction[1].u.operand;
740     unsigned src = currentInstruction[2].u.operand;
741 
742     linkSlowCase(iter);
743 
744     JITStubCall stubCall(this, cti_op_not);
745     stubCall.addArgument(src);
746     stubCall.call(dst);
747 }
748 
emit_op_jfalse(Instruction * currentInstruction)749 void JIT::emit_op_jfalse(Instruction* currentInstruction)
750 {
751     unsigned cond = currentInstruction[1].u.operand;
752     unsigned target = currentInstruction[2].u.operand;
753 
754     emitLoad(cond, regT1, regT0);
755 
756     Jump isTrue = branch32(Equal, regT1, Imm32(JSValue::TrueTag));
757     addJump(branch32(Equal, regT1, Imm32(JSValue::FalseTag)), target);
758 
759     Jump isNotInteger = branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag));
760     Jump isTrue2 = branch32(NotEqual, regT0, Imm32(0));
761     addJump(jump(), target);
762 
763     if (supportsFloatingPoint()) {
764         isNotInteger.link(this);
765 
766         addSlowCase(branch32(Above, regT1, Imm32(JSValue::LowestTag)));
767 
768         zeroDouble(fpRegT0);
769         emitLoadDouble(cond, fpRegT1);
770         addJump(branchDouble(DoubleEqualOrUnordered, fpRegT0, fpRegT1), target);
771     } else
772         addSlowCase(isNotInteger);
773 
774     isTrue.link(this);
775     isTrue2.link(this);
776 }
777 
emitSlow_op_jfalse(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)778 void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
779 {
780     unsigned cond = currentInstruction[1].u.operand;
781     unsigned target = currentInstruction[2].u.operand;
782 
783     linkSlowCase(iter);
784     JITStubCall stubCall(this, cti_op_jtrue);
785     stubCall.addArgument(cond);
786     stubCall.call();
787     emitJumpSlowToHot(branchTest32(Zero, regT0), target); // Inverted.
788 }
789 
emit_op_jtrue(Instruction * currentInstruction)790 void JIT::emit_op_jtrue(Instruction* currentInstruction)
791 {
792     unsigned cond = currentInstruction[1].u.operand;
793     unsigned target = currentInstruction[2].u.operand;
794 
795     emitLoad(cond, regT1, regT0);
796 
797     Jump isFalse = branch32(Equal, regT1, Imm32(JSValue::FalseTag));
798     addJump(branch32(Equal, regT1, Imm32(JSValue::TrueTag)), target);
799 
800     Jump isNotInteger = branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag));
801     Jump isFalse2 = branch32(Equal, regT0, Imm32(0));
802     addJump(jump(), target);
803 
804     if (supportsFloatingPoint()) {
805         isNotInteger.link(this);
806 
807         addSlowCase(branch32(Above, regT1, Imm32(JSValue::LowestTag)));
808 
809         zeroDouble(fpRegT0);
810         emitLoadDouble(cond, fpRegT1);
811         addJump(branchDouble(DoubleNotEqual, fpRegT0, fpRegT1), target);
812     } else
813         addSlowCase(isNotInteger);
814 
815     isFalse.link(this);
816     isFalse2.link(this);
817 }
818 
emitSlow_op_jtrue(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)819 void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
820 {
821     unsigned cond = currentInstruction[1].u.operand;
822     unsigned target = currentInstruction[2].u.operand;
823 
824     linkSlowCase(iter);
825     JITStubCall stubCall(this, cti_op_jtrue);
826     stubCall.addArgument(cond);
827     stubCall.call();
828     emitJumpSlowToHot(branchTest32(NonZero, regT0), target);
829 }
830 
emit_op_jeq_null(Instruction * currentInstruction)831 void JIT::emit_op_jeq_null(Instruction* currentInstruction)
832 {
833     unsigned src = currentInstruction[1].u.operand;
834     unsigned target = currentInstruction[2].u.operand;
835 
836     emitLoad(src, regT1, regT0);
837 
838     Jump isImmediate = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
839 
840     // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
841     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
842     addJump(branchTest32(NonZero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined)), target);
843 
844     Jump wasNotImmediate = jump();
845 
846     // Now handle the immediate cases - undefined & null
847     isImmediate.link(this);
848 
849     set32(Equal, regT1, Imm32(JSValue::NullTag), regT2);
850     set32(Equal, regT1, Imm32(JSValue::UndefinedTag), regT1);
851     or32(regT2, regT1);
852 
853     addJump(branchTest32(NonZero, regT1), target);
854 
855     wasNotImmediate.link(this);
856 }
857 
emit_op_jneq_null(Instruction * currentInstruction)858 void JIT::emit_op_jneq_null(Instruction* currentInstruction)
859 {
860     unsigned src = currentInstruction[1].u.operand;
861     unsigned target = currentInstruction[2].u.operand;
862 
863     emitLoad(src, regT1, regT0);
864 
865     Jump isImmediate = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
866 
867     // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
868     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
869     addJump(branchTest32(Zero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined)), target);
870 
871     Jump wasNotImmediate = jump();
872 
873     // Now handle the immediate cases - undefined & null
874     isImmediate.link(this);
875 
876     set32(Equal, regT1, Imm32(JSValue::NullTag), regT2);
877     set32(Equal, regT1, Imm32(JSValue::UndefinedTag), regT1);
878     or32(regT2, regT1);
879 
880     addJump(branchTest32(Zero, regT1), target);
881 
882     wasNotImmediate.link(this);
883 }
884 
emit_op_jneq_ptr(Instruction * currentInstruction)885 void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
886 {
887     unsigned src = currentInstruction[1].u.operand;
888     JSCell* ptr = currentInstruction[2].u.jsCell;
889     unsigned target = currentInstruction[3].u.operand;
890 
891     emitLoad(src, regT1, regT0);
892     addJump(branch32(NotEqual, regT1, Imm32(JSValue::CellTag)), target);
893     addJump(branchPtr(NotEqual, regT0, ImmPtr(ptr)), target);
894 }
895 
emit_op_jsr(Instruction * currentInstruction)896 void JIT::emit_op_jsr(Instruction* currentInstruction)
897 {
898     int retAddrDst = currentInstruction[1].u.operand;
899     int target = currentInstruction[2].u.operand;
900     DataLabelPtr storeLocation = storePtrWithPatch(ImmPtr(0), Address(callFrameRegister, sizeof(Register) * retAddrDst));
901     addJump(jump(), target);
902     m_jsrSites.append(JSRInfo(storeLocation, label()));
903 }
904 
emit_op_sret(Instruction * currentInstruction)905 void JIT::emit_op_sret(Instruction* currentInstruction)
906 {
907     jump(Address(callFrameRegister, sizeof(Register) * currentInstruction[1].u.operand));
908 }
909 
emit_op_eq(Instruction * currentInstruction)910 void JIT::emit_op_eq(Instruction* currentInstruction)
911 {
912     unsigned dst = currentInstruction[1].u.operand;
913     unsigned src1 = currentInstruction[2].u.operand;
914     unsigned src2 = currentInstruction[3].u.operand;
915 
916     emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
917     addSlowCase(branch32(NotEqual, regT1, regT3));
918     addSlowCase(branch32(Equal, regT1, Imm32(JSValue::CellTag)));
919     addSlowCase(branch32(Below, regT1, Imm32(JSValue::LowestTag)));
920 
921     set8(Equal, regT0, regT2, regT0);
922     or32(Imm32(JSValue::FalseTag), regT0);
923 
924     emitStoreBool(dst, regT0);
925 }
926 
emitSlow_op_eq(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)927 void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
928 {
929     unsigned dst = currentInstruction[1].u.operand;
930     unsigned op1 = currentInstruction[2].u.operand;
931     unsigned op2 = currentInstruction[3].u.operand;
932 
933     JumpList storeResult;
934     JumpList genericCase;
935 
936     genericCase.append(getSlowCase(iter)); // tags not equal
937 
938     linkSlowCase(iter); // tags equal and JSCell
939     genericCase.append(branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr)));
940     genericCase.append(branchPtr(NotEqual, Address(regT2), ImmPtr(m_globalData->jsStringVPtr)));
941 
942     // String case.
943     JITStubCall stubCallEqStrings(this, cti_op_eq_strings);
944     stubCallEqStrings.addArgument(regT0);
945     stubCallEqStrings.addArgument(regT2);
946     stubCallEqStrings.call();
947     storeResult.append(jump());
948 
949     // Generic case.
950     genericCase.append(getSlowCase(iter)); // doubles
951     genericCase.link(this);
952     JITStubCall stubCallEq(this, cti_op_eq);
953     stubCallEq.addArgument(op1);
954     stubCallEq.addArgument(op2);
955     stubCallEq.call(regT0);
956 
957     storeResult.link(this);
958     or32(Imm32(JSValue::FalseTag), regT0);
959     emitStoreBool(dst, regT0);
960 }
961 
emit_op_neq(Instruction * currentInstruction)962 void JIT::emit_op_neq(Instruction* currentInstruction)
963 {
964     unsigned dst = currentInstruction[1].u.operand;
965     unsigned src1 = currentInstruction[2].u.operand;
966     unsigned src2 = currentInstruction[3].u.operand;
967 
968     emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
969     addSlowCase(branch32(NotEqual, regT1, regT3));
970     addSlowCase(branch32(Equal, regT1, Imm32(JSValue::CellTag)));
971     addSlowCase(branch32(Below, regT1, Imm32(JSValue::LowestTag)));
972 
973     set8(NotEqual, regT0, regT2, regT0);
974     or32(Imm32(JSValue::FalseTag), regT0);
975 
976     emitStoreBool(dst, regT0);
977 }
978 
emitSlow_op_neq(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)979 void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
980 {
981     unsigned dst = currentInstruction[1].u.operand;
982 
983     JumpList storeResult;
984     JumpList genericCase;
985 
986     genericCase.append(getSlowCase(iter)); // tags not equal
987 
988     linkSlowCase(iter); // tags equal and JSCell
989     genericCase.append(branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr)));
990     genericCase.append(branchPtr(NotEqual, Address(regT2), ImmPtr(m_globalData->jsStringVPtr)));
991 
992     // String case.
993     JITStubCall stubCallEqStrings(this, cti_op_eq_strings);
994     stubCallEqStrings.addArgument(regT0);
995     stubCallEqStrings.addArgument(regT2);
996     stubCallEqStrings.call(regT0);
997     storeResult.append(jump());
998 
999     // Generic case.
1000     genericCase.append(getSlowCase(iter)); // doubles
1001     genericCase.link(this);
1002     JITStubCall stubCallEq(this, cti_op_eq);
1003     stubCallEq.addArgument(regT1, regT0);
1004     stubCallEq.addArgument(regT3, regT2);
1005     stubCallEq.call(regT0);
1006 
1007     storeResult.link(this);
1008     xor32(Imm32(0x1), regT0);
1009     or32(Imm32(JSValue::FalseTag), regT0);
1010     emitStoreBool(dst, regT0);
1011 }
1012 
compileOpStrictEq(Instruction * currentInstruction,CompileOpStrictEqType type)1013 void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
1014 {
1015     unsigned dst = currentInstruction[1].u.operand;
1016     unsigned src1 = currentInstruction[2].u.operand;
1017     unsigned src2 = currentInstruction[3].u.operand;
1018 
1019     emitLoadTag(src1, regT0);
1020     emitLoadTag(src2, regT1);
1021 
1022     // Jump to a slow case if either operand is double, or if both operands are
1023     // cells and/or Int32s.
1024     move(regT0, regT2);
1025     and32(regT1, regT2);
1026     addSlowCase(branch32(Below, regT2, Imm32(JSValue::LowestTag)));
1027     addSlowCase(branch32(AboveOrEqual, regT2, Imm32(JSValue::CellTag)));
1028 
1029     if (type == OpStrictEq)
1030         set8(Equal, regT0, regT1, regT0);
1031     else
1032         set8(NotEqual, regT0, regT1, regT0);
1033 
1034     or32(Imm32(JSValue::FalseTag), regT0);
1035 
1036     emitStoreBool(dst, regT0);
1037 }
1038 
emit_op_stricteq(Instruction * currentInstruction)1039 void JIT::emit_op_stricteq(Instruction* currentInstruction)
1040 {
1041     compileOpStrictEq(currentInstruction, OpStrictEq);
1042 }
1043 
emitSlow_op_stricteq(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1044 void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1045 {
1046     unsigned dst = currentInstruction[1].u.operand;
1047     unsigned src1 = currentInstruction[2].u.operand;
1048     unsigned src2 = currentInstruction[3].u.operand;
1049 
1050     linkSlowCase(iter);
1051     linkSlowCase(iter);
1052 
1053     JITStubCall stubCall(this, cti_op_stricteq);
1054     stubCall.addArgument(src1);
1055     stubCall.addArgument(src2);
1056     stubCall.call(dst);
1057 }
1058 
emit_op_nstricteq(Instruction * currentInstruction)1059 void JIT::emit_op_nstricteq(Instruction* currentInstruction)
1060 {
1061     compileOpStrictEq(currentInstruction, OpNStrictEq);
1062 }
1063 
emitSlow_op_nstricteq(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1064 void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1065 {
1066     unsigned dst = currentInstruction[1].u.operand;
1067     unsigned src1 = currentInstruction[2].u.operand;
1068     unsigned src2 = currentInstruction[3].u.operand;
1069 
1070     linkSlowCase(iter);
1071     linkSlowCase(iter);
1072 
1073     JITStubCall stubCall(this, cti_op_nstricteq);
1074     stubCall.addArgument(src1);
1075     stubCall.addArgument(src2);
1076     stubCall.call(dst);
1077 }
1078 
emit_op_eq_null(Instruction * currentInstruction)1079 void JIT::emit_op_eq_null(Instruction* currentInstruction)
1080 {
1081     unsigned dst = currentInstruction[1].u.operand;
1082     unsigned src = currentInstruction[2].u.operand;
1083 
1084     emitLoad(src, regT1, regT0);
1085     Jump isImmediate = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
1086 
1087     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT1);
1088     setTest8(NonZero, Address(regT1, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined), regT1);
1089 
1090     Jump wasNotImmediate = jump();
1091 
1092     isImmediate.link(this);
1093 
1094     set8(Equal, regT1, Imm32(JSValue::NullTag), regT2);
1095     set8(Equal, regT1, Imm32(JSValue::UndefinedTag), regT1);
1096     or32(regT2, regT1);
1097 
1098     wasNotImmediate.link(this);
1099 
1100     or32(Imm32(JSValue::FalseTag), regT1);
1101 
1102     emitStoreBool(dst, regT1);
1103 }
1104 
emit_op_neq_null(Instruction * currentInstruction)1105 void JIT::emit_op_neq_null(Instruction* currentInstruction)
1106 {
1107     unsigned dst = currentInstruction[1].u.operand;
1108     unsigned src = currentInstruction[2].u.operand;
1109 
1110     emitLoad(src, regT1, regT0);
1111     Jump isImmediate = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
1112 
1113     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT1);
1114     setTest8(Zero, Address(regT1, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined), regT1);
1115 
1116     Jump wasNotImmediate = jump();
1117 
1118     isImmediate.link(this);
1119 
1120     set8(NotEqual, regT1, Imm32(JSValue::NullTag), regT2);
1121     set8(NotEqual, regT1, Imm32(JSValue::UndefinedTag), regT1);
1122     and32(regT2, regT1);
1123 
1124     wasNotImmediate.link(this);
1125 
1126     or32(Imm32(JSValue::FalseTag), regT1);
1127 
1128     emitStoreBool(dst, regT1);
1129 }
1130 
emit_op_resolve_with_base(Instruction * currentInstruction)1131 void JIT::emit_op_resolve_with_base(Instruction* currentInstruction)
1132 {
1133     JITStubCall stubCall(this, cti_op_resolve_with_base);
1134     stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[3].u.operand)));
1135     stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
1136     stubCall.call(currentInstruction[2].u.operand);
1137 }
1138 
emit_op_new_func_exp(Instruction * currentInstruction)1139 void JIT::emit_op_new_func_exp(Instruction* currentInstruction)
1140 {
1141     JITStubCall stubCall(this, cti_op_new_func_exp);
1142     stubCall.addArgument(ImmPtr(m_codeBlock->functionExpr(currentInstruction[2].u.operand)));
1143     stubCall.call(currentInstruction[1].u.operand);
1144 }
1145 
emit_op_new_regexp(Instruction * currentInstruction)1146 void JIT::emit_op_new_regexp(Instruction* currentInstruction)
1147 {
1148     JITStubCall stubCall(this, cti_op_new_regexp);
1149     stubCall.addArgument(ImmPtr(m_codeBlock->regexp(currentInstruction[2].u.operand)));
1150     stubCall.call(currentInstruction[1].u.operand);
1151 }
1152 
emit_op_throw(Instruction * currentInstruction)1153 void JIT::emit_op_throw(Instruction* currentInstruction)
1154 {
1155     unsigned exception = currentInstruction[1].u.operand;
1156     JITStubCall stubCall(this, cti_op_throw);
1157     stubCall.addArgument(exception);
1158     stubCall.call();
1159 
1160 #ifndef NDEBUG
1161     // cti_op_throw always changes it's return address,
1162     // this point in the code should never be reached.
1163     breakpoint();
1164 #endif
1165 }
1166 
emit_op_get_pnames(Instruction * currentInstruction)1167 void JIT::emit_op_get_pnames(Instruction* currentInstruction)
1168 {
1169     int dst = currentInstruction[1].u.operand;
1170     int base = currentInstruction[2].u.operand;
1171     int i = currentInstruction[3].u.operand;
1172     int size = currentInstruction[4].u.operand;
1173     int breakTarget = currentInstruction[5].u.operand;
1174 
1175     JumpList isNotObject;
1176 
1177     emitLoad(base, regT1, regT0);
1178     if (!m_codeBlock->isKnownNotImmediate(base))
1179         isNotObject.append(branch32(NotEqual, regT1, Imm32(JSValue::CellTag)));
1180     if (base != m_codeBlock->thisRegister()) {
1181         loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
1182         isNotObject.append(branch32(NotEqual, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_type)), Imm32(ObjectType)));
1183     }
1184 
1185     // We could inline the case where you have a valid cache, but
1186     // this call doesn't seem to be hot.
1187     Label isObject(this);
1188     JITStubCall getPnamesStubCall(this, cti_op_get_pnames);
1189     getPnamesStubCall.addArgument(regT0);
1190     getPnamesStubCall.call(dst);
1191     load32(Address(regT0, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStringsSize)), regT3);
1192     store32(Imm32(0), addressFor(i));
1193     store32(regT3, addressFor(size));
1194     Jump end = jump();
1195 
1196     isNotObject.link(this);
1197     addJump(branch32(Equal, regT1, Imm32(JSValue::NullTag)), breakTarget);
1198     addJump(branch32(Equal, regT1, Imm32(JSValue::UndefinedTag)), breakTarget);
1199     JITStubCall toObjectStubCall(this, cti_to_object);
1200     toObjectStubCall.addArgument(regT1, regT0);
1201     toObjectStubCall.call(base);
1202     jump().linkTo(isObject, this);
1203 
1204     end.link(this);
1205 }
1206 
emit_op_next_pname(Instruction * currentInstruction)1207 void JIT::emit_op_next_pname(Instruction* currentInstruction)
1208 {
1209     int dst = currentInstruction[1].u.operand;
1210     int base = currentInstruction[2].u.operand;
1211     int i = currentInstruction[3].u.operand;
1212     int size = currentInstruction[4].u.operand;
1213     int it = currentInstruction[5].u.operand;
1214     int target = currentInstruction[6].u.operand;
1215 
1216     JumpList callHasProperty;
1217 
1218     Label begin(this);
1219     load32(addressFor(i), regT0);
1220     Jump end = branch32(Equal, regT0, addressFor(size));
1221 
1222     // Grab key @ i
1223     loadPtr(addressFor(it), regT1);
1224     loadPtr(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStrings)), regT2);
1225     load32(BaseIndex(regT2, regT0, TimesEight), regT2);
1226     store32(Imm32(JSValue::CellTag), tagFor(dst));
1227     store32(regT2, payloadFor(dst));
1228 
1229     // Increment i
1230     add32(Imm32(1), regT0);
1231     store32(regT0, addressFor(i));
1232 
1233     // Verify that i is valid:
1234     loadPtr(addressFor(base), regT0);
1235 
1236     // Test base's structure
1237     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
1238     callHasProperty.append(branchPtr(NotEqual, regT2, Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure)))));
1239 
1240     // Test base's prototype chain
1241     loadPtr(Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedPrototypeChain))), regT3);
1242     loadPtr(Address(regT3, OBJECT_OFFSETOF(StructureChain, m_vector)), regT3);
1243     addJump(branchTestPtr(Zero, Address(regT3)), target);
1244 
1245     Label checkPrototype(this);
1246     callHasProperty.append(branch32(Equal, Address(regT2, OBJECT_OFFSETOF(Structure, m_prototype) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), Imm32(JSValue::NullTag)));
1247     loadPtr(Address(regT2, OBJECT_OFFSETOF(Structure, m_prototype) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT2);
1248     loadPtr(Address(regT2, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
1249     callHasProperty.append(branchPtr(NotEqual, regT2, Address(regT3)));
1250     addPtr(Imm32(sizeof(Structure*)), regT3);
1251     branchTestPtr(NonZero, Address(regT3)).linkTo(checkPrototype, this);
1252 
1253     // Continue loop.
1254     addJump(jump(), target);
1255 
1256     // Slow case: Ask the object if i is valid.
1257     callHasProperty.link(this);
1258     loadPtr(addressFor(dst), regT1);
1259     JITStubCall stubCall(this, cti_has_property);
1260     stubCall.addArgument(regT0);
1261     stubCall.addArgument(regT1);
1262     stubCall.call();
1263 
1264     // Test for valid key.
1265     addJump(branchTest32(NonZero, regT0), target);
1266     jump().linkTo(begin, this);
1267 
1268     // End of loop.
1269     end.link(this);
1270 }
1271 
emit_op_push_scope(Instruction * currentInstruction)1272 void JIT::emit_op_push_scope(Instruction* currentInstruction)
1273 {
1274     JITStubCall stubCall(this, cti_op_push_scope);
1275     stubCall.addArgument(currentInstruction[1].u.operand);
1276     stubCall.call(currentInstruction[1].u.operand);
1277 }
1278 
emit_op_pop_scope(Instruction *)1279 void JIT::emit_op_pop_scope(Instruction*)
1280 {
1281     JITStubCall(this, cti_op_pop_scope).call();
1282 }
1283 
emit_op_to_jsnumber(Instruction * currentInstruction)1284 void JIT::emit_op_to_jsnumber(Instruction* currentInstruction)
1285 {
1286     int dst = currentInstruction[1].u.operand;
1287     int src = currentInstruction[2].u.operand;
1288 
1289     emitLoad(src, regT1, regT0);
1290 
1291     Jump isInt32 = branch32(Equal, regT1, Imm32(JSValue::Int32Tag));
1292     addSlowCase(branch32(AboveOrEqual, regT1, Imm32(JSValue::EmptyValueTag)));
1293     isInt32.link(this);
1294 
1295     if (src != dst)
1296         emitStore(dst, regT1, regT0);
1297     map(m_bytecodeIndex + OPCODE_LENGTH(op_to_jsnumber), dst, regT1, regT0);
1298 }
1299 
emitSlow_op_to_jsnumber(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1300 void JIT::emitSlow_op_to_jsnumber(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1301 {
1302     int dst = currentInstruction[1].u.operand;
1303 
1304     linkSlowCase(iter);
1305 
1306     JITStubCall stubCall(this, cti_op_to_jsnumber);
1307     stubCall.addArgument(regT1, regT0);
1308     stubCall.call(dst);
1309 }
1310 
emit_op_push_new_scope(Instruction * currentInstruction)1311 void JIT::emit_op_push_new_scope(Instruction* currentInstruction)
1312 {
1313     JITStubCall stubCall(this, cti_op_push_new_scope);
1314     stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
1315     stubCall.addArgument(currentInstruction[3].u.operand);
1316     stubCall.call(currentInstruction[1].u.operand);
1317 }
1318 
emit_op_catch(Instruction * currentInstruction)1319 void JIT::emit_op_catch(Instruction* currentInstruction)
1320 {
1321     unsigned exception = currentInstruction[1].u.operand;
1322 
1323     // This opcode only executes after a return from cti_op_throw.
1324 
1325     // cti_op_throw may have taken us to a call frame further up the stack; reload
1326     // the call frame pointer to adjust.
1327     peek(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof (void*));
1328 
1329     // Now store the exception returned by cti_op_throw.
1330     emitStore(exception, regT1, regT0);
1331     map(m_bytecodeIndex + OPCODE_LENGTH(op_catch), exception, regT1, regT0);
1332 }
1333 
emit_op_jmp_scopes(Instruction * currentInstruction)1334 void JIT::emit_op_jmp_scopes(Instruction* currentInstruction)
1335 {
1336     JITStubCall stubCall(this, cti_op_jmp_scopes);
1337     stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
1338     stubCall.call();
1339     addJump(jump(), currentInstruction[2].u.operand);
1340 }
1341 
emit_op_switch_imm(Instruction * currentInstruction)1342 void JIT::emit_op_switch_imm(Instruction* currentInstruction)
1343 {
1344     unsigned tableIndex = currentInstruction[1].u.operand;
1345     unsigned defaultOffset = currentInstruction[2].u.operand;
1346     unsigned scrutinee = currentInstruction[3].u.operand;
1347 
1348     // create jump table for switch destinations, track this switch statement.
1349     SimpleJumpTable* jumpTable = &m_codeBlock->immediateSwitchJumpTable(tableIndex);
1350     m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset, SwitchRecord::Immediate));
1351     jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1352 
1353     JITStubCall stubCall(this, cti_op_switch_imm);
1354     stubCall.addArgument(scrutinee);
1355     stubCall.addArgument(Imm32(tableIndex));
1356     stubCall.call();
1357     jump(regT0);
1358 }
1359 
emit_op_switch_char(Instruction * currentInstruction)1360 void JIT::emit_op_switch_char(Instruction* currentInstruction)
1361 {
1362     unsigned tableIndex = currentInstruction[1].u.operand;
1363     unsigned defaultOffset = currentInstruction[2].u.operand;
1364     unsigned scrutinee = currentInstruction[3].u.operand;
1365 
1366     // create jump table for switch destinations, track this switch statement.
1367     SimpleJumpTable* jumpTable = &m_codeBlock->characterSwitchJumpTable(tableIndex);
1368     m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset, SwitchRecord::Character));
1369     jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1370 
1371     JITStubCall stubCall(this, cti_op_switch_char);
1372     stubCall.addArgument(scrutinee);
1373     stubCall.addArgument(Imm32(tableIndex));
1374     stubCall.call();
1375     jump(regT0);
1376 }
1377 
emit_op_switch_string(Instruction * currentInstruction)1378 void JIT::emit_op_switch_string(Instruction* currentInstruction)
1379 {
1380     unsigned tableIndex = currentInstruction[1].u.operand;
1381     unsigned defaultOffset = currentInstruction[2].u.operand;
1382     unsigned scrutinee = currentInstruction[3].u.operand;
1383 
1384     // create jump table for switch destinations, track this switch statement.
1385     StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
1386     m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset));
1387 
1388     JITStubCall stubCall(this, cti_op_switch_string);
1389     stubCall.addArgument(scrutinee);
1390     stubCall.addArgument(Imm32(tableIndex));
1391     stubCall.call();
1392     jump(regT0);
1393 }
1394 
emit_op_new_error(Instruction * currentInstruction)1395 void JIT::emit_op_new_error(Instruction* currentInstruction)
1396 {
1397     unsigned dst = currentInstruction[1].u.operand;
1398     unsigned type = currentInstruction[2].u.operand;
1399     unsigned message = currentInstruction[3].u.operand;
1400 
1401     JITStubCall stubCall(this, cti_op_new_error);
1402     stubCall.addArgument(Imm32(type));
1403     stubCall.addArgument(m_codeBlock->getConstant(message));
1404     stubCall.addArgument(Imm32(m_bytecodeIndex));
1405     stubCall.call(dst);
1406 }
1407 
emit_op_debug(Instruction * currentInstruction)1408 void JIT::emit_op_debug(Instruction* currentInstruction)
1409 {
1410     JITStubCall stubCall(this, cti_op_debug);
1411     stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
1412     stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
1413     stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
1414     stubCall.call();
1415 }
1416 
1417 
emit_op_enter(Instruction *)1418 void JIT::emit_op_enter(Instruction*)
1419 {
1420     // Even though JIT code doesn't use them, we initialize our constant
1421     // registers to zap stale pointers, to avoid unnecessarily prolonging
1422     // object lifetime and increasing GC pressure.
1423     for (int i = 0; i < m_codeBlock->m_numVars; ++i)
1424         emitStore(i, jsUndefined());
1425 }
1426 
emit_op_enter_with_activation(Instruction * currentInstruction)1427 void JIT::emit_op_enter_with_activation(Instruction* currentInstruction)
1428 {
1429     emit_op_enter(currentInstruction);
1430 
1431     JITStubCall(this, cti_op_push_activation).call(currentInstruction[1].u.operand);
1432 }
1433 
emit_op_create_arguments(Instruction *)1434 void JIT::emit_op_create_arguments(Instruction*)
1435 {
1436     Jump argsCreated = branch32(NotEqual, tagFor(RegisterFile::ArgumentsRegister, callFrameRegister), Imm32(JSValue::EmptyValueTag));
1437 
1438     // If we get here the arguments pointer is a null cell - i.e. arguments need lazy creation.
1439     if (m_codeBlock->m_numParameters == 1)
1440         JITStubCall(this, cti_op_create_arguments_no_params).call();
1441     else
1442         JITStubCall(this, cti_op_create_arguments).call();
1443 
1444     argsCreated.link(this);
1445 }
1446 
emit_op_init_arguments(Instruction *)1447 void JIT::emit_op_init_arguments(Instruction*)
1448 {
1449     emitStore(RegisterFile::ArgumentsRegister, JSValue(), callFrameRegister);
1450 }
1451 
emit_op_convert_this(Instruction * currentInstruction)1452 void JIT::emit_op_convert_this(Instruction* currentInstruction)
1453 {
1454     unsigned thisRegister = currentInstruction[1].u.operand;
1455 
1456     emitLoad(thisRegister, regT1, regT0);
1457 
1458     addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::CellTag)));
1459 
1460     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
1461     addSlowCase(branchTest32(NonZero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(NeedsThisConversion)));
1462 
1463     map(m_bytecodeIndex + OPCODE_LENGTH(op_convert_this), thisRegister, regT1, regT0);
1464 }
1465 
emitSlow_op_convert_this(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1466 void JIT::emitSlow_op_convert_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1467 {
1468     unsigned thisRegister = currentInstruction[1].u.operand;
1469 
1470     linkSlowCase(iter);
1471     linkSlowCase(iter);
1472 
1473     JITStubCall stubCall(this, cti_op_convert_this);
1474     stubCall.addArgument(regT1, regT0);
1475     stubCall.call(thisRegister);
1476 }
1477 
emit_op_profile_will_call(Instruction * currentInstruction)1478 void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
1479 {
1480     peek(regT2, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof (void*));
1481     Jump noProfiler = branchTestPtr(Zero, Address(regT2));
1482 
1483     JITStubCall stubCall(this, cti_op_profile_will_call);
1484     stubCall.addArgument(currentInstruction[1].u.operand);
1485     stubCall.call();
1486     noProfiler.link(this);
1487 }
1488 
emit_op_profile_did_call(Instruction * currentInstruction)1489 void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
1490 {
1491     peek(regT2, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof (void*));
1492     Jump noProfiler = branchTestPtr(Zero, Address(regT2));
1493 
1494     JITStubCall stubCall(this, cti_op_profile_did_call);
1495     stubCall.addArgument(currentInstruction[1].u.operand);
1496     stubCall.call();
1497     noProfiler.link(this);
1498 }
1499 
1500 #else // USE(JSVALUE32_64)
1501 
1502 #define RECORD_JUMP_TARGET(targetOffset) \
1503    do { m_labels[m_bytecodeIndex + (targetOffset)].used(); } while (false)
1504 
1505 void JIT::privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool>* executablePool, JSGlobalData* globalData, TrampolineStructure *trampolines)
1506 {
1507 #if ENABLE(JIT_OPTIMIZE_MOD)
1508     Label softModBegin = align();
1509     softModulo();
1510 #endif
1511 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
1512     // (2) The second function provides fast property access for string length
1513     Label stringLengthBegin = align();
1514 
1515     // Check eax is a string
1516     Jump string_failureCases1 = emitJumpIfNotJSCell(regT0);
1517     Jump string_failureCases2 = branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr));
1518 
1519     // Checks out okay! - get the length from the Ustring.
1520     load32(Address(regT0, OBJECT_OFFSETOF(JSString, m_stringLength)), regT0);
1521 
1522     Jump string_failureCases3 = branch32(Above, regT0, Imm32(JSImmediate::maxImmediateInt));
1523 
1524     // regT0 contains a 64 bit value (is positive, is zero extended) so we don't need sign extend here.
1525     emitFastArithIntToImmNoCheck(regT0, regT0);
1526 
1527     ret();
1528 #endif
1529 
1530     // (3) Trampolines for the slow cases of op_call / op_call_eval / op_construct.
1531     COMPILE_ASSERT(sizeof(CodeType) == 4, CodeTypeEnumMustBe32Bit);
1532 
1533     // VirtualCallLink Trampoline
1534     // regT0 holds callee, regT1 holds argCount.  regT2 will hold the FunctionExecutable.
1535     Label virtualCallLinkBegin = align();
1536     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
1537 
1538     Jump isNativeFunc2 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), Imm32(0));
1539 
1540     Jump hasCodeBlock2 = branch32(GreaterThan, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), Imm32(0));
1541     preserveReturnAddressAfterCall(regT3);
1542     restoreArgumentReference();
1543     Call callJSFunction2 = call();
1544     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
1545     emitGetJITStubArg(2, regT1); // argCount
1546     restoreReturnAddressBeforeReturn(regT3);
1547     hasCodeBlock2.link(this);
1548 
1549     // Check argCount matches callee arity.
1550     Jump arityCheckOkay2 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), regT1);
1551     preserveReturnAddressAfterCall(regT3);
1552     emitPutJITStubArg(regT3, 1); // return address
1553     restoreArgumentReference();
1554     Call callArityCheck2 = call();
1555     move(regT1, callFrameRegister);
1556     emitGetJITStubArg(2, regT1); // argCount
1557     restoreReturnAddressBeforeReturn(regT3);
1558     arityCheckOkay2.link(this);
1559 
1560     isNativeFunc2.link(this);
1561 
1562     compileOpCallInitializeCallFrame();
1563     preserveReturnAddressAfterCall(regT3);
1564     emitPutJITStubArg(regT3, 1); // return address
1565     restoreArgumentReference();
1566     Call callLazyLinkCall = call();
1567     restoreReturnAddressBeforeReturn(regT3);
1568     jump(regT0);
1569 
1570     // VirtualCall Trampoline
1571     // regT0 holds callee, regT1 holds argCount.  regT2 will hold the FunctionExecutable.
1572     Label virtualCallBegin = align();
1573     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
1574 
1575     Jump isNativeFunc3 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), Imm32(0));
1576 
1577     Jump hasCodeBlock3 = branch32(GreaterThan, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), Imm32(0));
1578     preserveReturnAddressAfterCall(regT3);
1579     restoreArgumentReference();
1580     Call callJSFunction1 = call();
1581     emitGetJITStubArg(2, regT1); // argCount
1582     restoreReturnAddressBeforeReturn(regT3);
1583     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
1584     hasCodeBlock3.link(this);
1585 
1586     // Check argCount matches callee arity.
1587     Jump arityCheckOkay3 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), regT1);
1588     preserveReturnAddressAfterCall(regT3);
1589     emitPutJITStubArg(regT3, 1); // return address
1590     restoreArgumentReference();
1591     Call callArityCheck1 = call();
1592     move(regT1, callFrameRegister);
1593     emitGetJITStubArg(2, regT1); // argCount
1594     restoreReturnAddressBeforeReturn(regT3);
1595     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
1596     arityCheckOkay3.link(this);
1597 
1598     isNativeFunc3.link(this);
1599 
1600     compileOpCallInitializeCallFrame();
1601     loadPtr(Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_jitCode)), regT0);
1602     jump(regT0);
1603 
1604     Label nativeCallThunk = align();
1605     preserveReturnAddressAfterCall(regT0);
1606     emitPutToCallFrameHeader(regT0, RegisterFile::ReturnPC); // Push return address
1607 
1608     // Load caller frame's scope chain into this callframe so that whatever we call can
1609     // get to its global data.
1610     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT1);
1611     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT1);
1612     emitPutToCallFrameHeader(regT1, RegisterFile::ScopeChain);
1613 
1614 
1615 #if CPU(X86_64)
1616     emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, X86Registers::ecx);
1617 
1618     // Allocate stack space for our arglist
1619     subPtr(Imm32(sizeof(ArgList)), stackPointerRegister);
1620     COMPILE_ASSERT((sizeof(ArgList) & 0xf) == 0, ArgList_should_by_16byte_aligned);
1621 
1622     // Set up arguments
1623     subPtr(Imm32(1), X86Registers::ecx); // Don't include 'this' in argcount
1624 
1625     // Push argcount
1626     storePtr(X86Registers::ecx, Address(stackPointerRegister, OBJECT_OFFSETOF(ArgList, m_argCount)));
1627 
1628     // Calculate the start of the callframe header, and store in edx
1629     addPtr(Imm32(-RegisterFile::CallFrameHeaderSize * (int32_t)sizeof(Register)), callFrameRegister, X86Registers::edx);
1630 
1631     // Calculate start of arguments as callframe header - sizeof(Register) * argcount (ecx)
1632     mul32(Imm32(sizeof(Register)), X86Registers::ecx, X86Registers::ecx);
1633     subPtr(X86Registers::ecx, X86Registers::edx);
1634 
1635     // push pointer to arguments
1636     storePtr(X86Registers::edx, Address(stackPointerRegister, OBJECT_OFFSETOF(ArgList, m_args)));
1637 
1638     // ArgList is passed by reference so is stackPointerRegister
1639     move(stackPointerRegister, X86Registers::ecx);
1640 
1641     // edx currently points to the first argument, edx-sizeof(Register) points to 'this'
1642     loadPtr(Address(X86Registers::edx, -(int32_t)sizeof(Register)), X86Registers::edx);
1643 
1644     emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, X86Registers::esi);
1645 
1646     move(callFrameRegister, X86Registers::edi);
1647 
1648     call(Address(X86Registers::esi, OBJECT_OFFSETOF(JSFunction, m_data)));
1649 
1650     addPtr(Imm32(sizeof(ArgList)), stackPointerRegister);
1651 #elif CPU(X86)
1652     emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT0);
1653 
1654     /* We have two structs that we use to describe the stackframe we set up for our
1655      * call to native code.  NativeCallFrameStructure describes the how we set up the stack
1656      * in advance of the call.  NativeFunctionCalleeSignature describes the callframe
1657      * as the native code expects it.  We do this as we are using the fastcall calling
1658      * convention which results in the callee popping its arguments off the stack, but
1659      * not the rest of the callframe so we need a nice way to ensure we increment the
1660      * stack pointer by the right amount after the call.
1661      */
1662 #if COMPILER(MSVC) || OS(LINUX)
1663     struct NativeCallFrameStructure {
1664       //  CallFrame* callFrame; // passed in EDX
1665         JSObject* callee;
1666         JSValue thisValue;
1667         ArgList* argPointer;
1668         ArgList args;
1669         JSValue result;
1670     };
1671     struct NativeFunctionCalleeSignature {
1672         JSObject* callee;
1673         JSValue thisValue;
1674         ArgList* argPointer;
1675     };
1676 #else
1677     struct NativeCallFrameStructure {
1678       //  CallFrame* callFrame; // passed in ECX
1679       //  JSObject* callee; // passed in EDX
1680         JSValue thisValue;
1681         ArgList* argPointer;
1682         ArgList args;
1683     };
1684     struct NativeFunctionCalleeSignature {
1685         JSValue thisValue;
1686         ArgList* argPointer;
1687     };
1688 #endif
1689     const int NativeCallFrameSize = (sizeof(NativeCallFrameStructure) + 15) & ~15;
1690     // Allocate system stack frame
1691     subPtr(Imm32(NativeCallFrameSize), stackPointerRegister);
1692 
1693     // Set up arguments
1694     subPtr(Imm32(1), regT0); // Don't include 'this' in argcount
1695 
1696     // push argcount
1697     storePtr(regT0, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, args) + OBJECT_OFFSETOF(ArgList, m_argCount)));
1698 
1699     // Calculate the start of the callframe header, and store in regT1
1700     addPtr(Imm32(-RegisterFile::CallFrameHeaderSize * (int)sizeof(Register)), callFrameRegister, regT1);
1701 
1702     // Calculate start of arguments as callframe header - sizeof(Register) * argcount (regT0)
1703     mul32(Imm32(sizeof(Register)), regT0, regT0);
1704     subPtr(regT0, regT1);
1705     storePtr(regT1, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, args) + OBJECT_OFFSETOF(ArgList, m_args)));
1706 
1707     // ArgList is passed by reference so is stackPointerRegister + 4 * sizeof(Register)
1708     addPtr(Imm32(OBJECT_OFFSETOF(NativeCallFrameStructure, args)), stackPointerRegister, regT0);
1709     storePtr(regT0, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, argPointer)));
1710 
1711     // regT1 currently points to the first argument, regT1 - sizeof(Register) points to 'this'
1712     loadPtr(Address(regT1, -(int)sizeof(Register)), regT1);
1713     storePtr(regT1, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, thisValue)));
1714 
1715 #if COMPILER(MSVC) || OS(LINUX)
1716     // ArgList is passed by reference so is stackPointerRegister + 4 * sizeof(Register)
1717     addPtr(Imm32(OBJECT_OFFSETOF(NativeCallFrameStructure, result)), stackPointerRegister, X86Registers::ecx);
1718 
1719     // Plant callee
1720     emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, X86Registers::eax);
1721     storePtr(X86Registers::eax, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, callee)));
1722 
1723     // Plant callframe
1724     move(callFrameRegister, X86Registers::edx);
1725 
1726     call(Address(X86Registers::eax, OBJECT_OFFSETOF(JSFunction, m_data)));
1727 
1728     // JSValue is a non-POD type
1729     loadPtr(Address(X86Registers::eax), X86Registers::eax);
1730 #else
1731     // Plant callee
1732     emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, X86Registers::edx);
1733 
1734     // Plant callframe
1735     move(callFrameRegister, X86Registers::ecx);
1736     call(Address(X86Registers::edx, OBJECT_OFFSETOF(JSFunction, m_data)));
1737 #endif
1738 
1739     // We've put a few temporaries on the stack in addition to the actual arguments
1740     // so pull them off now
1741     addPtr(Imm32(NativeCallFrameSize - sizeof(NativeFunctionCalleeSignature)), stackPointerRegister);
1742 
1743 #elif CPU(ARM)
1744     emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT0);
1745 
1746     // Allocate stack space for our arglist
1747     COMPILE_ASSERT((sizeof(ArgList) & 0x7) == 0, ArgList_should_by_8byte_aligned);
1748     subPtr(Imm32(sizeof(ArgList)), stackPointerRegister);
1749 
1750     // Set up arguments
1751     subPtr(Imm32(1), regT0); // Don't include 'this' in argcount
1752 
1753     // Push argcount
1754     storePtr(regT0, Address(stackPointerRegister, OBJECT_OFFSETOF(ArgList, m_argCount)));
1755 
1756     // Calculate the start of the callframe header, and store in regT1
1757     move(callFrameRegister, regT1);
1758     sub32(Imm32(RegisterFile::CallFrameHeaderSize * (int32_t)sizeof(Register)), regT1);
1759 
1760     // Calculate start of arguments as callframe header - sizeof(Register) * argcount (regT1)
1761     mul32(Imm32(sizeof(Register)), regT0, regT0);
1762     subPtr(regT0, regT1);
1763 
1764     // push pointer to arguments
1765     storePtr(regT1, Address(stackPointerRegister, OBJECT_OFFSETOF(ArgList, m_args)));
1766 
1767     // Setup arg3: regT1 currently points to the first argument, regT1-sizeof(Register) points to 'this'
1768     loadPtr(Address(regT1, -(int32_t)sizeof(Register)), regT2);
1769 
1770     // Setup arg2:
1771     emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT1);
1772 
1773     // Setup arg1:
1774     move(callFrameRegister, regT0);
1775 
1776     // Setup arg4: This is a plain hack
1777     move(stackPointerRegister, ARMRegisters::r3);
1778 
1779     call(Address(regT1, OBJECT_OFFSETOF(JSFunction, m_data)));
1780 
1781     addPtr(Imm32(sizeof(ArgList)), stackPointerRegister);
1782 
1783 #elif ENABLE(JIT_OPTIMIZE_NATIVE_CALL)
1784 #error "JIT_OPTIMIZE_NATIVE_CALL not yet supported on this platform."
1785 #else
1786     breakpoint();
1787 #endif
1788 
1789     // Check for an exception
1790     loadPtr(&(globalData->exception), regT2);
1791     Jump exceptionHandler = branchTestPtr(NonZero, regT2);
1792 
1793     // Grab the return address.
1794     emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
1795 
1796     // Restore our caller's "r".
1797     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
1798 
1799     // Return.
1800     restoreReturnAddressBeforeReturn(regT1);
1801     ret();
1802 
1803     // Handle an exception
1804     exceptionHandler.link(this);
1805     // Grab the return address.
1806     emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
1807     move(ImmPtr(&globalData->exceptionLocation), regT2);
1808     storePtr(regT1, regT2);
1809     move(ImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()), regT2);
1810     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
1811     poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof (void*));
1812     restoreReturnAddressBeforeReturn(regT2);
1813     ret();
1814 
1815 
1816 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
1817     Call string_failureCases1Call = makeTailRecursiveCall(string_failureCases1);
1818     Call string_failureCases2Call = makeTailRecursiveCall(string_failureCases2);
1819     Call string_failureCases3Call = makeTailRecursiveCall(string_failureCases3);
1820 #endif
1821 
1822     // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
1823     LinkBuffer patchBuffer(this, m_globalData->executableAllocator.poolForSize(m_assembler.size()));
1824 
1825 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
1826     patchBuffer.link(string_failureCases1Call, FunctionPtr(cti_op_get_by_id_string_fail));
1827     patchBuffer.link(string_failureCases2Call, FunctionPtr(cti_op_get_by_id_string_fail));
1828     patchBuffer.link(string_failureCases3Call, FunctionPtr(cti_op_get_by_id_string_fail));
1829 #endif
1830     patchBuffer.link(callArityCheck1, FunctionPtr(cti_op_call_arityCheck));
1831     patchBuffer.link(callJSFunction1, FunctionPtr(cti_op_call_JSFunction));
1832 #if ENABLE(JIT_OPTIMIZE_CALL)
1833     patchBuffer.link(callArityCheck2, FunctionPtr(cti_op_call_arityCheck));
1834     patchBuffer.link(callJSFunction2, FunctionPtr(cti_op_call_JSFunction));
1835     patchBuffer.link(callLazyLinkCall, FunctionPtr(cti_vm_lazyLinkCall));
1836 #endif
1837 
1838     CodeRef finalCode = patchBuffer.finalizeCode();
1839     *executablePool = finalCode.m_executablePool;
1840 
1841     trampolines->ctiVirtualCallLink = trampolineAt(finalCode, virtualCallLinkBegin);
1842     trampolines->ctiVirtualCall = trampolineAt(finalCode, virtualCallBegin);
1843     trampolines->ctiNativeCallThunk = trampolineAt(finalCode, nativeCallThunk);
1844 #if ENABLE(JIT_OPTIMIZE_MOD)
1845     trampolines->ctiSoftModulo = trampolineAt(finalCode, softModBegin);
1846 #endif
1847 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
1848     trampolines->ctiStringLengthTrampoline = trampolineAt(finalCode, stringLengthBegin);
1849 #else
1850     UNUSED_PARAM(ctiStringLengthTrampoline);
1851 #endif
1852 }
1853 
1854 void JIT::emit_op_mov(Instruction* currentInstruction)
1855 {
1856     int dst = currentInstruction[1].u.operand;
1857     int src = currentInstruction[2].u.operand;
1858 
1859     if (m_codeBlock->isConstantRegisterIndex(src)) {
1860         storePtr(ImmPtr(JSValue::encode(getConstantOperand(src))), Address(callFrameRegister, dst * sizeof(Register)));
1861         if (dst == m_lastResultBytecodeRegister)
1862             killLastResultRegister();
1863     } else if ((src == m_lastResultBytecodeRegister) || (dst == m_lastResultBytecodeRegister)) {
1864         // If either the src or dst is the cached register go though
1865         // get/put registers to make sure we track this correctly.
1866         emitGetVirtualRegister(src, regT0);
1867         emitPutVirtualRegister(dst);
1868     } else {
1869         // Perform the copy via regT1; do not disturb any mapping in regT0.
1870         loadPtr(Address(callFrameRegister, src * sizeof(Register)), regT1);
1871         storePtr(regT1, Address(callFrameRegister, dst * sizeof(Register)));
1872     }
1873 }
1874 
1875 void JIT::emit_op_end(Instruction* currentInstruction)
1876 {
1877     if (m_codeBlock->needsFullScopeChain())
1878         JITStubCall(this, cti_op_end).call();
1879     ASSERT(returnValueRegister != callFrameRegister);
1880     emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
1881     restoreReturnAddressBeforeReturn(Address(callFrameRegister, RegisterFile::ReturnPC * static_cast<int>(sizeof(Register))));
1882     ret();
1883 }
1884 
1885 void JIT::emit_op_jmp(Instruction* currentInstruction)
1886 {
1887     unsigned target = currentInstruction[1].u.operand;
1888     addJump(jump(), target);
1889     RECORD_JUMP_TARGET(target);
1890 }
1891 
1892 void JIT::emit_op_loop_if_lesseq(Instruction* currentInstruction)
1893 {
1894     emitTimeoutCheck();
1895 
1896     unsigned op1 = currentInstruction[1].u.operand;
1897     unsigned op2 = currentInstruction[2].u.operand;
1898     unsigned target = currentInstruction[3].u.operand;
1899     if (isOperandConstantImmediateInt(op2)) {
1900         emitGetVirtualRegister(op1, regT0);
1901         emitJumpSlowCaseIfNotImmediateInteger(regT0);
1902 #if USE(JSVALUE64)
1903         int32_t op2imm = getConstantOperandImmediateInt(op2);
1904 #else
1905         int32_t op2imm = static_cast<int32_t>(JSImmediate::rawValue(getConstantOperand(op2)));
1906 #endif
1907         addJump(branch32(LessThanOrEqual, regT0, Imm32(op2imm)), target);
1908     } else {
1909         emitGetVirtualRegisters(op1, regT0, op2, regT1);
1910         emitJumpSlowCaseIfNotImmediateInteger(regT0);
1911         emitJumpSlowCaseIfNotImmediateInteger(regT1);
1912         addJump(branch32(LessThanOrEqual, regT0, regT1), target);
1913     }
1914 }
1915 
1916 void JIT::emit_op_new_object(Instruction* currentInstruction)
1917 {
1918     JITStubCall(this, cti_op_new_object).call(currentInstruction[1].u.operand);
1919 }
1920 
1921 void JIT::emit_op_instanceof(Instruction* currentInstruction)
1922 {
1923     unsigned dst = currentInstruction[1].u.operand;
1924     unsigned value = currentInstruction[2].u.operand;
1925     unsigned baseVal = currentInstruction[3].u.operand;
1926     unsigned proto = currentInstruction[4].u.operand;
1927 
1928     // Load the operands (baseVal, proto, and value respectively) into registers.
1929     // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
1930     emitGetVirtualRegister(value, regT2);
1931     emitGetVirtualRegister(baseVal, regT0);
1932     emitGetVirtualRegister(proto, regT1);
1933 
1934     // Check that baseVal & proto are cells.
1935     emitJumpSlowCaseIfNotJSCell(regT2, value);
1936     emitJumpSlowCaseIfNotJSCell(regT0, baseVal);
1937     emitJumpSlowCaseIfNotJSCell(regT1, proto);
1938 
1939     // Check that baseVal 'ImplementsDefaultHasInstance'.
1940     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT0);
1941     addSlowCase(branchTest32(Zero, Address(regT0, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(ImplementsDefaultHasInstance)));
1942 
1943     // Optimistically load the result true, and start looping.
1944     // Initially, regT1 still contains proto and regT2 still contains value.
1945     // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
1946     move(ImmPtr(JSValue::encode(jsBoolean(true))), regT0);
1947     Label loop(this);
1948 
1949     // Load the prototype of the object in regT2.  If this is equal to regT1 - WIN!
1950     // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
1951     loadPtr(Address(regT2, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
1952     loadPtr(Address(regT2, OBJECT_OFFSETOF(Structure, m_prototype)), regT2);
1953     Jump isInstance = branchPtr(Equal, regT2, regT1);
1954     emitJumpIfJSCell(regT2).linkTo(loop, this);
1955 
1956     // We get here either by dropping out of the loop, or if value was not an Object.  Result is false.
1957     move(ImmPtr(JSValue::encode(jsBoolean(false))), regT0);
1958 
1959     // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
1960     isInstance.link(this);
1961     emitPutVirtualRegister(dst);
1962 }
1963 
1964 void JIT::emit_op_new_func(Instruction* currentInstruction)
1965 {
1966     JITStubCall stubCall(this, cti_op_new_func);
1967     stubCall.addArgument(ImmPtr(m_codeBlock->functionDecl(currentInstruction[2].u.operand)));
1968     stubCall.call(currentInstruction[1].u.operand);
1969 }
1970 
1971 void JIT::emit_op_call(Instruction* currentInstruction)
1972 {
1973     compileOpCall(op_call, currentInstruction, m_callLinkInfoIndex++);
1974 }
1975 
1976 void JIT::emit_op_call_eval(Instruction* currentInstruction)
1977 {
1978     compileOpCall(op_call_eval, currentInstruction, m_callLinkInfoIndex++);
1979 }
1980 
1981 void JIT::emit_op_load_varargs(Instruction* currentInstruction)
1982 {
1983     int argCountDst = currentInstruction[1].u.operand;
1984     int argsOffset = currentInstruction[2].u.operand;
1985 
1986     JITStubCall stubCall(this, cti_op_load_varargs);
1987     stubCall.addArgument(Imm32(argsOffset));
1988     stubCall.call();
1989     // Stores a naked int32 in the register file.
1990     store32(returnValueRegister, Address(callFrameRegister, argCountDst * sizeof(Register)));
1991 }
1992 
1993 void JIT::emit_op_call_varargs(Instruction* currentInstruction)
1994 {
1995     compileOpCallVarargs(currentInstruction);
1996 }
1997 
1998 void JIT::emit_op_construct(Instruction* currentInstruction)
1999 {
2000     compileOpCall(op_construct, currentInstruction, m_callLinkInfoIndex++);
2001 }
2002 
2003 void JIT::emit_op_get_global_var(Instruction* currentInstruction)
2004 {
2005     JSVariableObject* globalObject = static_cast<JSVariableObject*>(currentInstruction[2].u.jsCell);
2006     move(ImmPtr(globalObject), regT0);
2007     emitGetVariableObjectRegister(regT0, currentInstruction[3].u.operand, regT0);
2008     emitPutVirtualRegister(currentInstruction[1].u.operand);
2009 }
2010 
2011 void JIT::emit_op_put_global_var(Instruction* currentInstruction)
2012 {
2013     emitGetVirtualRegister(currentInstruction[3].u.operand, regT1);
2014     JSVariableObject* globalObject = static_cast<JSVariableObject*>(currentInstruction[1].u.jsCell);
2015     move(ImmPtr(globalObject), regT0);
2016     emitPutVariableObjectRegister(regT1, regT0, currentInstruction[2].u.operand);
2017 }
2018 
2019 void JIT::emit_op_get_scoped_var(Instruction* currentInstruction)
2020 {
2021     int skip = currentInstruction[3].u.operand + m_codeBlock->needsFullScopeChain();
2022 
2023     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT0);
2024     while (skip--)
2025         loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, next)), regT0);
2026 
2027     loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, object)), regT0);
2028     emitGetVariableObjectRegister(regT0, currentInstruction[2].u.operand, regT0);
2029     emitPutVirtualRegister(currentInstruction[1].u.operand);
2030 }
2031 
2032 void JIT::emit_op_put_scoped_var(Instruction* currentInstruction)
2033 {
2034     int skip = currentInstruction[2].u.operand + m_codeBlock->needsFullScopeChain();
2035 
2036     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1);
2037     emitGetVirtualRegister(currentInstruction[3].u.operand, regT0);
2038     while (skip--)
2039         loadPtr(Address(regT1, OBJECT_OFFSETOF(ScopeChainNode, next)), regT1);
2040 
2041     loadPtr(Address(regT1, OBJECT_OFFSETOF(ScopeChainNode, object)), regT1);
2042     emitPutVariableObjectRegister(regT0, regT1, currentInstruction[1].u.operand);
2043 }
2044 
2045 void JIT::emit_op_tear_off_activation(Instruction* currentInstruction)
2046 {
2047     JITStubCall stubCall(this, cti_op_tear_off_activation);
2048     stubCall.addArgument(currentInstruction[1].u.operand, regT2);
2049     stubCall.call();
2050 }
2051 
2052 void JIT::emit_op_tear_off_arguments(Instruction*)
2053 {
2054     JITStubCall(this, cti_op_tear_off_arguments).call();
2055 }
2056 
2057 void JIT::emit_op_ret(Instruction* currentInstruction)
2058 {
2059     // We could JIT generate the deref, only calling out to C when the refcount hits zero.
2060     if (m_codeBlock->needsFullScopeChain())
2061         JITStubCall(this, cti_op_ret_scopeChain).call();
2062 
2063     ASSERT(callFrameRegister != regT1);
2064     ASSERT(regT1 != returnValueRegister);
2065     ASSERT(returnValueRegister != callFrameRegister);
2066 
2067     // Return the result in %eax.
2068     emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
2069 
2070     // Grab the return address.
2071     emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
2072 
2073     // Restore our caller's "r".
2074     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
2075 
2076     // Return.
2077     restoreReturnAddressBeforeReturn(regT1);
2078     ret();
2079 }
2080 
2081 void JIT::emit_op_new_array(Instruction* currentInstruction)
2082 {
2083     JITStubCall stubCall(this, cti_op_new_array);
2084     stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
2085     stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
2086     stubCall.call(currentInstruction[1].u.operand);
2087 }
2088 
2089 void JIT::emit_op_resolve(Instruction* currentInstruction)
2090 {
2091     JITStubCall stubCall(this, cti_op_resolve);
2092     stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
2093     stubCall.call(currentInstruction[1].u.operand);
2094 }
2095 
2096 void JIT::emit_op_construct_verify(Instruction* currentInstruction)
2097 {
2098     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
2099 
2100     emitJumpSlowCaseIfNotJSCell(regT0);
2101     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
2102     addSlowCase(branch32(NotEqual, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo) + OBJECT_OFFSETOF(TypeInfo, m_type)), Imm32(ObjectType)));
2103 
2104 }
2105 
2106 void JIT::emit_op_to_primitive(Instruction* currentInstruction)
2107 {
2108     int dst = currentInstruction[1].u.operand;
2109     int src = currentInstruction[2].u.operand;
2110 
2111     emitGetVirtualRegister(src, regT0);
2112 
2113     Jump isImm = emitJumpIfNotJSCell(regT0);
2114     addSlowCase(branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr)));
2115     isImm.link(this);
2116 
2117     if (dst != src)
2118         emitPutVirtualRegister(dst);
2119 
2120 }
2121 
2122 void JIT::emit_op_strcat(Instruction* currentInstruction)
2123 {
2124     JITStubCall stubCall(this, cti_op_strcat);
2125     stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
2126     stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
2127     stubCall.call(currentInstruction[1].u.operand);
2128 }
2129 
2130 void JIT::emit_op_resolve_base(Instruction* currentInstruction)
2131 {
2132     JITStubCall stubCall(this, cti_op_resolve_base);
2133     stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
2134     stubCall.call(currentInstruction[1].u.operand);
2135 }
2136 
2137 void JIT::emit_op_resolve_skip(Instruction* currentInstruction)
2138 {
2139     JITStubCall stubCall(this, cti_op_resolve_skip);
2140     stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
2141     stubCall.addArgument(Imm32(currentInstruction[3].u.operand + m_codeBlock->needsFullScopeChain()));
2142     stubCall.call(currentInstruction[1].u.operand);
2143 }
2144 
2145 void JIT::emit_op_resolve_global(Instruction* currentInstruction)
2146 {
2147     // Fast case
2148     void* globalObject = currentInstruction[2].u.jsCell;
2149     Identifier* ident = &m_codeBlock->identifier(currentInstruction[3].u.operand);
2150 
2151     unsigned currentIndex = m_globalResolveInfoIndex++;
2152     void* structureAddress = &(m_codeBlock->globalResolveInfo(currentIndex).structure);
2153     void* offsetAddr = &(m_codeBlock->globalResolveInfo(currentIndex).offset);
2154 
2155     // Check Structure of global object
2156     move(ImmPtr(globalObject), regT0);
2157     loadPtr(structureAddress, regT1);
2158     Jump noMatch = branchPtr(NotEqual, regT1, Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure))); // Structures don't match
2159 
2160     // Load cached property
2161     // Assume that the global object always uses external storage.
2162     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSGlobalObject, m_externalStorage)), regT0);
2163     load32(offsetAddr, regT1);
2164     loadPtr(BaseIndex(regT0, regT1, ScalePtr), regT0);
2165     emitPutVirtualRegister(currentInstruction[1].u.operand);
2166     Jump end = jump();
2167 
2168     // Slow case
2169     noMatch.link(this);
2170     JITStubCall stubCall(this, cti_op_resolve_global);
2171     stubCall.addArgument(ImmPtr(globalObject));
2172     stubCall.addArgument(ImmPtr(ident));
2173     stubCall.addArgument(Imm32(currentIndex));
2174     stubCall.call(currentInstruction[1].u.operand);
2175     end.link(this);
2176 }
2177 
2178 void JIT::emit_op_not(Instruction* currentInstruction)
2179 {
2180     emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
2181     xorPtr(Imm32(static_cast<int32_t>(JSImmediate::FullTagTypeBool)), regT0);
2182     addSlowCase(branchTestPtr(NonZero, regT0, Imm32(static_cast<int32_t>(~JSImmediate::ExtendedPayloadBitBoolValue))));
2183     xorPtr(Imm32(static_cast<int32_t>(JSImmediate::FullTagTypeBool | JSImmediate::ExtendedPayloadBitBoolValue)), regT0);
2184     emitPutVirtualRegister(currentInstruction[1].u.operand);
2185 }
2186 
2187 void JIT::emit_op_jfalse(Instruction* currentInstruction)
2188 {
2189     unsigned target = currentInstruction[2].u.operand;
2190     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
2191 
2192     addJump(branchPtr(Equal, regT0, ImmPtr(JSValue::encode(jsNumber(m_globalData, 0)))), target);
2193     Jump isNonZero = emitJumpIfImmediateInteger(regT0);
2194 
2195     addJump(branchPtr(Equal, regT0, ImmPtr(JSValue::encode(jsBoolean(false)))), target);
2196     addSlowCase(branchPtr(NotEqual, regT0, ImmPtr(JSValue::encode(jsBoolean(true)))));
2197 
2198     isNonZero.link(this);
2199     RECORD_JUMP_TARGET(target);
2200 };
2201 void JIT::emit_op_jeq_null(Instruction* currentInstruction)
2202 {
2203     unsigned src = currentInstruction[1].u.operand;
2204     unsigned target = currentInstruction[2].u.operand;
2205 
2206     emitGetVirtualRegister(src, regT0);
2207     Jump isImmediate = emitJumpIfNotJSCell(regT0);
2208 
2209     // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
2210     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
2211     addJump(branchTest32(NonZero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined)), target);
2212     Jump wasNotImmediate = jump();
2213 
2214     // Now handle the immediate cases - undefined & null
2215     isImmediate.link(this);
2216     andPtr(Imm32(~JSImmediate::ExtendedTagBitUndefined), regT0);
2217     addJump(branchPtr(Equal, regT0, ImmPtr(JSValue::encode(jsNull()))), target);
2218 
2219     wasNotImmediate.link(this);
2220     RECORD_JUMP_TARGET(target);
2221 };
2222 void JIT::emit_op_jneq_null(Instruction* currentInstruction)
2223 {
2224     unsigned src = currentInstruction[1].u.operand;
2225     unsigned target = currentInstruction[2].u.operand;
2226 
2227     emitGetVirtualRegister(src, regT0);
2228     Jump isImmediate = emitJumpIfNotJSCell(regT0);
2229 
2230     // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
2231     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
2232     addJump(branchTest32(Zero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined)), target);
2233     Jump wasNotImmediate = jump();
2234 
2235     // Now handle the immediate cases - undefined & null
2236     isImmediate.link(this);
2237     andPtr(Imm32(~JSImmediate::ExtendedTagBitUndefined), regT0);
2238     addJump(branchPtr(NotEqual, regT0, ImmPtr(JSValue::encode(jsNull()))), target);
2239 
2240     wasNotImmediate.link(this);
2241     RECORD_JUMP_TARGET(target);
2242 }
2243 
2244 void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
2245 {
2246     unsigned src = currentInstruction[1].u.operand;
2247     JSCell* ptr = currentInstruction[2].u.jsCell;
2248     unsigned target = currentInstruction[3].u.operand;
2249 
2250     emitGetVirtualRegister(src, regT0);
2251     addJump(branchPtr(NotEqual, regT0, ImmPtr(JSValue::encode(JSValue(ptr)))), target);
2252 
2253     RECORD_JUMP_TARGET(target);
2254 }
2255 
2256 void JIT::emit_op_jsr(Instruction* currentInstruction)
2257 {
2258     int retAddrDst = currentInstruction[1].u.operand;
2259     int target = currentInstruction[2].u.operand;
2260     DataLabelPtr storeLocation = storePtrWithPatch(ImmPtr(0), Address(callFrameRegister, sizeof(Register) * retAddrDst));
2261     addJump(jump(), target);
2262     m_jsrSites.append(JSRInfo(storeLocation, label()));
2263     killLastResultRegister();
2264     RECORD_JUMP_TARGET(target);
2265 }
2266 
2267 void JIT::emit_op_sret(Instruction* currentInstruction)
2268 {
2269     jump(Address(callFrameRegister, sizeof(Register) * currentInstruction[1].u.operand));
2270     killLastResultRegister();
2271 }
2272 
2273 void JIT::emit_op_eq(Instruction* currentInstruction)
2274 {
2275     emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
2276     emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
2277     set32(Equal, regT1, regT0, regT0);
2278     emitTagAsBoolImmediate(regT0);
2279     emitPutVirtualRegister(currentInstruction[1].u.operand);
2280 }
2281 
2282 void JIT::emit_op_bitnot(Instruction* currentInstruction)
2283 {
2284     emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
2285     emitJumpSlowCaseIfNotImmediateInteger(regT0);
2286 #if USE(JSVALUE64)
2287     not32(regT0);
2288     emitFastArithIntToImmNoCheck(regT0, regT0);
2289 #else
2290     xorPtr(Imm32(~JSImmediate::TagTypeNumber), regT0);
2291 #endif
2292     emitPutVirtualRegister(currentInstruction[1].u.operand);
2293 }
2294 
2295 void JIT::emit_op_resolve_with_base(Instruction* currentInstruction)
2296 {
2297     JITStubCall stubCall(this, cti_op_resolve_with_base);
2298     stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[3].u.operand)));
2299     stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
2300     stubCall.call(currentInstruction[2].u.operand);
2301 }
2302 
2303 void JIT::emit_op_new_func_exp(Instruction* currentInstruction)
2304 {
2305     JITStubCall stubCall(this, cti_op_new_func_exp);
2306     stubCall.addArgument(ImmPtr(m_codeBlock->functionExpr(currentInstruction[2].u.operand)));
2307     stubCall.call(currentInstruction[1].u.operand);
2308 }
2309 
2310 void JIT::emit_op_jtrue(Instruction* currentInstruction)
2311 {
2312     unsigned target = currentInstruction[2].u.operand;
2313     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
2314 
2315     Jump isZero = branchPtr(Equal, regT0, ImmPtr(JSValue::encode(jsNumber(m_globalData, 0))));
2316     addJump(emitJumpIfImmediateInteger(regT0), target);
2317 
2318     addJump(branchPtr(Equal, regT0, ImmPtr(JSValue::encode(jsBoolean(true)))), target);
2319     addSlowCase(branchPtr(NotEqual, regT0, ImmPtr(JSValue::encode(jsBoolean(false)))));
2320 
2321     isZero.link(this);
2322     RECORD_JUMP_TARGET(target);
2323 }
2324 
2325 void JIT::emit_op_neq(Instruction* currentInstruction)
2326 {
2327     emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
2328     emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
2329     set32(NotEqual, regT1, regT0, regT0);
2330     emitTagAsBoolImmediate(regT0);
2331 
2332     emitPutVirtualRegister(currentInstruction[1].u.operand);
2333 
2334 }
2335 
2336 void JIT::emit_op_bitxor(Instruction* currentInstruction)
2337 {
2338     emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
2339     emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
2340     xorPtr(regT1, regT0);
2341     emitFastArithReTagImmediate(regT0, regT0);
2342     emitPutVirtualRegister(currentInstruction[1].u.operand);
2343 }
2344 
2345 void JIT::emit_op_new_regexp(Instruction* currentInstruction)
2346 {
2347     JITStubCall stubCall(this, cti_op_new_regexp);
2348     stubCall.addArgument(ImmPtr(m_codeBlock->regexp(currentInstruction[2].u.operand)));
2349     stubCall.call(currentInstruction[1].u.operand);
2350 }
2351 
2352 void JIT::emit_op_bitor(Instruction* currentInstruction)
2353 {
2354     emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
2355     emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
2356     orPtr(regT1, regT0);
2357     emitPutVirtualRegister(currentInstruction[1].u.operand);
2358 }
2359 
2360 void JIT::emit_op_throw(Instruction* currentInstruction)
2361 {
2362     JITStubCall stubCall(this, cti_op_throw);
2363     stubCall.addArgument(currentInstruction[1].u.operand, regT2);
2364     stubCall.call();
2365     ASSERT(regT0 == returnValueRegister);
2366 #ifndef NDEBUG
2367     // cti_op_throw always changes it's return address,
2368     // this point in the code should never be reached.
2369     breakpoint();
2370 #endif
2371 }
2372 
2373 void JIT::emit_op_get_pnames(Instruction* currentInstruction)
2374 {
2375     int dst = currentInstruction[1].u.operand;
2376     int base = currentInstruction[2].u.operand;
2377     int i = currentInstruction[3].u.operand;
2378     int size = currentInstruction[4].u.operand;
2379     int breakTarget = currentInstruction[5].u.operand;
2380 
2381     JumpList isNotObject;
2382 
2383     emitGetVirtualRegister(base, regT0);
2384     if (!m_codeBlock->isKnownNotImmediate(base))
2385         isNotObject.append(emitJumpIfNotJSCell(regT0));
2386     if (base != m_codeBlock->thisRegister()) {
2387         loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
2388         isNotObject.append(branch32(NotEqual, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_type)), Imm32(ObjectType)));
2389     }
2390 
2391     // We could inline the case where you have a valid cache, but
2392     // this call doesn't seem to be hot.
2393     Label isObject(this);
2394     JITStubCall getPnamesStubCall(this, cti_op_get_pnames);
2395     getPnamesStubCall.addArgument(regT0);
2396     getPnamesStubCall.call(dst);
2397     load32(Address(regT0, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStringsSize)), regT3);
2398     store32(Imm32(0), addressFor(i));
2399     store32(regT3, addressFor(size));
2400     Jump end = jump();
2401 
2402     isNotObject.link(this);
2403     move(regT0, regT1);
2404     and32(Imm32(~JSImmediate::ExtendedTagBitUndefined), regT1);
2405     addJump(branch32(Equal, regT1, Imm32(JSImmediate::FullTagTypeNull)), breakTarget);
2406 
2407     JITStubCall toObjectStubCall(this, cti_to_object);
2408     toObjectStubCall.addArgument(regT0);
2409     toObjectStubCall.call(base);
2410     jump().linkTo(isObject, this);
2411 
2412     end.link(this);
2413 }
2414 
2415 void JIT::emit_op_next_pname(Instruction* currentInstruction)
2416 {
2417     int dst = currentInstruction[1].u.operand;
2418     int base = currentInstruction[2].u.operand;
2419     int i = currentInstruction[3].u.operand;
2420     int size = currentInstruction[4].u.operand;
2421     int it = currentInstruction[5].u.operand;
2422     int target = currentInstruction[6].u.operand;
2423 
2424     JumpList callHasProperty;
2425 
2426     Label begin(this);
2427     load32(addressFor(i), regT0);
2428     Jump end = branch32(Equal, regT0, addressFor(size));
2429 
2430     // Grab key @ i
2431     loadPtr(addressFor(it), regT1);
2432     loadPtr(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStrings)), regT2);
2433 
2434 #if USE(JSVALUE64)
2435     loadPtr(BaseIndex(regT2, regT0, TimesEight), regT2);
2436 #else
2437     loadPtr(BaseIndex(regT2, regT0, TimesFour), regT2);
2438 #endif
2439 
2440     emitPutVirtualRegister(dst, regT2);
2441 
2442     // Increment i
2443     add32(Imm32(1), regT0);
2444     store32(regT0, addressFor(i));
2445 
2446     // Verify that i is valid:
2447     emitGetVirtualRegister(base, regT0);
2448 
2449     // Test base's structure
2450     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
2451     callHasProperty.append(branchPtr(NotEqual, regT2, Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure)))));
2452 
2453     // Test base's prototype chain
2454     loadPtr(Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedPrototypeChain))), regT3);
2455     loadPtr(Address(regT3, OBJECT_OFFSETOF(StructureChain, m_vector)), regT3);
2456     addJump(branchTestPtr(Zero, Address(regT3)), target);
2457 
2458     Label checkPrototype(this);
2459     loadPtr(Address(regT2, OBJECT_OFFSETOF(Structure, m_prototype)), regT2);
2460     callHasProperty.append(emitJumpIfNotJSCell(regT2));
2461     loadPtr(Address(regT2, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
2462     callHasProperty.append(branchPtr(NotEqual, regT2, Address(regT3)));
2463     addPtr(Imm32(sizeof(Structure*)), regT3);
2464     branchTestPtr(NonZero, Address(regT3)).linkTo(checkPrototype, this);
2465 
2466     // Continue loop.
2467     addJump(jump(), target);
2468 
2469     // Slow case: Ask the object if i is valid.
2470     callHasProperty.link(this);
2471     emitGetVirtualRegister(dst, regT1);
2472     JITStubCall stubCall(this, cti_has_property);
2473     stubCall.addArgument(regT0);
2474     stubCall.addArgument(regT1);
2475     stubCall.call();
2476 
2477     // Test for valid key.
2478     addJump(branchTest32(NonZero, regT0), target);
2479     jump().linkTo(begin, this);
2480 
2481     // End of loop.
2482     end.link(this);
2483 }
2484 
2485 void JIT::emit_op_push_scope(Instruction* currentInstruction)
2486 {
2487     JITStubCall stubCall(this, cti_op_push_scope);
2488     stubCall.addArgument(currentInstruction[1].u.operand, regT2);
2489     stubCall.call(currentInstruction[1].u.operand);
2490 }
2491 
2492 void JIT::emit_op_pop_scope(Instruction*)
2493 {
2494     JITStubCall(this, cti_op_pop_scope).call();
2495 }
2496 
2497 void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
2498 {
2499     unsigned dst = currentInstruction[1].u.operand;
2500     unsigned src1 = currentInstruction[2].u.operand;
2501     unsigned src2 = currentInstruction[3].u.operand;
2502 
2503     emitGetVirtualRegisters(src1, regT0, src2, regT1);
2504 
2505     // Jump to a slow case if either operand is a number, or if both are JSCell*s.
2506     move(regT0, regT2);
2507     orPtr(regT1, regT2);
2508     addSlowCase(emitJumpIfJSCell(regT2));
2509     addSlowCase(emitJumpIfImmediateNumber(regT2));
2510 
2511     if (type == OpStrictEq)
2512         set32(Equal, regT1, regT0, regT0);
2513     else
2514         set32(NotEqual, regT1, regT0, regT0);
2515     emitTagAsBoolImmediate(regT0);
2516 
2517     emitPutVirtualRegister(dst);
2518 }
2519 
2520 void JIT::emit_op_stricteq(Instruction* currentInstruction)
2521 {
2522     compileOpStrictEq(currentInstruction, OpStrictEq);
2523 }
2524 
2525 void JIT::emit_op_nstricteq(Instruction* currentInstruction)
2526 {
2527     compileOpStrictEq(currentInstruction, OpNStrictEq);
2528 }
2529 
2530 void JIT::emit_op_to_jsnumber(Instruction* currentInstruction)
2531 {
2532     int srcVReg = currentInstruction[2].u.operand;
2533     emitGetVirtualRegister(srcVReg, regT0);
2534 
2535     Jump wasImmediate = emitJumpIfImmediateInteger(regT0);
2536 
2537     emitJumpSlowCaseIfNotJSCell(regT0, srcVReg);
2538     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
2539     addSlowCase(branch32(NotEqual, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_type)), Imm32(NumberType)));
2540 
2541     wasImmediate.link(this);
2542 
2543     emitPutVirtualRegister(currentInstruction[1].u.operand);
2544 }
2545 
2546 void JIT::emit_op_push_new_scope(Instruction* currentInstruction)
2547 {
2548     JITStubCall stubCall(this, cti_op_push_new_scope);
2549     stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
2550     stubCall.addArgument(currentInstruction[3].u.operand, regT2);
2551     stubCall.call(currentInstruction[1].u.operand);
2552 }
2553 
2554 void JIT::emit_op_catch(Instruction* currentInstruction)
2555 {
2556     killLastResultRegister(); // FIXME: Implicitly treat op_catch as a labeled statement, and remove this line of code.
2557     peek(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof (void*));
2558     emitPutVirtualRegister(currentInstruction[1].u.operand);
2559 }
2560 
2561 void JIT::emit_op_jmp_scopes(Instruction* currentInstruction)
2562 {
2563     JITStubCall stubCall(this, cti_op_jmp_scopes);
2564     stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
2565     stubCall.call();
2566     addJump(jump(), currentInstruction[2].u.operand);
2567     RECORD_JUMP_TARGET(currentInstruction[2].u.operand);
2568 }
2569 
2570 void JIT::emit_op_switch_imm(Instruction* currentInstruction)
2571 {
2572     unsigned tableIndex = currentInstruction[1].u.operand;
2573     unsigned defaultOffset = currentInstruction[2].u.operand;
2574     unsigned scrutinee = currentInstruction[3].u.operand;
2575 
2576     // create jump table for switch destinations, track this switch statement.
2577     SimpleJumpTable* jumpTable = &m_codeBlock->immediateSwitchJumpTable(tableIndex);
2578     m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset, SwitchRecord::Immediate));
2579     jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
2580 
2581     JITStubCall stubCall(this, cti_op_switch_imm);
2582     stubCall.addArgument(scrutinee, regT2);
2583     stubCall.addArgument(Imm32(tableIndex));
2584     stubCall.call();
2585     jump(regT0);
2586 }
2587 
2588 void JIT::emit_op_switch_char(Instruction* currentInstruction)
2589 {
2590     unsigned tableIndex = currentInstruction[1].u.operand;
2591     unsigned defaultOffset = currentInstruction[2].u.operand;
2592     unsigned scrutinee = currentInstruction[3].u.operand;
2593 
2594     // create jump table for switch destinations, track this switch statement.
2595     SimpleJumpTable* jumpTable = &m_codeBlock->characterSwitchJumpTable(tableIndex);
2596     m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset, SwitchRecord::Character));
2597     jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
2598 
2599     JITStubCall stubCall(this, cti_op_switch_char);
2600     stubCall.addArgument(scrutinee, regT2);
2601     stubCall.addArgument(Imm32(tableIndex));
2602     stubCall.call();
2603     jump(regT0);
2604 }
2605 
2606 void JIT::emit_op_switch_string(Instruction* currentInstruction)
2607 {
2608     unsigned tableIndex = currentInstruction[1].u.operand;
2609     unsigned defaultOffset = currentInstruction[2].u.operand;
2610     unsigned scrutinee = currentInstruction[3].u.operand;
2611 
2612     // create jump table for switch destinations, track this switch statement.
2613     StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
2614     m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset));
2615 
2616     JITStubCall stubCall(this, cti_op_switch_string);
2617     stubCall.addArgument(scrutinee, regT2);
2618     stubCall.addArgument(Imm32(tableIndex));
2619     stubCall.call();
2620     jump(regT0);
2621 }
2622 
2623 void JIT::emit_op_new_error(Instruction* currentInstruction)
2624 {
2625     JITStubCall stubCall(this, cti_op_new_error);
2626     stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
2627     stubCall.addArgument(ImmPtr(JSValue::encode(m_codeBlock->getConstant(currentInstruction[3].u.operand))));
2628     stubCall.addArgument(Imm32(m_bytecodeIndex));
2629     stubCall.call(currentInstruction[1].u.operand);
2630 }
2631 
2632 void JIT::emit_op_debug(Instruction* currentInstruction)
2633 {
2634     JITStubCall stubCall(this, cti_op_debug);
2635     stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
2636     stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
2637     stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
2638     stubCall.call();
2639 }
2640 
2641 void JIT::emit_op_eq_null(Instruction* currentInstruction)
2642 {
2643     unsigned dst = currentInstruction[1].u.operand;
2644     unsigned src1 = currentInstruction[2].u.operand;
2645 
2646     emitGetVirtualRegister(src1, regT0);
2647     Jump isImmediate = emitJumpIfNotJSCell(regT0);
2648 
2649     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
2650     setTest32(NonZero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined), regT0);
2651 
2652     Jump wasNotImmediate = jump();
2653 
2654     isImmediate.link(this);
2655 
2656     andPtr(Imm32(~JSImmediate::ExtendedTagBitUndefined), regT0);
2657     setPtr(Equal, regT0, Imm32(JSImmediate::FullTagTypeNull), regT0);
2658 
2659     wasNotImmediate.link(this);
2660 
2661     emitTagAsBoolImmediate(regT0);
2662     emitPutVirtualRegister(dst);
2663 
2664 }
2665 
2666 void JIT::emit_op_neq_null(Instruction* currentInstruction)
2667 {
2668     unsigned dst = currentInstruction[1].u.operand;
2669     unsigned src1 = currentInstruction[2].u.operand;
2670 
2671     emitGetVirtualRegister(src1, regT0);
2672     Jump isImmediate = emitJumpIfNotJSCell(regT0);
2673 
2674     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
2675     setTest32(Zero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined), regT0);
2676 
2677     Jump wasNotImmediate = jump();
2678 
2679     isImmediate.link(this);
2680 
2681     andPtr(Imm32(~JSImmediate::ExtendedTagBitUndefined), regT0);
2682     setPtr(NotEqual, regT0, Imm32(JSImmediate::FullTagTypeNull), regT0);
2683 
2684     wasNotImmediate.link(this);
2685 
2686     emitTagAsBoolImmediate(regT0);
2687     emitPutVirtualRegister(dst);
2688 
2689 }
2690 
2691 void JIT::emit_op_enter(Instruction*)
2692 {
2693     // Even though CTI doesn't use them, we initialize our constant
2694     // registers to zap stale pointers, to avoid unnecessarily prolonging
2695     // object lifetime and increasing GC pressure.
2696     size_t count = m_codeBlock->m_numVars;
2697     for (size_t j = 0; j < count; ++j)
2698         emitInitRegister(j);
2699 
2700 }
2701 
2702 void JIT::emit_op_enter_with_activation(Instruction* currentInstruction)
2703 {
2704     // Even though CTI doesn't use them, we initialize our constant
2705     // registers to zap stale pointers, to avoid unnecessarily prolonging
2706     // object lifetime and increasing GC pressure.
2707     size_t count = m_codeBlock->m_numVars;
2708     for (size_t j = 0; j < count; ++j)
2709         emitInitRegister(j);
2710 
2711     JITStubCall(this, cti_op_push_activation).call(currentInstruction[1].u.operand);
2712 }
2713 
2714 void JIT::emit_op_create_arguments(Instruction*)
2715 {
2716     Jump argsCreated = branchTestPtr(NonZero, Address(callFrameRegister, sizeof(Register) * RegisterFile::ArgumentsRegister));
2717     if (m_codeBlock->m_numParameters == 1)
2718         JITStubCall(this, cti_op_create_arguments_no_params).call();
2719     else
2720         JITStubCall(this, cti_op_create_arguments).call();
2721     argsCreated.link(this);
2722 }
2723 
2724 void JIT::emit_op_init_arguments(Instruction*)
2725 {
2726     storePtr(ImmPtr(0), Address(callFrameRegister, sizeof(Register) * RegisterFile::ArgumentsRegister));
2727 }
2728 
2729 void JIT::emit_op_convert_this(Instruction* currentInstruction)
2730 {
2731     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
2732 
2733     emitJumpSlowCaseIfNotJSCell(regT0);
2734     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT1);
2735     addSlowCase(branchTest32(NonZero, Address(regT1, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(NeedsThisConversion)));
2736 
2737 }
2738 
2739 void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
2740 {
2741     peek(regT1, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof (void*));
2742     Jump noProfiler = branchTestPtr(Zero, Address(regT1));
2743 
2744     JITStubCall stubCall(this, cti_op_profile_will_call);
2745     stubCall.addArgument(currentInstruction[1].u.operand, regT1);
2746     stubCall.call();
2747     noProfiler.link(this);
2748 
2749 }
2750 
2751 void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
2752 {
2753     peek(regT1, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof (void*));
2754     Jump noProfiler = branchTestPtr(Zero, Address(regT1));
2755 
2756     JITStubCall stubCall(this, cti_op_profile_did_call);
2757     stubCall.addArgument(currentInstruction[1].u.operand, regT1);
2758     stubCall.call();
2759     noProfiler.link(this);
2760 }
2761 
2762 
2763 // Slow cases
2764 
2765 void JIT::emitSlow_op_convert_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2766 {
2767     linkSlowCase(iter);
2768     linkSlowCase(iter);
2769     JITStubCall stubCall(this, cti_op_convert_this);
2770     stubCall.addArgument(regT0);
2771     stubCall.call(currentInstruction[1].u.operand);
2772 }
2773 
2774 void JIT::emitSlow_op_construct_verify(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2775 {
2776     linkSlowCase(iter);
2777     linkSlowCase(iter);
2778     emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
2779     emitPutVirtualRegister(currentInstruction[1].u.operand);
2780 }
2781 
2782 void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2783 {
2784     linkSlowCase(iter);
2785 
2786     JITStubCall stubCall(this, cti_op_to_primitive);
2787     stubCall.addArgument(regT0);
2788     stubCall.call(currentInstruction[1].u.operand);
2789 }
2790 
2791 void JIT::emitSlow_op_get_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2792 {
2793     unsigned dst = currentInstruction[1].u.operand;
2794     unsigned base = currentInstruction[2].u.operand;
2795     unsigned property = currentInstruction[3].u.operand;
2796 
2797     linkSlowCase(iter); // property int32 check
2798     linkSlowCaseIfNotJSCell(iter, base); // base cell check
2799     linkSlowCase(iter); // base array check
2800     linkSlowCase(iter); // vector length check
2801     linkSlowCase(iter); // empty value
2802 
2803     JITStubCall stubCall(this, cti_op_get_by_val);
2804     stubCall.addArgument(base, regT2);
2805     stubCall.addArgument(property, regT2);
2806     stubCall.call(dst);
2807 }
2808 
2809 void JIT::emitSlow_op_loop_if_lesseq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2810 {
2811     unsigned op2 = currentInstruction[2].u.operand;
2812     unsigned target = currentInstruction[3].u.operand;
2813     if (isOperandConstantImmediateInt(op2)) {
2814         linkSlowCase(iter);
2815         JITStubCall stubCall(this, cti_op_loop_if_lesseq);
2816         stubCall.addArgument(regT0);
2817         stubCall.addArgument(currentInstruction[2].u.operand, regT2);
2818         stubCall.call();
2819         emitJumpSlowToHot(branchTest32(NonZero, regT0), target);
2820     } else {
2821         linkSlowCase(iter);
2822         linkSlowCase(iter);
2823         JITStubCall stubCall(this, cti_op_loop_if_lesseq);
2824         stubCall.addArgument(regT0);
2825         stubCall.addArgument(regT1);
2826         stubCall.call();
2827         emitJumpSlowToHot(branchTest32(NonZero, regT0), target);
2828     }
2829 }
2830 
2831 void JIT::emitSlow_op_put_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2832 {
2833     unsigned base = currentInstruction[1].u.operand;
2834     unsigned property = currentInstruction[2].u.operand;
2835     unsigned value = currentInstruction[3].u.operand;
2836 
2837     linkSlowCase(iter); // property int32 check
2838     linkSlowCaseIfNotJSCell(iter, base); // base cell check
2839     linkSlowCase(iter); // base not array check
2840     linkSlowCase(iter); // in vector check
2841 
2842     JITStubCall stubPutByValCall(this, cti_op_put_by_val);
2843     stubPutByValCall.addArgument(regT0);
2844     stubPutByValCall.addArgument(property, regT2);
2845     stubPutByValCall.addArgument(value, regT2);
2846     stubPutByValCall.call();
2847 }
2848 
2849 void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2850 {
2851     linkSlowCase(iter);
2852     xorPtr(Imm32(static_cast<int32_t>(JSImmediate::FullTagTypeBool)), regT0);
2853     JITStubCall stubCall(this, cti_op_not);
2854     stubCall.addArgument(regT0);
2855     stubCall.call(currentInstruction[1].u.operand);
2856 }
2857 
2858 void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2859 {
2860     linkSlowCase(iter);
2861     JITStubCall stubCall(this, cti_op_jtrue);
2862     stubCall.addArgument(regT0);
2863     stubCall.call();
2864     emitJumpSlowToHot(branchTest32(Zero, regT0), currentInstruction[2].u.operand); // inverted!
2865 }
2866 
2867 void JIT::emitSlow_op_bitnot(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2868 {
2869     linkSlowCase(iter);
2870     JITStubCall stubCall(this, cti_op_bitnot);
2871     stubCall.addArgument(regT0);
2872     stubCall.call(currentInstruction[1].u.operand);
2873 }
2874 
2875 void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2876 {
2877     linkSlowCase(iter);
2878     JITStubCall stubCall(this, cti_op_jtrue);
2879     stubCall.addArgument(regT0);
2880     stubCall.call();
2881     emitJumpSlowToHot(branchTest32(NonZero, regT0), currentInstruction[2].u.operand);
2882 }
2883 
2884 void JIT::emitSlow_op_bitxor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2885 {
2886     linkSlowCase(iter);
2887     JITStubCall stubCall(this, cti_op_bitxor);
2888     stubCall.addArgument(regT0);
2889     stubCall.addArgument(regT1);
2890     stubCall.call(currentInstruction[1].u.operand);
2891 }
2892 
2893 void JIT::emitSlow_op_bitor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2894 {
2895     linkSlowCase(iter);
2896     JITStubCall stubCall(this, cti_op_bitor);
2897     stubCall.addArgument(regT0);
2898     stubCall.addArgument(regT1);
2899     stubCall.call(currentInstruction[1].u.operand);
2900 }
2901 
2902 void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2903 {
2904     linkSlowCase(iter);
2905     JITStubCall stubCall(this, cti_op_eq);
2906     stubCall.addArgument(regT0);
2907     stubCall.addArgument(regT1);
2908     stubCall.call();
2909     emitTagAsBoolImmediate(regT0);
2910     emitPutVirtualRegister(currentInstruction[1].u.operand);
2911 }
2912 
2913 void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2914 {
2915     linkSlowCase(iter);
2916     JITStubCall stubCall(this, cti_op_eq);
2917     stubCall.addArgument(regT0);
2918     stubCall.addArgument(regT1);
2919     stubCall.call();
2920     xor32(Imm32(0x1), regT0);
2921     emitTagAsBoolImmediate(regT0);
2922     emitPutVirtualRegister(currentInstruction[1].u.operand);
2923 }
2924 
2925 void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2926 {
2927     linkSlowCase(iter);
2928     linkSlowCase(iter);
2929     JITStubCall stubCall(this, cti_op_stricteq);
2930     stubCall.addArgument(regT0);
2931     stubCall.addArgument(regT1);
2932     stubCall.call(currentInstruction[1].u.operand);
2933 }
2934 
2935 void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2936 {
2937     linkSlowCase(iter);
2938     linkSlowCase(iter);
2939     JITStubCall stubCall(this, cti_op_nstricteq);
2940     stubCall.addArgument(regT0);
2941     stubCall.addArgument(regT1);
2942     stubCall.call(currentInstruction[1].u.operand);
2943 }
2944 
2945 void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2946 {
2947     unsigned dst = currentInstruction[1].u.operand;
2948     unsigned value = currentInstruction[2].u.operand;
2949     unsigned baseVal = currentInstruction[3].u.operand;
2950     unsigned proto = currentInstruction[4].u.operand;
2951 
2952     linkSlowCaseIfNotJSCell(iter, value);
2953     linkSlowCaseIfNotJSCell(iter, baseVal);
2954     linkSlowCaseIfNotJSCell(iter, proto);
2955     linkSlowCase(iter);
2956     JITStubCall stubCall(this, cti_op_instanceof);
2957     stubCall.addArgument(value, regT2);
2958     stubCall.addArgument(baseVal, regT2);
2959     stubCall.addArgument(proto, regT2);
2960     stubCall.call(dst);
2961 }
2962 
2963 void JIT::emitSlow_op_call(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2964 {
2965     compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_call);
2966 }
2967 
2968 void JIT::emitSlow_op_call_eval(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2969 {
2970     compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_call_eval);
2971 }
2972 
2973 void JIT::emitSlow_op_call_varargs(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2974 {
2975     compileOpCallVarargsSlowCase(currentInstruction, iter);
2976 }
2977 
2978 void JIT::emitSlow_op_construct(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2979 {
2980     compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_construct);
2981 }
2982 
2983 void JIT::emitSlow_op_to_jsnumber(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2984 {
2985     linkSlowCaseIfNotJSCell(iter, currentInstruction[2].u.operand);
2986     linkSlowCase(iter);
2987 
2988     JITStubCall stubCall(this, cti_op_to_jsnumber);
2989     stubCall.addArgument(regT0);
2990     stubCall.call(currentInstruction[1].u.operand);
2991 }
2992 
2993 #endif // USE(JSVALUE32_64)
2994 
2995 // For both JSValue32_64 and JSValue32
2996 #if ENABLE(JIT_OPTIMIZE_MOD)
2997 #if CPU(ARM_TRADITIONAL)
softModulo()2998 void JIT::softModulo()
2999 {
3000     push(regS0);
3001     push(regS1);
3002     push(regT1);
3003     push(regT3);
3004 #if USE(JSVALUE32_64)
3005     m_assembler.mov_r(regT3, regT2);
3006     m_assembler.mov_r(regT2, regT0);
3007 #else
3008     m_assembler.mov_r(regT3, m_assembler.asr(regT2, 1));
3009     m_assembler.mov_r(regT2, m_assembler.asr(regT0, 1));
3010 #endif
3011     m_assembler.mov_r(regT1, ARMAssembler::getOp2(0));
3012 
3013     m_assembler.teq_r(regT3, ARMAssembler::getOp2(0));
3014     m_assembler.rsb_r(regT3, regT3, ARMAssembler::getOp2(0), ARMAssembler::MI);
3015     m_assembler.eor_r(regT1, regT1, ARMAssembler::getOp2(1), ARMAssembler::MI);
3016 
3017     m_assembler.teq_r(regT2, ARMAssembler::getOp2(0));
3018     m_assembler.rsb_r(regT2, regT2, ARMAssembler::getOp2(0), ARMAssembler::MI);
3019     m_assembler.eor_r(regT1, regT1, ARMAssembler::getOp2(2), ARMAssembler::MI);
3020 
3021     Jump exitBranch = branch32(LessThan, regT2, regT3);
3022 
3023     m_assembler.sub_r(regS1, regT3, ARMAssembler::getOp2(1));
3024     m_assembler.tst_r(regS1, regT3);
3025     m_assembler.and_r(regT2, regT2, regS1, ARMAssembler::EQ);
3026     m_assembler.and_r(regT0, regS1, regT3);
3027     Jump exitBranch2 = branchTest32(Zero, regT0);
3028 
3029     m_assembler.clz_r(regS1, regT2);
3030     m_assembler.clz_r(regS0, regT3);
3031     m_assembler.sub_r(regS0, regS0, regS1);
3032 
3033     m_assembler.rsbs_r(regS0, regS0, ARMAssembler::getOp2(31));
3034 
3035     m_assembler.mov_r(regS0, m_assembler.lsl(regS0, 1), ARMAssembler::NE);
3036 
3037     m_assembler.add_r(ARMRegisters::pc, ARMRegisters::pc, m_assembler.lsl(regS0, 2), ARMAssembler::NE);
3038     m_assembler.mov_r(regT0, regT0);
3039 
3040     for (int i = 31; i > 0; --i) {
3041         m_assembler.cmp_r(regT2, m_assembler.lsl(regT3, i));
3042         m_assembler.sub_r(regT2, regT2, m_assembler.lsl(regT3, i), ARMAssembler::CS);
3043     }
3044 
3045     m_assembler.cmp_r(regT2, regT3);
3046     m_assembler.sub_r(regT2, regT2, regT3, ARMAssembler::CS);
3047 
3048     exitBranch.link(this);
3049     exitBranch2.link(this);
3050 
3051     m_assembler.teq_r(regT1, ARMAssembler::getOp2(0));
3052     m_assembler.rsb_r(regT2, regT2, ARMAssembler::getOp2(0), ARMAssembler::GT);
3053 
3054 #if USE(JSVALUE32_64)
3055     m_assembler.mov_r(regT0, regT2);
3056 #else
3057     m_assembler.mov_r(regT0, m_assembler.lsl(regT2, 1));
3058     m_assembler.eor_r(regT0, regT0, ARMAssembler::getOp2(1));
3059 #endif
3060     pop(regT3);
3061     pop(regT1);
3062     pop(regS1);
3063     pop(regS0);
3064     ret();
3065 }
3066 #else
3067 #error "JIT_OPTIMIZE_MOD not yet supported on this platform."
3068 #endif // CPU(ARM_TRADITIONAL)
3069 #endif
3070 } // namespace JSC
3071 
3072 #endif // ENABLE(JIT)
3073