1 /*
2 * Copyright (C) 2009 Apple Inc. All rights reserved.
3 * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
13 *
14 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
18 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25 */
26
27 #include "config.h"
28 #if ENABLE(JIT)
29 #include "JIT.h"
30
31 #include "Arguments.h"
32 #include "JITInlineMethods.h"
33 #include "JITStubCall.h"
34 #include "JSArray.h"
35 #include "JSCell.h"
36 #include "JSFunction.h"
37 #include "JSPropertyNameIterator.h"
38 #include "LinkBuffer.h"
39
40 namespace JSC {
41
42 #if USE(JSVALUE64)
43
44 #define RECORD_JUMP_TARGET(targetOffset) \
45 do { m_labels[m_bytecodeOffset + (targetOffset)].used(); } while (false)
46
privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool> * executablePool,JSGlobalData * globalData,TrampolineStructure * trampolines)47 void JIT::privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool>* executablePool, JSGlobalData* globalData, TrampolineStructure *trampolines)
48 {
49 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
50 // (2) The second function provides fast property access for string length
51 Label stringLengthBegin = align();
52
53 // Check eax is a string
54 Jump string_failureCases1 = emitJumpIfNotJSCell(regT0);
55 Jump string_failureCases2 = branchPtr(NotEqual, Address(regT0), TrustedImmPtr(m_globalData->jsStringVPtr));
56
57 // Checks out okay! - get the length from the Ustring.
58 load32(Address(regT0, OBJECT_OFFSETOF(JSString, m_length)), regT0);
59
60 Jump string_failureCases3 = branch32(LessThan, regT0, TrustedImm32(0));
61
62 // regT0 contains a 64 bit value (is positive, is zero extended) so we don't need sign extend here.
63 emitFastArithIntToImmNoCheck(regT0, regT0);
64
65 ret();
66 #endif
67
68 // (3) Trampolines for the slow cases of op_call / op_call_eval / op_construct.
69 COMPILE_ASSERT(sizeof(CodeType) == 4, CodeTypeEnumMustBe32Bit);
70
71 // VirtualCallLink Trampoline
72 // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
73 JumpList callLinkFailures;
74 Label virtualCallLinkBegin = align();
75 compileOpCallInitializeCallFrame();
76 preserveReturnAddressAfterCall(regT3);
77 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
78 restoreArgumentReference();
79 Call callLazyLinkCall = call();
80 callLinkFailures.append(branchTestPtr(Zero, regT0));
81 restoreReturnAddressBeforeReturn(regT3);
82 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT1);
83 jump(regT0);
84
85 // VirtualConstructLink Trampoline
86 // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
87 Label virtualConstructLinkBegin = align();
88 compileOpCallInitializeCallFrame();
89 preserveReturnAddressAfterCall(regT3);
90 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
91 restoreArgumentReference();
92 Call callLazyLinkConstruct = call();
93 callLinkFailures.append(branchTestPtr(Zero, regT0));
94 restoreReturnAddressBeforeReturn(regT3);
95 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT1);
96 jump(regT0);
97
98 // VirtualCall Trampoline
99 // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
100 Label virtualCallBegin = align();
101 compileOpCallInitializeCallFrame();
102
103 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
104
105 Jump hasCodeBlock3 = branch32(GreaterThanOrEqual, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParametersForCall)), TrustedImm32(0));
106 preserveReturnAddressAfterCall(regT3);
107 restoreArgumentReference();
108 Call callCompileCall = call();
109 callLinkFailures.append(branchTestPtr(Zero, regT0));
110 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT1);
111 restoreReturnAddressBeforeReturn(regT3);
112 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
113 hasCodeBlock3.link(this);
114
115 loadPtr(Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_jitCodeForCallWithArityCheck)), regT0);
116 jump(regT0);
117
118 // VirtualConstruct Trampoline
119 // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
120 Label virtualConstructBegin = align();
121 compileOpCallInitializeCallFrame();
122
123 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
124
125 Jump hasCodeBlock4 = branch32(GreaterThanOrEqual, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParametersForConstruct)), TrustedImm32(0));
126 preserveReturnAddressAfterCall(regT3);
127 restoreArgumentReference();
128 Call callCompileConstruct = call();
129 callLinkFailures.append(branchTestPtr(Zero, regT0));
130 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT1);
131 restoreReturnAddressBeforeReturn(regT3);
132 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
133 hasCodeBlock4.link(this);
134
135 loadPtr(Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_jitCodeForConstructWithArityCheck)), regT0);
136 jump(regT0);
137
138 // If the parser fails we want to be able to be able to keep going,
139 // So we handle this as a parse failure.
140 callLinkFailures.link(this);
141 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
142 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
143 restoreReturnAddressBeforeReturn(regT1);
144 move(TrustedImmPtr(&globalData->exceptionLocation), regT2);
145 storePtr(regT1, regT2);
146 poke(callFrameRegister, 1 + OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
147 poke(TrustedImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()));
148 ret();
149
150 // NativeCall Trampoline
151 Label nativeCallThunk = privateCompileCTINativeCall(globalData);
152 Label nativeConstructThunk = privateCompileCTINativeCall(globalData, true);
153
154 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
155 Call string_failureCases1Call = makeTailRecursiveCall(string_failureCases1);
156 Call string_failureCases2Call = makeTailRecursiveCall(string_failureCases2);
157 Call string_failureCases3Call = makeTailRecursiveCall(string_failureCases3);
158 #endif
159
160 // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
161 LinkBuffer patchBuffer(this, m_globalData->executableAllocator.poolForSize(m_assembler.size()), 0);
162
163 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
164 patchBuffer.link(string_failureCases1Call, FunctionPtr(cti_op_get_by_id_string_fail));
165 patchBuffer.link(string_failureCases2Call, FunctionPtr(cti_op_get_by_id_string_fail));
166 patchBuffer.link(string_failureCases3Call, FunctionPtr(cti_op_get_by_id_string_fail));
167 #endif
168 #if ENABLE(JIT_OPTIMIZE_CALL)
169 patchBuffer.link(callLazyLinkCall, FunctionPtr(cti_vm_lazyLinkCall));
170 patchBuffer.link(callLazyLinkConstruct, FunctionPtr(cti_vm_lazyLinkConstruct));
171 #endif
172 patchBuffer.link(callCompileCall, FunctionPtr(cti_op_call_jitCompile));
173 patchBuffer.link(callCompileConstruct, FunctionPtr(cti_op_construct_jitCompile));
174
175 CodeRef finalCode = patchBuffer.finalizeCode();
176 *executablePool = finalCode.m_executablePool;
177
178 trampolines->ctiVirtualCallLink = patchBuffer.trampolineAt(virtualCallLinkBegin);
179 trampolines->ctiVirtualConstructLink = patchBuffer.trampolineAt(virtualConstructLinkBegin);
180 trampolines->ctiVirtualCall = patchBuffer.trampolineAt(virtualCallBegin);
181 trampolines->ctiVirtualConstruct = patchBuffer.trampolineAt(virtualConstructBegin);
182 trampolines->ctiNativeCall = patchBuffer.trampolineAt(nativeCallThunk);
183 trampolines->ctiNativeConstruct = patchBuffer.trampolineAt(nativeConstructThunk);
184 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
185 trampolines->ctiStringLengthTrampoline = patchBuffer.trampolineAt(stringLengthBegin);
186 #endif
187 }
188
privateCompileCTINativeCall(JSGlobalData * globalData,bool isConstruct)189 JIT::Label JIT::privateCompileCTINativeCall(JSGlobalData* globalData, bool isConstruct)
190 {
191 int executableOffsetToFunction = isConstruct ? OBJECT_OFFSETOF(NativeExecutable, m_constructor) : OBJECT_OFFSETOF(NativeExecutable, m_function);
192
193 Label nativeCallThunk = align();
194
195 emitPutImmediateToCallFrameHeader(0, RegisterFile::CodeBlock);
196
197 #if CPU(X86_64)
198 // Load caller frame's scope chain into this callframe so that whatever we call can
199 // get to its global data.
200 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
201 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT0);
202 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
203
204 peek(regT1);
205 emitPutToCallFrameHeader(regT1, RegisterFile::ReturnPC);
206
207 // Calling convention: f(edi, esi, edx, ecx, ...);
208 // Host function signature: f(ExecState*);
209 move(callFrameRegister, X86Registers::edi);
210
211 subPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister); // Align stack after call.
212
213 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, X86Registers::esi);
214 loadPtr(Address(X86Registers::esi, OBJECT_OFFSETOF(JSFunction, m_executable)), X86Registers::r9);
215 move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
216 call(Address(X86Registers::r9, executableOffsetToFunction));
217
218 addPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister);
219
220 #elif CPU(ARM)
221 // Load caller frame's scope chain into this callframe so that whatever we call can
222 // get to its global data.
223 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
224 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT2);
225 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
226
227 preserveReturnAddressAfterCall(regT3); // Callee preserved
228 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
229
230 // Calling convention: f(r0 == regT0, r1 == regT1, ...);
231 // Host function signature: f(ExecState*);
232 move(callFrameRegister, ARMRegisters::r0);
233
234 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, ARMRegisters::r1);
235 move(regT2, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
236 loadPtr(Address(ARMRegisters::r1, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
237 call(Address(regT2, executableOffsetToFunction));
238
239 restoreReturnAddressBeforeReturn(regT3);
240
241 #elif CPU(MIPS)
242 // Load caller frame's scope chain into this callframe so that whatever we call can
243 // get to its global data.
244 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
245 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT0);
246 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
247
248 preserveReturnAddressAfterCall(regT3); // Callee preserved
249 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
250
251 // Calling convention: f(a0, a1, a2, a3);
252 // Host function signature: f(ExecState*);
253
254 // Allocate stack space for 16 bytes (8-byte aligned)
255 // 16 bytes (unused) for 4 arguments
256 subPtr(TrustedImm32(16), stackPointerRegister);
257
258 // Setup arg0
259 move(callFrameRegister, MIPSRegisters::a0);
260
261 // Call
262 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, MIPSRegisters::a2);
263 loadPtr(Address(MIPSRegisters::a2, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
264 move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
265 call(Address(regT2, executableOffsetToFunction));
266
267 // Restore stack space
268 addPtr(TrustedImm32(16), stackPointerRegister);
269
270 restoreReturnAddressBeforeReturn(regT3);
271
272 #elif ENABLE(JIT_OPTIMIZE_NATIVE_CALL)
273 #error "JIT_OPTIMIZE_NATIVE_CALL not yet supported on this platform."
274 #else
275 UNUSED_PARAM(executableOffsetToFunction);
276 breakpoint();
277 #endif
278
279 // Check for an exception
280 loadPtr(&(globalData->exception), regT2);
281 Jump exceptionHandler = branchTestPtr(NonZero, regT2);
282
283 // Return.
284 ret();
285
286 // Handle an exception
287 exceptionHandler.link(this);
288
289 // Grab the return address.
290 preserveReturnAddressAfterCall(regT1);
291
292 move(TrustedImmPtr(&globalData->exceptionLocation), regT2);
293 storePtr(regT1, regT2);
294 poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
295
296 // Set the return address.
297 move(TrustedImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()), regT1);
298 restoreReturnAddressBeforeReturn(regT1);
299
300 ret();
301
302 return nativeCallThunk;
303 }
304
privateCompileCTINativeCall(PassRefPtr<ExecutablePool>,JSGlobalData * globalData,NativeFunction)305 JIT::CodePtr JIT::privateCompileCTINativeCall(PassRefPtr<ExecutablePool>, JSGlobalData* globalData, NativeFunction)
306 {
307 return globalData->jitStubs->ctiNativeCall();
308 }
309
emit_op_mov(Instruction * currentInstruction)310 void JIT::emit_op_mov(Instruction* currentInstruction)
311 {
312 int dst = currentInstruction[1].u.operand;
313 int src = currentInstruction[2].u.operand;
314
315 if (m_codeBlock->isConstantRegisterIndex(src)) {
316 storePtr(ImmPtr(JSValue::encode(getConstantOperand(src))), Address(callFrameRegister, dst * sizeof(Register)));
317 if (dst == m_lastResultBytecodeRegister)
318 killLastResultRegister();
319 } else if ((src == m_lastResultBytecodeRegister) || (dst == m_lastResultBytecodeRegister)) {
320 // If either the src or dst is the cached register go though
321 // get/put registers to make sure we track this correctly.
322 emitGetVirtualRegister(src, regT0);
323 emitPutVirtualRegister(dst);
324 } else {
325 // Perform the copy via regT1; do not disturb any mapping in regT0.
326 loadPtr(Address(callFrameRegister, src * sizeof(Register)), regT1);
327 storePtr(regT1, Address(callFrameRegister, dst * sizeof(Register)));
328 }
329 }
330
emit_op_end(Instruction * currentInstruction)331 void JIT::emit_op_end(Instruction* currentInstruction)
332 {
333 ASSERT(returnValueRegister != callFrameRegister);
334 emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
335 restoreReturnAddressBeforeReturn(Address(callFrameRegister, RegisterFile::ReturnPC * static_cast<int>(sizeof(Register))));
336 ret();
337 }
338
emit_op_jmp(Instruction * currentInstruction)339 void JIT::emit_op_jmp(Instruction* currentInstruction)
340 {
341 unsigned target = currentInstruction[1].u.operand;
342 addJump(jump(), target);
343 RECORD_JUMP_TARGET(target);
344 }
345
emit_op_loop_if_lesseq(Instruction * currentInstruction)346 void JIT::emit_op_loop_if_lesseq(Instruction* currentInstruction)
347 {
348 emitTimeoutCheck();
349
350 unsigned op1 = currentInstruction[1].u.operand;
351 unsigned op2 = currentInstruction[2].u.operand;
352 unsigned target = currentInstruction[3].u.operand;
353 if (isOperandConstantImmediateInt(op2)) {
354 emitGetVirtualRegister(op1, regT0);
355 emitJumpSlowCaseIfNotImmediateInteger(regT0);
356 int32_t op2imm = getConstantOperandImmediateInt(op2);
357 addJump(branch32(LessThanOrEqual, regT0, Imm32(op2imm)), target);
358 } else {
359 emitGetVirtualRegisters(op1, regT0, op2, regT1);
360 emitJumpSlowCaseIfNotImmediateInteger(regT0);
361 emitJumpSlowCaseIfNotImmediateInteger(regT1);
362 addJump(branch32(LessThanOrEqual, regT0, regT1), target);
363 }
364 }
365
emit_op_new_object(Instruction * currentInstruction)366 void JIT::emit_op_new_object(Instruction* currentInstruction)
367 {
368 JITStubCall(this, cti_op_new_object).call(currentInstruction[1].u.operand);
369 }
370
emit_op_check_has_instance(Instruction * currentInstruction)371 void JIT::emit_op_check_has_instance(Instruction* currentInstruction)
372 {
373 unsigned baseVal = currentInstruction[1].u.operand;
374
375 emitGetVirtualRegister(baseVal, regT0);
376
377 // Check that baseVal is a cell.
378 emitJumpSlowCaseIfNotJSCell(regT0, baseVal);
379
380 // Check that baseVal 'ImplementsHasInstance'.
381 loadPtr(Address(regT0, JSCell::structureOffset()), regT0);
382 addSlowCase(branchTest8(Zero, Address(regT0, Structure::typeInfoFlagsOffset()), TrustedImm32(ImplementsHasInstance)));
383 }
384
emit_op_instanceof(Instruction * currentInstruction)385 void JIT::emit_op_instanceof(Instruction* currentInstruction)
386 {
387 unsigned dst = currentInstruction[1].u.operand;
388 unsigned value = currentInstruction[2].u.operand;
389 unsigned baseVal = currentInstruction[3].u.operand;
390 unsigned proto = currentInstruction[4].u.operand;
391
392 // Load the operands (baseVal, proto, and value respectively) into registers.
393 // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
394 emitGetVirtualRegister(value, regT2);
395 emitGetVirtualRegister(baseVal, regT0);
396 emitGetVirtualRegister(proto, regT1);
397
398 // Check that proto are cells. baseVal must be a cell - this is checked by op_check_has_instance.
399 emitJumpSlowCaseIfNotJSCell(regT2, value);
400 emitJumpSlowCaseIfNotJSCell(regT1, proto);
401
402 // Check that prototype is an object
403 loadPtr(Address(regT1, JSCell::structureOffset()), regT3);
404 addSlowCase(branch8(NotEqual, Address(regT3, Structure::typeInfoTypeOffset()), TrustedImm32(ObjectType)));
405
406 // Fixme: this check is only needed because the JSC API allows HasInstance to be overridden; we should deprecate this.
407 // Check that baseVal 'ImplementsDefaultHasInstance'.
408 loadPtr(Address(regT0, JSCell::structureOffset()), regT0);
409 addSlowCase(branchTest8(Zero, Address(regT0, Structure::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance)));
410
411 // Optimistically load the result true, and start looping.
412 // Initially, regT1 still contains proto and regT2 still contains value.
413 // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
414 move(TrustedImmPtr(JSValue::encode(jsBoolean(true))), regT0);
415 Label loop(this);
416
417 // Load the prototype of the object in regT2. If this is equal to regT1 - WIN!
418 // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
419 loadPtr(Address(regT2, JSCell::structureOffset()), regT2);
420 loadPtr(Address(regT2, Structure::prototypeOffset()), regT2);
421 Jump isInstance = branchPtr(Equal, regT2, regT1);
422 emitJumpIfJSCell(regT2).linkTo(loop, this);
423
424 // We get here either by dropping out of the loop, or if value was not an Object. Result is false.
425 move(TrustedImmPtr(JSValue::encode(jsBoolean(false))), regT0);
426
427 // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
428 isInstance.link(this);
429 emitPutVirtualRegister(dst);
430 }
431
emit_op_call(Instruction * currentInstruction)432 void JIT::emit_op_call(Instruction* currentInstruction)
433 {
434 compileOpCall(op_call, currentInstruction, m_callLinkInfoIndex++);
435 }
436
emit_op_call_eval(Instruction * currentInstruction)437 void JIT::emit_op_call_eval(Instruction* currentInstruction)
438 {
439 compileOpCall(op_call_eval, currentInstruction, m_callLinkInfoIndex++);
440 }
441
emit_op_call_varargs(Instruction * currentInstruction)442 void JIT::emit_op_call_varargs(Instruction* currentInstruction)
443 {
444 compileOpCallVarargs(currentInstruction);
445 }
446
emit_op_construct(Instruction * currentInstruction)447 void JIT::emit_op_construct(Instruction* currentInstruction)
448 {
449 compileOpCall(op_construct, currentInstruction, m_callLinkInfoIndex++);
450 }
451
emit_op_get_global_var(Instruction * currentInstruction)452 void JIT::emit_op_get_global_var(Instruction* currentInstruction)
453 {
454 JSVariableObject* globalObject = m_codeBlock->globalObject();
455 loadPtr(&globalObject->m_registers, regT0);
456 loadPtr(Address(regT0, currentInstruction[2].u.operand * sizeof(Register)), regT0);
457 emitPutVirtualRegister(currentInstruction[1].u.operand);
458 }
459
emit_op_put_global_var(Instruction * currentInstruction)460 void JIT::emit_op_put_global_var(Instruction* currentInstruction)
461 {
462 emitGetVirtualRegister(currentInstruction[2].u.operand, regT1);
463 JSVariableObject* globalObject = m_codeBlock->globalObject();
464 loadPtr(&globalObject->m_registers, regT0);
465 storePtr(regT1, Address(regT0, currentInstruction[1].u.operand * sizeof(Register)));
466 }
467
emit_op_get_scoped_var(Instruction * currentInstruction)468 void JIT::emit_op_get_scoped_var(Instruction* currentInstruction)
469 {
470 int skip = currentInstruction[3].u.operand;
471
472 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT0);
473 bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain();
474 ASSERT(skip || !checkTopLevel);
475 if (checkTopLevel && skip--) {
476 Jump activationNotCreated;
477 if (checkTopLevel)
478 activationNotCreated = branchTestPtr(Zero, addressFor(m_codeBlock->activationRegister()));
479 loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, next)), regT0);
480 activationNotCreated.link(this);
481 }
482 while (skip--)
483 loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, next)), regT0);
484
485 loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, object)), regT0);
486 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSVariableObject, m_registers)), regT0);
487 loadPtr(Address(regT0, currentInstruction[2].u.operand * sizeof(Register)), regT0);
488 emitPutVirtualRegister(currentInstruction[1].u.operand);
489 }
490
emit_op_put_scoped_var(Instruction * currentInstruction)491 void JIT::emit_op_put_scoped_var(Instruction* currentInstruction)
492 {
493 int skip = currentInstruction[2].u.operand;
494
495 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1);
496 emitGetVirtualRegister(currentInstruction[3].u.operand, regT0);
497 bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain();
498 ASSERT(skip || !checkTopLevel);
499 if (checkTopLevel && skip--) {
500 Jump activationNotCreated;
501 if (checkTopLevel)
502 activationNotCreated = branchTestPtr(Zero, addressFor(m_codeBlock->activationRegister()));
503 loadPtr(Address(regT1, OBJECT_OFFSETOF(ScopeChainNode, next)), regT1);
504 activationNotCreated.link(this);
505 }
506 while (skip--)
507 loadPtr(Address(regT1, OBJECT_OFFSETOF(ScopeChainNode, next)), regT1);
508
509 loadPtr(Address(regT1, OBJECT_OFFSETOF(ScopeChainNode, object)), regT1);
510 loadPtr(Address(regT1, OBJECT_OFFSETOF(JSVariableObject, m_registers)), regT1);
511 storePtr(regT0, Address(regT1, currentInstruction[1].u.operand * sizeof(Register)));
512 }
513
emit_op_tear_off_activation(Instruction * currentInstruction)514 void JIT::emit_op_tear_off_activation(Instruction* currentInstruction)
515 {
516 unsigned activation = currentInstruction[1].u.operand;
517 unsigned arguments = currentInstruction[2].u.operand;
518 Jump activationCreated = branchTestPtr(NonZero, addressFor(activation));
519 Jump argumentsNotCreated = branchTestPtr(Zero, addressFor(arguments));
520 activationCreated.link(this);
521 JITStubCall stubCall(this, cti_op_tear_off_activation);
522 stubCall.addArgument(activation, regT2);
523 stubCall.addArgument(unmodifiedArgumentsRegister(arguments), regT2);
524 stubCall.call();
525 argumentsNotCreated.link(this);
526 }
527
emit_op_tear_off_arguments(Instruction * currentInstruction)528 void JIT::emit_op_tear_off_arguments(Instruction* currentInstruction)
529 {
530 unsigned dst = currentInstruction[1].u.operand;
531
532 Jump argsNotCreated = branchTestPtr(Zero, Address(callFrameRegister, sizeof(Register) * (unmodifiedArgumentsRegister(dst))));
533 JITStubCall stubCall(this, cti_op_tear_off_arguments);
534 stubCall.addArgument(unmodifiedArgumentsRegister(dst), regT2);
535 stubCall.call();
536 argsNotCreated.link(this);
537 }
538
emit_op_ret(Instruction * currentInstruction)539 void JIT::emit_op_ret(Instruction* currentInstruction)
540 {
541 ASSERT(callFrameRegister != regT1);
542 ASSERT(regT1 != returnValueRegister);
543 ASSERT(returnValueRegister != callFrameRegister);
544
545 // Return the result in %eax.
546 emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
547
548 // Grab the return address.
549 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
550
551 // Restore our caller's "r".
552 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
553
554 // Return.
555 restoreReturnAddressBeforeReturn(regT1);
556 ret();
557 }
558
emit_op_ret_object_or_this(Instruction * currentInstruction)559 void JIT::emit_op_ret_object_or_this(Instruction* currentInstruction)
560 {
561 ASSERT(callFrameRegister != regT1);
562 ASSERT(regT1 != returnValueRegister);
563 ASSERT(returnValueRegister != callFrameRegister);
564
565 // Return the result in %eax.
566 emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
567 Jump notJSCell = emitJumpIfNotJSCell(returnValueRegister);
568 loadPtr(Address(returnValueRegister, JSCell::structureOffset()), regT2);
569 Jump notObject = branch8(NotEqual, Address(regT2, Structure::typeInfoTypeOffset()), TrustedImm32(ObjectType));
570
571 // Grab the return address.
572 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
573
574 // Restore our caller's "r".
575 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
576
577 // Return.
578 restoreReturnAddressBeforeReturn(regT1);
579 ret();
580
581 // Return 'this' in %eax.
582 notJSCell.link(this);
583 notObject.link(this);
584 emitGetVirtualRegister(currentInstruction[2].u.operand, returnValueRegister);
585
586 // Grab the return address.
587 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
588
589 // Restore our caller's "r".
590 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
591
592 // Return.
593 restoreReturnAddressBeforeReturn(regT1);
594 ret();
595 }
596
emit_op_new_array(Instruction * currentInstruction)597 void JIT::emit_op_new_array(Instruction* currentInstruction)
598 {
599 JITStubCall stubCall(this, cti_op_new_array);
600 stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
601 stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
602 stubCall.call(currentInstruction[1].u.operand);
603 }
604
emit_op_resolve(Instruction * currentInstruction)605 void JIT::emit_op_resolve(Instruction* currentInstruction)
606 {
607 JITStubCall stubCall(this, cti_op_resolve);
608 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
609 stubCall.call(currentInstruction[1].u.operand);
610 }
611
emit_op_to_primitive(Instruction * currentInstruction)612 void JIT::emit_op_to_primitive(Instruction* currentInstruction)
613 {
614 int dst = currentInstruction[1].u.operand;
615 int src = currentInstruction[2].u.operand;
616
617 emitGetVirtualRegister(src, regT0);
618
619 Jump isImm = emitJumpIfNotJSCell(regT0);
620 addSlowCase(branchPtr(NotEqual, Address(regT0), TrustedImmPtr(m_globalData->jsStringVPtr)));
621 isImm.link(this);
622
623 if (dst != src)
624 emitPutVirtualRegister(dst);
625
626 }
627
emit_op_strcat(Instruction * currentInstruction)628 void JIT::emit_op_strcat(Instruction* currentInstruction)
629 {
630 JITStubCall stubCall(this, cti_op_strcat);
631 stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
632 stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
633 stubCall.call(currentInstruction[1].u.operand);
634 }
635
emit_op_resolve_base(Instruction * currentInstruction)636 void JIT::emit_op_resolve_base(Instruction* currentInstruction)
637 {
638 JITStubCall stubCall(this, currentInstruction[3].u.operand ? cti_op_resolve_base_strict_put : cti_op_resolve_base);
639 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
640 stubCall.call(currentInstruction[1].u.operand);
641 }
642
emit_op_ensure_property_exists(Instruction * currentInstruction)643 void JIT::emit_op_ensure_property_exists(Instruction* currentInstruction)
644 {
645 JITStubCall stubCall(this, cti_op_ensure_property_exists);
646 stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
647 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
648 stubCall.call(currentInstruction[1].u.operand);
649 }
650
emit_op_resolve_skip(Instruction * currentInstruction)651 void JIT::emit_op_resolve_skip(Instruction* currentInstruction)
652 {
653 JITStubCall stubCall(this, cti_op_resolve_skip);
654 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
655 stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
656 stubCall.call(currentInstruction[1].u.operand);
657 }
658
emit_op_resolve_global(Instruction * currentInstruction,bool)659 void JIT::emit_op_resolve_global(Instruction* currentInstruction, bool)
660 {
661 // Fast case
662 void* globalObject = m_codeBlock->globalObject();
663 unsigned currentIndex = m_globalResolveInfoIndex++;
664 void* structureAddress = &(m_codeBlock->globalResolveInfo(currentIndex).structure);
665 void* offsetAddr = &(m_codeBlock->globalResolveInfo(currentIndex).offset);
666
667 // Check Structure of global object
668 move(TrustedImmPtr(globalObject), regT0);
669 loadPtr(structureAddress, regT1);
670 addSlowCase(branchPtr(NotEqual, regT1, Address(regT0, JSCell::structureOffset()))); // Structures don't match
671
672 // Load cached property
673 // Assume that the global object always uses external storage.
674 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSGlobalObject, m_propertyStorage)), regT0);
675 load32(offsetAddr, regT1);
676 loadPtr(BaseIndex(regT0, regT1, ScalePtr), regT0);
677 emitPutVirtualRegister(currentInstruction[1].u.operand);
678 }
679
emitSlow_op_resolve_global(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)680 void JIT::emitSlow_op_resolve_global(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
681 {
682 unsigned dst = currentInstruction[1].u.operand;
683 Identifier* ident = &m_codeBlock->identifier(currentInstruction[2].u.operand);
684
685 unsigned currentIndex = m_globalResolveInfoIndex++;
686
687 linkSlowCase(iter);
688 JITStubCall stubCall(this, cti_op_resolve_global);
689 stubCall.addArgument(TrustedImmPtr(ident));
690 stubCall.addArgument(Imm32(currentIndex));
691 stubCall.addArgument(regT0);
692 stubCall.call(dst);
693 }
694
emit_op_not(Instruction * currentInstruction)695 void JIT::emit_op_not(Instruction* currentInstruction)
696 {
697 emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
698
699 // Invert against JSValue(false); if the value was tagged as a boolean, then all bits will be
700 // clear other than the low bit (which will be 0 or 1 for false or true inputs respectively).
701 // Then invert against JSValue(true), which will add the tag back in, and flip the low bit.
702 xorPtr(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
703 addSlowCase(branchTestPtr(NonZero, regT0, TrustedImm32(static_cast<int32_t>(~1))));
704 xorPtr(TrustedImm32(static_cast<int32_t>(ValueTrue)), regT0);
705
706 emitPutVirtualRegister(currentInstruction[1].u.operand);
707 }
708
emit_op_jfalse(Instruction * currentInstruction)709 void JIT::emit_op_jfalse(Instruction* currentInstruction)
710 {
711 unsigned target = currentInstruction[2].u.operand;
712 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
713
714 addJump(branchPtr(Equal, regT0, TrustedImmPtr(JSValue::encode(jsNumber(0)))), target);
715 Jump isNonZero = emitJumpIfImmediateInteger(regT0);
716
717 addJump(branchPtr(Equal, regT0, TrustedImmPtr(JSValue::encode(jsBoolean(false)))), target);
718 addSlowCase(branchPtr(NotEqual, regT0, TrustedImmPtr(JSValue::encode(jsBoolean(true)))));
719
720 isNonZero.link(this);
721 RECORD_JUMP_TARGET(target);
722 }
723
emit_op_jeq_null(Instruction * currentInstruction)724 void JIT::emit_op_jeq_null(Instruction* currentInstruction)
725 {
726 unsigned src = currentInstruction[1].u.operand;
727 unsigned target = currentInstruction[2].u.operand;
728
729 emitGetVirtualRegister(src, regT0);
730 Jump isImmediate = emitJumpIfNotJSCell(regT0);
731
732 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
733 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
734 addJump(branchTest8(NonZero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
735 Jump wasNotImmediate = jump();
736
737 // Now handle the immediate cases - undefined & null
738 isImmediate.link(this);
739 andPtr(TrustedImm32(~TagBitUndefined), regT0);
740 addJump(branchPtr(Equal, regT0, TrustedImmPtr(JSValue::encode(jsNull()))), target);
741
742 wasNotImmediate.link(this);
743 RECORD_JUMP_TARGET(target);
744 };
emit_op_jneq_null(Instruction * currentInstruction)745 void JIT::emit_op_jneq_null(Instruction* currentInstruction)
746 {
747 unsigned src = currentInstruction[1].u.operand;
748 unsigned target = currentInstruction[2].u.operand;
749
750 emitGetVirtualRegister(src, regT0);
751 Jump isImmediate = emitJumpIfNotJSCell(regT0);
752
753 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
754 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
755 addJump(branchTest8(Zero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
756 Jump wasNotImmediate = jump();
757
758 // Now handle the immediate cases - undefined & null
759 isImmediate.link(this);
760 andPtr(TrustedImm32(~TagBitUndefined), regT0);
761 addJump(branchPtr(NotEqual, regT0, TrustedImmPtr(JSValue::encode(jsNull()))), target);
762
763 wasNotImmediate.link(this);
764 RECORD_JUMP_TARGET(target);
765 }
766
emit_op_jneq_ptr(Instruction * currentInstruction)767 void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
768 {
769 unsigned src = currentInstruction[1].u.operand;
770 JSCell* ptr = currentInstruction[2].u.jsCell.get();
771 unsigned target = currentInstruction[3].u.operand;
772
773 emitGetVirtualRegister(src, regT0);
774 addJump(branchPtr(NotEqual, regT0, TrustedImmPtr(JSValue::encode(JSValue(ptr)))), target);
775
776 RECORD_JUMP_TARGET(target);
777 }
778
emit_op_jsr(Instruction * currentInstruction)779 void JIT::emit_op_jsr(Instruction* currentInstruction)
780 {
781 int retAddrDst = currentInstruction[1].u.operand;
782 int target = currentInstruction[2].u.operand;
783 DataLabelPtr storeLocation = storePtrWithPatch(TrustedImmPtr(0), Address(callFrameRegister, sizeof(Register) * retAddrDst));
784 addJump(jump(), target);
785 m_jsrSites.append(JSRInfo(storeLocation, label()));
786 killLastResultRegister();
787 RECORD_JUMP_TARGET(target);
788 }
789
emit_op_sret(Instruction * currentInstruction)790 void JIT::emit_op_sret(Instruction* currentInstruction)
791 {
792 jump(Address(callFrameRegister, sizeof(Register) * currentInstruction[1].u.operand));
793 killLastResultRegister();
794 }
795
emit_op_eq(Instruction * currentInstruction)796 void JIT::emit_op_eq(Instruction* currentInstruction)
797 {
798 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
799 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
800 set32Compare32(Equal, regT1, regT0, regT0);
801 emitTagAsBoolImmediate(regT0);
802 emitPutVirtualRegister(currentInstruction[1].u.operand);
803 }
804
emit_op_bitnot(Instruction * currentInstruction)805 void JIT::emit_op_bitnot(Instruction* currentInstruction)
806 {
807 emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
808 emitJumpSlowCaseIfNotImmediateInteger(regT0);
809 not32(regT0);
810 emitFastArithIntToImmNoCheck(regT0, regT0);
811 emitPutVirtualRegister(currentInstruction[1].u.operand);
812 }
813
emit_op_resolve_with_base(Instruction * currentInstruction)814 void JIT::emit_op_resolve_with_base(Instruction* currentInstruction)
815 {
816 JITStubCall stubCall(this, cti_op_resolve_with_base);
817 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[3].u.operand)));
818 stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
819 stubCall.call(currentInstruction[2].u.operand);
820 }
821
emit_op_new_func_exp(Instruction * currentInstruction)822 void JIT::emit_op_new_func_exp(Instruction* currentInstruction)
823 {
824 JITStubCall stubCall(this, cti_op_new_func_exp);
825 stubCall.addArgument(TrustedImmPtr(m_codeBlock->functionExpr(currentInstruction[2].u.operand)));
826 stubCall.call(currentInstruction[1].u.operand);
827 }
828
emit_op_jtrue(Instruction * currentInstruction)829 void JIT::emit_op_jtrue(Instruction* currentInstruction)
830 {
831 unsigned target = currentInstruction[2].u.operand;
832 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
833
834 Jump isZero = branchPtr(Equal, regT0, TrustedImmPtr(JSValue::encode(jsNumber(0))));
835 addJump(emitJumpIfImmediateInteger(regT0), target);
836
837 addJump(branchPtr(Equal, regT0, TrustedImmPtr(JSValue::encode(jsBoolean(true)))), target);
838 addSlowCase(branchPtr(NotEqual, regT0, TrustedImmPtr(JSValue::encode(jsBoolean(false)))));
839
840 isZero.link(this);
841 RECORD_JUMP_TARGET(target);
842 }
843
emit_op_neq(Instruction * currentInstruction)844 void JIT::emit_op_neq(Instruction* currentInstruction)
845 {
846 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
847 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
848 set32Compare32(NotEqual, regT1, regT0, regT0);
849 emitTagAsBoolImmediate(regT0);
850
851 emitPutVirtualRegister(currentInstruction[1].u.operand);
852
853 }
854
emit_op_bitxor(Instruction * currentInstruction)855 void JIT::emit_op_bitxor(Instruction* currentInstruction)
856 {
857 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
858 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
859 xorPtr(regT1, regT0);
860 emitFastArithReTagImmediate(regT0, regT0);
861 emitPutVirtualRegister(currentInstruction[1].u.operand);
862 }
863
emit_op_bitor(Instruction * currentInstruction)864 void JIT::emit_op_bitor(Instruction* currentInstruction)
865 {
866 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
867 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
868 orPtr(regT1, regT0);
869 emitPutVirtualRegister(currentInstruction[1].u.operand);
870 }
871
emit_op_throw(Instruction * currentInstruction)872 void JIT::emit_op_throw(Instruction* currentInstruction)
873 {
874 JITStubCall stubCall(this, cti_op_throw);
875 stubCall.addArgument(currentInstruction[1].u.operand, regT2);
876 stubCall.call();
877 ASSERT(regT0 == returnValueRegister);
878 #ifndef NDEBUG
879 // cti_op_throw always changes it's return address,
880 // this point in the code should never be reached.
881 breakpoint();
882 #endif
883 }
884
emit_op_get_pnames(Instruction * currentInstruction)885 void JIT::emit_op_get_pnames(Instruction* currentInstruction)
886 {
887 int dst = currentInstruction[1].u.operand;
888 int base = currentInstruction[2].u.operand;
889 int i = currentInstruction[3].u.operand;
890 int size = currentInstruction[4].u.operand;
891 int breakTarget = currentInstruction[5].u.operand;
892
893 JumpList isNotObject;
894
895 emitGetVirtualRegister(base, regT0);
896 if (!m_codeBlock->isKnownNotImmediate(base))
897 isNotObject.append(emitJumpIfNotJSCell(regT0));
898 if (base != m_codeBlock->thisRegister() || m_codeBlock->isStrictMode()) {
899 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
900 isNotObject.append(branch8(NotEqual, Address(regT2, Structure::typeInfoTypeOffset()), TrustedImm32(ObjectType)));
901 }
902
903 // We could inline the case where you have a valid cache, but
904 // this call doesn't seem to be hot.
905 Label isObject(this);
906 JITStubCall getPnamesStubCall(this, cti_op_get_pnames);
907 getPnamesStubCall.addArgument(regT0);
908 getPnamesStubCall.call(dst);
909 load32(Address(regT0, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStringsSize)), regT3);
910 storePtr(tagTypeNumberRegister, payloadFor(i));
911 store32(TrustedImm32(Int32Tag), intTagFor(size));
912 store32(regT3, intPayloadFor(size));
913 Jump end = jump();
914
915 isNotObject.link(this);
916 move(regT0, regT1);
917 and32(TrustedImm32(~TagBitUndefined), regT1);
918 addJump(branch32(Equal, regT1, TrustedImm32(ValueNull)), breakTarget);
919
920 JITStubCall toObjectStubCall(this, cti_to_object);
921 toObjectStubCall.addArgument(regT0);
922 toObjectStubCall.call(base);
923 jump().linkTo(isObject, this);
924
925 end.link(this);
926 }
927
emit_op_next_pname(Instruction * currentInstruction)928 void JIT::emit_op_next_pname(Instruction* currentInstruction)
929 {
930 int dst = currentInstruction[1].u.operand;
931 int base = currentInstruction[2].u.operand;
932 int i = currentInstruction[3].u.operand;
933 int size = currentInstruction[4].u.operand;
934 int it = currentInstruction[5].u.operand;
935 int target = currentInstruction[6].u.operand;
936
937 JumpList callHasProperty;
938
939 Label begin(this);
940 load32(intPayloadFor(i), regT0);
941 Jump end = branch32(Equal, regT0, intPayloadFor(size));
942
943 // Grab key @ i
944 loadPtr(addressFor(it), regT1);
945 loadPtr(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStrings)), regT2);
946
947 loadPtr(BaseIndex(regT2, regT0, TimesEight), regT2);
948
949 emitPutVirtualRegister(dst, regT2);
950
951 // Increment i
952 add32(TrustedImm32(1), regT0);
953 store32(regT0, intPayloadFor(i));
954
955 // Verify that i is valid:
956 emitGetVirtualRegister(base, regT0);
957
958 // Test base's structure
959 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
960 callHasProperty.append(branchPtr(NotEqual, regT2, Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure)))));
961
962 // Test base's prototype chain
963 loadPtr(Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedPrototypeChain))), regT3);
964 loadPtr(Address(regT3, OBJECT_OFFSETOF(StructureChain, m_vector)), regT3);
965 addJump(branchTestPtr(Zero, Address(regT3)), target);
966
967 Label checkPrototype(this);
968 loadPtr(Address(regT2, Structure::prototypeOffset()), regT2);
969 callHasProperty.append(emitJumpIfNotJSCell(regT2));
970 loadPtr(Address(regT2, JSCell::structureOffset()), regT2);
971 callHasProperty.append(branchPtr(NotEqual, regT2, Address(regT3)));
972 addPtr(TrustedImm32(sizeof(Structure*)), regT3);
973 branchTestPtr(NonZero, Address(regT3)).linkTo(checkPrototype, this);
974
975 // Continue loop.
976 addJump(jump(), target);
977
978 // Slow case: Ask the object if i is valid.
979 callHasProperty.link(this);
980 emitGetVirtualRegister(dst, regT1);
981 JITStubCall stubCall(this, cti_has_property);
982 stubCall.addArgument(regT0);
983 stubCall.addArgument(regT1);
984 stubCall.call();
985
986 // Test for valid key.
987 addJump(branchTest32(NonZero, regT0), target);
988 jump().linkTo(begin, this);
989
990 // End of loop.
991 end.link(this);
992 }
993
emit_op_push_scope(Instruction * currentInstruction)994 void JIT::emit_op_push_scope(Instruction* currentInstruction)
995 {
996 JITStubCall stubCall(this, cti_op_push_scope);
997 stubCall.addArgument(currentInstruction[1].u.operand, regT2);
998 stubCall.call(currentInstruction[1].u.operand);
999 }
1000
emit_op_pop_scope(Instruction *)1001 void JIT::emit_op_pop_scope(Instruction*)
1002 {
1003 JITStubCall(this, cti_op_pop_scope).call();
1004 }
1005
compileOpStrictEq(Instruction * currentInstruction,CompileOpStrictEqType type)1006 void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
1007 {
1008 unsigned dst = currentInstruction[1].u.operand;
1009 unsigned src1 = currentInstruction[2].u.operand;
1010 unsigned src2 = currentInstruction[3].u.operand;
1011
1012 emitGetVirtualRegisters(src1, regT0, src2, regT1);
1013
1014 // Jump to a slow case if either operand is a number, or if both are JSCell*s.
1015 move(regT0, regT2);
1016 orPtr(regT1, regT2);
1017 addSlowCase(emitJumpIfJSCell(regT2));
1018 addSlowCase(emitJumpIfImmediateNumber(regT2));
1019
1020 if (type == OpStrictEq)
1021 set32Compare32(Equal, regT1, regT0, regT0);
1022 else
1023 set32Compare32(NotEqual, regT1, regT0, regT0);
1024 emitTagAsBoolImmediate(regT0);
1025
1026 emitPutVirtualRegister(dst);
1027 }
1028
emit_op_stricteq(Instruction * currentInstruction)1029 void JIT::emit_op_stricteq(Instruction* currentInstruction)
1030 {
1031 compileOpStrictEq(currentInstruction, OpStrictEq);
1032 }
1033
emit_op_nstricteq(Instruction * currentInstruction)1034 void JIT::emit_op_nstricteq(Instruction* currentInstruction)
1035 {
1036 compileOpStrictEq(currentInstruction, OpNStrictEq);
1037 }
1038
emit_op_to_jsnumber(Instruction * currentInstruction)1039 void JIT::emit_op_to_jsnumber(Instruction* currentInstruction)
1040 {
1041 int srcVReg = currentInstruction[2].u.operand;
1042 emitGetVirtualRegister(srcVReg, regT0);
1043
1044 Jump wasImmediate = emitJumpIfImmediateInteger(regT0);
1045
1046 emitJumpSlowCaseIfNotJSCell(regT0, srcVReg);
1047 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
1048 addSlowCase(branch8(NotEqual, Address(regT2, Structure::typeInfoTypeOffset()), TrustedImm32(NumberType)));
1049
1050 wasImmediate.link(this);
1051
1052 emitPutVirtualRegister(currentInstruction[1].u.operand);
1053 }
1054
emit_op_push_new_scope(Instruction * currentInstruction)1055 void JIT::emit_op_push_new_scope(Instruction* currentInstruction)
1056 {
1057 JITStubCall stubCall(this, cti_op_push_new_scope);
1058 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
1059 stubCall.addArgument(currentInstruction[3].u.operand, regT2);
1060 stubCall.call(currentInstruction[1].u.operand);
1061 }
1062
emit_op_catch(Instruction * currentInstruction)1063 void JIT::emit_op_catch(Instruction* currentInstruction)
1064 {
1065 killLastResultRegister(); // FIXME: Implicitly treat op_catch as a labeled statement, and remove this line of code.
1066 move(regT0, callFrameRegister);
1067 peek(regT3, OBJECT_OFFSETOF(struct JITStackFrame, globalData) / sizeof(void*));
1068 loadPtr(Address(regT3, OBJECT_OFFSETOF(JSGlobalData, exception)), regT0);
1069 storePtr(TrustedImmPtr(JSValue::encode(JSValue())), Address(regT3, OBJECT_OFFSETOF(JSGlobalData, exception)));
1070 emitPutVirtualRegister(currentInstruction[1].u.operand);
1071 }
1072
emit_op_jmp_scopes(Instruction * currentInstruction)1073 void JIT::emit_op_jmp_scopes(Instruction* currentInstruction)
1074 {
1075 JITStubCall stubCall(this, cti_op_jmp_scopes);
1076 stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
1077 stubCall.call();
1078 addJump(jump(), currentInstruction[2].u.operand);
1079 RECORD_JUMP_TARGET(currentInstruction[2].u.operand);
1080 }
1081
emit_op_switch_imm(Instruction * currentInstruction)1082 void JIT::emit_op_switch_imm(Instruction* currentInstruction)
1083 {
1084 unsigned tableIndex = currentInstruction[1].u.operand;
1085 unsigned defaultOffset = currentInstruction[2].u.operand;
1086 unsigned scrutinee = currentInstruction[3].u.operand;
1087
1088 // create jump table for switch destinations, track this switch statement.
1089 SimpleJumpTable* jumpTable = &m_codeBlock->immediateSwitchJumpTable(tableIndex);
1090 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Immediate));
1091 jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1092
1093 JITStubCall stubCall(this, cti_op_switch_imm);
1094 stubCall.addArgument(scrutinee, regT2);
1095 stubCall.addArgument(Imm32(tableIndex));
1096 stubCall.call();
1097 jump(regT0);
1098 }
1099
emit_op_switch_char(Instruction * currentInstruction)1100 void JIT::emit_op_switch_char(Instruction* currentInstruction)
1101 {
1102 unsigned tableIndex = currentInstruction[1].u.operand;
1103 unsigned defaultOffset = currentInstruction[2].u.operand;
1104 unsigned scrutinee = currentInstruction[3].u.operand;
1105
1106 // create jump table for switch destinations, track this switch statement.
1107 SimpleJumpTable* jumpTable = &m_codeBlock->characterSwitchJumpTable(tableIndex);
1108 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Character));
1109 jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1110
1111 JITStubCall stubCall(this, cti_op_switch_char);
1112 stubCall.addArgument(scrutinee, regT2);
1113 stubCall.addArgument(Imm32(tableIndex));
1114 stubCall.call();
1115 jump(regT0);
1116 }
1117
emit_op_switch_string(Instruction * currentInstruction)1118 void JIT::emit_op_switch_string(Instruction* currentInstruction)
1119 {
1120 unsigned tableIndex = currentInstruction[1].u.operand;
1121 unsigned defaultOffset = currentInstruction[2].u.operand;
1122 unsigned scrutinee = currentInstruction[3].u.operand;
1123
1124 // create jump table for switch destinations, track this switch statement.
1125 StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
1126 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset));
1127
1128 JITStubCall stubCall(this, cti_op_switch_string);
1129 stubCall.addArgument(scrutinee, regT2);
1130 stubCall.addArgument(Imm32(tableIndex));
1131 stubCall.call();
1132 jump(regT0);
1133 }
1134
emit_op_throw_reference_error(Instruction * currentInstruction)1135 void JIT::emit_op_throw_reference_error(Instruction* currentInstruction)
1136 {
1137 JITStubCall stubCall(this, cti_op_throw_reference_error);
1138 stubCall.addArgument(ImmPtr(JSValue::encode(m_codeBlock->getConstant(currentInstruction[1].u.operand))));
1139 stubCall.call();
1140 }
1141
emit_op_debug(Instruction * currentInstruction)1142 void JIT::emit_op_debug(Instruction* currentInstruction)
1143 {
1144 #if ENABLE(DEBUG_WITH_BREAKPOINT)
1145 UNUSED_PARAM(currentInstruction);
1146 breakpoint();
1147 #else
1148 JITStubCall stubCall(this, cti_op_debug);
1149 stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
1150 stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
1151 stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
1152 stubCall.call();
1153 #endif
1154 }
1155
emit_op_eq_null(Instruction * currentInstruction)1156 void JIT::emit_op_eq_null(Instruction* currentInstruction)
1157 {
1158 unsigned dst = currentInstruction[1].u.operand;
1159 unsigned src1 = currentInstruction[2].u.operand;
1160
1161 emitGetVirtualRegister(src1, regT0);
1162 Jump isImmediate = emitJumpIfNotJSCell(regT0);
1163
1164 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
1165 set32Test8(NonZero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined), regT0);
1166
1167 Jump wasNotImmediate = jump();
1168
1169 isImmediate.link(this);
1170
1171 andPtr(TrustedImm32(~TagBitUndefined), regT0);
1172 setPtr(Equal, regT0, TrustedImm32(ValueNull), regT0);
1173
1174 wasNotImmediate.link(this);
1175
1176 emitTagAsBoolImmediate(regT0);
1177 emitPutVirtualRegister(dst);
1178
1179 }
1180
emit_op_neq_null(Instruction * currentInstruction)1181 void JIT::emit_op_neq_null(Instruction* currentInstruction)
1182 {
1183 unsigned dst = currentInstruction[1].u.operand;
1184 unsigned src1 = currentInstruction[2].u.operand;
1185
1186 emitGetVirtualRegister(src1, regT0);
1187 Jump isImmediate = emitJumpIfNotJSCell(regT0);
1188
1189 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
1190 set32Test8(Zero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined), regT0);
1191
1192 Jump wasNotImmediate = jump();
1193
1194 isImmediate.link(this);
1195
1196 andPtr(TrustedImm32(~TagBitUndefined), regT0);
1197 setPtr(NotEqual, regT0, TrustedImm32(ValueNull), regT0);
1198
1199 wasNotImmediate.link(this);
1200
1201 emitTagAsBoolImmediate(regT0);
1202 emitPutVirtualRegister(dst);
1203 }
1204
emit_op_enter(Instruction *)1205 void JIT::emit_op_enter(Instruction*)
1206 {
1207 // Even though CTI doesn't use them, we initialize our constant
1208 // registers to zap stale pointers, to avoid unnecessarily prolonging
1209 // object lifetime and increasing GC pressure.
1210 size_t count = m_codeBlock->m_numVars;
1211 for (size_t j = 0; j < count; ++j)
1212 emitInitRegister(j);
1213
1214 }
1215
emit_op_create_activation(Instruction * currentInstruction)1216 void JIT::emit_op_create_activation(Instruction* currentInstruction)
1217 {
1218 unsigned dst = currentInstruction[1].u.operand;
1219
1220 Jump activationCreated = branchTestPtr(NonZero, Address(callFrameRegister, sizeof(Register) * dst));
1221 JITStubCall(this, cti_op_push_activation).call(currentInstruction[1].u.operand);
1222 emitPutVirtualRegister(dst);
1223 activationCreated.link(this);
1224 }
1225
emit_op_create_arguments(Instruction * currentInstruction)1226 void JIT::emit_op_create_arguments(Instruction* currentInstruction)
1227 {
1228 unsigned dst = currentInstruction[1].u.operand;
1229
1230 Jump argsCreated = branchTestPtr(NonZero, Address(callFrameRegister, sizeof(Register) * dst));
1231 if (m_codeBlock->m_numParameters == 1)
1232 JITStubCall(this, cti_op_create_arguments_no_params).call();
1233 else
1234 JITStubCall(this, cti_op_create_arguments).call();
1235 emitPutVirtualRegister(dst);
1236 emitPutVirtualRegister(unmodifiedArgumentsRegister(dst));
1237 argsCreated.link(this);
1238 }
1239
emit_op_init_lazy_reg(Instruction * currentInstruction)1240 void JIT::emit_op_init_lazy_reg(Instruction* currentInstruction)
1241 {
1242 unsigned dst = currentInstruction[1].u.operand;
1243
1244 storePtr(TrustedImmPtr(0), Address(callFrameRegister, sizeof(Register) * dst));
1245 }
1246
emit_op_convert_this(Instruction * currentInstruction)1247 void JIT::emit_op_convert_this(Instruction* currentInstruction)
1248 {
1249 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
1250
1251 emitJumpSlowCaseIfNotJSCell(regT0);
1252 loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
1253 addSlowCase(branchTest8(NonZero, Address(regT1, Structure::typeInfoFlagsOffset()), TrustedImm32(NeedsThisConversion)));
1254 }
1255
emit_op_convert_this_strict(Instruction * currentInstruction)1256 void JIT::emit_op_convert_this_strict(Instruction* currentInstruction)
1257 {
1258 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
1259 Jump notNull = branchTestPtr(NonZero, regT0);
1260 move(TrustedImmPtr(JSValue::encode(jsNull())), regT0);
1261 emitPutVirtualRegister(currentInstruction[1].u.operand, regT0);
1262 Jump setThis = jump();
1263 notNull.link(this);
1264 Jump isImmediate = emitJumpIfNotJSCell(regT0);
1265 loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
1266 Jump notAnObject = branch8(NotEqual, Address(regT1, Structure::typeInfoTypeOffset()), TrustedImm32(ObjectType));
1267 addSlowCase(branchTest8(NonZero, Address(regT1, Structure::typeInfoFlagsOffset()), TrustedImm32(NeedsThisConversion)));
1268 isImmediate.link(this);
1269 notAnObject.link(this);
1270 setThis.link(this);
1271 }
1272
emit_op_get_callee(Instruction * currentInstruction)1273 void JIT::emit_op_get_callee(Instruction* currentInstruction)
1274 {
1275 unsigned result = currentInstruction[1].u.operand;
1276 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT0);
1277 emitPutVirtualRegister(result);
1278 }
1279
emit_op_create_this(Instruction * currentInstruction)1280 void JIT::emit_op_create_this(Instruction* currentInstruction)
1281 {
1282 JITStubCall stubCall(this, cti_op_create_this);
1283 stubCall.addArgument(currentInstruction[2].u.operand, regT1);
1284 stubCall.call(currentInstruction[1].u.operand);
1285 }
1286
emit_op_profile_will_call(Instruction * currentInstruction)1287 void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
1288 {
1289 peek(regT1, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof(void*));
1290 Jump noProfiler = branchTestPtr(Zero, Address(regT1));
1291
1292 JITStubCall stubCall(this, cti_op_profile_will_call);
1293 stubCall.addArgument(currentInstruction[1].u.operand, regT1);
1294 stubCall.call();
1295 noProfiler.link(this);
1296
1297 }
1298
emit_op_profile_did_call(Instruction * currentInstruction)1299 void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
1300 {
1301 peek(regT1, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof(void*));
1302 Jump noProfiler = branchTestPtr(Zero, Address(regT1));
1303
1304 JITStubCall stubCall(this, cti_op_profile_did_call);
1305 stubCall.addArgument(currentInstruction[1].u.operand, regT1);
1306 stubCall.call();
1307 noProfiler.link(this);
1308 }
1309
1310
1311 // Slow cases
1312
emitSlow_op_convert_this(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1313 void JIT::emitSlow_op_convert_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1314 {
1315 linkSlowCase(iter);
1316 linkSlowCase(iter);
1317 JITStubCall stubCall(this, cti_op_convert_this);
1318 stubCall.addArgument(regT0);
1319 stubCall.call(currentInstruction[1].u.operand);
1320 }
1321
emitSlow_op_convert_this_strict(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1322 void JIT::emitSlow_op_convert_this_strict(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1323 {
1324 linkSlowCase(iter);
1325 JITStubCall stubCall(this, cti_op_convert_this_strict);
1326 stubCall.addArgument(regT0);
1327 stubCall.call(currentInstruction[1].u.operand);
1328 }
1329
emitSlow_op_to_primitive(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1330 void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1331 {
1332 linkSlowCase(iter);
1333
1334 JITStubCall stubCall(this, cti_op_to_primitive);
1335 stubCall.addArgument(regT0);
1336 stubCall.call(currentInstruction[1].u.operand);
1337 }
1338
emitSlow_op_loop_if_lesseq(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1339 void JIT::emitSlow_op_loop_if_lesseq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1340 {
1341 unsigned op2 = currentInstruction[2].u.operand;
1342 unsigned target = currentInstruction[3].u.operand;
1343 if (isOperandConstantImmediateInt(op2)) {
1344 linkSlowCase(iter);
1345 JITStubCall stubCall(this, cti_op_loop_if_lesseq);
1346 stubCall.addArgument(regT0);
1347 stubCall.addArgument(currentInstruction[2].u.operand, regT2);
1348 stubCall.call();
1349 emitJumpSlowToHot(branchTest32(NonZero, regT0), target);
1350 } else {
1351 linkSlowCase(iter);
1352 linkSlowCase(iter);
1353 JITStubCall stubCall(this, cti_op_loop_if_lesseq);
1354 stubCall.addArgument(regT0);
1355 stubCall.addArgument(regT1);
1356 stubCall.call();
1357 emitJumpSlowToHot(branchTest32(NonZero, regT0), target);
1358 }
1359 }
1360
emitSlow_op_put_by_val(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1361 void JIT::emitSlow_op_put_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1362 {
1363 unsigned base = currentInstruction[1].u.operand;
1364 unsigned property = currentInstruction[2].u.operand;
1365 unsigned value = currentInstruction[3].u.operand;
1366
1367 linkSlowCase(iter); // property int32 check
1368 linkSlowCaseIfNotJSCell(iter, base); // base cell check
1369 linkSlowCase(iter); // base not array check
1370 linkSlowCase(iter); // in vector check
1371
1372 JITStubCall stubPutByValCall(this, cti_op_put_by_val);
1373 stubPutByValCall.addArgument(regT0);
1374 stubPutByValCall.addArgument(property, regT2);
1375 stubPutByValCall.addArgument(value, regT2);
1376 stubPutByValCall.call();
1377 }
1378
emitSlow_op_not(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1379 void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1380 {
1381 linkSlowCase(iter);
1382 xorPtr(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
1383 JITStubCall stubCall(this, cti_op_not);
1384 stubCall.addArgument(regT0);
1385 stubCall.call(currentInstruction[1].u.operand);
1386 }
1387
emitSlow_op_jfalse(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1388 void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1389 {
1390 linkSlowCase(iter);
1391 JITStubCall stubCall(this, cti_op_jtrue);
1392 stubCall.addArgument(regT0);
1393 stubCall.call();
1394 emitJumpSlowToHot(branchTest32(Zero, regT0), currentInstruction[2].u.operand); // inverted!
1395 }
1396
emitSlow_op_bitnot(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1397 void JIT::emitSlow_op_bitnot(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1398 {
1399 linkSlowCase(iter);
1400 JITStubCall stubCall(this, cti_op_bitnot);
1401 stubCall.addArgument(regT0);
1402 stubCall.call(currentInstruction[1].u.operand);
1403 }
1404
emitSlow_op_jtrue(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1405 void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1406 {
1407 linkSlowCase(iter);
1408 JITStubCall stubCall(this, cti_op_jtrue);
1409 stubCall.addArgument(regT0);
1410 stubCall.call();
1411 emitJumpSlowToHot(branchTest32(NonZero, regT0), currentInstruction[2].u.operand);
1412 }
1413
emitSlow_op_bitxor(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1414 void JIT::emitSlow_op_bitxor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1415 {
1416 linkSlowCase(iter);
1417 JITStubCall stubCall(this, cti_op_bitxor);
1418 stubCall.addArgument(regT0);
1419 stubCall.addArgument(regT1);
1420 stubCall.call(currentInstruction[1].u.operand);
1421 }
1422
emitSlow_op_bitor(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1423 void JIT::emitSlow_op_bitor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1424 {
1425 linkSlowCase(iter);
1426 JITStubCall stubCall(this, cti_op_bitor);
1427 stubCall.addArgument(regT0);
1428 stubCall.addArgument(regT1);
1429 stubCall.call(currentInstruction[1].u.operand);
1430 }
1431
emitSlow_op_eq(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1432 void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1433 {
1434 linkSlowCase(iter);
1435 JITStubCall stubCall(this, cti_op_eq);
1436 stubCall.addArgument(regT0);
1437 stubCall.addArgument(regT1);
1438 stubCall.call();
1439 emitTagAsBoolImmediate(regT0);
1440 emitPutVirtualRegister(currentInstruction[1].u.operand);
1441 }
1442
emitSlow_op_neq(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1443 void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1444 {
1445 linkSlowCase(iter);
1446 JITStubCall stubCall(this, cti_op_eq);
1447 stubCall.addArgument(regT0);
1448 stubCall.addArgument(regT1);
1449 stubCall.call();
1450 xor32(TrustedImm32(0x1), regT0);
1451 emitTagAsBoolImmediate(regT0);
1452 emitPutVirtualRegister(currentInstruction[1].u.operand);
1453 }
1454
emitSlow_op_stricteq(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1455 void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1456 {
1457 linkSlowCase(iter);
1458 linkSlowCase(iter);
1459 JITStubCall stubCall(this, cti_op_stricteq);
1460 stubCall.addArgument(regT0);
1461 stubCall.addArgument(regT1);
1462 stubCall.call(currentInstruction[1].u.operand);
1463 }
1464
emitSlow_op_nstricteq(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1465 void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1466 {
1467 linkSlowCase(iter);
1468 linkSlowCase(iter);
1469 JITStubCall stubCall(this, cti_op_nstricteq);
1470 stubCall.addArgument(regT0);
1471 stubCall.addArgument(regT1);
1472 stubCall.call(currentInstruction[1].u.operand);
1473 }
1474
emitSlow_op_check_has_instance(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1475 void JIT::emitSlow_op_check_has_instance(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1476 {
1477 unsigned baseVal = currentInstruction[1].u.operand;
1478
1479 linkSlowCaseIfNotJSCell(iter, baseVal);
1480 linkSlowCase(iter);
1481 JITStubCall stubCall(this, cti_op_check_has_instance);
1482 stubCall.addArgument(baseVal, regT2);
1483 stubCall.call();
1484 }
1485
emitSlow_op_instanceof(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1486 void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1487 {
1488 unsigned dst = currentInstruction[1].u.operand;
1489 unsigned value = currentInstruction[2].u.operand;
1490 unsigned baseVal = currentInstruction[3].u.operand;
1491 unsigned proto = currentInstruction[4].u.operand;
1492
1493 linkSlowCaseIfNotJSCell(iter, value);
1494 linkSlowCaseIfNotJSCell(iter, proto);
1495 linkSlowCase(iter);
1496 linkSlowCase(iter);
1497 JITStubCall stubCall(this, cti_op_instanceof);
1498 stubCall.addArgument(value, regT2);
1499 stubCall.addArgument(baseVal, regT2);
1500 stubCall.addArgument(proto, regT2);
1501 stubCall.call(dst);
1502 }
1503
emitSlow_op_call(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1504 void JIT::emitSlow_op_call(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1505 {
1506 compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_call);
1507 }
1508
emitSlow_op_call_eval(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1509 void JIT::emitSlow_op_call_eval(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1510 {
1511 compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_call_eval);
1512 }
1513
emitSlow_op_call_varargs(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1514 void JIT::emitSlow_op_call_varargs(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1515 {
1516 compileOpCallVarargsSlowCase(currentInstruction, iter);
1517 }
1518
emitSlow_op_construct(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1519 void JIT::emitSlow_op_construct(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1520 {
1521 compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_construct);
1522 }
1523
emitSlow_op_to_jsnumber(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1524 void JIT::emitSlow_op_to_jsnumber(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1525 {
1526 linkSlowCaseIfNotJSCell(iter, currentInstruction[2].u.operand);
1527 linkSlowCase(iter);
1528
1529 JITStubCall stubCall(this, cti_op_to_jsnumber);
1530 stubCall.addArgument(regT0);
1531 stubCall.call(currentInstruction[1].u.operand);
1532 }
1533
emit_op_get_arguments_length(Instruction * currentInstruction)1534 void JIT::emit_op_get_arguments_length(Instruction* currentInstruction)
1535 {
1536 int dst = currentInstruction[1].u.operand;
1537 int argumentsRegister = currentInstruction[2].u.operand;
1538 addSlowCase(branchTestPtr(NonZero, addressFor(argumentsRegister)));
1539 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT0);
1540 sub32(TrustedImm32(1), regT0);
1541 emitFastArithReTagImmediate(regT0, regT0);
1542 emitPutVirtualRegister(dst, regT0);
1543 }
1544
emitSlow_op_get_arguments_length(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1545 void JIT::emitSlow_op_get_arguments_length(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1546 {
1547 linkSlowCase(iter);
1548 unsigned dst = currentInstruction[1].u.operand;
1549 unsigned base = currentInstruction[2].u.operand;
1550 Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
1551
1552 emitGetVirtualRegister(base, regT0);
1553 JITStubCall stubCall(this, cti_op_get_by_id_generic);
1554 stubCall.addArgument(regT0);
1555 stubCall.addArgument(TrustedImmPtr(ident));
1556 stubCall.call(dst);
1557 }
1558
emit_op_get_argument_by_val(Instruction * currentInstruction)1559 void JIT::emit_op_get_argument_by_val(Instruction* currentInstruction)
1560 {
1561 int dst = currentInstruction[1].u.operand;
1562 int argumentsRegister = currentInstruction[2].u.operand;
1563 int property = currentInstruction[3].u.operand;
1564 addSlowCase(branchTestPtr(NonZero, addressFor(argumentsRegister)));
1565 emitGetVirtualRegister(property, regT1);
1566 addSlowCase(emitJumpIfNotImmediateInteger(regT1));
1567 add32(TrustedImm32(1), regT1);
1568 // regT1 now contains the integer index of the argument we want, including this
1569 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT2);
1570 addSlowCase(branch32(AboveOrEqual, regT1, regT2));
1571
1572 Jump skipOutofLineParams;
1573 int numArgs = m_codeBlock->m_numParameters;
1574 if (numArgs) {
1575 Jump notInInPlaceArgs = branch32(AboveOrEqual, regT1, Imm32(numArgs));
1576 addPtr(Imm32(static_cast<unsigned>(-(RegisterFile::CallFrameHeaderSize + numArgs) * sizeof(Register))), callFrameRegister, regT0);
1577 loadPtr(BaseIndex(regT0, regT1, TimesEight, 0), regT0);
1578 skipOutofLineParams = jump();
1579 notInInPlaceArgs.link(this);
1580 }
1581
1582 addPtr(Imm32(static_cast<unsigned>(-(RegisterFile::CallFrameHeaderSize + numArgs) * sizeof(Register))), callFrameRegister, regT0);
1583 mul32(TrustedImm32(sizeof(Register)), regT2, regT2);
1584 subPtr(regT2, regT0);
1585 loadPtr(BaseIndex(regT0, regT1, TimesEight, 0), regT0);
1586 if (numArgs)
1587 skipOutofLineParams.link(this);
1588 emitPutVirtualRegister(dst, regT0);
1589 }
1590
emitSlow_op_get_argument_by_val(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1591 void JIT::emitSlow_op_get_argument_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1592 {
1593 unsigned dst = currentInstruction[1].u.operand;
1594 unsigned arguments = currentInstruction[2].u.operand;
1595 unsigned property = currentInstruction[3].u.operand;
1596
1597 linkSlowCase(iter);
1598 Jump skipArgumentsCreation = jump();
1599
1600 linkSlowCase(iter);
1601 linkSlowCase(iter);
1602 if (m_codeBlock->m_numParameters == 1)
1603 JITStubCall(this, cti_op_create_arguments_no_params).call();
1604 else
1605 JITStubCall(this, cti_op_create_arguments).call();
1606 emitPutVirtualRegister(arguments);
1607 emitPutVirtualRegister(unmodifiedArgumentsRegister(arguments));
1608
1609 skipArgumentsCreation.link(this);
1610 JITStubCall stubCall(this, cti_op_get_by_val);
1611 stubCall.addArgument(arguments, regT2);
1612 stubCall.addArgument(property, regT2);
1613 stubCall.call(dst);
1614 }
1615
1616 #endif // USE(JSVALUE64)
1617
emit_op_resolve_global_dynamic(Instruction * currentInstruction)1618 void JIT::emit_op_resolve_global_dynamic(Instruction* currentInstruction)
1619 {
1620 int skip = currentInstruction[5].u.operand;
1621
1622 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT0);
1623
1624 bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain();
1625 ASSERT(skip || !checkTopLevel);
1626 if (checkTopLevel && skip--) {
1627 Jump activationNotCreated;
1628 if (checkTopLevel)
1629 activationNotCreated = branchTestPtr(Zero, addressFor(m_codeBlock->activationRegister()));
1630 loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, object)), regT1);
1631 addSlowCase(checkStructure(regT1, m_globalData->activationStructure.get()));
1632 loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, next)), regT0);
1633 activationNotCreated.link(this);
1634 }
1635 while (skip--) {
1636 loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, object)), regT1);
1637 addSlowCase(checkStructure(regT1, m_globalData->activationStructure.get()));
1638 loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, next)), regT0);
1639 }
1640 emit_op_resolve_global(currentInstruction, true);
1641 }
1642
emitSlow_op_resolve_global_dynamic(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1643 void JIT::emitSlow_op_resolve_global_dynamic(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1644 {
1645 unsigned dst = currentInstruction[1].u.operand;
1646 Identifier* ident = &m_codeBlock->identifier(currentInstruction[2].u.operand);
1647 int skip = currentInstruction[5].u.operand;
1648 while (skip--)
1649 linkSlowCase(iter);
1650 JITStubCall resolveStubCall(this, cti_op_resolve);
1651 resolveStubCall.addArgument(TrustedImmPtr(ident));
1652 resolveStubCall.call(dst);
1653 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_resolve_global_dynamic));
1654
1655 unsigned currentIndex = m_globalResolveInfoIndex++;
1656
1657 linkSlowCase(iter); // We managed to skip all the nodes in the scope chain, but the cache missed.
1658 JITStubCall stubCall(this, cti_op_resolve_global);
1659 stubCall.addArgument(TrustedImmPtr(ident));
1660 stubCall.addArgument(Imm32(currentIndex));
1661 stubCall.addArgument(regT0);
1662 stubCall.call(dst);
1663 }
1664
emit_op_new_regexp(Instruction * currentInstruction)1665 void JIT::emit_op_new_regexp(Instruction* currentInstruction)
1666 {
1667 JITStubCall stubCall(this, cti_op_new_regexp);
1668 stubCall.addArgument(TrustedImmPtr(m_codeBlock->regexp(currentInstruction[2].u.operand)));
1669 stubCall.call(currentInstruction[1].u.operand);
1670 }
1671
emit_op_load_varargs(Instruction * currentInstruction)1672 void JIT::emit_op_load_varargs(Instruction* currentInstruction)
1673 {
1674 int argCountDst = currentInstruction[1].u.operand;
1675 int argsOffset = currentInstruction[2].u.operand;
1676 int registerOffset = currentInstruction[3].u.operand;
1677 ASSERT(argsOffset <= registerOffset);
1678
1679 int expectedParams = m_codeBlock->m_numParameters - 1;
1680 // Don't do inline copying if we aren't guaranteed to have a single stream
1681 // of arguments
1682 if (expectedParams) {
1683 JITStubCall stubCall(this, cti_op_load_varargs);
1684 stubCall.addArgument(Imm32(argsOffset));
1685 stubCall.call();
1686 // Stores a naked int32 in the register file.
1687 store32(returnValueRegister, Address(callFrameRegister, argCountDst * sizeof(Register)));
1688 return;
1689 }
1690
1691 #if USE(JSVALUE32_64)
1692 addSlowCase(branch32(NotEqual, tagFor(argsOffset), TrustedImm32(JSValue::EmptyValueTag)));
1693 #else
1694 addSlowCase(branchTestPtr(NonZero, addressFor(argsOffset)));
1695 #endif
1696 // Load arg count into regT0
1697 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT0);
1698 store32(TrustedImm32(Int32Tag), intTagFor(argCountDst));
1699 store32(regT0, intPayloadFor(argCountDst));
1700 Jump endBranch = branch32(Equal, regT0, TrustedImm32(1));
1701
1702 mul32(TrustedImm32(sizeof(Register)), regT0, regT3);
1703 addPtr(TrustedImm32(static_cast<unsigned>(sizeof(Register) - RegisterFile::CallFrameHeaderSize * sizeof(Register))), callFrameRegister, regT1);
1704 subPtr(regT3, regT1); // regT1 is now the start of the out of line arguments
1705 addPtr(Imm32(argsOffset * sizeof(Register)), callFrameRegister, regT2); // regT2 is the target buffer
1706
1707 // Bounds check the registerfile
1708 addPtr(regT2, regT3);
1709 addPtr(Imm32((registerOffset - argsOffset) * sizeof(Register)), regT3);
1710 addSlowCase(branchPtr(Below, AbsoluteAddress(m_globalData->interpreter->registerFile().addressOfEnd()), regT3));
1711
1712 sub32(TrustedImm32(1), regT0);
1713 Label loopStart = label();
1714 loadPtr(BaseIndex(regT1, regT0, TimesEight, static_cast<unsigned>(0 - 2 * sizeof(Register))), regT3);
1715 storePtr(regT3, BaseIndex(regT2, regT0, TimesEight, static_cast<unsigned>(0 - sizeof(Register))));
1716 #if USE(JSVALUE32_64)
1717 loadPtr(BaseIndex(regT1, regT0, TimesEight, static_cast<unsigned>(sizeof(void*) - 2 * sizeof(Register))), regT3);
1718 storePtr(regT3, BaseIndex(regT2, regT0, TimesEight, static_cast<unsigned>(sizeof(void*) - sizeof(Register))));
1719 #endif
1720 branchSubPtr(NonZero, TrustedImm32(1), regT0).linkTo(loopStart, this);
1721 endBranch.link(this);
1722 }
1723
emitSlow_op_load_varargs(Instruction * currentInstruction,Vector<SlowCaseEntry>::iterator & iter)1724 void JIT::emitSlow_op_load_varargs(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1725 {
1726 int argCountDst = currentInstruction[1].u.operand;
1727 int argsOffset = currentInstruction[2].u.operand;
1728 int expectedParams = m_codeBlock->m_numParameters - 1;
1729 if (expectedParams)
1730 return;
1731
1732 linkSlowCase(iter);
1733 linkSlowCase(iter);
1734 JITStubCall stubCall(this, cti_op_load_varargs);
1735 stubCall.addArgument(Imm32(argsOffset));
1736 stubCall.call();
1737
1738 store32(TrustedImm32(Int32Tag), intTagFor(argCountDst));
1739 store32(returnValueRegister, intPayloadFor(argCountDst));
1740 }
1741
emit_op_new_func(Instruction * currentInstruction)1742 void JIT::emit_op_new_func(Instruction* currentInstruction)
1743 {
1744 Jump lazyJump;
1745 int dst = currentInstruction[1].u.operand;
1746 if (currentInstruction[3].u.operand) {
1747 #if USE(JSVALUE32_64)
1748 lazyJump = branch32(NotEqual, tagFor(dst), TrustedImm32(JSValue::EmptyValueTag));
1749 #else
1750 lazyJump = branchTestPtr(NonZero, addressFor(dst));
1751 #endif
1752 }
1753 JITStubCall stubCall(this, cti_op_new_func);
1754 stubCall.addArgument(TrustedImmPtr(m_codeBlock->functionDecl(currentInstruction[2].u.operand)));
1755 stubCall.call(currentInstruction[1].u.operand);
1756 if (currentInstruction[3].u.operand)
1757 lazyJump.link(this);
1758 }
1759
1760 } // namespace JSC
1761
1762 #endif // ENABLE(JIT)
1763