1 /*
2 * Copyright (C) 2008, 2009 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26 #include "config.h"
27 #include "JIT.h"
28
29 #if ENABLE(JIT)
30
31 #include "CodeBlock.h"
32 #include "JITInlineMethods.h"
33 #include "JSArray.h"
34 #include "JSFunction.h"
35 #include "Interpreter.h"
36 #include "ResultType.h"
37 #include "SamplingTool.h"
38
39 #ifndef NDEBUG
40 #include <stdio.h>
41 #endif
42
43 using namespace std;
44
45 namespace JSC {
46
47 #if COMPILER(GCC) && PLATFORM(X86)
48
49 COMPILE_ASSERT(STUB_ARGS_code == 0x0C, STUB_ARGS_code_is_0x0C);
50 COMPILE_ASSERT(STUB_ARGS_callFrame == 0x0E, STUB_ARGS_callFrame_is_0x0E);
51
52 #if PLATFORM(DARWIN)
53 #define SYMBOL_STRING(name) "_" #name
54 #else
55 #define SYMBOL_STRING(name) #name
56 #endif
57
58 asm(
59 ".globl " SYMBOL_STRING(ctiTrampoline) "\n"
60 SYMBOL_STRING(ctiTrampoline) ":" "\n"
61 "pushl %ebp" "\n"
62 "movl %esp, %ebp" "\n"
63 "pushl %esi" "\n"
64 "pushl %edi" "\n"
65 "pushl %ebx" "\n"
66 "subl $0x1c, %esp" "\n"
67 "movl $512, %esi" "\n"
68 "movl 0x38(%esp), %edi" "\n" // Ox38 = 0x0E * 4, 0x0E = STUB_ARGS_callFrame (see assertion above)
69 "call *0x30(%esp)" "\n" // Ox30 = 0x0C * 4, 0x0C = STUB_ARGS_code (see assertion above)
70 "addl $0x1c, %esp" "\n"
71 "popl %ebx" "\n"
72 "popl %edi" "\n"
73 "popl %esi" "\n"
74 "popl %ebp" "\n"
75 "ret" "\n"
76 );
77
78 asm(
79 ".globl " SYMBOL_STRING(ctiVMThrowTrampoline) "\n"
80 SYMBOL_STRING(ctiVMThrowTrampoline) ":" "\n"
81 #if USE(JIT_STUB_ARGUMENT_VA_LIST)
82 "call " SYMBOL_STRING(_ZN3JSC11Interpreter12cti_vm_throwEPvz) "\n"
83 #else
84 #if USE(JIT_STUB_ARGUMENT_REGISTER)
85 "movl %esp, %ecx" "\n"
86 #else // JIT_STUB_ARGUMENT_STACK
87 "movl %esp, 0(%esp)" "\n"
88 #endif
89 "call " SYMBOL_STRING(_ZN3JSC11Interpreter12cti_vm_throwEPPv) "\n"
90 #endif
91 "addl $0x1c, %esp" "\n"
92 "popl %ebx" "\n"
93 "popl %edi" "\n"
94 "popl %esi" "\n"
95 "popl %ebp" "\n"
96 "ret" "\n"
97 );
98
99 #elif COMPILER(GCC) && PLATFORM(X86_64)
100
101 COMPILE_ASSERT(STUB_ARGS_code == 0x10, STUB_ARGS_code_is_0x10);
102 COMPILE_ASSERT(STUB_ARGS_callFrame == 0x12, STUB_ARGS_callFrame_is_0x12);
103
104 #if PLATFORM(DARWIN)
105 #define SYMBOL_STRING(name) "_" #name
106 #else
107 #define SYMBOL_STRING(name) #name
108 #endif
109
110 asm(
111 ".globl " SYMBOL_STRING(ctiTrampoline) "\n"
112 SYMBOL_STRING(ctiTrampoline) ":" "\n"
113 "pushq %rbp" "\n"
114 "movq %rsp, %rbp" "\n"
115 "pushq %r12" "\n"
116 "pushq %r13" "\n"
117 "pushq %r14" "\n"
118 "pushq %r15" "\n"
119 "pushq %rbx" "\n"
120 "subq $0x48, %rsp" "\n"
121 "movq $512, %r12" "\n"
122 "movq $0xFFFF000000000000, %r14" "\n"
123 "movq $0xFFFF000000000002, %r15" "\n"
124 "movq 0x90(%rsp), %r13" "\n" // Ox90 = 0x12 * 8, 0x12 = STUB_ARGS_callFrame (see assertion above)
125 "call *0x80(%rsp)" "\n" // Ox80 = 0x10 * 8, 0x10 = STUB_ARGS_code (see assertion above)
126 "addq $0x48, %rsp" "\n"
127 "popq %rbx" "\n"
128 "popq %r15" "\n"
129 "popq %r14" "\n"
130 "popq %r13" "\n"
131 "popq %r12" "\n"
132 "popq %rbp" "\n"
133 "ret" "\n"
134 );
135
136 asm(
137 ".globl " SYMBOL_STRING(ctiVMThrowTrampoline) "\n"
138 SYMBOL_STRING(ctiVMThrowTrampoline) ":" "\n"
139 #if USE(JIT_STUB_ARGUMENT_REGISTER)
140 "movq %rsp, %rdi" "\n"
141 "call " SYMBOL_STRING(_ZN3JSC11Interpreter12cti_vm_throwEPPv) "\n"
142 #else // JIT_STUB_ARGUMENT_VA_LIST or JIT_STUB_ARGUMENT_STACK
143 #error "JIT_STUB_ARGUMENT configuration not supported."
144 #endif
145 "addq $0x48, %rsp" "\n"
146 "popq %rbx" "\n"
147 "popq %r15" "\n"
148 "popq %r14" "\n"
149 "popq %r13" "\n"
150 "popq %r12" "\n"
151 "popq %rbp" "\n"
152 "ret" "\n"
153 );
154
155 #elif COMPILER(MSVC)
156
157 extern "C" {
158
159 __declspec(naked) JSValueEncodedAsPointer* ctiTrampoline(void* code, RegisterFile*, CallFrame*, JSValuePtr* exception, Profiler**, JSGlobalData*)
160 {
161 __asm {
162 push ebp;
163 mov ebp, esp;
164 push esi;
165 push edi;
166 push ebx;
167 sub esp, 0x1c;
168 mov esi, 512;
169 mov ecx, esp;
170 mov edi, [esp + 0x38];
171 call [esp + 0x30]; // Ox30 = 0x0C * 4, 0x0C = STUB_ARGS_code (see assertion above)
172 add esp, 0x1c;
173 pop ebx;
174 pop edi;
175 pop esi;
176 pop ebp;
177 ret;
178 }
179 }
180
181 __declspec(naked) void ctiVMThrowTrampoline()
182 {
183 __asm {
184 #if USE(JIT_STUB_ARGUMENT_REGISTER)
185 mov ecx, esp;
186 #else // JIT_STUB_ARGUMENT_VA_LIST or JIT_STUB_ARGUMENT_STACK
187 #error "JIT_STUB_ARGUMENT configuration not supported."
188 #endif
189 call JSC::Interpreter::cti_vm_throw;
190 add esp, 0x1c;
191 pop ebx;
192 pop edi;
193 pop esi;
194 pop ebp;
195 ret;
196 }
197 }
198
199 }
200
201 #endif
202
ctiSetReturnAddress(void ** where,void * what)203 void ctiSetReturnAddress(void** where, void* what)
204 {
205 *where = what;
206 }
207
ctiPatchCallByReturnAddress(void * where,void * what)208 void ctiPatchCallByReturnAddress(void* where, void* what)
209 {
210 MacroAssembler::Jump::patch(where, what);
211 }
212
JIT(JSGlobalData * globalData,CodeBlock * codeBlock)213 JIT::JIT(JSGlobalData* globalData, CodeBlock* codeBlock)
214 : m_interpreter(globalData->interpreter)
215 , m_globalData(globalData)
216 , m_codeBlock(codeBlock)
217 , m_labels(codeBlock ? codeBlock->instructions().size() : 0)
218 , m_propertyAccessCompilationInfo(codeBlock ? codeBlock->numberOfStructureStubInfos() : 0)
219 , m_callStructureStubCompilationInfo(codeBlock ? codeBlock->numberOfCallLinkInfos() : 0)
220 , m_lastResultBytecodeRegister(std::numeric_limits<int>::max())
221 , m_jumpTargetsPosition(0)
222 {
223 }
224
compileOpStrictEq(Instruction * currentInstruction,CompileOpStrictEqType type)225 void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
226 {
227 unsigned dst = currentInstruction[1].u.operand;
228 unsigned src1 = currentInstruction[2].u.operand;
229 unsigned src2 = currentInstruction[3].u.operand;
230
231 emitGetVirtualRegisters(src1, X86::eax, src2, X86::edx);
232
233 #if USE(ALTERNATE_JSIMMEDIATE)
234 // Jump to a slow case if either operand is a number, or if both are JSCell*s.
235 move(X86::eax, X86::ecx);
236 orPtr(X86::edx, X86::ecx);
237 addSlowCase(emitJumpIfJSCell(X86::ecx));
238 addSlowCase(emitJumpIfImmediateNumber(X86::ecx));
239
240 if (type == OpStrictEq)
241 sete32(X86::edx, X86::eax);
242 else
243 setne32(X86::edx, X86::eax);
244 emitTagAsBoolImmediate(X86::eax);
245 #else
246 bool negated = (type == OpNStrictEq);
247
248 // Check that both are immediates, if so check if they're equal
249 Jump firstNotImmediate = emitJumpIfJSCell(X86::eax);
250 Jump secondNotImmediate = emitJumpIfJSCell(X86::edx);
251 Jump bothWereImmediatesButNotEqual = jnePtr(X86::edx, X86::eax);
252
253 // They are equal - set the result to true. (Or false, if negated).
254 move(ImmPtr(JSValuePtr::encode(jsBoolean(!negated))), X86::eax);
255 Jump bothWereImmediatesAndEqual = jump();
256
257 // eax was not an immediate, we haven't yet checked edx.
258 // If edx is also a JSCell, or is 0, then jump to a slow case,
259 // otherwise these values are not equal.
260 firstNotImmediate.link(this);
261 emitJumpSlowCaseIfJSCell(X86::edx);
262 addSlowCase(jePtr(X86::edx, ImmPtr(JSValuePtr::encode(js0()))));
263 Jump firstWasNotImmediate = jump();
264
265 // eax was an immediate, but edx wasn't.
266 // If eax is 0 jump to a slow case, otherwise these values are not equal.
267 secondNotImmediate.link(this);
268 addSlowCase(jePtr(X86::eax, ImmPtr(JSValuePtr::encode(js0()))));
269
270 // We get here if the two values are different immediates, or one is 0 and the other is a JSCell.
271 // Vaelues are not equal, set the result to false.
272 bothWereImmediatesButNotEqual.link(this);
273 firstWasNotImmediate.link(this);
274 move(ImmPtr(JSValuePtr::encode(jsBoolean(negated))), X86::eax);
275
276 bothWereImmediatesAndEqual.link(this);
277 #endif
278
279 emitPutVirtualRegister(dst);
280 }
281
emitSlowScriptCheck()282 void JIT::emitSlowScriptCheck()
283 {
284 Jump skipTimeout = jnzSub32(Imm32(1), timeoutCheckRegister);
285 emitCTICall(Interpreter::cti_timeout_check);
286 move(X86::eax, timeoutCheckRegister);
287 skipTimeout.link(this);
288
289 killLastResultRegister();
290 }
291
292
293 #define NEXT_OPCODE(name) \
294 m_bytecodeIndex += OPCODE_LENGTH(name); \
295 break;
296
297 #define CTI_COMPILE_BINARY_OP(name) \
298 case name: { \
299 emitPutJITStubArgFromVirtualRegister(currentInstruction[2].u.operand, 1, X86::ecx); \
300 emitPutJITStubArgFromVirtualRegister(currentInstruction[3].u.operand, 2, X86::ecx); \
301 emitCTICall(Interpreter::cti_##name); \
302 emitPutVirtualRegister(currentInstruction[1].u.operand); \
303 NEXT_OPCODE(name); \
304 }
305
306 #define CTI_COMPILE_UNARY_OP(name) \
307 case name: { \
308 emitPutJITStubArgFromVirtualRegister(currentInstruction[2].u.operand, 1, X86::ecx); \
309 emitCTICall(Interpreter::cti_##name); \
310 emitPutVirtualRegister(currentInstruction[1].u.operand); \
311 NEXT_OPCODE(name); \
312 }
313
privateCompileMainPass()314 void JIT::privateCompileMainPass()
315 {
316 Instruction* instructionsBegin = m_codeBlock->instructions().begin();
317 unsigned instructionCount = m_codeBlock->instructions().size();
318 unsigned propertyAccessInstructionIndex = 0;
319 unsigned globalResolveInfoIndex = 0;
320 unsigned callLinkInfoIndex = 0;
321
322 for (m_bytecodeIndex = 0; m_bytecodeIndex < instructionCount; ) {
323 Instruction* currentInstruction = instructionsBegin + m_bytecodeIndex;
324 ASSERT_WITH_MESSAGE(m_interpreter->isOpcode(currentInstruction->u.opcode), "privateCompileMainPass gone bad @ %d", m_bytecodeIndex);
325
326 #if ENABLE(OPCODE_SAMPLING)
327 if (m_bytecodeIndex > 0) // Avoid the overhead of sampling op_enter twice.
328 sampleInstruction(currentInstruction);
329 #endif
330
331 m_labels[m_bytecodeIndex] = label();
332 OpcodeID opcodeID = m_interpreter->getOpcodeID(currentInstruction->u.opcode);
333
334 switch (opcodeID) {
335 case op_mov: {
336 emitGetVirtualRegister(currentInstruction[2].u.operand, X86::eax);
337 emitPutVirtualRegister(currentInstruction[1].u.operand);
338 NEXT_OPCODE(op_mov);
339 }
340 case op_add: {
341 compileFastArith_op_add(currentInstruction);
342 NEXT_OPCODE(op_add);
343 }
344 case op_end: {
345 if (m_codeBlock->needsFullScopeChain())
346 emitCTICall(Interpreter::cti_op_end);
347 emitGetVirtualRegister(currentInstruction[1].u.operand, X86::eax);
348 push(Address(callFrameRegister, RegisterFile::ReturnPC * static_cast<int>(sizeof(Register))));
349 ret();
350 NEXT_OPCODE(op_end);
351 }
352 case op_jmp: {
353 unsigned target = currentInstruction[1].u.operand;
354 addJump(jump(), target + 1);
355 NEXT_OPCODE(op_jmp);
356 }
357 case op_pre_inc: {
358 compileFastArith_op_pre_inc(currentInstruction[1].u.operand);
359 NEXT_OPCODE(op_pre_inc);
360 }
361 case op_loop: {
362 emitSlowScriptCheck();
363
364 unsigned target = currentInstruction[1].u.operand;
365 addJump(jump(), target + 1);
366 NEXT_OPCODE(op_end);
367 }
368 case op_loop_if_less: {
369 emitSlowScriptCheck();
370
371 unsigned op1 = currentInstruction[1].u.operand;
372 unsigned op2 = currentInstruction[2].u.operand;
373 unsigned target = currentInstruction[3].u.operand;
374 if (isOperandConstantImmediateInt(op2)) {
375 emitGetVirtualRegister(op1, X86::eax);
376 emitJumpSlowCaseIfNotImmediateInteger(X86::eax);
377 #if USE(ALTERNATE_JSIMMEDIATE)
378 int32_t op2imm = getConstantOperandImmediateInt(op2);
379 #else
380 int32_t op2imm = static_cast<int32_t>(JSImmediate::rawValue(getConstantOperand(op2)));
381 #endif
382 addJump(jl32(X86::eax, Imm32(op2imm)), target + 3);
383 } else {
384 emitGetVirtualRegisters(op1, X86::eax, op2, X86::edx);
385 emitJumpSlowCaseIfNotImmediateInteger(X86::eax);
386 emitJumpSlowCaseIfNotImmediateInteger(X86::edx);
387 addJump(jl32(X86::eax, X86::edx), target + 3);
388 }
389 NEXT_OPCODE(op_loop_if_less);
390 }
391 case op_loop_if_lesseq: {
392 emitSlowScriptCheck();
393
394 unsigned op1 = currentInstruction[1].u.operand;
395 unsigned op2 = currentInstruction[2].u.operand;
396 unsigned target = currentInstruction[3].u.operand;
397 if (isOperandConstantImmediateInt(op2)) {
398 emitGetVirtualRegister(op1, X86::eax);
399 emitJumpSlowCaseIfNotImmediateInteger(X86::eax);
400 #if USE(ALTERNATE_JSIMMEDIATE)
401 int32_t op2imm = getConstantOperandImmediateInt(op2);
402 #else
403 int32_t op2imm = static_cast<int32_t>(JSImmediate::rawValue(getConstantOperand(op2)));
404 #endif
405 addJump(jle32(X86::eax, Imm32(op2imm)), target + 3);
406 } else {
407 emitGetVirtualRegisters(op1, X86::eax, op2, X86::edx);
408 emitJumpSlowCaseIfNotImmediateInteger(X86::eax);
409 emitJumpSlowCaseIfNotImmediateInteger(X86::edx);
410 addJump(jle32(X86::eax, X86::edx), target + 3);
411 }
412 NEXT_OPCODE(op_loop_if_less);
413 }
414 case op_new_object: {
415 emitCTICall(Interpreter::cti_op_new_object);
416 emitPutVirtualRegister(currentInstruction[1].u.operand);
417 NEXT_OPCODE(op_new_object);
418 }
419 case op_put_by_id: {
420 compilePutByIdHotPath(currentInstruction[1].u.operand, &(m_codeBlock->identifier(currentInstruction[2].u.operand)), currentInstruction[3].u.operand, propertyAccessInstructionIndex++);
421 NEXT_OPCODE(op_put_by_id);
422 }
423 case op_get_by_id: {
424 compileGetByIdHotPath(currentInstruction[1].u.operand, currentInstruction[2].u.operand, &(m_codeBlock->identifier(currentInstruction[3].u.operand)), propertyAccessInstructionIndex++);
425 NEXT_OPCODE(op_get_by_id);
426 }
427 case op_instanceof: {
428 emitGetVirtualRegister(currentInstruction[2].u.operand, X86::eax); // value
429 emitGetVirtualRegister(currentInstruction[3].u.operand, X86::ecx); // baseVal
430 emitGetVirtualRegister(currentInstruction[4].u.operand, X86::edx); // proto
431
432 // check if any are immediates
433 move(X86::eax, X86::ebx);
434 orPtr(X86::ecx, X86::ebx);
435 orPtr(X86::edx, X86::ebx);
436 emitJumpSlowCaseIfNotJSCell(X86::ebx);
437
438 // check that all are object type - this is a bit of a bithack to avoid excess branching;
439 // we check that the sum of the three type codes from Structures is exactly 3 * ObjectType,
440 // this works because NumberType and StringType are smaller
441 move(Imm32(3 * ObjectType), X86::ebx);
442 loadPtr(Address(X86::eax, FIELD_OFFSET(JSCell, m_structure)), X86::eax);
443 loadPtr(Address(X86::ecx, FIELD_OFFSET(JSCell, m_structure)), X86::ecx);
444 loadPtr(Address(X86::edx, FIELD_OFFSET(JSCell, m_structure)), X86::edx);
445 sub32(Address(X86::eax, FIELD_OFFSET(Structure, m_typeInfo.m_type)), X86::ebx);
446 sub32(Address(X86::ecx, FIELD_OFFSET(Structure, m_typeInfo.m_type)), X86::ebx);
447 addSlowCase(jne32(Address(X86::edx, FIELD_OFFSET(Structure, m_typeInfo.m_type)), X86::ebx));
448
449 // check that baseVal's flags include ImplementsHasInstance but not OverridesHasInstance
450 load32(Address(X86::ecx, FIELD_OFFSET(Structure, m_typeInfo.m_flags)), X86::ecx);
451 and32(Imm32(ImplementsHasInstance | OverridesHasInstance), X86::ecx);
452 addSlowCase(jne32(X86::ecx, Imm32(ImplementsHasInstance)));
453
454 emitGetVirtualRegister(currentInstruction[2].u.operand, X86::ecx); // reload value
455 emitGetVirtualRegister(currentInstruction[4].u.operand, X86::edx); // reload proto
456
457 // optimistically load true result
458 move(ImmPtr(JSValuePtr::encode(jsBoolean(true))), X86::eax);
459
460 Label loop(this);
461
462 // load value's prototype
463 loadPtr(Address(X86::ecx, FIELD_OFFSET(JSCell, m_structure)), X86::ecx);
464 loadPtr(Address(X86::ecx, FIELD_OFFSET(Structure, m_prototype)), X86::ecx);
465
466 Jump exit = jePtr(X86::ecx, X86::edx);
467
468 jnePtr(X86::ecx, ImmPtr(JSValuePtr::encode(jsNull())), loop);
469
470 move(ImmPtr(JSValuePtr::encode(jsBoolean(false))), X86::eax);
471
472 exit.link(this);
473
474 emitPutVirtualRegister(currentInstruction[1].u.operand);
475
476 NEXT_OPCODE(op_instanceof);
477 }
478 case op_del_by_id: {
479 emitPutJITStubArgFromVirtualRegister(currentInstruction[2].u.operand, 1, X86::ecx);
480 Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
481 emitPutJITStubArgConstant(ident, 2);
482 emitCTICall(Interpreter::cti_op_del_by_id);
483 emitPutVirtualRegister(currentInstruction[1].u.operand);
484 NEXT_OPCODE(op_del_by_id);
485 }
486 case op_mul: {
487 compileFastArith_op_mul(currentInstruction);
488 NEXT_OPCODE(op_mul);
489 }
490 case op_new_func: {
491 FuncDeclNode* func = m_codeBlock->function(currentInstruction[2].u.operand);
492 emitPutJITStubArgConstant(func, 1);
493 emitCTICall(Interpreter::cti_op_new_func);
494 emitPutVirtualRegister(currentInstruction[1].u.operand);
495 NEXT_OPCODE(op_new_func);
496 }
497 case op_call: {
498 compileOpCall(opcodeID, currentInstruction, callLinkInfoIndex++);
499 NEXT_OPCODE(op_call);
500 }
501 case op_call_eval: {
502 compileOpCall(opcodeID, currentInstruction, callLinkInfoIndex++);
503 NEXT_OPCODE(op_call_eval);
504 }
505 case op_construct: {
506 compileOpCall(opcodeID, currentInstruction, callLinkInfoIndex++);
507 NEXT_OPCODE(op_construct);
508 }
509 case op_get_global_var: {
510 JSVariableObject* globalObject = static_cast<JSVariableObject*>(currentInstruction[2].u.jsCell);
511 move(ImmPtr(globalObject), X86::eax);
512 emitGetVariableObjectRegister(X86::eax, currentInstruction[3].u.operand, X86::eax);
513 emitPutVirtualRegister(currentInstruction[1].u.operand);
514 NEXT_OPCODE(op_get_global_var);
515 }
516 case op_put_global_var: {
517 emitGetVirtualRegister(currentInstruction[3].u.operand, X86::edx);
518 JSVariableObject* globalObject = static_cast<JSVariableObject*>(currentInstruction[1].u.jsCell);
519 move(ImmPtr(globalObject), X86::eax);
520 emitPutVariableObjectRegister(X86::edx, X86::eax, currentInstruction[2].u.operand);
521 NEXT_OPCODE(op_put_global_var);
522 }
523 case op_get_scoped_var: {
524 int skip = currentInstruction[3].u.operand + m_codeBlock->needsFullScopeChain();
525
526 emitGetFromCallFrameHeader(RegisterFile::ScopeChain, X86::eax);
527 while (skip--)
528 loadPtr(Address(X86::eax, FIELD_OFFSET(ScopeChainNode, next)), X86::eax);
529
530 loadPtr(Address(X86::eax, FIELD_OFFSET(ScopeChainNode, object)), X86::eax);
531 emitGetVariableObjectRegister(X86::eax, currentInstruction[2].u.operand, X86::eax);
532 emitPutVirtualRegister(currentInstruction[1].u.operand);
533 NEXT_OPCODE(op_get_scoped_var);
534 }
535 case op_put_scoped_var: {
536 int skip = currentInstruction[2].u.operand + m_codeBlock->needsFullScopeChain();
537
538 emitGetFromCallFrameHeader(RegisterFile::ScopeChain, X86::edx);
539 emitGetVirtualRegister(currentInstruction[3].u.operand, X86::eax);
540 while (skip--)
541 loadPtr(Address(X86::edx, FIELD_OFFSET(ScopeChainNode, next)), X86::edx);
542
543 loadPtr(Address(X86::edx, FIELD_OFFSET(ScopeChainNode, object)), X86::edx);
544 emitPutVariableObjectRegister(X86::eax, X86::edx, currentInstruction[1].u.operand);
545 NEXT_OPCODE(op_put_scoped_var);
546 }
547 case op_tear_off_activation: {
548 emitPutJITStubArgFromVirtualRegister(currentInstruction[1].u.operand, 1, X86::ecx);
549 emitCTICall(Interpreter::cti_op_tear_off_activation);
550 NEXT_OPCODE(op_tear_off_activation);
551 }
552 case op_tear_off_arguments: {
553 emitCTICall(Interpreter::cti_op_tear_off_arguments);
554 NEXT_OPCODE(op_tear_off_arguments);
555 }
556 case op_ret: {
557 // We could JIT generate the deref, only calling out to C when the refcount hits zero.
558 if (m_codeBlock->needsFullScopeChain())
559 emitCTICall(Interpreter::cti_op_ret_scopeChain);
560
561 // Return the result in %eax.
562 emitGetVirtualRegister(currentInstruction[1].u.operand, X86::eax);
563
564 // Grab the return address.
565 emitGetFromCallFrameHeader(RegisterFile::ReturnPC, X86::edx);
566
567 // Restore our caller's "r".
568 emitGetFromCallFrameHeader(RegisterFile::CallerFrame, callFrameRegister);
569
570 // Return.
571 push(X86::edx);
572 ret();
573
574 NEXT_OPCODE(op_ret);
575 }
576 case op_new_array: {
577 emitPutJITStubArgConstant(currentInstruction[2].u.operand, 1);
578 emitPutJITStubArgConstant(currentInstruction[3].u.operand, 2);
579 emitCTICall(Interpreter::cti_op_new_array);
580 emitPutVirtualRegister(currentInstruction[1].u.operand);
581 NEXT_OPCODE(op_new_array);
582 }
583 case op_resolve: {
584 Identifier* ident = &(m_codeBlock->identifier(currentInstruction[2].u.operand));
585 emitPutJITStubArgConstant(ident, 1);
586 emitCTICall(Interpreter::cti_op_resolve);
587 emitPutVirtualRegister(currentInstruction[1].u.operand);
588 NEXT_OPCODE(op_resolve);
589 }
590 case op_construct_verify: {
591 emitGetVirtualRegister(currentInstruction[1].u.operand, X86::eax);
592
593 emitJumpSlowCaseIfNotJSCell(X86::eax);
594 loadPtr(Address(X86::eax, FIELD_OFFSET(JSCell, m_structure)), X86::ecx);
595 addSlowCase(jne32(Address(X86::ecx, FIELD_OFFSET(Structure, m_typeInfo) + FIELD_OFFSET(TypeInfo, m_type)), Imm32(ObjectType)));
596
597 NEXT_OPCODE(op_construct_verify);
598 }
599 case op_get_by_val: {
600 emitGetVirtualRegisters(currentInstruction[2].u.operand, X86::eax, currentInstruction[3].u.operand, X86::edx);
601 emitJumpSlowCaseIfNotImmediateInteger(X86::edx);
602 #if USE(ALTERNATE_JSIMMEDIATE)
603 // This is technically incorrect - we're zero-extending an int32. On the hot path this doesn't matter.
604 // We check the value as if it was a uint32 against the m_fastAccessCutoff - which will always fail if
605 // number was signed since m_fastAccessCutoff is always less than intmax (since the total allocation
606 // size is always less than 4Gb). As such zero extending wil have been correct (and extending the value
607 // to 64-bits is necessary since it's used in the address calculation. We zero extend rather than sign
608 // extending since it makes it easier to re-tag the value in the slow case.
609 zeroExtend32ToPtr(X86::edx, X86::edx);
610 #else
611 emitFastArithImmToInt(X86::edx);
612 #endif
613 emitJumpSlowCaseIfNotJSCell(X86::eax);
614 addSlowCase(jnePtr(Address(X86::eax), ImmPtr(m_interpreter->m_jsArrayVptr)));
615
616 // This is an array; get the m_storage pointer into ecx, then check if the index is below the fast cutoff
617 loadPtr(Address(X86::eax, FIELD_OFFSET(JSArray, m_storage)), X86::ecx);
618 addSlowCase(jae32(X86::edx, Address(X86::eax, FIELD_OFFSET(JSArray, m_fastAccessCutoff))));
619
620 // Get the value from the vector
621 loadPtr(BaseIndex(X86::ecx, X86::edx, ScalePtr, FIELD_OFFSET(ArrayStorage, m_vector[0])), X86::eax);
622 emitPutVirtualRegister(currentInstruction[1].u.operand);
623 NEXT_OPCODE(op_get_by_val);
624 }
625 case op_resolve_func: {
626 Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
627 emitPutJITStubArgConstant(ident, 1);
628 emitCTICall(Interpreter::cti_op_resolve_func);
629 emitPutVirtualRegister(currentInstruction[2].u.operand, X86::edx);
630 emitPutVirtualRegister(currentInstruction[1].u.operand);
631 NEXT_OPCODE(op_resolve_func);
632 }
633 case op_sub: {
634 compileFastArith_op_sub(currentInstruction);
635 NEXT_OPCODE(op_sub);
636 }
637 case op_put_by_val: {
638 emitGetVirtualRegisters(currentInstruction[1].u.operand, X86::eax, currentInstruction[2].u.operand, X86::edx);
639 emitJumpSlowCaseIfNotImmediateInteger(X86::edx);
640 #if USE(ALTERNATE_JSIMMEDIATE)
641 // See comment in op_get_by_val.
642 zeroExtend32ToPtr(X86::edx, X86::edx);
643 #else
644 emitFastArithImmToInt(X86::edx);
645 #endif
646 emitJumpSlowCaseIfNotJSCell(X86::eax);
647 addSlowCase(jnePtr(Address(X86::eax), ImmPtr(m_interpreter->m_jsArrayVptr)));
648
649 // This is an array; get the m_storage pointer into ecx, then check if the index is below the fast cutoff
650 loadPtr(Address(X86::eax, FIELD_OFFSET(JSArray, m_storage)), X86::ecx);
651 Jump inFastVector = jb32(X86::edx, Address(X86::eax, FIELD_OFFSET(JSArray, m_fastAccessCutoff)));
652 // No; oh well, check if the access if within the vector - if so, we may still be okay.
653 addSlowCase(jae32(X86::edx, Address(X86::ecx, FIELD_OFFSET(ArrayStorage, m_vectorLength))));
654
655 // This is a write to the slow part of the vector; first, we have to check if this would be the first write to this location.
656 // FIXME: should be able to handle initial write to array; increment the the number of items in the array, and potentially update fast access cutoff.
657 addSlowCase(jzPtr(BaseIndex(X86::ecx, X86::edx, ScalePtr, FIELD_OFFSET(ArrayStorage, m_vector[0]))));
658
659 // All good - put the value into the array.
660 inFastVector.link(this);
661 emitGetVirtualRegister(currentInstruction[3].u.operand, X86::eax);
662 storePtr(X86::eax, BaseIndex(X86::ecx, X86::edx, ScalePtr, FIELD_OFFSET(ArrayStorage, m_vector[0])));
663 NEXT_OPCODE(op_put_by_val);
664 }
665 CTI_COMPILE_BINARY_OP(op_lesseq)
666 case op_loop_if_true: {
667 emitSlowScriptCheck();
668
669 unsigned target = currentInstruction[2].u.operand;
670 emitGetVirtualRegister(currentInstruction[1].u.operand, X86::eax);
671
672 Jump isZero = jePtr(X86::eax, ImmPtr(JSValuePtr::encode(js0())));
673 addJump(emitJumpIfImmediateInteger(X86::eax), target + 2);
674
675 addJump(jePtr(X86::eax, ImmPtr(JSValuePtr::encode(jsBoolean(true)))), target + 2);
676 addSlowCase(jnePtr(X86::eax, ImmPtr(JSValuePtr::encode(jsBoolean(false)))));
677
678 isZero.link(this);
679 NEXT_OPCODE(op_loop_if_true);
680 };
681 case op_resolve_base: {
682 Identifier* ident = &(m_codeBlock->identifier(currentInstruction[2].u.operand));
683 emitPutJITStubArgConstant(ident, 1);
684 emitCTICall(Interpreter::cti_op_resolve_base);
685 emitPutVirtualRegister(currentInstruction[1].u.operand);
686 NEXT_OPCODE(op_resolve_base);
687 }
688 case op_negate: {
689 emitPutJITStubArgFromVirtualRegister(currentInstruction[2].u.operand, 1, X86::ecx);
690 emitCTICall(Interpreter::cti_op_negate);
691 emitPutVirtualRegister(currentInstruction[1].u.operand);
692 NEXT_OPCODE(op_negate);
693 }
694 case op_resolve_skip: {
695 Identifier* ident = &(m_codeBlock->identifier(currentInstruction[2].u.operand));
696 emitPutJITStubArgConstant(ident, 1);
697 emitPutJITStubArgConstant(currentInstruction[3].u.operand + m_codeBlock->needsFullScopeChain(), 2);
698 emitCTICall(Interpreter::cti_op_resolve_skip);
699 emitPutVirtualRegister(currentInstruction[1].u.operand);
700 NEXT_OPCODE(op_resolve_skip);
701 }
702 case op_resolve_global: {
703 // Fast case
704 void* globalObject = currentInstruction[2].u.jsCell;
705 Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
706
707 unsigned currentIndex = globalResolveInfoIndex++;
708 void* structureAddress = &(m_codeBlock->globalResolveInfo(currentIndex).structure);
709 void* offsetAddr = &(m_codeBlock->globalResolveInfo(currentIndex).offset);
710
711 // Check Structure of global object
712 move(ImmPtr(globalObject), X86::eax);
713 loadPtr(structureAddress, X86::edx);
714 Jump noMatch = jnePtr(X86::edx, Address(X86::eax, FIELD_OFFSET(JSCell, m_structure))); // Structures don't match
715
716 // Load cached property
717 loadPtr(Address(X86::eax, FIELD_OFFSET(JSGlobalObject, m_propertyStorage)), X86::eax);
718 load32(offsetAddr, X86::edx);
719 loadPtr(BaseIndex(X86::eax, X86::edx, ScalePtr), X86::eax);
720 emitPutVirtualRegister(currentInstruction[1].u.operand);
721 Jump end = jump();
722
723 // Slow case
724 noMatch.link(this);
725 emitPutJITStubArgConstant(globalObject, 1);
726 emitPutJITStubArgConstant(ident, 2);
727 emitPutJITStubArgConstant(currentIndex, 3);
728 emitCTICall(Interpreter::cti_op_resolve_global);
729 emitPutVirtualRegister(currentInstruction[1].u.operand);
730 end.link(this);
731 NEXT_OPCODE(op_resolve_global);
732 }
733 CTI_COMPILE_BINARY_OP(op_div)
734 case op_pre_dec: {
735 compileFastArith_op_pre_dec(currentInstruction[1].u.operand);
736 NEXT_OPCODE(op_pre_dec);
737 }
738 case op_jnless: {
739 unsigned op1 = currentInstruction[1].u.operand;
740 unsigned op2 = currentInstruction[2].u.operand;
741 unsigned target = currentInstruction[3].u.operand;
742 if (isOperandConstantImmediateInt(op2)) {
743 emitGetVirtualRegister(op1, X86::eax);
744 emitJumpSlowCaseIfNotImmediateInteger(X86::eax);
745 #if USE(ALTERNATE_JSIMMEDIATE)
746 int32_t op2imm = getConstantOperandImmediateInt(op2);
747 #else
748 int32_t op2imm = static_cast<int32_t>(JSImmediate::rawValue(getConstantOperand(op2)));
749 #endif
750 addJump(jge32(X86::eax, Imm32(op2imm)), target + 3);
751 } else {
752 emitGetVirtualRegisters(op1, X86::eax, op2, X86::edx);
753 emitJumpSlowCaseIfNotImmediateInteger(X86::eax);
754 emitJumpSlowCaseIfNotImmediateInteger(X86::edx);
755 addJump(jge32(X86::eax, X86::edx), target + 3);
756 }
757 NEXT_OPCODE(op_jnless);
758 }
759 case op_not: {
760 emitGetVirtualRegister(currentInstruction[2].u.operand, X86::eax);
761 xorPtr(Imm32(static_cast<int32_t>(JSImmediate::FullTagTypeBool)), X86::eax);
762 addSlowCase(jnzPtr(X86::eax, Imm32(static_cast<int32_t>(~JSImmediate::ExtendedPayloadBitBoolValue))));
763 xorPtr(Imm32(static_cast<int32_t>(JSImmediate::FullTagTypeBool | JSImmediate::ExtendedPayloadBitBoolValue)), X86::eax);
764 emitPutVirtualRegister(currentInstruction[1].u.operand);
765 NEXT_OPCODE(op_not);
766 }
767 case op_jfalse: {
768 unsigned target = currentInstruction[2].u.operand;
769 emitGetVirtualRegister(currentInstruction[1].u.operand, X86::eax);
770
771 addJump(jePtr(X86::eax, ImmPtr(JSValuePtr::encode(js0()))), target + 2);
772 Jump isNonZero = emitJumpIfImmediateInteger(X86::eax);
773
774 addJump(jePtr(X86::eax, ImmPtr(JSValuePtr::encode(jsBoolean(false)))), target + 2);
775 addSlowCase(jnePtr(X86::eax, ImmPtr(JSValuePtr::encode(jsBoolean(true)))));
776
777 isNonZero.link(this);
778 NEXT_OPCODE(op_jfalse);
779 };
780 case op_jeq_null: {
781 unsigned src = currentInstruction[1].u.operand;
782 unsigned target = currentInstruction[2].u.operand;
783
784 emitGetVirtualRegister(src, X86::eax);
785 Jump isImmediate = emitJumpIfNotJSCell(X86::eax);
786
787 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
788 loadPtr(Address(X86::eax, FIELD_OFFSET(JSCell, m_structure)), X86::ecx);
789 addJump(jnz32(Address(X86::ecx, FIELD_OFFSET(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined)), target + 2);
790 Jump wasNotImmediate = jump();
791
792 // Now handle the immediate cases - undefined & null
793 isImmediate.link(this);
794 andPtr(Imm32(~JSImmediate::ExtendedTagBitUndefined), X86::eax);
795 addJump(jePtr(X86::eax, ImmPtr(JSValuePtr::encode(jsNull()))), target + 2);
796
797 wasNotImmediate.link(this);
798 NEXT_OPCODE(op_jeq_null);
799 };
800 case op_jneq_null: {
801 unsigned src = currentInstruction[1].u.operand;
802 unsigned target = currentInstruction[2].u.operand;
803
804 emitGetVirtualRegister(src, X86::eax);
805 Jump isImmediate = emitJumpIfNotJSCell(X86::eax);
806
807 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
808 loadPtr(Address(X86::eax, FIELD_OFFSET(JSCell, m_structure)), X86::ecx);
809 addJump(jz32(Address(X86::ecx, FIELD_OFFSET(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined)), target + 2);
810 Jump wasNotImmediate = jump();
811
812 // Now handle the immediate cases - undefined & null
813 isImmediate.link(this);
814 andPtr(Imm32(~JSImmediate::ExtendedTagBitUndefined), X86::eax);
815 addJump(jnePtr(X86::eax, ImmPtr(JSValuePtr::encode(jsNull()))), target + 2);
816
817 wasNotImmediate.link(this);
818 NEXT_OPCODE(op_jneq_null);
819 }
820 case op_post_inc: {
821 compileFastArith_op_post_inc(currentInstruction[1].u.operand, currentInstruction[2].u.operand);
822 NEXT_OPCODE(op_post_inc);
823 }
824 case op_unexpected_load: {
825 JSValuePtr v = m_codeBlock->unexpectedConstant(currentInstruction[2].u.operand);
826 move(ImmPtr(JSValuePtr::encode(v)), X86::eax);
827 emitPutVirtualRegister(currentInstruction[1].u.operand);
828 NEXT_OPCODE(op_unexpected_load);
829 }
830 case op_jsr: {
831 int retAddrDst = currentInstruction[1].u.operand;
832 int target = currentInstruction[2].u.operand;
833 DataLabelPtr storeLocation = storePtrWithPatch(Address(callFrameRegister, sizeof(Register) * retAddrDst));
834 addJump(jump(), target + 2);
835 m_jsrSites.append(JSRInfo(storeLocation, label()));
836 NEXT_OPCODE(op_jsr);
837 }
838 case op_sret: {
839 jump(Address(callFrameRegister, sizeof(Register) * currentInstruction[1].u.operand));
840 NEXT_OPCODE(op_sret);
841 }
842 case op_eq: {
843 emitGetVirtualRegisters(currentInstruction[2].u.operand, X86::eax, currentInstruction[3].u.operand, X86::edx);
844 emitJumpSlowCaseIfNotImmediateIntegers(X86::eax, X86::edx, X86::ecx);
845 sete32(X86::edx, X86::eax);
846 emitTagAsBoolImmediate(X86::eax);
847 emitPutVirtualRegister(currentInstruction[1].u.operand);
848 NEXT_OPCODE(op_eq);
849 }
850 case op_lshift: {
851 compileFastArith_op_lshift(currentInstruction[1].u.operand, currentInstruction[2].u.operand, currentInstruction[3].u.operand);
852 NEXT_OPCODE(op_lshift);
853 }
854 case op_bitand: {
855 compileFastArith_op_bitand(currentInstruction[1].u.operand, currentInstruction[2].u.operand, currentInstruction[3].u.operand);
856 NEXT_OPCODE(op_bitand);
857 }
858 case op_rshift: {
859 compileFastArith_op_rshift(currentInstruction[1].u.operand, currentInstruction[2].u.operand, currentInstruction[3].u.operand);
860 NEXT_OPCODE(op_rshift);
861 }
862 case op_bitnot: {
863 emitGetVirtualRegister(currentInstruction[2].u.operand, X86::eax);
864 emitJumpSlowCaseIfNotImmediateInteger(X86::eax);
865 #if USE(ALTERNATE_JSIMMEDIATE)
866 not32(X86::eax);
867 emitFastArithIntToImmNoCheck(X86::eax, X86::eax);
868 #else
869 xorPtr(Imm32(~JSImmediate::TagTypeNumber), X86::eax);
870 #endif
871 emitPutVirtualRegister(currentInstruction[1].u.operand);
872 NEXT_OPCODE(op_bitnot);
873 }
874 case op_resolve_with_base: {
875 Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
876 emitPutJITStubArgConstant(ident, 1);
877 emitCTICall(Interpreter::cti_op_resolve_with_base);
878 emitPutVirtualRegister(currentInstruction[2].u.operand, X86::edx);
879 emitPutVirtualRegister(currentInstruction[1].u.operand);
880 NEXT_OPCODE(op_resolve_with_base);
881 }
882 case op_new_func_exp: {
883 FuncExprNode* func = m_codeBlock->functionExpression(currentInstruction[2].u.operand);
884 emitPutJITStubArgConstant(func, 1);
885 emitCTICall(Interpreter::cti_op_new_func_exp);
886 emitPutVirtualRegister(currentInstruction[1].u.operand);
887 NEXT_OPCODE(op_new_func_exp);
888 }
889 case op_mod: {
890 compileFastArith_op_mod(currentInstruction[1].u.operand, currentInstruction[2].u.operand, currentInstruction[3].u.operand);
891 NEXT_OPCODE(op_mod);
892 }
893 case op_jtrue: {
894 unsigned target = currentInstruction[2].u.operand;
895 emitGetVirtualRegister(currentInstruction[1].u.operand, X86::eax);
896
897 Jump isZero = jePtr(X86::eax, ImmPtr(JSValuePtr::encode(js0())));
898 addJump(emitJumpIfImmediateInteger(X86::eax), target + 2);
899
900 addJump(jePtr(X86::eax, ImmPtr(JSValuePtr::encode(jsBoolean(true)))), target + 2);
901 addSlowCase(jnePtr(X86::eax, ImmPtr(JSValuePtr::encode(jsBoolean(false)))));
902
903 isZero.link(this);
904 NEXT_OPCODE(op_jtrue);
905 }
906 CTI_COMPILE_BINARY_OP(op_less)
907 case op_neq: {
908 emitGetVirtualRegisters(currentInstruction[2].u.operand, X86::eax, currentInstruction[3].u.operand, X86::edx);
909 emitJumpSlowCaseIfNotImmediateIntegers(X86::eax, X86::edx, X86::ecx);
910 setne32(X86::edx, X86::eax);
911 emitTagAsBoolImmediate(X86::eax);
912
913 emitPutVirtualRegister(currentInstruction[1].u.operand);
914
915 NEXT_OPCODE(op_neq);
916 }
917 case op_post_dec: {
918 compileFastArith_op_post_dec(currentInstruction[1].u.operand, currentInstruction[2].u.operand);
919 NEXT_OPCODE(op_post_dec);
920 }
921 CTI_COMPILE_BINARY_OP(op_urshift)
922 case op_bitxor: {
923 emitGetVirtualRegisters(currentInstruction[2].u.operand, X86::eax, currentInstruction[3].u.operand, X86::edx);
924 emitJumpSlowCaseIfNotImmediateIntegers(X86::eax, X86::edx, X86::ecx);
925 xorPtr(X86::edx, X86::eax);
926 emitFastArithReTagImmediate(X86::eax, X86::eax);
927 emitPutVirtualRegister(currentInstruction[1].u.operand);
928 NEXT_OPCODE(op_bitxor);
929 }
930 case op_new_regexp: {
931 RegExp* regExp = m_codeBlock->regexp(currentInstruction[2].u.operand);
932 emitPutJITStubArgConstant(regExp, 1);
933 emitCTICall(Interpreter::cti_op_new_regexp);
934 emitPutVirtualRegister(currentInstruction[1].u.operand);
935 NEXT_OPCODE(op_new_regexp);
936 }
937 case op_bitor: {
938 emitGetVirtualRegisters(currentInstruction[2].u.operand, X86::eax, currentInstruction[3].u.operand, X86::edx);
939 emitJumpSlowCaseIfNotImmediateIntegers(X86::eax, X86::edx, X86::ecx);
940 orPtr(X86::edx, X86::eax);
941 emitPutVirtualRegister(currentInstruction[1].u.operand);
942 NEXT_OPCODE(op_bitor);
943 }
944 case op_throw: {
945 emitPutJITStubArgFromVirtualRegister(currentInstruction[1].u.operand, 1, X86::ecx);
946 emitCTICall(Interpreter::cti_op_throw);
947 #if PLATFORM(X86_64)
948 addPtr(Imm32(0x48), X86::esp);
949 pop(X86::ebx);
950 pop(X86::r15);
951 pop(X86::r14);
952 pop(X86::r13);
953 pop(X86::r12);
954 pop(X86::ebp);
955 ret();
956 #else
957 addPtr(Imm32(0x1c), X86::esp);
958 pop(X86::ebx);
959 pop(X86::edi);
960 pop(X86::esi);
961 pop(X86::ebp);
962 ret();
963 #endif
964 NEXT_OPCODE(op_throw);
965 }
966 case op_get_pnames: {
967 emitPutJITStubArgFromVirtualRegister(currentInstruction[2].u.operand, 1, X86::ecx);
968 emitCTICall(Interpreter::cti_op_get_pnames);
969 emitPutVirtualRegister(currentInstruction[1].u.operand);
970 NEXT_OPCODE(op_get_pnames);
971 }
972 case op_next_pname: {
973 emitPutJITStubArgFromVirtualRegister(currentInstruction[2].u.operand, 1, X86::ecx);
974 unsigned target = currentInstruction[3].u.operand;
975 emitCTICall(Interpreter::cti_op_next_pname);
976 Jump endOfIter = jzPtr(X86::eax);
977 emitPutVirtualRegister(currentInstruction[1].u.operand);
978 addJump(jump(), target + 3);
979 endOfIter.link(this);
980 NEXT_OPCODE(op_next_pname);
981 }
982 case op_push_scope: {
983 emitPutJITStubArgFromVirtualRegister(currentInstruction[1].u.operand, 1, X86::ecx);
984 emitCTICall(Interpreter::cti_op_push_scope);
985 emitPutVirtualRegister(currentInstruction[1].u.operand);
986 NEXT_OPCODE(op_push_scope);
987 }
988 case op_pop_scope: {
989 emitCTICall(Interpreter::cti_op_pop_scope);
990 NEXT_OPCODE(op_pop_scope);
991 }
992 CTI_COMPILE_UNARY_OP(op_typeof)
993 CTI_COMPILE_UNARY_OP(op_is_undefined)
994 CTI_COMPILE_UNARY_OP(op_is_boolean)
995 CTI_COMPILE_UNARY_OP(op_is_number)
996 CTI_COMPILE_UNARY_OP(op_is_string)
997 CTI_COMPILE_UNARY_OP(op_is_object)
998 CTI_COMPILE_UNARY_OP(op_is_function)
999 case op_stricteq: {
1000 compileOpStrictEq(currentInstruction, OpStrictEq);
1001 NEXT_OPCODE(op_stricteq);
1002 }
1003 case op_nstricteq: {
1004 compileOpStrictEq(currentInstruction, OpNStrictEq);
1005 NEXT_OPCODE(op_nstricteq);
1006 }
1007 case op_to_jsnumber: {
1008 int srcVReg = currentInstruction[2].u.operand;
1009 emitGetVirtualRegister(srcVReg, X86::eax);
1010
1011 Jump wasImmediate = emitJumpIfImmediateInteger(X86::eax);
1012
1013 emitJumpSlowCaseIfNotJSCell(X86::eax, srcVReg);
1014 loadPtr(Address(X86::eax, FIELD_OFFSET(JSCell, m_structure)), X86::ecx);
1015 addSlowCase(jne32(Address(X86::ecx, FIELD_OFFSET(Structure, m_typeInfo.m_type)), Imm32(NumberType)));
1016
1017 wasImmediate.link(this);
1018
1019 emitPutVirtualRegister(currentInstruction[1].u.operand);
1020 NEXT_OPCODE(op_to_jsnumber);
1021 }
1022 CTI_COMPILE_BINARY_OP(op_in)
1023 case op_push_new_scope: {
1024 Identifier* ident = &(m_codeBlock->identifier(currentInstruction[2].u.operand));
1025 emitPutJITStubArgConstant(ident, 1);
1026 emitPutJITStubArgFromVirtualRegister(currentInstruction[3].u.operand, 2, X86::ecx);
1027 emitCTICall(Interpreter::cti_op_push_new_scope);
1028 emitPutVirtualRegister(currentInstruction[1].u.operand);
1029 NEXT_OPCODE(op_push_new_scope);
1030 }
1031 case op_catch: {
1032 emitGetCTIParam(STUB_ARGS_callFrame, callFrameRegister);
1033 emitPutVirtualRegister(currentInstruction[1].u.operand);
1034 NEXT_OPCODE(op_catch);
1035 }
1036 case op_jmp_scopes: {
1037 unsigned count = currentInstruction[1].u.operand;
1038 emitPutJITStubArgConstant(count, 1);
1039 emitCTICall(Interpreter::cti_op_jmp_scopes);
1040 unsigned target = currentInstruction[2].u.operand;
1041 addJump(jump(), target + 2);
1042 NEXT_OPCODE(op_jmp_scopes);
1043 }
1044 case op_put_by_index: {
1045 emitPutJITStubArgFromVirtualRegister(currentInstruction[1].u.operand, 1, X86::ecx);
1046 emitPutJITStubArgConstant(currentInstruction[2].u.operand, 2);
1047 emitPutJITStubArgFromVirtualRegister(currentInstruction[3].u.operand, 3, X86::ecx);
1048 emitCTICall(Interpreter::cti_op_put_by_index);
1049 NEXT_OPCODE(op_put_by_index);
1050 }
1051 case op_switch_imm: {
1052 unsigned tableIndex = currentInstruction[1].u.operand;
1053 unsigned defaultOffset = currentInstruction[2].u.operand;
1054 unsigned scrutinee = currentInstruction[3].u.operand;
1055
1056 // create jump table for switch destinations, track this switch statement.
1057 SimpleJumpTable* jumpTable = &m_codeBlock->immediateSwitchJumpTable(tableIndex);
1058 m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset, SwitchRecord::Immediate));
1059 jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1060
1061 emitPutJITStubArgFromVirtualRegister(scrutinee, 1, X86::ecx);
1062 emitPutJITStubArgConstant(tableIndex, 2);
1063 emitCTICall(Interpreter::cti_op_switch_imm);
1064 jump(X86::eax);
1065 NEXT_OPCODE(op_switch_imm);
1066 }
1067 case op_switch_char: {
1068 unsigned tableIndex = currentInstruction[1].u.operand;
1069 unsigned defaultOffset = currentInstruction[2].u.operand;
1070 unsigned scrutinee = currentInstruction[3].u.operand;
1071
1072 // create jump table for switch destinations, track this switch statement.
1073 SimpleJumpTable* jumpTable = &m_codeBlock->characterSwitchJumpTable(tableIndex);
1074 m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset, SwitchRecord::Character));
1075 jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1076
1077 emitPutJITStubArgFromVirtualRegister(scrutinee, 1, X86::ecx);
1078 emitPutJITStubArgConstant(tableIndex, 2);
1079 emitCTICall(Interpreter::cti_op_switch_char);
1080 jump(X86::eax);
1081 NEXT_OPCODE(op_switch_char);
1082 }
1083 case op_switch_string: {
1084 unsigned tableIndex = currentInstruction[1].u.operand;
1085 unsigned defaultOffset = currentInstruction[2].u.operand;
1086 unsigned scrutinee = currentInstruction[3].u.operand;
1087
1088 // create jump table for switch destinations, track this switch statement.
1089 StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
1090 m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset));
1091
1092 emitPutJITStubArgFromVirtualRegister(scrutinee, 1, X86::ecx);
1093 emitPutJITStubArgConstant(tableIndex, 2);
1094 emitCTICall(Interpreter::cti_op_switch_string);
1095 jump(X86::eax);
1096 NEXT_OPCODE(op_switch_string);
1097 }
1098 case op_del_by_val: {
1099 emitPutJITStubArgFromVirtualRegister(currentInstruction[2].u.operand, 1, X86::ecx);
1100 emitPutJITStubArgFromVirtualRegister(currentInstruction[3].u.operand, 2, X86::ecx);
1101 emitCTICall(Interpreter::cti_op_del_by_val);
1102 emitPutVirtualRegister(currentInstruction[1].u.operand);
1103 NEXT_OPCODE(op_del_by_val);
1104 }
1105 case op_put_getter: {
1106 emitPutJITStubArgFromVirtualRegister(currentInstruction[1].u.operand, 1, X86::ecx);
1107 Identifier* ident = &(m_codeBlock->identifier(currentInstruction[2].u.operand));
1108 emitPutJITStubArgConstant(ident, 2);
1109 emitPutJITStubArgFromVirtualRegister(currentInstruction[3].u.operand, 3, X86::ecx);
1110 emitCTICall(Interpreter::cti_op_put_getter);
1111 NEXT_OPCODE(op_put_getter);
1112 }
1113 case op_put_setter: {
1114 emitPutJITStubArgFromVirtualRegister(currentInstruction[1].u.operand, 1, X86::ecx);
1115 Identifier* ident = &(m_codeBlock->identifier(currentInstruction[2].u.operand));
1116 emitPutJITStubArgConstant(ident, 2);
1117 emitPutJITStubArgFromVirtualRegister(currentInstruction[3].u.operand, 3, X86::ecx);
1118 emitCTICall(Interpreter::cti_op_put_setter);
1119 NEXT_OPCODE(op_put_setter);
1120 }
1121 case op_new_error: {
1122 JSValuePtr message = m_codeBlock->unexpectedConstant(currentInstruction[3].u.operand);
1123 emitPutJITStubArgConstant(currentInstruction[2].u.operand, 1);
1124 emitPutJITStubArgConstant(JSValuePtr::encode(message), 2);
1125 emitPutJITStubArgConstant(m_bytecodeIndex, 3);
1126 emitCTICall(Interpreter::cti_op_new_error);
1127 emitPutVirtualRegister(currentInstruction[1].u.operand);
1128 NEXT_OPCODE(op_new_error);
1129 }
1130 case op_debug: {
1131 emitPutJITStubArgConstant(currentInstruction[1].u.operand, 1);
1132 emitPutJITStubArgConstant(currentInstruction[2].u.operand, 2);
1133 emitPutJITStubArgConstant(currentInstruction[3].u.operand, 3);
1134 emitCTICall(Interpreter::cti_op_debug);
1135 NEXT_OPCODE(op_debug);
1136 }
1137 case op_eq_null: {
1138 unsigned dst = currentInstruction[1].u.operand;
1139 unsigned src1 = currentInstruction[2].u.operand;
1140
1141 emitGetVirtualRegister(src1, X86::eax);
1142 Jump isImmediate = emitJumpIfNotJSCell(X86::eax);
1143
1144 loadPtr(Address(X86::eax, FIELD_OFFSET(JSCell, m_structure)), X86::ecx);
1145 setnz32(Address(X86::ecx, FIELD_OFFSET(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined), X86::eax);
1146
1147 Jump wasNotImmediate = jump();
1148
1149 isImmediate.link(this);
1150
1151 andPtr(Imm32(~JSImmediate::ExtendedTagBitUndefined), X86::eax);
1152 sete32(Imm32(JSImmediate::FullTagTypeNull), X86::eax);
1153
1154 wasNotImmediate.link(this);
1155
1156 emitTagAsBoolImmediate(X86::eax);
1157 emitPutVirtualRegister(dst);
1158
1159 NEXT_OPCODE(op_eq_null);
1160 }
1161 case op_neq_null: {
1162 unsigned dst = currentInstruction[1].u.operand;
1163 unsigned src1 = currentInstruction[2].u.operand;
1164
1165 emitGetVirtualRegister(src1, X86::eax);
1166 Jump isImmediate = emitJumpIfNotJSCell(X86::eax);
1167
1168 loadPtr(Address(X86::eax, FIELD_OFFSET(JSCell, m_structure)), X86::ecx);
1169 setz32(Address(X86::ecx, FIELD_OFFSET(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined), X86::eax);
1170
1171 Jump wasNotImmediate = jump();
1172
1173 isImmediate.link(this);
1174
1175 andPtr(Imm32(~JSImmediate::ExtendedTagBitUndefined), X86::eax);
1176 setne32(Imm32(JSImmediate::FullTagTypeNull), X86::eax);
1177
1178 wasNotImmediate.link(this);
1179
1180 emitTagAsBoolImmediate(X86::eax);
1181 emitPutVirtualRegister(dst);
1182
1183 NEXT_OPCODE(op_neq_null);
1184 }
1185 case op_enter: {
1186 // Even though CTI doesn't use them, we initialize our constant
1187 // registers to zap stale pointers, to avoid unnecessarily prolonging
1188 // object lifetime and increasing GC pressure.
1189 size_t count = m_codeBlock->m_numVars + m_codeBlock->numberOfConstantRegisters();
1190 for (size_t j = 0; j < count; ++j)
1191 emitInitRegister(j);
1192
1193 NEXT_OPCODE(op_enter);
1194 }
1195 case op_enter_with_activation: {
1196 // Even though CTI doesn't use them, we initialize our constant
1197 // registers to zap stale pointers, to avoid unnecessarily prolonging
1198 // object lifetime and increasing GC pressure.
1199 size_t count = m_codeBlock->m_numVars + m_codeBlock->numberOfConstantRegisters();
1200 for (size_t j = 0; j < count; ++j)
1201 emitInitRegister(j);
1202
1203 emitCTICall(Interpreter::cti_op_push_activation);
1204 emitPutVirtualRegister(currentInstruction[1].u.operand);
1205
1206 NEXT_OPCODE(op_enter_with_activation);
1207 }
1208 case op_create_arguments: {
1209 if (m_codeBlock->m_numParameters == 1)
1210 emitCTICall(Interpreter::cti_op_create_arguments_no_params);
1211 else
1212 emitCTICall(Interpreter::cti_op_create_arguments);
1213 NEXT_OPCODE(op_create_arguments);
1214 }
1215 case op_convert_this: {
1216 emitGetVirtualRegister(currentInstruction[1].u.operand, X86::eax);
1217
1218 emitJumpSlowCaseIfNotJSCell(X86::eax);
1219 loadPtr(Address(X86::eax, FIELD_OFFSET(JSCell, m_structure)), X86::edx);
1220 addSlowCase(jnz32(Address(X86::edx, FIELD_OFFSET(Structure, m_typeInfo.m_flags)), Imm32(NeedsThisConversion)));
1221
1222 NEXT_OPCODE(op_convert_this);
1223 }
1224 case op_profile_will_call: {
1225 emitGetCTIParam(STUB_ARGS_profilerReference, X86::eax);
1226 Jump noProfiler = jzPtr(Address(X86::eax));
1227 emitPutJITStubArgFromVirtualRegister(currentInstruction[1].u.operand, 1, X86::eax);
1228 emitCTICall(Interpreter::cti_op_profile_will_call);
1229 noProfiler.link(this);
1230
1231 NEXT_OPCODE(op_profile_will_call);
1232 }
1233 case op_profile_did_call: {
1234 emitGetCTIParam(STUB_ARGS_profilerReference, X86::eax);
1235 Jump noProfiler = jzPtr(Address(X86::eax));
1236 emitPutJITStubArgFromVirtualRegister(currentInstruction[1].u.operand, 1, X86::eax);
1237 emitCTICall(Interpreter::cti_op_profile_did_call);
1238 noProfiler.link(this);
1239
1240 NEXT_OPCODE(op_profile_did_call);
1241 }
1242 case op_get_array_length:
1243 case op_get_by_id_chain:
1244 case op_get_by_id_generic:
1245 case op_get_by_id_proto:
1246 case op_get_by_id_proto_list:
1247 case op_get_by_id_self:
1248 case op_get_by_id_self_list:
1249 case op_get_string_length:
1250 case op_put_by_id_generic:
1251 case op_put_by_id_replace:
1252 case op_put_by_id_transition:
1253 ASSERT_NOT_REACHED();
1254 }
1255 }
1256
1257 ASSERT(propertyAccessInstructionIndex == m_codeBlock->numberOfStructureStubInfos());
1258 ASSERT(callLinkInfoIndex == m_codeBlock->numberOfCallLinkInfos());
1259
1260 #ifndef NDEBUG
1261 // reset this, in order to guard it's use with asserts
1262 m_bytecodeIndex = (unsigned)-1;
1263 #endif
1264 }
1265
1266
privateCompileLinkPass()1267 void JIT::privateCompileLinkPass()
1268 {
1269 unsigned jmpTableCount = m_jmpTable.size();
1270 for (unsigned i = 0; i < jmpTableCount; ++i)
1271 m_jmpTable[i].from.linkTo(m_labels[m_jmpTable[i].toBytecodeIndex], this);
1272 m_jmpTable.clear();
1273 }
1274
privateCompileSlowCases()1275 void JIT::privateCompileSlowCases()
1276 {
1277 Instruction* instructionsBegin = m_codeBlock->instructions().begin();
1278 unsigned propertyAccessInstructionIndex = 0;
1279 unsigned callLinkInfoIndex = 0;
1280
1281 for (Vector<SlowCaseEntry>::iterator iter = m_slowCases.begin(); iter != m_slowCases.end();) {
1282 // FIXME: enable peephole optimizations for slow cases when applicable
1283 killLastResultRegister();
1284
1285 m_bytecodeIndex = iter->to;
1286 #ifndef NDEBUG
1287 unsigned firstTo = m_bytecodeIndex;
1288 #endif
1289 Instruction* currentInstruction = instructionsBegin + m_bytecodeIndex;
1290
1291 switch (OpcodeID opcodeID = m_interpreter->getOpcodeID(currentInstruction->u.opcode)) {
1292 case op_convert_this: {
1293 linkSlowCase(iter);
1294 linkSlowCase(iter);
1295 emitPutJITStubArg(X86::eax, 1);
1296 emitCTICall(Interpreter::cti_op_convert_this);
1297 emitPutVirtualRegister(currentInstruction[1].u.operand);
1298 NEXT_OPCODE(op_convert_this);
1299 }
1300 case op_add: {
1301 compileFastArithSlow_op_add(currentInstruction, iter);
1302 NEXT_OPCODE(op_add);
1303 }
1304 case op_construct_verify: {
1305 linkSlowCase(iter);
1306 linkSlowCase(iter);
1307 emitGetVirtualRegister(currentInstruction[2].u.operand, X86::eax);
1308 emitPutVirtualRegister(currentInstruction[1].u.operand);
1309
1310 NEXT_OPCODE(op_construct_verify);
1311 }
1312 case op_get_by_val: {
1313 // The slow case that handles accesses to arrays (below) may jump back up to here.
1314 Label beginGetByValSlow(this);
1315
1316 Jump notImm = getSlowCase(iter);
1317 linkSlowCase(iter);
1318 linkSlowCase(iter);
1319 emitFastArithIntToImmNoCheck(X86::edx, X86::edx);
1320 notImm.link(this);
1321 emitPutJITStubArg(X86::eax, 1);
1322 emitPutJITStubArg(X86::edx, 2);
1323 emitCTICall(Interpreter::cti_op_get_by_val);
1324 emitPutVirtualRegister(currentInstruction[1].u.operand);
1325 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_get_by_val));
1326
1327 // This is slow case that handles accesses to arrays above the fast cut-off.
1328 // First, check if this is an access to the vector
1329 linkSlowCase(iter);
1330 jae32(X86::edx, Address(X86::ecx, FIELD_OFFSET(ArrayStorage, m_vectorLength)), beginGetByValSlow);
1331
1332 // okay, missed the fast region, but it is still in the vector. Get the value.
1333 loadPtr(BaseIndex(X86::ecx, X86::edx, ScalePtr, FIELD_OFFSET(ArrayStorage, m_vector[0])), X86::ecx);
1334 // Check whether the value loaded is zero; if so we need to return undefined.
1335 jzPtr(X86::ecx, beginGetByValSlow);
1336 move(X86::ecx, X86::eax);
1337 emitPutVirtualRegister(currentInstruction[1].u.operand, X86::eax);
1338
1339 NEXT_OPCODE(op_get_by_val);
1340 }
1341 case op_sub: {
1342 compileFastArithSlow_op_sub(currentInstruction, iter);
1343 NEXT_OPCODE(op_sub);
1344 }
1345 case op_rshift: {
1346 compileFastArithSlow_op_rshift(currentInstruction[1].u.operand, currentInstruction[2].u.operand, currentInstruction[3].u.operand, iter);
1347 NEXT_OPCODE(op_rshift);
1348 }
1349 case op_lshift: {
1350 compileFastArithSlow_op_lshift(currentInstruction[1].u.operand, currentInstruction[2].u.operand, currentInstruction[3].u.operand, iter);
1351 NEXT_OPCODE(op_lshift);
1352 }
1353 case op_loop_if_less: {
1354 unsigned op2 = currentInstruction[2].u.operand;
1355 unsigned target = currentInstruction[3].u.operand;
1356 if (isOperandConstantImmediateInt(op2)) {
1357 linkSlowCase(iter);
1358 emitPutJITStubArg(X86::eax, 1);
1359 emitPutJITStubArgFromVirtualRegister(op2, 2, X86::ecx);
1360 emitCTICall(Interpreter::cti_op_loop_if_less);
1361 emitJumpSlowToHot(jnz32(X86::eax), target + 3);
1362 } else {
1363 linkSlowCase(iter);
1364 linkSlowCase(iter);
1365 emitPutJITStubArg(X86::eax, 1);
1366 emitPutJITStubArg(X86::edx, 2);
1367 emitCTICall(Interpreter::cti_op_loop_if_less);
1368 emitJumpSlowToHot(jnz32(X86::eax), target + 3);
1369 }
1370 NEXT_OPCODE(op_loop_if_less);
1371 }
1372 case op_put_by_id: {
1373 compilePutByIdSlowCase(currentInstruction[1].u.operand, &(m_codeBlock->identifier(currentInstruction[2].u.operand)), currentInstruction[3].u.operand, iter, propertyAccessInstructionIndex++);
1374 NEXT_OPCODE(op_put_by_id);
1375 }
1376 case op_get_by_id: {
1377 compileGetByIdSlowCase(currentInstruction[1].u.operand, currentInstruction[2].u.operand, &(m_codeBlock->identifier(currentInstruction[3].u.operand)), iter, propertyAccessInstructionIndex++);
1378 NEXT_OPCODE(op_get_by_id);
1379 }
1380 case op_loop_if_lesseq: {
1381 unsigned op2 = currentInstruction[2].u.operand;
1382 unsigned target = currentInstruction[3].u.operand;
1383 if (isOperandConstantImmediateInt(op2)) {
1384 linkSlowCase(iter);
1385 emitPutJITStubArg(X86::eax, 1);
1386 emitPutJITStubArgFromVirtualRegister(currentInstruction[2].u.operand, 2, X86::ecx);
1387 emitCTICall(Interpreter::cti_op_loop_if_lesseq);
1388 emitJumpSlowToHot(jnz32(X86::eax), target + 3);
1389 } else {
1390 linkSlowCase(iter);
1391 linkSlowCase(iter);
1392 emitPutJITStubArg(X86::eax, 1);
1393 emitPutJITStubArg(X86::edx, 2);
1394 emitCTICall(Interpreter::cti_op_loop_if_lesseq);
1395 emitJumpSlowToHot(jnz32(X86::eax), target + 3);
1396 }
1397 NEXT_OPCODE(op_loop_if_lesseq);
1398 }
1399 case op_pre_inc: {
1400 compileFastArithSlow_op_pre_inc(currentInstruction[1].u.operand, iter);
1401 NEXT_OPCODE(op_pre_inc);
1402 }
1403 case op_put_by_val: {
1404 // Normal slow cases - either is not an immediate imm, or is an array.
1405 Jump notImm = getSlowCase(iter);
1406 linkSlowCase(iter);
1407 linkSlowCase(iter);
1408 emitFastArithIntToImmNoCheck(X86::edx, X86::edx);
1409 notImm.link(this);
1410 emitGetVirtualRegister(currentInstruction[3].u.operand, X86::ecx);
1411 emitPutJITStubArg(X86::eax, 1);
1412 emitPutJITStubArg(X86::edx, 2);
1413 emitPutJITStubArg(X86::ecx, 3);
1414 emitCTICall(Interpreter::cti_op_put_by_val);
1415 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_put_by_val));
1416
1417 // slow cases for immediate int accesses to arrays
1418 linkSlowCase(iter);
1419 linkSlowCase(iter);
1420 emitGetVirtualRegister(currentInstruction[3].u.operand, X86::ecx);
1421 emitPutJITStubArg(X86::eax, 1);
1422 emitPutJITStubArg(X86::edx, 2);
1423 emitPutJITStubArg(X86::ecx, 3);
1424 emitCTICall(Interpreter::cti_op_put_by_val_array);
1425
1426 NEXT_OPCODE(op_put_by_val);
1427 }
1428 case op_loop_if_true: {
1429 linkSlowCase(iter);
1430 emitPutJITStubArg(X86::eax, 1);
1431 emitCTICall(Interpreter::cti_op_jtrue);
1432 unsigned target = currentInstruction[2].u.operand;
1433 emitJumpSlowToHot(jnz32(X86::eax), target + 2);
1434 NEXT_OPCODE(op_loop_if_true);
1435 }
1436 case op_pre_dec: {
1437 compileFastArithSlow_op_pre_dec(currentInstruction[1].u.operand, iter);
1438 NEXT_OPCODE(op_pre_dec);
1439 }
1440 case op_jnless: {
1441 unsigned op2 = currentInstruction[2].u.operand;
1442 unsigned target = currentInstruction[3].u.operand;
1443 if (isOperandConstantImmediateInt(op2)) {
1444 linkSlowCase(iter);
1445 emitPutJITStubArg(X86::eax, 1);
1446 emitPutJITStubArgFromVirtualRegister(currentInstruction[2].u.operand, 2, X86::ecx);
1447 emitCTICall(Interpreter::cti_op_jless);
1448 emitJumpSlowToHot(jz32(X86::eax), target + 3);
1449 } else {
1450 linkSlowCase(iter);
1451 linkSlowCase(iter);
1452 emitPutJITStubArg(X86::eax, 1);
1453 emitPutJITStubArg(X86::edx, 2);
1454 emitCTICall(Interpreter::cti_op_jless);
1455 emitJumpSlowToHot(jz32(X86::eax), target + 3);
1456 }
1457 NEXT_OPCODE(op_jnless);
1458 }
1459 case op_not: {
1460 linkSlowCase(iter);
1461 xorPtr(Imm32(static_cast<int32_t>(JSImmediate::FullTagTypeBool)), X86::eax);
1462 emitPutJITStubArg(X86::eax, 1);
1463 emitCTICall(Interpreter::cti_op_not);
1464 emitPutVirtualRegister(currentInstruction[1].u.operand);
1465 NEXT_OPCODE(op_not);
1466 }
1467 case op_jfalse: {
1468 linkSlowCase(iter);
1469 emitPutJITStubArg(X86::eax, 1);
1470 emitCTICall(Interpreter::cti_op_jtrue);
1471 unsigned target = currentInstruction[2].u.operand;
1472 emitJumpSlowToHot(jz32(X86::eax), target + 2); // inverted!
1473 NEXT_OPCODE(op_jfalse);
1474 }
1475 case op_post_inc: {
1476 compileFastArithSlow_op_post_inc(currentInstruction[1].u.operand, currentInstruction[2].u.operand, iter);
1477 NEXT_OPCODE(op_post_inc);
1478 }
1479 case op_bitnot: {
1480 linkSlowCase(iter);
1481 emitPutJITStubArg(X86::eax, 1);
1482 emitCTICall(Interpreter::cti_op_bitnot);
1483 emitPutVirtualRegister(currentInstruction[1].u.operand);
1484 NEXT_OPCODE(op_bitnot);
1485 }
1486 case op_bitand: {
1487 compileFastArithSlow_op_bitand(currentInstruction[1].u.operand, currentInstruction[2].u.operand, currentInstruction[3].u.operand, iter);
1488 NEXT_OPCODE(op_bitand);
1489 }
1490 case op_jtrue: {
1491 linkSlowCase(iter);
1492 emitPutJITStubArg(X86::eax, 1);
1493 emitCTICall(Interpreter::cti_op_jtrue);
1494 unsigned target = currentInstruction[2].u.operand;
1495 emitJumpSlowToHot(jnz32(X86::eax), target + 2);
1496 NEXT_OPCODE(op_jtrue);
1497 }
1498 case op_post_dec: {
1499 compileFastArithSlow_op_post_dec(currentInstruction[1].u.operand, currentInstruction[2].u.operand, iter);
1500 NEXT_OPCODE(op_post_dec);
1501 }
1502 case op_bitxor: {
1503 linkSlowCase(iter);
1504 emitPutJITStubArg(X86::eax, 1);
1505 emitPutJITStubArg(X86::edx, 2);
1506 emitCTICall(Interpreter::cti_op_bitxor);
1507 emitPutVirtualRegister(currentInstruction[1].u.operand);
1508 NEXT_OPCODE(op_bitxor);
1509 }
1510 case op_bitor: {
1511 linkSlowCase(iter);
1512 emitPutJITStubArg(X86::eax, 1);
1513 emitPutJITStubArg(X86::edx, 2);
1514 emitCTICall(Interpreter::cti_op_bitor);
1515 emitPutVirtualRegister(currentInstruction[1].u.operand);
1516 NEXT_OPCODE(op_bitor);
1517 }
1518 case op_eq: {
1519 linkSlowCase(iter);
1520 emitPutJITStubArg(X86::eax, 1);
1521 emitPutJITStubArg(X86::edx, 2);
1522 emitCTICall(Interpreter::cti_op_eq);
1523 emitPutVirtualRegister(currentInstruction[1].u.operand);
1524 NEXT_OPCODE(op_eq);
1525 }
1526 case op_neq: {
1527 linkSlowCase(iter);
1528 emitPutJITStubArg(X86::eax, 1);
1529 emitPutJITStubArg(X86::edx, 2);
1530 emitCTICall(Interpreter::cti_op_neq);
1531 emitPutVirtualRegister(currentInstruction[1].u.operand);
1532 NEXT_OPCODE(op_neq);
1533 }
1534 case op_stricteq: {
1535 linkSlowCase(iter);
1536 linkSlowCase(iter);
1537 #if !USE(ALTERNATE_JSIMMEDIATE)
1538 linkSlowCase(iter);
1539 #endif
1540 emitPutJITStubArg(X86::eax, 1);
1541 emitPutJITStubArg(X86::edx, 2);
1542 emitCTICall(Interpreter::cti_op_stricteq);
1543 emitPutVirtualRegister(currentInstruction[1].u.operand);
1544 NEXT_OPCODE(op_stricteq);
1545 }
1546 case op_nstricteq: {
1547 linkSlowCase(iter);
1548 linkSlowCase(iter);
1549 #if !USE(ALTERNATE_JSIMMEDIATE)
1550 linkSlowCase(iter);
1551 #endif
1552 emitPutJITStubArg(X86::eax, 1);
1553 emitPutJITStubArg(X86::edx, 2);
1554 emitCTICall(Interpreter::cti_op_nstricteq);
1555 emitPutVirtualRegister(currentInstruction[1].u.operand);
1556 NEXT_OPCODE(op_nstricteq);
1557 }
1558 case op_instanceof: {
1559 linkSlowCase(iter);
1560 linkSlowCase(iter);
1561 linkSlowCase(iter);
1562 emitPutJITStubArgFromVirtualRegister(currentInstruction[2].u.operand, 1, X86::ecx);
1563 emitPutJITStubArgFromVirtualRegister(currentInstruction[3].u.operand, 2, X86::ecx);
1564 emitPutJITStubArgFromVirtualRegister(currentInstruction[4].u.operand, 3, X86::ecx);
1565 emitCTICall(Interpreter::cti_op_instanceof);
1566 emitPutVirtualRegister(currentInstruction[1].u.operand);
1567 NEXT_OPCODE(op_instanceof);
1568 }
1569 case op_mod: {
1570 compileFastArithSlow_op_mod(currentInstruction[1].u.operand, currentInstruction[2].u.operand, currentInstruction[3].u.operand, iter);
1571 NEXT_OPCODE(op_mod);
1572 }
1573 case op_mul: {
1574 compileFastArithSlow_op_mul(currentInstruction, iter);
1575 NEXT_OPCODE(op_mul);
1576 }
1577
1578 case op_call: {
1579 compileOpCallSlowCase(currentInstruction, iter, callLinkInfoIndex++, opcodeID);
1580 NEXT_OPCODE(op_call);
1581 }
1582 case op_call_eval: {
1583 compileOpCallSlowCase(currentInstruction, iter, callLinkInfoIndex++, opcodeID);
1584 NEXT_OPCODE(op_call_eval);
1585 }
1586 case op_construct: {
1587 compileOpCallSlowCase(currentInstruction, iter, callLinkInfoIndex++, opcodeID);
1588 NEXT_OPCODE(op_construct);
1589 }
1590 case op_to_jsnumber: {
1591 linkSlowCaseIfNotJSCell(iter, currentInstruction[2].u.operand);
1592 linkSlowCase(iter);
1593
1594 emitPutJITStubArg(X86::eax, 1);
1595 emitCTICall(Interpreter::cti_op_to_jsnumber);
1596
1597 emitPutVirtualRegister(currentInstruction[1].u.operand);
1598 NEXT_OPCODE(op_to_jsnumber);
1599 }
1600
1601 default:
1602 ASSERT_NOT_REACHED();
1603 }
1604
1605 ASSERT_WITH_MESSAGE(iter == m_slowCases.end() || firstTo != iter->to,"Not enough jumps linked in slow case codegen.");
1606 ASSERT_WITH_MESSAGE(firstTo == (iter - 1)->to, "Too many jumps linked in slow case codegen.");
1607
1608 emitJumpSlowToHot(jump(), 0);
1609 }
1610
1611 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
1612 ASSERT(propertyAccessInstructionIndex == m_codeBlock->numberOfStructureStubInfos());
1613 #endif
1614 ASSERT(callLinkInfoIndex == m_codeBlock->numberOfCallLinkInfos());
1615
1616 #ifndef NDEBUG
1617 // reset this, in order to guard it's use with asserts
1618 m_bytecodeIndex = (unsigned)-1;
1619 #endif
1620 }
1621
privateCompile()1622 void JIT::privateCompile()
1623 {
1624 sampleCodeBlock(m_codeBlock);
1625 #if ENABLE(OPCODE_SAMPLING)
1626 sampleInstruction(m_codeBlock->instructions().begin());
1627 #endif
1628
1629 // Could use a pop_m, but would need to offset the following instruction if so.
1630 pop(X86::ecx);
1631 emitPutToCallFrameHeader(X86::ecx, RegisterFile::ReturnPC);
1632
1633 Jump slowRegisterFileCheck;
1634 Label afterRegisterFileCheck;
1635 if (m_codeBlock->codeType() == FunctionCode) {
1636 // In the case of a fast linked call, we do not set this up in the caller.
1637 emitPutImmediateToCallFrameHeader(m_codeBlock, RegisterFile::CodeBlock);
1638
1639 emitGetCTIParam(STUB_ARGS_registerFile, X86::eax);
1640 addPtr(Imm32(m_codeBlock->m_numCalleeRegisters * sizeof(Register)), callFrameRegister, X86::edx);
1641
1642 slowRegisterFileCheck = jg32(X86::edx, Address(X86::eax, FIELD_OFFSET(RegisterFile, m_end)));
1643 afterRegisterFileCheck = label();
1644 }
1645
1646 privateCompileMainPass();
1647 privateCompileLinkPass();
1648 privateCompileSlowCases();
1649
1650 if (m_codeBlock->codeType() == FunctionCode) {
1651 slowRegisterFileCheck.link(this);
1652 m_bytecodeIndex = 0; // emitCTICall will add to the map, but doesn't actually need this...
1653 emitCTICall(Interpreter::cti_register_file_check);
1654 #ifndef NDEBUG
1655 // reset this, in order to guard it's use with asserts
1656 m_bytecodeIndex = (unsigned)-1;
1657 #endif
1658 jump(afterRegisterFileCheck);
1659 }
1660
1661 ASSERT(m_jmpTable.isEmpty());
1662
1663 RefPtr<ExecutablePool> allocator = m_globalData->poolForSize(m_assembler.size());
1664 void* code = m_assembler.executableCopy(allocator.get());
1665 JITCodeRef codeRef(code, allocator);
1666 #ifndef NDEBUG
1667 codeRef.codeSize = m_assembler.size();
1668 #endif
1669
1670 PatchBuffer patchBuffer(code);
1671
1672 // Translate vPC offsets into addresses in JIT generated code, for switch tables.
1673 for (unsigned i = 0; i < m_switches.size(); ++i) {
1674 SwitchRecord record = m_switches[i];
1675 unsigned bytecodeIndex = record.bytecodeIndex;
1676
1677 if (record.type != SwitchRecord::String) {
1678 ASSERT(record.type == SwitchRecord::Immediate || record.type == SwitchRecord::Character);
1679 ASSERT(record.jumpTable.simpleJumpTable->branchOffsets.size() == record.jumpTable.simpleJumpTable->ctiOffsets.size());
1680
1681 record.jumpTable.simpleJumpTable->ctiDefault = patchBuffer.addressOf(m_labels[bytecodeIndex + 3 + record.defaultOffset]);
1682
1683 for (unsigned j = 0; j < record.jumpTable.simpleJumpTable->branchOffsets.size(); ++j) {
1684 unsigned offset = record.jumpTable.simpleJumpTable->branchOffsets[j];
1685 record.jumpTable.simpleJumpTable->ctiOffsets[j] = offset ? patchBuffer.addressOf(m_labels[bytecodeIndex + 3 + offset]) : record.jumpTable.simpleJumpTable->ctiDefault;
1686 }
1687 } else {
1688 ASSERT(record.type == SwitchRecord::String);
1689
1690 record.jumpTable.stringJumpTable->ctiDefault = patchBuffer.addressOf(m_labels[bytecodeIndex + 3 + record.defaultOffset]);
1691
1692 StringJumpTable::StringOffsetTable::iterator end = record.jumpTable.stringJumpTable->offsetTable.end();
1693 for (StringJumpTable::StringOffsetTable::iterator it = record.jumpTable.stringJumpTable->offsetTable.begin(); it != end; ++it) {
1694 unsigned offset = it->second.branchOffset;
1695 it->second.ctiOffset = offset ? patchBuffer.addressOf(m_labels[bytecodeIndex + 3 + offset]) : record.jumpTable.stringJumpTable->ctiDefault;
1696 }
1697 }
1698 }
1699
1700 for (size_t i = 0; i < m_codeBlock->numberOfExceptionHandlers(); ++i) {
1701 HandlerInfo& handler = m_codeBlock->exceptionHandler(i);
1702 handler.nativeCode = patchBuffer.addressOf(m_labels[handler.target]);
1703 }
1704
1705 for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
1706 if (iter->to)
1707 patchBuffer.link(iter->from, iter->to);
1708 }
1709
1710 if (m_codeBlock->hasExceptionInfo()) {
1711 m_codeBlock->pcVector().reserveCapacity(m_calls.size());
1712 for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter)
1713 m_codeBlock->pcVector().append(PC(reinterpret_cast<void**>(patchBuffer.addressOf(iter->from)) - reinterpret_cast<void**>(code), iter->bytecodeIndex));
1714 }
1715
1716 // Link absolute addresses for jsr
1717 for (Vector<JSRInfo>::iterator iter = m_jsrSites.begin(); iter != m_jsrSites.end(); ++iter)
1718 patchBuffer.setPtr(iter->storeLocation, patchBuffer.addressOf(iter->target));
1719
1720 for (unsigned i = 0; i < m_codeBlock->numberOfStructureStubInfos(); ++i) {
1721 StructureStubInfo& info = m_codeBlock->structureStubInfo(i);
1722 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
1723 info.callReturnLocation = patchBuffer.addressOf(m_propertyAccessCompilationInfo[i].callReturnLocation);
1724 info.hotPathBegin = patchBuffer.addressOf(m_propertyAccessCompilationInfo[i].hotPathBegin);
1725 #else
1726 info.callReturnLocation = 0;
1727 info.hotPathBegin = 0;
1728 #endif
1729 }
1730 for (unsigned i = 0; i < m_codeBlock->numberOfCallLinkInfos(); ++i) {
1731 CallLinkInfo& info = m_codeBlock->callLinkInfo(i);
1732 #if ENABLE(JIT_OPTIMIZE_CALL)
1733 info.callReturnLocation = patchBuffer.addressOf(m_callStructureStubCompilationInfo[i].callReturnLocation);
1734 info.hotPathBegin = patchBuffer.addressOf(m_callStructureStubCompilationInfo[i].hotPathBegin);
1735 info.hotPathOther = patchBuffer.addressOf(m_callStructureStubCompilationInfo[i].hotPathOther);
1736 info.coldPathOther = patchBuffer.addressOf(m_callStructureStubCompilationInfo[i].coldPathOther);
1737 #else
1738 info.callReturnLocation = 0;
1739 info.hotPathBegin = 0;
1740 info.hotPathOther = 0;
1741 info.coldPathOther = 0;
1742 #endif
1743 }
1744
1745 m_codeBlock->setJITCode(codeRef);
1746 }
1747
privateCompileCTIMachineTrampolines()1748 void JIT::privateCompileCTIMachineTrampolines()
1749 {
1750 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
1751 // (1) The first function provides fast property access for array length
1752 Label arrayLengthBegin = align();
1753
1754 // Check eax is an array
1755 Jump array_failureCases1 = emitJumpIfNotJSCell(X86::eax);
1756 Jump array_failureCases2 = jnePtr(Address(X86::eax), ImmPtr(m_interpreter->m_jsArrayVptr));
1757
1758 // Checks out okay! - get the length from the storage
1759 loadPtr(Address(X86::eax, FIELD_OFFSET(JSArray, m_storage)), X86::eax);
1760 load32(Address(X86::eax, FIELD_OFFSET(ArrayStorage, m_length)), X86::eax);
1761
1762 Jump array_failureCases3 = ja32(X86::eax, Imm32(JSImmediate::maxImmediateInt));
1763
1764 // X86::eax contains a 64 bit value (is positive, is zero extended) so we don't need sign extend here.
1765 emitFastArithIntToImmNoCheck(X86::eax, X86::eax);
1766
1767 ret();
1768
1769 // (2) The second function provides fast property access for string length
1770 Label stringLengthBegin = align();
1771
1772 // Check eax is a string
1773 Jump string_failureCases1 = emitJumpIfNotJSCell(X86::eax);
1774 Jump string_failureCases2 = jnePtr(Address(X86::eax), ImmPtr(m_interpreter->m_jsStringVptr));
1775
1776 // Checks out okay! - get the length from the Ustring.
1777 loadPtr(Address(X86::eax, FIELD_OFFSET(JSString, m_value) + FIELD_OFFSET(UString, m_rep)), X86::eax);
1778 load32(Address(X86::eax, FIELD_OFFSET(UString::Rep, len)), X86::eax);
1779
1780 Jump string_failureCases3 = ja32(X86::eax, Imm32(JSImmediate::maxImmediateInt));
1781
1782 // X86::eax contains a 64 bit value (is positive, is zero extended) so we don't need sign extend here.
1783 emitFastArithIntToImmNoCheck(X86::eax, X86::eax);
1784
1785 ret();
1786 #endif
1787
1788 // (3) Trampolines for the slow cases of op_call / op_call_eval / op_construct.
1789
1790 Label virtualCallPreLinkBegin = align();
1791
1792 // Load the callee CodeBlock* into eax
1793 loadPtr(Address(X86::ecx, FIELD_OFFSET(JSFunction, m_body)), X86::eax);
1794 loadPtr(Address(X86::eax, FIELD_OFFSET(FunctionBodyNode, m_code)), X86::eax);
1795 Jump hasCodeBlock1 = jnzPtr(X86::eax);
1796 pop(X86::ebx);
1797 restoreArgumentReference();
1798 Jump callJSFunction1 = call();
1799 emitGetJITStubArg(1, X86::ecx);
1800 emitGetJITStubArg(3, X86::edx);
1801 push(X86::ebx);
1802 hasCodeBlock1.link(this);
1803
1804 // Check argCount matches callee arity.
1805 Jump arityCheckOkay1 = je32(Address(X86::eax, FIELD_OFFSET(CodeBlock, m_numParameters)), X86::edx);
1806 pop(X86::ebx);
1807 emitPutJITStubArg(X86::ebx, 2);
1808 emitPutJITStubArg(X86::eax, 4);
1809 restoreArgumentReference();
1810 Jump callArityCheck1 = call();
1811 move(X86::edx, callFrameRegister);
1812 emitGetJITStubArg(1, X86::ecx);
1813 emitGetJITStubArg(3, X86::edx);
1814 push(X86::ebx);
1815 arityCheckOkay1.link(this);
1816
1817 compileOpCallInitializeCallFrame();
1818
1819 pop(X86::ebx);
1820 emitPutJITStubArg(X86::ebx, 2);
1821 restoreArgumentReference();
1822 Jump callDontLazyLinkCall = call();
1823 push(X86::ebx);
1824
1825 jump(X86::eax);
1826
1827 Label virtualCallLinkBegin = align();
1828
1829 // Load the callee CodeBlock* into eax
1830 loadPtr(Address(X86::ecx, FIELD_OFFSET(JSFunction, m_body)), X86::eax);
1831 loadPtr(Address(X86::eax, FIELD_OFFSET(FunctionBodyNode, m_code)), X86::eax);
1832 Jump hasCodeBlock2 = jnzPtr(X86::eax);
1833 pop(X86::ebx);
1834 restoreArgumentReference();
1835 Jump callJSFunction2 = call();
1836 emitGetJITStubArg(1, X86::ecx);
1837 emitGetJITStubArg(3, X86::edx);
1838 push(X86::ebx);
1839 hasCodeBlock2.link(this);
1840
1841 // Check argCount matches callee arity.
1842 Jump arityCheckOkay2 = je32(Address(X86::eax, FIELD_OFFSET(CodeBlock, m_numParameters)), X86::edx);
1843 pop(X86::ebx);
1844 emitPutJITStubArg(X86::ebx, 2);
1845 emitPutJITStubArg(X86::eax, 4);
1846 restoreArgumentReference();
1847 Jump callArityCheck2 = call();
1848 move(X86::edx, callFrameRegister);
1849 emitGetJITStubArg(1, X86::ecx);
1850 emitGetJITStubArg(3, X86::edx);
1851 push(X86::ebx);
1852 arityCheckOkay2.link(this);
1853
1854 compileOpCallInitializeCallFrame();
1855
1856 pop(X86::ebx);
1857 emitPutJITStubArg(X86::ebx, 2);
1858 restoreArgumentReference();
1859 Jump callLazyLinkCall = call();
1860 push(X86::ebx);
1861
1862 jump(X86::eax);
1863
1864 Label virtualCallBegin = align();
1865
1866 // Load the callee CodeBlock* into eax
1867 loadPtr(Address(X86::ecx, FIELD_OFFSET(JSFunction, m_body)), X86::eax);
1868 loadPtr(Address(X86::eax, FIELD_OFFSET(FunctionBodyNode, m_code)), X86::eax);
1869 Jump hasCodeBlock3 = jnzPtr(X86::eax);
1870 pop(X86::ebx);
1871 restoreArgumentReference();
1872 Jump callJSFunction3 = call();
1873 emitGetJITStubArg(1, X86::ecx);
1874 emitGetJITStubArg(3, X86::edx);
1875 push(X86::ebx);
1876 hasCodeBlock3.link(this);
1877
1878 // Check argCount matches callee arity.
1879 Jump arityCheckOkay3 = je32(Address(X86::eax, FIELD_OFFSET(CodeBlock, m_numParameters)), X86::edx);
1880 pop(X86::ebx);
1881 emitPutJITStubArg(X86::ebx, 2);
1882 emitPutJITStubArg(X86::eax, 4);
1883 restoreArgumentReference();
1884 Jump callArityCheck3 = call();
1885 move(X86::edx, callFrameRegister);
1886 emitGetJITStubArg(1, X86::ecx);
1887 emitGetJITStubArg(3, X86::edx);
1888 push(X86::ebx);
1889 arityCheckOkay3.link(this);
1890
1891 compileOpCallInitializeCallFrame();
1892
1893 // load ctiCode from the new codeBlock.
1894 loadPtr(Address(X86::eax, FIELD_OFFSET(CodeBlock, m_jitCode)), X86::eax);
1895
1896 jump(X86::eax);
1897
1898 // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
1899 m_interpreter->m_executablePool = m_globalData->poolForSize(m_assembler.size());
1900 void* code = m_assembler.executableCopy(m_interpreter->m_executablePool.get());
1901 PatchBuffer patchBuffer(code);
1902
1903 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
1904 patchBuffer.link(array_failureCases1, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_array_fail));
1905 patchBuffer.link(array_failureCases2, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_array_fail));
1906 patchBuffer.link(array_failureCases3, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_array_fail));
1907 patchBuffer.link(string_failureCases1, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_string_fail));
1908 patchBuffer.link(string_failureCases2, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_string_fail));
1909 patchBuffer.link(string_failureCases3, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_string_fail));
1910
1911 m_interpreter->m_ctiArrayLengthTrampoline = patchBuffer.addressOf(arrayLengthBegin);
1912 m_interpreter->m_ctiStringLengthTrampoline = patchBuffer.addressOf(stringLengthBegin);
1913 #endif
1914 patchBuffer.link(callArityCheck1, reinterpret_cast<void*>(Interpreter::cti_op_call_arityCheck));
1915 patchBuffer.link(callArityCheck2, reinterpret_cast<void*>(Interpreter::cti_op_call_arityCheck));
1916 patchBuffer.link(callArityCheck3, reinterpret_cast<void*>(Interpreter::cti_op_call_arityCheck));
1917 patchBuffer.link(callJSFunction1, reinterpret_cast<void*>(Interpreter::cti_op_call_JSFunction));
1918 patchBuffer.link(callJSFunction2, reinterpret_cast<void*>(Interpreter::cti_op_call_JSFunction));
1919 patchBuffer.link(callJSFunction3, reinterpret_cast<void*>(Interpreter::cti_op_call_JSFunction));
1920 patchBuffer.link(callDontLazyLinkCall, reinterpret_cast<void*>(Interpreter::cti_vm_dontLazyLinkCall));
1921 patchBuffer.link(callLazyLinkCall, reinterpret_cast<void*>(Interpreter::cti_vm_lazyLinkCall));
1922
1923 m_interpreter->m_ctiVirtualCallPreLink = patchBuffer.addressOf(virtualCallPreLinkBegin);
1924 m_interpreter->m_ctiVirtualCallLink = patchBuffer.addressOf(virtualCallLinkBegin);
1925 m_interpreter->m_ctiVirtualCall = patchBuffer.addressOf(virtualCallBegin);
1926 }
1927
emitGetVariableObjectRegister(RegisterID variableObject,int index,RegisterID dst)1928 void JIT::emitGetVariableObjectRegister(RegisterID variableObject, int index, RegisterID dst)
1929 {
1930 loadPtr(Address(variableObject, FIELD_OFFSET(JSVariableObject, d)), dst);
1931 loadPtr(Address(dst, FIELD_OFFSET(JSVariableObject::JSVariableObjectData, registers)), dst);
1932 loadPtr(Address(dst, index * sizeof(Register)), dst);
1933 }
1934
emitPutVariableObjectRegister(RegisterID src,RegisterID variableObject,int index)1935 void JIT::emitPutVariableObjectRegister(RegisterID src, RegisterID variableObject, int index)
1936 {
1937 loadPtr(Address(variableObject, FIELD_OFFSET(JSVariableObject, d)), variableObject);
1938 loadPtr(Address(variableObject, FIELD_OFFSET(JSVariableObject::JSVariableObjectData, registers)), variableObject);
1939 storePtr(src, Address(variableObject, index * sizeof(Register)));
1940 }
1941
1942 } // namespace JSC
1943
1944 #endif // ENABLE(JIT)
1945