1 /*
2 * Copyright (C) 2008, 2009 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26 #include "config.h"
27 #include "JIT.h"
28
29 // This probably does not belong here; adding here for now as a quick Windows build fix.
30 #if ENABLE(ASSEMBLER) && PLATFORM(X86) && !PLATFORM(MAC)
31 #include "MacroAssembler.h"
32 JSC::MacroAssemblerX86Common::SSE2CheckState JSC::MacroAssemblerX86Common::s_sse2CheckState = NotCheckedSSE2;
33 #endif
34
35 #if ENABLE(JIT)
36
37 #include "CodeBlock.h"
38 #include "Interpreter.h"
39 #include "JITInlineMethods.h"
40 #include "JITStubs.h"
41 #include "JITStubCall.h"
42 #include "JSArray.h"
43 #include "JSFunction.h"
44 #include "LinkBuffer.h"
45 #include "RepatchBuffer.h"
46 #include "ResultType.h"
47 #include "SamplingTool.h"
48
49 #ifndef NDEBUG
50 #include <stdio.h>
51 #endif
52
53 using namespace std;
54
55 namespace JSC {
56
ctiPatchNearCallByReturnAddress(CodeBlock * codeblock,ReturnAddressPtr returnAddress,MacroAssemblerCodePtr newCalleeFunction)57 void ctiPatchNearCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, MacroAssemblerCodePtr newCalleeFunction)
58 {
59 RepatchBuffer repatchBuffer(codeblock);
60 repatchBuffer.relinkNearCallerToTrampoline(returnAddress, newCalleeFunction);
61 }
62
ctiPatchCallByReturnAddress(CodeBlock * codeblock,ReturnAddressPtr returnAddress,MacroAssemblerCodePtr newCalleeFunction)63 void ctiPatchCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, MacroAssemblerCodePtr newCalleeFunction)
64 {
65 RepatchBuffer repatchBuffer(codeblock);
66 repatchBuffer.relinkCallerToTrampoline(returnAddress, newCalleeFunction);
67 }
68
ctiPatchCallByReturnAddress(CodeBlock * codeblock,ReturnAddressPtr returnAddress,FunctionPtr newCalleeFunction)69 void ctiPatchCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, FunctionPtr newCalleeFunction)
70 {
71 RepatchBuffer repatchBuffer(codeblock);
72 repatchBuffer.relinkCallerToFunction(returnAddress, newCalleeFunction);
73 }
74
JIT(JSGlobalData * globalData,CodeBlock * codeBlock)75 JIT::JIT(JSGlobalData* globalData, CodeBlock* codeBlock)
76 : m_interpreter(globalData->interpreter)
77 , m_globalData(globalData)
78 , m_codeBlock(codeBlock)
79 , m_labels(codeBlock ? codeBlock->instructions().size() : 0)
80 , m_propertyAccessCompilationInfo(codeBlock ? codeBlock->numberOfStructureStubInfos() : 0)
81 , m_callStructureStubCompilationInfo(codeBlock ? codeBlock->numberOfCallLinkInfos() : 0)
82 , m_bytecodeIndex((unsigned)-1)
83 #if USE(JSVALUE32_64)
84 , m_jumpTargetIndex(0)
85 , m_mappedBytecodeIndex((unsigned)-1)
86 , m_mappedVirtualRegisterIndex((unsigned)-1)
87 , m_mappedTag((RegisterID)-1)
88 , m_mappedPayload((RegisterID)-1)
89 #else
90 , m_lastResultBytecodeRegister(std::numeric_limits<int>::max())
91 , m_jumpTargetsPosition(0)
92 #endif
93 {
94 }
95
96 #if USE(JSVALUE32_64)
emitTimeoutCheck()97 void JIT::emitTimeoutCheck()
98 {
99 Jump skipTimeout = branchSub32(NonZero, Imm32(1), timeoutCheckRegister);
100 JITStubCall stubCall(this, cti_timeout_check);
101 stubCall.addArgument(regT1, regT0); // save last result registers.
102 stubCall.call(timeoutCheckRegister);
103 stubCall.getArgument(0, regT1, regT0); // reload last result registers.
104 skipTimeout.link(this);
105 }
106 #else
emitTimeoutCheck()107 void JIT::emitTimeoutCheck()
108 {
109 Jump skipTimeout = branchSub32(NonZero, Imm32(1), timeoutCheckRegister);
110 JITStubCall(this, cti_timeout_check).call(timeoutCheckRegister);
111 skipTimeout.link(this);
112
113 killLastResultRegister();
114 }
115 #endif
116
117 #define NEXT_OPCODE(name) \
118 m_bytecodeIndex += OPCODE_LENGTH(name); \
119 break;
120
121 #if USE(JSVALUE32_64)
122 #define DEFINE_BINARY_OP(name) \
123 case name: { \
124 JITStubCall stubCall(this, cti_##name); \
125 stubCall.addArgument(currentInstruction[2].u.operand); \
126 stubCall.addArgument(currentInstruction[3].u.operand); \
127 stubCall.call(currentInstruction[1].u.operand); \
128 NEXT_OPCODE(name); \
129 }
130
131 #define DEFINE_UNARY_OP(name) \
132 case name: { \
133 JITStubCall stubCall(this, cti_##name); \
134 stubCall.addArgument(currentInstruction[2].u.operand); \
135 stubCall.call(currentInstruction[1].u.operand); \
136 NEXT_OPCODE(name); \
137 }
138
139 #else // USE(JSVALUE32_64)
140
141 #define DEFINE_BINARY_OP(name) \
142 case name: { \
143 JITStubCall stubCall(this, cti_##name); \
144 stubCall.addArgument(currentInstruction[2].u.operand, regT2); \
145 stubCall.addArgument(currentInstruction[3].u.operand, regT2); \
146 stubCall.call(currentInstruction[1].u.operand); \
147 NEXT_OPCODE(name); \
148 }
149
150 #define DEFINE_UNARY_OP(name) \
151 case name: { \
152 JITStubCall stubCall(this, cti_##name); \
153 stubCall.addArgument(currentInstruction[2].u.operand, regT2); \
154 stubCall.call(currentInstruction[1].u.operand); \
155 NEXT_OPCODE(name); \
156 }
157 #endif // USE(JSVALUE32_64)
158
159 #define DEFINE_OP(name) \
160 case name: { \
161 emit_##name(currentInstruction); \
162 NEXT_OPCODE(name); \
163 }
164
165 #define DEFINE_SLOWCASE_OP(name) \
166 case name: { \
167 emitSlow_##name(currentInstruction, iter); \
168 NEXT_OPCODE(name); \
169 }
170
privateCompileMainPass()171 void JIT::privateCompileMainPass()
172 {
173 Instruction* instructionsBegin = m_codeBlock->instructions().begin();
174 unsigned instructionCount = m_codeBlock->instructions().size();
175
176 m_propertyAccessInstructionIndex = 0;
177 m_globalResolveInfoIndex = 0;
178 m_callLinkInfoIndex = 0;
179
180 for (m_bytecodeIndex = 0; m_bytecodeIndex < instructionCount; ) {
181 Instruction* currentInstruction = instructionsBegin + m_bytecodeIndex;
182 ASSERT_WITH_MESSAGE(m_interpreter->isOpcode(currentInstruction->u.opcode), "privateCompileMainPass gone bad @ %d", m_bytecodeIndex);
183
184 #if ENABLE(OPCODE_SAMPLING)
185 if (m_bytecodeIndex > 0) // Avoid the overhead of sampling op_enter twice.
186 sampleInstruction(currentInstruction);
187 #endif
188
189 #if !USE(JSVALUE32_64)
190 if (m_labels[m_bytecodeIndex].isUsed())
191 killLastResultRegister();
192 #endif
193
194 m_labels[m_bytecodeIndex] = label();
195
196 switch (m_interpreter->getOpcodeID(currentInstruction->u.opcode)) {
197 DEFINE_BINARY_OP(op_del_by_val)
198 #if !USE(JSVALUE32_64)
199 DEFINE_BINARY_OP(op_div)
200 #endif
201 DEFINE_BINARY_OP(op_in)
202 DEFINE_BINARY_OP(op_less)
203 DEFINE_BINARY_OP(op_lesseq)
204 DEFINE_BINARY_OP(op_urshift)
205 DEFINE_UNARY_OP(op_get_pnames)
206 DEFINE_UNARY_OP(op_is_boolean)
207 DEFINE_UNARY_OP(op_is_function)
208 DEFINE_UNARY_OP(op_is_number)
209 DEFINE_UNARY_OP(op_is_object)
210 DEFINE_UNARY_OP(op_is_string)
211 DEFINE_UNARY_OP(op_is_undefined)
212 #if !USE(JSVALUE32_64)
213 DEFINE_UNARY_OP(op_negate)
214 #endif
215 DEFINE_UNARY_OP(op_typeof)
216
217 DEFINE_OP(op_add)
218 DEFINE_OP(op_bitand)
219 DEFINE_OP(op_bitnot)
220 DEFINE_OP(op_bitor)
221 DEFINE_OP(op_bitxor)
222 DEFINE_OP(op_call)
223 DEFINE_OP(op_call_eval)
224 DEFINE_OP(op_call_varargs)
225 DEFINE_OP(op_catch)
226 DEFINE_OP(op_construct)
227 DEFINE_OP(op_construct_verify)
228 DEFINE_OP(op_convert_this)
229 DEFINE_OP(op_init_arguments)
230 DEFINE_OP(op_create_arguments)
231 DEFINE_OP(op_debug)
232 DEFINE_OP(op_del_by_id)
233 #if USE(JSVALUE32_64)
234 DEFINE_OP(op_div)
235 #endif
236 DEFINE_OP(op_end)
237 DEFINE_OP(op_enter)
238 DEFINE_OP(op_enter_with_activation)
239 DEFINE_OP(op_eq)
240 DEFINE_OP(op_eq_null)
241 DEFINE_OP(op_get_by_id)
242 DEFINE_OP(op_get_by_val)
243 DEFINE_OP(op_get_global_var)
244 DEFINE_OP(op_get_scoped_var)
245 DEFINE_OP(op_instanceof)
246 DEFINE_OP(op_jeq_null)
247 DEFINE_OP(op_jfalse)
248 DEFINE_OP(op_jmp)
249 DEFINE_OP(op_jmp_scopes)
250 DEFINE_OP(op_jneq_null)
251 DEFINE_OP(op_jneq_ptr)
252 DEFINE_OP(op_jnless)
253 DEFINE_OP(op_jnlesseq)
254 DEFINE_OP(op_jsr)
255 DEFINE_OP(op_jtrue)
256 DEFINE_OP(op_load_varargs)
257 DEFINE_OP(op_loop)
258 DEFINE_OP(op_loop_if_less)
259 DEFINE_OP(op_loop_if_lesseq)
260 DEFINE_OP(op_loop_if_true)
261 DEFINE_OP(op_lshift)
262 DEFINE_OP(op_method_check)
263 DEFINE_OP(op_mod)
264 DEFINE_OP(op_mov)
265 DEFINE_OP(op_mul)
266 #if USE(JSVALUE32_64)
267 DEFINE_OP(op_negate)
268 #endif
269 DEFINE_OP(op_neq)
270 DEFINE_OP(op_neq_null)
271 DEFINE_OP(op_new_array)
272 DEFINE_OP(op_new_error)
273 DEFINE_OP(op_new_func)
274 DEFINE_OP(op_new_func_exp)
275 DEFINE_OP(op_new_object)
276 DEFINE_OP(op_new_regexp)
277 DEFINE_OP(op_next_pname)
278 DEFINE_OP(op_not)
279 DEFINE_OP(op_nstricteq)
280 DEFINE_OP(op_pop_scope)
281 DEFINE_OP(op_post_dec)
282 DEFINE_OP(op_post_inc)
283 DEFINE_OP(op_pre_dec)
284 DEFINE_OP(op_pre_inc)
285 DEFINE_OP(op_profile_did_call)
286 DEFINE_OP(op_profile_will_call)
287 DEFINE_OP(op_push_new_scope)
288 DEFINE_OP(op_push_scope)
289 DEFINE_OP(op_put_by_id)
290 DEFINE_OP(op_put_by_index)
291 DEFINE_OP(op_put_by_val)
292 DEFINE_OP(op_put_getter)
293 DEFINE_OP(op_put_global_var)
294 DEFINE_OP(op_put_scoped_var)
295 DEFINE_OP(op_put_setter)
296 DEFINE_OP(op_resolve)
297 DEFINE_OP(op_resolve_base)
298 DEFINE_OP(op_resolve_global)
299 DEFINE_OP(op_resolve_skip)
300 DEFINE_OP(op_resolve_with_base)
301 DEFINE_OP(op_ret)
302 DEFINE_OP(op_rshift)
303 DEFINE_OP(op_sret)
304 DEFINE_OP(op_strcat)
305 DEFINE_OP(op_stricteq)
306 DEFINE_OP(op_sub)
307 DEFINE_OP(op_switch_char)
308 DEFINE_OP(op_switch_imm)
309 DEFINE_OP(op_switch_string)
310 DEFINE_OP(op_tear_off_activation)
311 DEFINE_OP(op_tear_off_arguments)
312 DEFINE_OP(op_throw)
313 DEFINE_OP(op_to_jsnumber)
314 DEFINE_OP(op_to_primitive)
315
316 case op_get_array_length:
317 case op_get_by_id_chain:
318 case op_get_by_id_generic:
319 case op_get_by_id_proto:
320 case op_get_by_id_proto_list:
321 case op_get_by_id_self:
322 case op_get_by_id_self_list:
323 case op_get_string_length:
324 case op_put_by_id_generic:
325 case op_put_by_id_replace:
326 case op_put_by_id_transition:
327 ASSERT_NOT_REACHED();
328 }
329 }
330
331 ASSERT(m_propertyAccessInstructionIndex == m_codeBlock->numberOfStructureStubInfos());
332 ASSERT(m_callLinkInfoIndex == m_codeBlock->numberOfCallLinkInfos());
333
334 #ifndef NDEBUG
335 // Reset this, in order to guard its use with ASSERTs.
336 m_bytecodeIndex = (unsigned)-1;
337 #endif
338 }
339
340
privateCompileLinkPass()341 void JIT::privateCompileLinkPass()
342 {
343 unsigned jmpTableCount = m_jmpTable.size();
344 for (unsigned i = 0; i < jmpTableCount; ++i)
345 m_jmpTable[i].from.linkTo(m_labels[m_jmpTable[i].toBytecodeIndex], this);
346 m_jmpTable.clear();
347 }
348
privateCompileSlowCases()349 void JIT::privateCompileSlowCases()
350 {
351 Instruction* instructionsBegin = m_codeBlock->instructions().begin();
352
353 m_propertyAccessInstructionIndex = 0;
354 #if USE(JSVALUE32_64)
355 m_globalResolveInfoIndex = 0;
356 #endif
357 m_callLinkInfoIndex = 0;
358
359 for (Vector<SlowCaseEntry>::iterator iter = m_slowCases.begin(); iter != m_slowCases.end();) {
360 #if !USE(JSVALUE32_64)
361 killLastResultRegister();
362 #endif
363
364 m_bytecodeIndex = iter->to;
365 #ifndef NDEBUG
366 unsigned firstTo = m_bytecodeIndex;
367 #endif
368 Instruction* currentInstruction = instructionsBegin + m_bytecodeIndex;
369
370 switch (m_interpreter->getOpcodeID(currentInstruction->u.opcode)) {
371 DEFINE_SLOWCASE_OP(op_add)
372 DEFINE_SLOWCASE_OP(op_bitand)
373 DEFINE_SLOWCASE_OP(op_bitnot)
374 DEFINE_SLOWCASE_OP(op_bitor)
375 DEFINE_SLOWCASE_OP(op_bitxor)
376 DEFINE_SLOWCASE_OP(op_call)
377 DEFINE_SLOWCASE_OP(op_call_eval)
378 DEFINE_SLOWCASE_OP(op_call_varargs)
379 DEFINE_SLOWCASE_OP(op_construct)
380 DEFINE_SLOWCASE_OP(op_construct_verify)
381 DEFINE_SLOWCASE_OP(op_convert_this)
382 #if USE(JSVALUE32_64)
383 DEFINE_SLOWCASE_OP(op_div)
384 #endif
385 DEFINE_SLOWCASE_OP(op_eq)
386 DEFINE_SLOWCASE_OP(op_get_by_id)
387 DEFINE_SLOWCASE_OP(op_get_by_val)
388 DEFINE_SLOWCASE_OP(op_instanceof)
389 DEFINE_SLOWCASE_OP(op_jfalse)
390 DEFINE_SLOWCASE_OP(op_jnless)
391 DEFINE_SLOWCASE_OP(op_jnlesseq)
392 DEFINE_SLOWCASE_OP(op_jtrue)
393 DEFINE_SLOWCASE_OP(op_loop_if_less)
394 DEFINE_SLOWCASE_OP(op_loop_if_lesseq)
395 DEFINE_SLOWCASE_OP(op_loop_if_true)
396 DEFINE_SLOWCASE_OP(op_lshift)
397 DEFINE_SLOWCASE_OP(op_method_check)
398 DEFINE_SLOWCASE_OP(op_mod)
399 DEFINE_SLOWCASE_OP(op_mul)
400 #if USE(JSVALUE32_64)
401 DEFINE_SLOWCASE_OP(op_negate)
402 #endif
403 DEFINE_SLOWCASE_OP(op_neq)
404 DEFINE_SLOWCASE_OP(op_not)
405 DEFINE_SLOWCASE_OP(op_nstricteq)
406 DEFINE_SLOWCASE_OP(op_post_dec)
407 DEFINE_SLOWCASE_OP(op_post_inc)
408 DEFINE_SLOWCASE_OP(op_pre_dec)
409 DEFINE_SLOWCASE_OP(op_pre_inc)
410 DEFINE_SLOWCASE_OP(op_put_by_id)
411 DEFINE_SLOWCASE_OP(op_put_by_val)
412 #if USE(JSVALUE32_64)
413 DEFINE_SLOWCASE_OP(op_resolve_global)
414 #endif
415 DEFINE_SLOWCASE_OP(op_rshift)
416 DEFINE_SLOWCASE_OP(op_stricteq)
417 DEFINE_SLOWCASE_OP(op_sub)
418 DEFINE_SLOWCASE_OP(op_to_jsnumber)
419 DEFINE_SLOWCASE_OP(op_to_primitive)
420 default:
421 ASSERT_NOT_REACHED();
422 }
423
424 ASSERT_WITH_MESSAGE(iter == m_slowCases.end() || firstTo != iter->to,"Not enough jumps linked in slow case codegen.");
425 ASSERT_WITH_MESSAGE(firstTo == (iter - 1)->to, "Too many jumps linked in slow case codegen.");
426
427 emitJumpSlowToHot(jump(), 0);
428 }
429
430 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
431 ASSERT(m_propertyAccessInstructionIndex == m_codeBlock->numberOfStructureStubInfos());
432 #endif
433 ASSERT(m_callLinkInfoIndex == m_codeBlock->numberOfCallLinkInfos());
434
435 #ifndef NDEBUG
436 // Reset this, in order to guard its use with ASSERTs.
437 m_bytecodeIndex = (unsigned)-1;
438 #endif
439 }
440
privateCompile()441 void JIT::privateCompile()
442 {
443 sampleCodeBlock(m_codeBlock);
444 #if ENABLE(OPCODE_SAMPLING)
445 sampleInstruction(m_codeBlock->instructions().begin());
446 #endif
447
448 // Could use a pop_m, but would need to offset the following instruction if so.
449 preserveReturnAddressAfterCall(regT2);
450 emitPutToCallFrameHeader(regT2, RegisterFile::ReturnPC);
451
452 Jump slowRegisterFileCheck;
453 Label afterRegisterFileCheck;
454 if (m_codeBlock->codeType() == FunctionCode) {
455 // In the case of a fast linked call, we do not set this up in the caller.
456 emitPutImmediateToCallFrameHeader(m_codeBlock, RegisterFile::CodeBlock);
457
458 peek(regT0, OBJECT_OFFSETOF(JITStackFrame, registerFile) / sizeof (void*));
459 addPtr(Imm32(m_codeBlock->m_numCalleeRegisters * sizeof(Register)), callFrameRegister, regT1);
460
461 slowRegisterFileCheck = branchPtr(Above, regT1, Address(regT0, OBJECT_OFFSETOF(RegisterFile, m_end)));
462 afterRegisterFileCheck = label();
463 }
464
465 privateCompileMainPass();
466 privateCompileLinkPass();
467 privateCompileSlowCases();
468
469 if (m_codeBlock->codeType() == FunctionCode) {
470 slowRegisterFileCheck.link(this);
471 m_bytecodeIndex = 0;
472 JITStubCall(this, cti_register_file_check).call();
473 #ifndef NDEBUG
474 m_bytecodeIndex = (unsigned)-1; // Reset this, in order to guard its use with ASSERTs.
475 #endif
476 jump(afterRegisterFileCheck);
477 }
478
479 ASSERT(m_jmpTable.isEmpty());
480
481 LinkBuffer patchBuffer(this, m_globalData->executableAllocator.poolForSize(m_assembler.size()));
482
483 // Translate vPC offsets into addresses in JIT generated code, for switch tables.
484 for (unsigned i = 0; i < m_switches.size(); ++i) {
485 SwitchRecord record = m_switches[i];
486 unsigned bytecodeIndex = record.bytecodeIndex;
487
488 if (record.type != SwitchRecord::String) {
489 ASSERT(record.type == SwitchRecord::Immediate || record.type == SwitchRecord::Character);
490 ASSERT(record.jumpTable.simpleJumpTable->branchOffsets.size() == record.jumpTable.simpleJumpTable->ctiOffsets.size());
491
492 record.jumpTable.simpleJumpTable->ctiDefault = patchBuffer.locationOf(m_labels[bytecodeIndex + 3 + record.defaultOffset]);
493
494 for (unsigned j = 0; j < record.jumpTable.simpleJumpTable->branchOffsets.size(); ++j) {
495 unsigned offset = record.jumpTable.simpleJumpTable->branchOffsets[j];
496 record.jumpTable.simpleJumpTable->ctiOffsets[j] = offset ? patchBuffer.locationOf(m_labels[bytecodeIndex + 3 + offset]) : record.jumpTable.simpleJumpTable->ctiDefault;
497 }
498 } else {
499 ASSERT(record.type == SwitchRecord::String);
500
501 record.jumpTable.stringJumpTable->ctiDefault = patchBuffer.locationOf(m_labels[bytecodeIndex + 3 + record.defaultOffset]);
502
503 StringJumpTable::StringOffsetTable::iterator end = record.jumpTable.stringJumpTable->offsetTable.end();
504 for (StringJumpTable::StringOffsetTable::iterator it = record.jumpTable.stringJumpTable->offsetTable.begin(); it != end; ++it) {
505 unsigned offset = it->second.branchOffset;
506 it->second.ctiOffset = offset ? patchBuffer.locationOf(m_labels[bytecodeIndex + 3 + offset]) : record.jumpTable.stringJumpTable->ctiDefault;
507 }
508 }
509 }
510
511 for (size_t i = 0; i < m_codeBlock->numberOfExceptionHandlers(); ++i) {
512 HandlerInfo& handler = m_codeBlock->exceptionHandler(i);
513 handler.nativeCode = patchBuffer.locationOf(m_labels[handler.target]);
514 }
515
516 for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
517 if (iter->to)
518 patchBuffer.link(iter->from, FunctionPtr(iter->to));
519 }
520
521 if (m_codeBlock->hasExceptionInfo()) {
522 m_codeBlock->callReturnIndexVector().reserveCapacity(m_calls.size());
523 for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter)
524 m_codeBlock->callReturnIndexVector().append(CallReturnOffsetToBytecodeIndex(patchBuffer.returnAddressOffset(iter->from), iter->bytecodeIndex));
525 }
526
527 // Link absolute addresses for jsr
528 for (Vector<JSRInfo>::iterator iter = m_jsrSites.begin(); iter != m_jsrSites.end(); ++iter)
529 patchBuffer.patch(iter->storeLocation, patchBuffer.locationOf(iter->target).executableAddress());
530
531 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
532 for (unsigned i = 0; i < m_codeBlock->numberOfStructureStubInfos(); ++i) {
533 StructureStubInfo& info = m_codeBlock->structureStubInfo(i);
534 info.callReturnLocation = patchBuffer.locationOf(m_propertyAccessCompilationInfo[i].callReturnLocation);
535 info.hotPathBegin = patchBuffer.locationOf(m_propertyAccessCompilationInfo[i].hotPathBegin);
536 }
537 #endif
538 #if ENABLE(JIT_OPTIMIZE_CALL)
539 for (unsigned i = 0; i < m_codeBlock->numberOfCallLinkInfos(); ++i) {
540 CallLinkInfo& info = m_codeBlock->callLinkInfo(i);
541 info.ownerCodeBlock = m_codeBlock;
542 info.callReturnLocation = patchBuffer.locationOfNearCall(m_callStructureStubCompilationInfo[i].callReturnLocation);
543 info.hotPathBegin = patchBuffer.locationOf(m_callStructureStubCompilationInfo[i].hotPathBegin);
544 info.hotPathOther = patchBuffer.locationOfNearCall(m_callStructureStubCompilationInfo[i].hotPathOther);
545 }
546 #endif
547 unsigned methodCallCount = m_methodCallCompilationInfo.size();
548 m_codeBlock->addMethodCallLinkInfos(methodCallCount);
549 for (unsigned i = 0; i < methodCallCount; ++i) {
550 MethodCallLinkInfo& info = m_codeBlock->methodCallLinkInfo(i);
551 info.structureLabel = patchBuffer.locationOf(m_methodCallCompilationInfo[i].structureToCompare);
552 info.callReturnLocation = m_codeBlock->structureStubInfo(m_methodCallCompilationInfo[i].propertyAccessIndex).callReturnLocation;
553 }
554
555 m_codeBlock->setJITCode(patchBuffer.finalizeCode());
556 }
557
558 #if !USE(JSVALUE32_64)
emitGetVariableObjectRegister(RegisterID variableObject,int index,RegisterID dst)559 void JIT::emitGetVariableObjectRegister(RegisterID variableObject, int index, RegisterID dst)
560 {
561 loadPtr(Address(variableObject, OBJECT_OFFSETOF(JSVariableObject, d)), dst);
562 loadPtr(Address(dst, OBJECT_OFFSETOF(JSVariableObject::JSVariableObjectData, registers)), dst);
563 loadPtr(Address(dst, index * sizeof(Register)), dst);
564 }
565
emitPutVariableObjectRegister(RegisterID src,RegisterID variableObject,int index)566 void JIT::emitPutVariableObjectRegister(RegisterID src, RegisterID variableObject, int index)
567 {
568 loadPtr(Address(variableObject, OBJECT_OFFSETOF(JSVariableObject, d)), variableObject);
569 loadPtr(Address(variableObject, OBJECT_OFFSETOF(JSVariableObject::JSVariableObjectData, registers)), variableObject);
570 storePtr(src, Address(variableObject, index * sizeof(Register)));
571 }
572 #endif
573
574 #if ENABLE(JIT_OPTIMIZE_CALL)
unlinkCall(CallLinkInfo * callLinkInfo)575 void JIT::unlinkCall(CallLinkInfo* callLinkInfo)
576 {
577 // When the JSFunction is deleted the pointer embedded in the instruction stream will no longer be valid
578 // (and, if a new JSFunction happened to be constructed at the same location, we could get a false positive
579 // match). Reset the check so it no longer matches.
580 RepatchBuffer repatchBuffer(callLinkInfo->ownerCodeBlock.get());
581 #if USE(JSVALUE32_64)
582 repatchBuffer.repatch(callLinkInfo->hotPathBegin, 0);
583 #else
584 repatchBuffer.repatch(callLinkInfo->hotPathBegin, JSValue::encode(JSValue()));
585 #endif
586 }
587
linkCall(JSFunction * callee,CodeBlock * callerCodeBlock,CodeBlock * calleeCodeBlock,JITCode & code,CallLinkInfo * callLinkInfo,int callerArgCount,JSGlobalData * globalData)588 void JIT::linkCall(JSFunction* callee, CodeBlock* callerCodeBlock, CodeBlock* calleeCodeBlock, JITCode& code, CallLinkInfo* callLinkInfo, int callerArgCount, JSGlobalData* globalData)
589 {
590 ASSERT(calleeCodeBlock);
591 RepatchBuffer repatchBuffer(callerCodeBlock);
592
593 // Currently we only link calls with the exact number of arguments.
594 // If this is a native call calleeCodeBlock is null so the number of parameters is unimportant
595 if (callerArgCount == calleeCodeBlock->m_numParameters || calleeCodeBlock->codeType() == NativeCode) {
596 ASSERT(!callLinkInfo->isLinked());
597
598 if (calleeCodeBlock)
599 calleeCodeBlock->addCaller(callLinkInfo);
600
601 repatchBuffer.repatch(callLinkInfo->hotPathBegin, callee);
602 repatchBuffer.relink(callLinkInfo->hotPathOther, code.addressForCall());
603 }
604
605 // patch the call so we do not continue to try to link.
606 repatchBuffer.relink(callLinkInfo->callReturnLocation, globalData->jitStubs.ctiVirtualCall());
607 }
608 #endif // ENABLE(JIT_OPTIMIZE_CALL)
609
610 } // namespace JSC
611
612 #endif // ENABLE(JIT)
613