1 /*
2 * Copyright (C) 2008 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26 #ifndef JITInlineMethods_h
27 #define JITInlineMethods_h
28
29 #include <wtf/Platform.h>
30
31 #if ENABLE(JIT)
32
33 namespace JSC {
34
35 /* Deprecated: Please use JITStubCall instead. */
36
37 // puts an arg onto the stack, as an arg to a context threaded function.
emitPutJITStubArg(RegisterID src,unsigned argumentNumber)38 ALWAYS_INLINE void JIT::emitPutJITStubArg(RegisterID src, unsigned argumentNumber)
39 {
40 poke(src, argumentNumber);
41 }
42
43 /* Deprecated: Please use JITStubCall instead. */
44
emitPutJITStubArgConstant(unsigned value,unsigned argumentNumber)45 ALWAYS_INLINE void JIT::emitPutJITStubArgConstant(unsigned value, unsigned argumentNumber)
46 {
47 poke(Imm32(value), argumentNumber);
48 }
49
50 /* Deprecated: Please use JITStubCall instead. */
51
emitPutJITStubArgConstant(void * value,unsigned argumentNumber)52 ALWAYS_INLINE void JIT::emitPutJITStubArgConstant(void* value, unsigned argumentNumber)
53 {
54 poke(ImmPtr(value), argumentNumber);
55 }
56
57 /* Deprecated: Please use JITStubCall instead. */
58
emitGetJITStubArg(unsigned argumentNumber,RegisterID dst)59 ALWAYS_INLINE void JIT::emitGetJITStubArg(unsigned argumentNumber, RegisterID dst)
60 {
61 peek(dst, argumentNumber);
62 }
63
getConstantOperand(unsigned src)64 ALWAYS_INLINE JSValue JIT::getConstantOperand(unsigned src)
65 {
66 ASSERT(m_codeBlock->isConstantRegisterIndex(src));
67 return m_codeBlock->getConstant(src);
68 }
69
emitPutToCallFrameHeader(RegisterID from,RegisterFile::CallFrameHeaderEntry entry)70 ALWAYS_INLINE void JIT::emitPutToCallFrameHeader(RegisterID from, RegisterFile::CallFrameHeaderEntry entry)
71 {
72 storePtr(from, Address(callFrameRegister, entry * sizeof(Register)));
73 }
74
emitPutImmediateToCallFrameHeader(void * value,RegisterFile::CallFrameHeaderEntry entry)75 ALWAYS_INLINE void JIT::emitPutImmediateToCallFrameHeader(void* value, RegisterFile::CallFrameHeaderEntry entry)
76 {
77 storePtr(ImmPtr(value), Address(callFrameRegister, entry * sizeof(Register)));
78 }
79
emitGetFromCallFrameHeaderPtr(RegisterFile::CallFrameHeaderEntry entry,RegisterID to,RegisterID from)80 ALWAYS_INLINE void JIT::emitGetFromCallFrameHeaderPtr(RegisterFile::CallFrameHeaderEntry entry, RegisterID to, RegisterID from)
81 {
82 loadPtr(Address(from, entry * sizeof(Register)), to);
83 #if !USE(JSVALUE32_64)
84 killLastResultRegister();
85 #endif
86 }
87
emitGetFromCallFrameHeader32(RegisterFile::CallFrameHeaderEntry entry,RegisterID to,RegisterID from)88 ALWAYS_INLINE void JIT::emitGetFromCallFrameHeader32(RegisterFile::CallFrameHeaderEntry entry, RegisterID to, RegisterID from)
89 {
90 load32(Address(from, entry * sizeof(Register)), to);
91 #if !USE(JSVALUE32_64)
92 killLastResultRegister();
93 #endif
94 }
95
emitNakedCall(CodePtr function)96 ALWAYS_INLINE JIT::Call JIT::emitNakedCall(CodePtr function)
97 {
98 ASSERT(m_bytecodeIndex != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeIndex is set.
99
100 Call nakedCall = nearCall();
101 m_calls.append(CallRecord(nakedCall, m_bytecodeIndex, function.executableAddress()));
102 return nakedCall;
103 }
104
105 #if PLATFORM(X86) || PLATFORM(X86_64) || (PLATFORM(ARM) && !PLATFORM_ARM_ARCH(7))
106
preserveReturnAddressAfterCall(RegisterID reg)107 ALWAYS_INLINE void JIT::preserveReturnAddressAfterCall(RegisterID reg)
108 {
109 pop(reg);
110 }
111
restoreReturnAddressBeforeReturn(RegisterID reg)112 ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(RegisterID reg)
113 {
114 push(reg);
115 }
116
restoreReturnAddressBeforeReturn(Address address)117 ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(Address address)
118 {
119 push(address);
120 }
121
122 #elif PLATFORM_ARM_ARCH(7)
123
preserveReturnAddressAfterCall(RegisterID reg)124 ALWAYS_INLINE void JIT::preserveReturnAddressAfterCall(RegisterID reg)
125 {
126 move(linkRegister, reg);
127 }
128
restoreReturnAddressBeforeReturn(RegisterID reg)129 ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(RegisterID reg)
130 {
131 move(reg, linkRegister);
132 }
133
restoreReturnAddressBeforeReturn(Address address)134 ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(Address address)
135 {
136 loadPtr(address, linkRegister);
137 }
138
139 #endif
140
141 #if USE(JIT_STUB_ARGUMENT_VA_LIST)
restoreArgumentReference()142 ALWAYS_INLINE void JIT::restoreArgumentReference()
143 {
144 poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof (void*));
145 }
restoreArgumentReferenceForTrampoline()146 ALWAYS_INLINE void JIT::restoreArgumentReferenceForTrampoline() {}
147 #else
restoreArgumentReference()148 ALWAYS_INLINE void JIT::restoreArgumentReference()
149 {
150 move(stackPointerRegister, firstArgumentRegister);
151 poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof (void*));
152 #if PLATFORM(ARM) && !PLATFORM_ARM_ARCH(7)
153 move(ctiReturnRegister, ARM::lr);
154 #endif
155 }
restoreArgumentReferenceForTrampoline()156 ALWAYS_INLINE void JIT::restoreArgumentReferenceForTrampoline()
157 {
158 #if PLATFORM(X86)
159 // Within a trampoline the return address will be on the stack at this point.
160 addPtr(Imm32(sizeof(void*)), stackPointerRegister, firstArgumentRegister);
161 #elif PLATFORM_ARM_ARCH(7)
162 move(stackPointerRegister, firstArgumentRegister);
163 #endif
164 // In the trampoline on x86-64, the first argument register is not overwritten.
165 }
166 #endif
167
checkStructure(RegisterID reg,Structure * structure)168 ALWAYS_INLINE JIT::Jump JIT::checkStructure(RegisterID reg, Structure* structure)
169 {
170 return branchPtr(NotEqual, Address(reg, OBJECT_OFFSETOF(JSCell, m_structure)), ImmPtr(structure));
171 }
172
linkSlowCaseIfNotJSCell(Vector<SlowCaseEntry>::iterator & iter,int vReg)173 ALWAYS_INLINE void JIT::linkSlowCaseIfNotJSCell(Vector<SlowCaseEntry>::iterator& iter, int vReg)
174 {
175 if (!m_codeBlock->isKnownNotImmediate(vReg))
176 linkSlowCase(iter);
177 }
178
addSlowCase(Jump jump)179 ALWAYS_INLINE void JIT::addSlowCase(Jump jump)
180 {
181 ASSERT(m_bytecodeIndex != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeIndex is set.
182
183 m_slowCases.append(SlowCaseEntry(jump, m_bytecodeIndex));
184 }
185
addSlowCase(JumpList jumpList)186 ALWAYS_INLINE void JIT::addSlowCase(JumpList jumpList)
187 {
188 ASSERT(m_bytecodeIndex != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeIndex is set.
189
190 const JumpList::JumpVector& jumpVector = jumpList.jumps();
191 size_t size = jumpVector.size();
192 for (size_t i = 0; i < size; ++i)
193 m_slowCases.append(SlowCaseEntry(jumpVector[i], m_bytecodeIndex));
194 }
195
addJump(Jump jump,int relativeOffset)196 ALWAYS_INLINE void JIT::addJump(Jump jump, int relativeOffset)
197 {
198 ASSERT(m_bytecodeIndex != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeIndex is set.
199
200 m_jmpTable.append(JumpTable(jump, m_bytecodeIndex + relativeOffset));
201 }
202
emitJumpSlowToHot(Jump jump,int relativeOffset)203 ALWAYS_INLINE void JIT::emitJumpSlowToHot(Jump jump, int relativeOffset)
204 {
205 ASSERT(m_bytecodeIndex != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeIndex is set.
206
207 jump.linkTo(m_labels[m_bytecodeIndex + relativeOffset], this);
208 }
209
210 #if ENABLE(SAMPLING_FLAGS)
setSamplingFlag(int32_t flag)211 ALWAYS_INLINE void JIT::setSamplingFlag(int32_t flag)
212 {
213 ASSERT(flag >= 1);
214 ASSERT(flag <= 32);
215 or32(Imm32(1u << (flag - 1)), AbsoluteAddress(&SamplingFlags::s_flags));
216 }
217
clearSamplingFlag(int32_t flag)218 ALWAYS_INLINE void JIT::clearSamplingFlag(int32_t flag)
219 {
220 ASSERT(flag >= 1);
221 ASSERT(flag <= 32);
222 and32(Imm32(~(1u << (flag - 1))), AbsoluteAddress(&SamplingFlags::s_flags));
223 }
224 #endif
225
226 #if ENABLE(SAMPLING_COUNTERS)
emitCount(AbstractSamplingCounter & counter,uint32_t count)227 ALWAYS_INLINE void JIT::emitCount(AbstractSamplingCounter& counter, uint32_t count)
228 {
229 #if PLATFORM(X86_64) // Or any other 64-bit plattform.
230 addPtr(Imm32(count), AbsoluteAddress(&counter.m_counter));
231 #elif PLATFORM(X86) // Or any other little-endian 32-bit plattform.
232 intptr_t hiWord = reinterpret_cast<intptr_t>(&counter.m_counter) + sizeof(int32_t);
233 add32(Imm32(count), AbsoluteAddress(&counter.m_counter));
234 addWithCarry32(Imm32(0), AbsoluteAddress(reinterpret_cast<void*>(hiWord)));
235 #else
236 #error "SAMPLING_FLAGS not implemented on this platform."
237 #endif
238 }
239 #endif
240
241 #if ENABLE(OPCODE_SAMPLING)
242 #if PLATFORM(X86_64)
sampleInstruction(Instruction * instruction,bool inHostFunction)243 ALWAYS_INLINE void JIT::sampleInstruction(Instruction* instruction, bool inHostFunction)
244 {
245 move(ImmPtr(m_interpreter->sampler()->sampleSlot()), X86::ecx);
246 storePtr(ImmPtr(m_interpreter->sampler()->encodeSample(instruction, inHostFunction)), X86::ecx);
247 }
248 #else
sampleInstruction(Instruction * instruction,bool inHostFunction)249 ALWAYS_INLINE void JIT::sampleInstruction(Instruction* instruction, bool inHostFunction)
250 {
251 storePtr(ImmPtr(m_interpreter->sampler()->encodeSample(instruction, inHostFunction)), m_interpreter->sampler()->sampleSlot());
252 }
253 #endif
254 #endif
255
256 #if ENABLE(CODEBLOCK_SAMPLING)
257 #if PLATFORM(X86_64)
sampleCodeBlock(CodeBlock * codeBlock)258 ALWAYS_INLINE void JIT::sampleCodeBlock(CodeBlock* codeBlock)
259 {
260 move(ImmPtr(m_interpreter->sampler()->codeBlockSlot()), X86::ecx);
261 storePtr(ImmPtr(codeBlock), X86::ecx);
262 }
263 #else
sampleCodeBlock(CodeBlock * codeBlock)264 ALWAYS_INLINE void JIT::sampleCodeBlock(CodeBlock* codeBlock)
265 {
266 storePtr(ImmPtr(codeBlock), m_interpreter->sampler()->codeBlockSlot());
267 }
268 #endif
269 #endif
270
271 #if USE(JSVALUE32_64)
272
tagFor(unsigned index,RegisterID base)273 inline JIT::Address JIT::tagFor(unsigned index, RegisterID base)
274 {
275 return Address(base, (index * sizeof(Register)) + OBJECT_OFFSETOF(JSValue, u.asBits.tag));
276 }
277
payloadFor(unsigned index,RegisterID base)278 inline JIT::Address JIT::payloadFor(unsigned index, RegisterID base)
279 {
280 return Address(base, (index * sizeof(Register)) + OBJECT_OFFSETOF(JSValue, u.asBits.payload));
281 }
282
addressFor(unsigned index,RegisterID base)283 inline JIT::Address JIT::addressFor(unsigned index, RegisterID base)
284 {
285 return Address(base, (index * sizeof(Register)));
286 }
287
emitLoadTag(unsigned index,RegisterID tag)288 inline void JIT::emitLoadTag(unsigned index, RegisterID tag)
289 {
290 RegisterID mappedTag;
291 if (getMappedTag(index, mappedTag)) {
292 move(mappedTag, tag);
293 unmap(tag);
294 return;
295 }
296
297 if (m_codeBlock->isConstantRegisterIndex(index)) {
298 move(Imm32(getConstantOperand(index).tag()), tag);
299 unmap(tag);
300 return;
301 }
302
303 load32(tagFor(index), tag);
304 unmap(tag);
305 }
306
emitLoadPayload(unsigned index,RegisterID payload)307 inline void JIT::emitLoadPayload(unsigned index, RegisterID payload)
308 {
309 RegisterID mappedPayload;
310 if (getMappedPayload(index, mappedPayload)) {
311 move(mappedPayload, payload);
312 unmap(payload);
313 return;
314 }
315
316 if (m_codeBlock->isConstantRegisterIndex(index)) {
317 move(Imm32(getConstantOperand(index).payload()), payload);
318 unmap(payload);
319 return;
320 }
321
322 load32(payloadFor(index), payload);
323 unmap(payload);
324 }
325
emitLoad(const JSValue & v,RegisterID tag,RegisterID payload)326 inline void JIT::emitLoad(const JSValue& v, RegisterID tag, RegisterID payload)
327 {
328 move(Imm32(v.payload()), payload);
329 move(Imm32(v.tag()), tag);
330 }
331
emitLoad(unsigned index,RegisterID tag,RegisterID payload,RegisterID base)332 inline void JIT::emitLoad(unsigned index, RegisterID tag, RegisterID payload, RegisterID base)
333 {
334 ASSERT(tag != payload);
335
336 if (base == callFrameRegister) {
337 ASSERT(payload != base);
338 emitLoadPayload(index, payload);
339 emitLoadTag(index, tag);
340 return;
341 }
342
343 if (payload == base) { // avoid stomping base
344 load32(tagFor(index, base), tag);
345 load32(payloadFor(index, base), payload);
346 return;
347 }
348
349 load32(payloadFor(index, base), payload);
350 load32(tagFor(index, base), tag);
351 }
352
emitLoad2(unsigned index1,RegisterID tag1,RegisterID payload1,unsigned index2,RegisterID tag2,RegisterID payload2)353 inline void JIT::emitLoad2(unsigned index1, RegisterID tag1, RegisterID payload1, unsigned index2, RegisterID tag2, RegisterID payload2)
354 {
355 if (isMapped(index1)) {
356 emitLoad(index1, tag1, payload1);
357 emitLoad(index2, tag2, payload2);
358 return;
359 }
360 emitLoad(index2, tag2, payload2);
361 emitLoad(index1, tag1, payload1);
362 }
363
emitLoadDouble(unsigned index,FPRegisterID value)364 inline void JIT::emitLoadDouble(unsigned index, FPRegisterID value)
365 {
366 if (m_codeBlock->isConstantRegisterIndex(index)) {
367 Register& inConstantPool = m_codeBlock->constantRegister(index);
368 loadDouble(&inConstantPool, value);
369 } else
370 loadDouble(addressFor(index), value);
371 }
372
emitLoadInt32ToDouble(unsigned index,FPRegisterID value)373 inline void JIT::emitLoadInt32ToDouble(unsigned index, FPRegisterID value)
374 {
375 if (m_codeBlock->isConstantRegisterIndex(index)) {
376 Register& inConstantPool = m_codeBlock->constantRegister(index);
377 char* bytePointer = reinterpret_cast<char*>(&inConstantPool);
378 convertInt32ToDouble(AbsoluteAddress(bytePointer + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), value);
379 } else
380 convertInt32ToDouble(payloadFor(index), value);
381 }
382
emitStore(unsigned index,RegisterID tag,RegisterID payload,RegisterID base)383 inline void JIT::emitStore(unsigned index, RegisterID tag, RegisterID payload, RegisterID base)
384 {
385 store32(payload, payloadFor(index, base));
386 store32(tag, tagFor(index, base));
387 }
388
emitStoreInt32(unsigned index,RegisterID payload,bool indexIsInt32)389 inline void JIT::emitStoreInt32(unsigned index, RegisterID payload, bool indexIsInt32)
390 {
391 store32(payload, payloadFor(index, callFrameRegister));
392 if (!indexIsInt32)
393 store32(Imm32(JSValue::Int32Tag), tagFor(index, callFrameRegister));
394 }
395
emitStoreInt32(unsigned index,Imm32 payload,bool indexIsInt32)396 inline void JIT::emitStoreInt32(unsigned index, Imm32 payload, bool indexIsInt32)
397 {
398 store32(payload, payloadFor(index, callFrameRegister));
399 if (!indexIsInt32)
400 store32(Imm32(JSValue::Int32Tag), tagFor(index, callFrameRegister));
401 }
402
emitStoreCell(unsigned index,RegisterID payload,bool indexIsCell)403 inline void JIT::emitStoreCell(unsigned index, RegisterID payload, bool indexIsCell)
404 {
405 store32(payload, payloadFor(index, callFrameRegister));
406 if (!indexIsCell)
407 store32(Imm32(JSValue::CellTag), tagFor(index, callFrameRegister));
408 }
409
emitStoreBool(unsigned index,RegisterID tag,bool indexIsBool)410 inline void JIT::emitStoreBool(unsigned index, RegisterID tag, bool indexIsBool)
411 {
412 if (!indexIsBool)
413 store32(Imm32(0), payloadFor(index, callFrameRegister));
414 store32(tag, tagFor(index, callFrameRegister));
415 }
416
emitStoreDouble(unsigned index,FPRegisterID value)417 inline void JIT::emitStoreDouble(unsigned index, FPRegisterID value)
418 {
419 storeDouble(value, addressFor(index));
420 }
421
emitStore(unsigned index,const JSValue constant,RegisterID base)422 inline void JIT::emitStore(unsigned index, const JSValue constant, RegisterID base)
423 {
424 store32(Imm32(constant.payload()), payloadFor(index, base));
425 store32(Imm32(constant.tag()), tagFor(index, base));
426 }
427
emitInitRegister(unsigned dst)428 ALWAYS_INLINE void JIT::emitInitRegister(unsigned dst)
429 {
430 emitStore(dst, jsUndefined());
431 }
432
isLabeled(unsigned bytecodeIndex)433 inline bool JIT::isLabeled(unsigned bytecodeIndex)
434 {
435 for (size_t numberOfJumpTargets = m_codeBlock->numberOfJumpTargets(); m_jumpTargetIndex != numberOfJumpTargets; ++m_jumpTargetIndex) {
436 unsigned jumpTarget = m_codeBlock->jumpTarget(m_jumpTargetIndex);
437 if (jumpTarget == bytecodeIndex)
438 return true;
439 if (jumpTarget > bytecodeIndex)
440 return false;
441 }
442 return false;
443 }
444
map(unsigned bytecodeIndex,unsigned virtualRegisterIndex,RegisterID tag,RegisterID payload)445 inline void JIT::map(unsigned bytecodeIndex, unsigned virtualRegisterIndex, RegisterID tag, RegisterID payload)
446 {
447 if (isLabeled(bytecodeIndex))
448 return;
449
450 m_mappedBytecodeIndex = bytecodeIndex;
451 m_mappedVirtualRegisterIndex = virtualRegisterIndex;
452 m_mappedTag = tag;
453 m_mappedPayload = payload;
454 }
455
unmap(RegisterID registerID)456 inline void JIT::unmap(RegisterID registerID)
457 {
458 if (m_mappedTag == registerID)
459 m_mappedTag = (RegisterID)-1;
460 else if (m_mappedPayload == registerID)
461 m_mappedPayload = (RegisterID)-1;
462 }
463
unmap()464 inline void JIT::unmap()
465 {
466 m_mappedBytecodeIndex = (unsigned)-1;
467 m_mappedVirtualRegisterIndex = (unsigned)-1;
468 m_mappedTag = (RegisterID)-1;
469 m_mappedPayload = (RegisterID)-1;
470 }
471
isMapped(unsigned virtualRegisterIndex)472 inline bool JIT::isMapped(unsigned virtualRegisterIndex)
473 {
474 if (m_mappedBytecodeIndex != m_bytecodeIndex)
475 return false;
476 if (m_mappedVirtualRegisterIndex != virtualRegisterIndex)
477 return false;
478 return true;
479 }
480
getMappedPayload(unsigned virtualRegisterIndex,RegisterID & payload)481 inline bool JIT::getMappedPayload(unsigned virtualRegisterIndex, RegisterID& payload)
482 {
483 if (m_mappedBytecodeIndex != m_bytecodeIndex)
484 return false;
485 if (m_mappedVirtualRegisterIndex != virtualRegisterIndex)
486 return false;
487 if (m_mappedPayload == (RegisterID)-1)
488 return false;
489 payload = m_mappedPayload;
490 return true;
491 }
492
getMappedTag(unsigned virtualRegisterIndex,RegisterID & tag)493 inline bool JIT::getMappedTag(unsigned virtualRegisterIndex, RegisterID& tag)
494 {
495 if (m_mappedBytecodeIndex != m_bytecodeIndex)
496 return false;
497 if (m_mappedVirtualRegisterIndex != virtualRegisterIndex)
498 return false;
499 if (m_mappedTag == (RegisterID)-1)
500 return false;
501 tag = m_mappedTag;
502 return true;
503 }
504
emitJumpSlowCaseIfNotJSCell(unsigned virtualRegisterIndex)505 inline void JIT::emitJumpSlowCaseIfNotJSCell(unsigned virtualRegisterIndex)
506 {
507 if (!m_codeBlock->isKnownNotImmediate(virtualRegisterIndex))
508 addSlowCase(branch32(NotEqual, tagFor(virtualRegisterIndex), Imm32(JSValue::CellTag)));
509 }
510
emitJumpSlowCaseIfNotJSCell(unsigned virtualRegisterIndex,RegisterID tag)511 inline void JIT::emitJumpSlowCaseIfNotJSCell(unsigned virtualRegisterIndex, RegisterID tag)
512 {
513 if (!m_codeBlock->isKnownNotImmediate(virtualRegisterIndex))
514 addSlowCase(branch32(NotEqual, tag, Imm32(JSValue::CellTag)));
515 }
516
linkSlowCaseIfNotJSCell(Vector<SlowCaseEntry>::iterator & iter,unsigned virtualRegisterIndex)517 inline void JIT::linkSlowCaseIfNotJSCell(Vector<SlowCaseEntry>::iterator& iter, unsigned virtualRegisterIndex)
518 {
519 if (!m_codeBlock->isKnownNotImmediate(virtualRegisterIndex))
520 linkSlowCase(iter);
521 }
522
isOperandConstantImmediateInt(unsigned src)523 ALWAYS_INLINE bool JIT::isOperandConstantImmediateInt(unsigned src)
524 {
525 return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isInt32();
526 }
527
getOperandConstantImmediateInt(unsigned op1,unsigned op2,unsigned & op,int32_t & constant)528 ALWAYS_INLINE bool JIT::getOperandConstantImmediateInt(unsigned op1, unsigned op2, unsigned& op, int32_t& constant)
529 {
530 if (isOperandConstantImmediateInt(op1)) {
531 constant = getConstantOperand(op1).asInt32();
532 op = op2;
533 return true;
534 }
535
536 if (isOperandConstantImmediateInt(op2)) {
537 constant = getConstantOperand(op2).asInt32();
538 op = op1;
539 return true;
540 }
541
542 return false;
543 }
544
isOperandConstantImmediateDouble(unsigned src)545 ALWAYS_INLINE bool JIT::isOperandConstantImmediateDouble(unsigned src)
546 {
547 return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isDouble();
548 }
549
550 /* Deprecated: Please use JITStubCall instead. */
551
emitPutJITStubArgFromVirtualRegister(unsigned src,unsigned argumentNumber,RegisterID scratch1,RegisterID scratch2)552 ALWAYS_INLINE void JIT::emitPutJITStubArgFromVirtualRegister(unsigned src, unsigned argumentNumber, RegisterID scratch1, RegisterID scratch2)
553 {
554 if (m_codeBlock->isConstantRegisterIndex(src)) {
555 JSValue constant = m_codeBlock->getConstant(src);
556 poke(Imm32(constant.payload()), argumentNumber);
557 poke(Imm32(constant.tag()), argumentNumber + 1);
558 } else {
559 emitLoad(src, scratch1, scratch2);
560 poke(scratch2, argumentNumber);
561 poke(scratch1, argumentNumber + 1);
562 }
563 }
564
565 #else // USE(JSVALUE32_64)
566
killLastResultRegister()567 ALWAYS_INLINE void JIT::killLastResultRegister()
568 {
569 m_lastResultBytecodeRegister = std::numeric_limits<int>::max();
570 }
571
572 // get arg puts an arg from the SF register array into a h/w register
emitGetVirtualRegister(int src,RegisterID dst)573 ALWAYS_INLINE void JIT::emitGetVirtualRegister(int src, RegisterID dst)
574 {
575 ASSERT(m_bytecodeIndex != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeIndex is set.
576
577 // TODO: we want to reuse values that are already in registers if we can - add a register allocator!
578 if (m_codeBlock->isConstantRegisterIndex(src)) {
579 JSValue value = m_codeBlock->getConstant(src);
580 move(ImmPtr(JSValue::encode(value)), dst);
581 killLastResultRegister();
582 return;
583 }
584
585 if (src == m_lastResultBytecodeRegister && m_codeBlock->isTemporaryRegisterIndex(src)) {
586 bool atJumpTarget = false;
587 while (m_jumpTargetsPosition < m_codeBlock->numberOfJumpTargets() && m_codeBlock->jumpTarget(m_jumpTargetsPosition) <= m_bytecodeIndex) {
588 if (m_codeBlock->jumpTarget(m_jumpTargetsPosition) == m_bytecodeIndex)
589 atJumpTarget = true;
590 ++m_jumpTargetsPosition;
591 }
592
593 if (!atJumpTarget) {
594 // The argument we want is already stored in eax
595 if (dst != cachedResultRegister)
596 move(cachedResultRegister, dst);
597 killLastResultRegister();
598 return;
599 }
600 }
601
602 loadPtr(Address(callFrameRegister, src * sizeof(Register)), dst);
603 killLastResultRegister();
604 }
605
emitGetVirtualRegisters(int src1,RegisterID dst1,int src2,RegisterID dst2)606 ALWAYS_INLINE void JIT::emitGetVirtualRegisters(int src1, RegisterID dst1, int src2, RegisterID dst2)
607 {
608 if (src2 == m_lastResultBytecodeRegister) {
609 emitGetVirtualRegister(src2, dst2);
610 emitGetVirtualRegister(src1, dst1);
611 } else {
612 emitGetVirtualRegister(src1, dst1);
613 emitGetVirtualRegister(src2, dst2);
614 }
615 }
616
getConstantOperandImmediateInt(unsigned src)617 ALWAYS_INLINE int32_t JIT::getConstantOperandImmediateInt(unsigned src)
618 {
619 return getConstantOperand(src).asInt32();
620 }
621
isOperandConstantImmediateInt(unsigned src)622 ALWAYS_INLINE bool JIT::isOperandConstantImmediateInt(unsigned src)
623 {
624 return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isInt32();
625 }
626
emitPutVirtualRegister(unsigned dst,RegisterID from)627 ALWAYS_INLINE void JIT::emitPutVirtualRegister(unsigned dst, RegisterID from)
628 {
629 storePtr(from, Address(callFrameRegister, dst * sizeof(Register)));
630 m_lastResultBytecodeRegister = (from == cachedResultRegister) ? dst : std::numeric_limits<int>::max();
631 }
632
emitInitRegister(unsigned dst)633 ALWAYS_INLINE void JIT::emitInitRegister(unsigned dst)
634 {
635 storePtr(ImmPtr(JSValue::encode(jsUndefined())), Address(callFrameRegister, dst * sizeof(Register)));
636 }
637
emitJumpIfJSCell(RegisterID reg)638 ALWAYS_INLINE JIT::Jump JIT::emitJumpIfJSCell(RegisterID reg)
639 {
640 #if USE(JSVALUE64)
641 return branchTestPtr(Zero, reg, tagMaskRegister);
642 #else
643 return branchTest32(Zero, reg, Imm32(JSImmediate::TagMask));
644 #endif
645 }
646
emitJumpIfBothJSCells(RegisterID reg1,RegisterID reg2,RegisterID scratch)647 ALWAYS_INLINE JIT::Jump JIT::emitJumpIfBothJSCells(RegisterID reg1, RegisterID reg2, RegisterID scratch)
648 {
649 move(reg1, scratch);
650 orPtr(reg2, scratch);
651 return emitJumpIfJSCell(scratch);
652 }
653
emitJumpSlowCaseIfJSCell(RegisterID reg)654 ALWAYS_INLINE void JIT::emitJumpSlowCaseIfJSCell(RegisterID reg)
655 {
656 addSlowCase(emitJumpIfJSCell(reg));
657 }
658
emitJumpIfNotJSCell(RegisterID reg)659 ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotJSCell(RegisterID reg)
660 {
661 #if USE(JSVALUE64)
662 return branchTestPtr(NonZero, reg, tagMaskRegister);
663 #else
664 return branchTest32(NonZero, reg, Imm32(JSImmediate::TagMask));
665 #endif
666 }
667
emitJumpSlowCaseIfNotJSCell(RegisterID reg)668 ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotJSCell(RegisterID reg)
669 {
670 addSlowCase(emitJumpIfNotJSCell(reg));
671 }
672
emitJumpSlowCaseIfNotJSCell(RegisterID reg,int vReg)673 ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotJSCell(RegisterID reg, int vReg)
674 {
675 if (!m_codeBlock->isKnownNotImmediate(vReg))
676 emitJumpSlowCaseIfNotJSCell(reg);
677 }
678
679 #if USE(JSVALUE64)
emitJumpIfImmediateNumber(RegisterID reg)680 ALWAYS_INLINE JIT::Jump JIT::emitJumpIfImmediateNumber(RegisterID reg)
681 {
682 return branchTestPtr(NonZero, reg, tagTypeNumberRegister);
683 }
emitJumpIfNotImmediateNumber(RegisterID reg)684 ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotImmediateNumber(RegisterID reg)
685 {
686 return branchTestPtr(Zero, reg, tagTypeNumberRegister);
687 }
688 #endif
689
emitJumpIfImmediateInteger(RegisterID reg)690 ALWAYS_INLINE JIT::Jump JIT::emitJumpIfImmediateInteger(RegisterID reg)
691 {
692 #if USE(JSVALUE64)
693 return branchPtr(AboveOrEqual, reg, tagTypeNumberRegister);
694 #else
695 return branchTest32(NonZero, reg, Imm32(JSImmediate::TagTypeNumber));
696 #endif
697 }
698
emitJumpIfNotImmediateInteger(RegisterID reg)699 ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotImmediateInteger(RegisterID reg)
700 {
701 #if USE(JSVALUE64)
702 return branchPtr(Below, reg, tagTypeNumberRegister);
703 #else
704 return branchTest32(Zero, reg, Imm32(JSImmediate::TagTypeNumber));
705 #endif
706 }
707
emitJumpIfNotImmediateIntegers(RegisterID reg1,RegisterID reg2,RegisterID scratch)708 ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotImmediateIntegers(RegisterID reg1, RegisterID reg2, RegisterID scratch)
709 {
710 move(reg1, scratch);
711 andPtr(reg2, scratch);
712 return emitJumpIfNotImmediateInteger(scratch);
713 }
714
emitJumpSlowCaseIfNotImmediateInteger(RegisterID reg)715 ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotImmediateInteger(RegisterID reg)
716 {
717 addSlowCase(emitJumpIfNotImmediateInteger(reg));
718 }
719
emitJumpSlowCaseIfNotImmediateIntegers(RegisterID reg1,RegisterID reg2,RegisterID scratch)720 ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotImmediateIntegers(RegisterID reg1, RegisterID reg2, RegisterID scratch)
721 {
722 addSlowCase(emitJumpIfNotImmediateIntegers(reg1, reg2, scratch));
723 }
724
725 #if !USE(JSVALUE64)
emitFastArithDeTagImmediate(RegisterID reg)726 ALWAYS_INLINE void JIT::emitFastArithDeTagImmediate(RegisterID reg)
727 {
728 subPtr(Imm32(JSImmediate::TagTypeNumber), reg);
729 }
730
emitFastArithDeTagImmediateJumpIfZero(RegisterID reg)731 ALWAYS_INLINE JIT::Jump JIT::emitFastArithDeTagImmediateJumpIfZero(RegisterID reg)
732 {
733 return branchSubPtr(Zero, Imm32(JSImmediate::TagTypeNumber), reg);
734 }
735 #endif
736
emitFastArithReTagImmediate(RegisterID src,RegisterID dest)737 ALWAYS_INLINE void JIT::emitFastArithReTagImmediate(RegisterID src, RegisterID dest)
738 {
739 #if USE(JSVALUE64)
740 emitFastArithIntToImmNoCheck(src, dest);
741 #else
742 if (src != dest)
743 move(src, dest);
744 addPtr(Imm32(JSImmediate::TagTypeNumber), dest);
745 #endif
746 }
747
emitFastArithImmToInt(RegisterID reg)748 ALWAYS_INLINE void JIT::emitFastArithImmToInt(RegisterID reg)
749 {
750 #if USE(JSVALUE64)
751 UNUSED_PARAM(reg);
752 #else
753 rshiftPtr(Imm32(JSImmediate::IntegerPayloadShift), reg);
754 #endif
755 }
756
757 // operand is int32_t, must have been zero-extended if register is 64-bit.
emitFastArithIntToImmNoCheck(RegisterID src,RegisterID dest)758 ALWAYS_INLINE void JIT::emitFastArithIntToImmNoCheck(RegisterID src, RegisterID dest)
759 {
760 #if USE(JSVALUE64)
761 if (src != dest)
762 move(src, dest);
763 orPtr(tagTypeNumberRegister, dest);
764 #else
765 signExtend32ToPtr(src, dest);
766 addPtr(dest, dest);
767 emitFastArithReTagImmediate(dest, dest);
768 #endif
769 }
770
emitTagAsBoolImmediate(RegisterID reg)771 ALWAYS_INLINE void JIT::emitTagAsBoolImmediate(RegisterID reg)
772 {
773 lshift32(Imm32(JSImmediate::ExtendedPayloadShift), reg);
774 or32(Imm32(static_cast<int32_t>(JSImmediate::FullTagTypeBool)), reg);
775 }
776
777 /* Deprecated: Please use JITStubCall instead. */
778
779 // get arg puts an arg from the SF register array onto the stack, as an arg to a context threaded function.
emitPutJITStubArgFromVirtualRegister(unsigned src,unsigned argumentNumber,RegisterID scratch)780 ALWAYS_INLINE void JIT::emitPutJITStubArgFromVirtualRegister(unsigned src, unsigned argumentNumber, RegisterID scratch)
781 {
782 if (m_codeBlock->isConstantRegisterIndex(src)) {
783 JSValue value = m_codeBlock->getConstant(src);
784 emitPutJITStubArgConstant(JSValue::encode(value), argumentNumber);
785 } else {
786 loadPtr(Address(callFrameRegister, src * sizeof(Register)), scratch);
787 emitPutJITStubArg(scratch, argumentNumber);
788 }
789
790 killLastResultRegister();
791 }
792
793 #endif // USE(JSVALUE32_64)
794
795 } // namespace JSC
796
797 #endif // ENABLE(JIT)
798
799 #endif
800