2 * Copyright (C) 2008 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 #ifndef JITInlineMethods_h
27 #define JITInlineMethods_h
29 #include <wtf/Platform.h>
35 /* Deprecated: Please use JITStubCall instead. */
37 // puts an arg onto the stack, as an arg to a context threaded function.
38 ALWAYS_INLINE
void JIT::emitPutJITStubArg(RegisterID src
, unsigned argumentNumber
)
40 poke(src
, argumentNumber
);
43 /* Deprecated: Please use JITStubCall instead. */
45 ALWAYS_INLINE
void JIT::emitPutJITStubArgConstant(unsigned value
, unsigned argumentNumber
)
47 poke(Imm32(value
), argumentNumber
);
50 /* Deprecated: Please use JITStubCall instead. */
52 ALWAYS_INLINE
void JIT::emitPutJITStubArgConstant(void* value
, unsigned argumentNumber
)
54 poke(ImmPtr(value
), argumentNumber
);
57 /* Deprecated: Please use JITStubCall instead. */
59 ALWAYS_INLINE
void JIT::emitGetJITStubArg(unsigned argumentNumber
, RegisterID dst
)
61 peek(dst
, argumentNumber
);
64 ALWAYS_INLINE JSValue
JIT::getConstantOperand(unsigned src
)
66 ASSERT(m_codeBlock
->isConstantRegisterIndex(src
));
67 return m_codeBlock
->getConstant(src
);
70 ALWAYS_INLINE
void JIT::emitPutToCallFrameHeader(RegisterID from
, RegisterFile::CallFrameHeaderEntry entry
)
72 storePtr(from
, Address(callFrameRegister
, entry
* sizeof(Register
)));
75 ALWAYS_INLINE
void JIT::emitPutImmediateToCallFrameHeader(void* value
, RegisterFile::CallFrameHeaderEntry entry
)
77 storePtr(ImmPtr(value
), Address(callFrameRegister
, entry
* sizeof(Register
)));
80 ALWAYS_INLINE
void JIT::emitGetFromCallFrameHeaderPtr(RegisterFile::CallFrameHeaderEntry entry
, RegisterID to
, RegisterID from
)
82 loadPtr(Address(from
, entry
* sizeof(Register
)), to
);
83 #if !USE(JSVALUE32_64)
84 killLastResultRegister();
88 ALWAYS_INLINE
void JIT::emitGetFromCallFrameHeader32(RegisterFile::CallFrameHeaderEntry entry
, RegisterID to
, RegisterID from
)
90 load32(Address(from
, entry
* sizeof(Register
)), to
);
91 #if !USE(JSVALUE32_64)
92 killLastResultRegister();
96 ALWAYS_INLINE
JIT::Call
JIT::emitNakedCall(CodePtr function
)
98 ASSERT(m_bytecodeIndex
!= (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeIndex is set.
100 Call nakedCall
= nearCall();
101 m_calls
.append(CallRecord(nakedCall
, m_bytecodeIndex
, function
.executableAddress()));
105 #if PLATFORM(X86) || PLATFORM(X86_64)
107 ALWAYS_INLINE
void JIT::preserveReturnAddressAfterCall(RegisterID reg
)
112 ALWAYS_INLINE
void JIT::restoreReturnAddressBeforeReturn(RegisterID reg
)
117 ALWAYS_INLINE
void JIT::restoreReturnAddressBeforeReturn(Address address
)
122 #elif PLATFORM_ARM_ARCH(7)
124 ALWAYS_INLINE
void JIT::preserveReturnAddressAfterCall(RegisterID reg
)
126 move(linkRegister
, reg
);
129 ALWAYS_INLINE
void JIT::restoreReturnAddressBeforeReturn(RegisterID reg
)
131 move(reg
, linkRegister
);
134 ALWAYS_INLINE
void JIT::restoreReturnAddressBeforeReturn(Address address
)
136 loadPtr(address
, linkRegister
);
141 #if USE(JIT_STUB_ARGUMENT_VA_LIST)
142 ALWAYS_INLINE
void JIT::restoreArgumentReference()
144 poke(callFrameRegister
, OBJECT_OFFSETOF(struct JITStackFrame
, callFrame
) / sizeof (void*));
146 ALWAYS_INLINE
void JIT::restoreArgumentReferenceForTrampoline() {}
148 ALWAYS_INLINE
void JIT::restoreArgumentReference()
150 move(stackPointerRegister
, firstArgumentRegister
);
151 poke(callFrameRegister
, OBJECT_OFFSETOF(struct JITStackFrame
, callFrame
) / sizeof (void*));
153 ALWAYS_INLINE
void JIT::restoreArgumentReferenceForTrampoline()
156 // Within a trampoline the return address will be on the stack at this point.
157 addPtr(Imm32(sizeof(void*)), stackPointerRegister
, firstArgumentRegister
);
158 #elif PLATFORM_ARM_ARCH(7)
159 move(stackPointerRegister
, firstArgumentRegister
);
161 // In the trampoline on x86-64, the first argument register is not overwritten.
165 ALWAYS_INLINE
JIT::Jump
JIT::checkStructure(RegisterID reg
, Structure
* structure
)
167 return branchPtr(NotEqual
, Address(reg
, OBJECT_OFFSETOF(JSCell
, m_structure
)), ImmPtr(structure
));
170 ALWAYS_INLINE
void JIT::linkSlowCaseIfNotJSCell(Vector
<SlowCaseEntry
>::iterator
& iter
, int vReg
)
172 if (!m_codeBlock
->isKnownNotImmediate(vReg
))
176 ALWAYS_INLINE
void JIT::addSlowCase(Jump jump
)
178 ASSERT(m_bytecodeIndex
!= (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeIndex is set.
180 m_slowCases
.append(SlowCaseEntry(jump
, m_bytecodeIndex
));
183 ALWAYS_INLINE
void JIT::addSlowCase(JumpList jumpList
)
185 ASSERT(m_bytecodeIndex
!= (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeIndex is set.
187 const JumpList::JumpVector
& jumpVector
= jumpList
.jumps();
188 size_t size
= jumpVector
.size();
189 for (size_t i
= 0; i
< size
; ++i
)
190 m_slowCases
.append(SlowCaseEntry(jumpVector
[i
], m_bytecodeIndex
));
193 ALWAYS_INLINE
void JIT::addJump(Jump jump
, int relativeOffset
)
195 ASSERT(m_bytecodeIndex
!= (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeIndex is set.
197 m_jmpTable
.append(JumpTable(jump
, m_bytecodeIndex
+ relativeOffset
));
200 ALWAYS_INLINE
void JIT::emitJumpSlowToHot(Jump jump
, int relativeOffset
)
202 ASSERT(m_bytecodeIndex
!= (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeIndex is set.
204 jump
.linkTo(m_labels
[m_bytecodeIndex
+ relativeOffset
], this);
207 #if ENABLE(SAMPLING_FLAGS)
208 ALWAYS_INLINE
void JIT::setSamplingFlag(int32_t flag
)
212 or32(Imm32(1u << (flag
- 1)), AbsoluteAddress(&SamplingFlags::s_flags
));
215 ALWAYS_INLINE
void JIT::clearSamplingFlag(int32_t flag
)
219 and32(Imm32(~(1u << (flag
- 1))), AbsoluteAddress(&SamplingFlags::s_flags
));
223 #if ENABLE(SAMPLING_COUNTERS)
224 ALWAYS_INLINE
void JIT::emitCount(AbstractSamplingCounter
& counter
, uint32_t count
)
226 #if PLATFORM(X86_64) // Or any other 64-bit plattform.
227 addPtr(Imm32(count
), AbsoluteAddress(&counter
.m_counter
));
228 #elif PLATFORM(X86) // Or any other little-endian 32-bit plattform.
229 intptr_t hiWord
= reinterpret_cast<intptr_t>(&counter
.m_counter
) + sizeof(int32_t);
230 add32(Imm32(count
), AbsoluteAddress(&counter
.m_counter
));
231 addWithCarry32(Imm32(0), AbsoluteAddress(reinterpret_cast<void*>(hiWord
)));
233 #error "SAMPLING_FLAGS not implemented on this platform."
238 #if ENABLE(OPCODE_SAMPLING)
240 ALWAYS_INLINE
void JIT::sampleInstruction(Instruction
* instruction
, bool inHostFunction
)
242 move(ImmPtr(m_interpreter
->sampler()->sampleSlot()), X86::ecx
);
243 storePtr(ImmPtr(m_interpreter
->sampler()->encodeSample(instruction
, inHostFunction
)), X86::ecx
);
246 ALWAYS_INLINE
void JIT::sampleInstruction(Instruction
* instruction
, bool inHostFunction
)
248 storePtr(ImmPtr(m_interpreter
->sampler()->encodeSample(instruction
, inHostFunction
)), m_interpreter
->sampler()->sampleSlot());
253 #if ENABLE(CODEBLOCK_SAMPLING)
255 ALWAYS_INLINE
void JIT::sampleCodeBlock(CodeBlock
* codeBlock
)
257 move(ImmPtr(m_interpreter
->sampler()->codeBlockSlot()), X86::ecx
);
258 storePtr(ImmPtr(codeBlock
), X86::ecx
);
261 ALWAYS_INLINE
void JIT::sampleCodeBlock(CodeBlock
* codeBlock
)
263 storePtr(ImmPtr(codeBlock
), m_interpreter
->sampler()->codeBlockSlot());
268 #if USE(JSVALUE32_64)
270 inline JIT::Address
JIT::tagFor(unsigned index
, RegisterID base
)
272 return Address(base
, (index
* sizeof(Register
)) + OBJECT_OFFSETOF(JSValue
, u
.asBits
.tag
));
275 inline JIT::Address
JIT::payloadFor(unsigned index
, RegisterID base
)
277 return Address(base
, (index
* sizeof(Register
)) + OBJECT_OFFSETOF(JSValue
, u
.asBits
.payload
));
280 inline JIT::Address
JIT::addressFor(unsigned index
, RegisterID base
)
282 return Address(base
, (index
* sizeof(Register
)));
285 inline void JIT::emitLoadTag(unsigned index
, RegisterID tag
)
287 RegisterID mappedTag
;
288 if (getMappedTag(index
, mappedTag
)) {
289 move(mappedTag
, tag
);
294 if (m_codeBlock
->isConstantRegisterIndex(index
)) {
295 move(Imm32(getConstantOperand(index
).tag()), tag
);
300 load32(tagFor(index
), tag
);
304 inline void JIT::emitLoadPayload(unsigned index
, RegisterID payload
)
306 RegisterID mappedPayload
;
307 if (getMappedPayload(index
, mappedPayload
)) {
308 move(mappedPayload
, payload
);
313 if (m_codeBlock
->isConstantRegisterIndex(index
)) {
314 move(Imm32(getConstantOperand(index
).payload()), payload
);
319 load32(payloadFor(index
), payload
);
323 inline void JIT::emitLoad(const JSValue
& v
, RegisterID tag
, RegisterID payload
)
325 move(Imm32(v
.payload()), payload
);
326 move(Imm32(v
.tag()), tag
);
329 inline void JIT::emitLoad(unsigned index
, RegisterID tag
, RegisterID payload
, RegisterID base
)
331 ASSERT(tag
!= payload
);
333 if (base
== callFrameRegister
) {
334 ASSERT(payload
!= base
);
335 emitLoadPayload(index
, payload
);
336 emitLoadTag(index
, tag
);
340 if (payload
== base
) { // avoid stomping base
341 load32(tagFor(index
, base
), tag
);
342 load32(payloadFor(index
, base
), payload
);
346 load32(payloadFor(index
, base
), payload
);
347 load32(tagFor(index
, base
), tag
);
350 inline void JIT::emitLoad2(unsigned index1
, RegisterID tag1
, RegisterID payload1
, unsigned index2
, RegisterID tag2
, RegisterID payload2
)
352 if (isMapped(index1
)) {
353 emitLoad(index1
, tag1
, payload1
);
354 emitLoad(index2
, tag2
, payload2
);
357 emitLoad(index2
, tag2
, payload2
);
358 emitLoad(index1
, tag1
, payload1
);
361 inline void JIT::emitLoadDouble(unsigned index
, FPRegisterID value
)
363 if (m_codeBlock
->isConstantRegisterIndex(index
)) {
364 Register
& inConstantPool
= m_codeBlock
->constantRegister(index
);
365 loadDouble(&inConstantPool
, value
);
367 loadDouble(addressFor(index
), value
);
370 inline void JIT::emitLoadInt32ToDouble(unsigned index
, FPRegisterID value
)
372 if (m_codeBlock
->isConstantRegisterIndex(index
)) {
373 Register
& inConstantPool
= m_codeBlock
->constantRegister(index
);
374 char* bytePointer
= reinterpret_cast<char*>(&inConstantPool
);
375 convertInt32ToDouble(AbsoluteAddress(bytePointer
+ OBJECT_OFFSETOF(JSValue
, u
.asBits
.payload
)), value
);
377 convertInt32ToDouble(payloadFor(index
), value
);
380 inline void JIT::emitStore(unsigned index
, RegisterID tag
, RegisterID payload
, RegisterID base
)
382 store32(payload
, payloadFor(index
, base
));
383 store32(tag
, tagFor(index
, base
));
386 inline void JIT::emitStoreInt32(unsigned index
, RegisterID payload
, bool indexIsInt32
)
388 store32(payload
, payloadFor(index
, callFrameRegister
));
390 store32(Imm32(JSValue::Int32Tag
), tagFor(index
, callFrameRegister
));
393 inline void JIT::emitStoreInt32(unsigned index
, Imm32 payload
, bool indexIsInt32
)
395 store32(payload
, payloadFor(index
, callFrameRegister
));
397 store32(Imm32(JSValue::Int32Tag
), tagFor(index
, callFrameRegister
));
400 inline void JIT::emitStoreCell(unsigned index
, RegisterID payload
, bool indexIsCell
)
402 store32(payload
, payloadFor(index
, callFrameRegister
));
404 store32(Imm32(JSValue::CellTag
), tagFor(index
, callFrameRegister
));
407 inline void JIT::emitStoreBool(unsigned index
, RegisterID tag
, bool indexIsBool
)
410 store32(Imm32(0), payloadFor(index
, callFrameRegister
));
411 store32(tag
, tagFor(index
, callFrameRegister
));
414 inline void JIT::emitStoreDouble(unsigned index
, FPRegisterID value
)
416 storeDouble(value
, addressFor(index
));
419 inline void JIT::emitStore(unsigned index
, const JSValue constant
, RegisterID base
)
421 store32(Imm32(constant
.payload()), payloadFor(index
, base
));
422 store32(Imm32(constant
.tag()), tagFor(index
, base
));
425 ALWAYS_INLINE
void JIT::emitInitRegister(unsigned dst
)
427 emitStore(dst
, jsUndefined());
430 inline bool JIT::isLabeled(unsigned bytecodeIndex
)
432 for (size_t numberOfJumpTargets
= m_codeBlock
->numberOfJumpTargets(); m_jumpTargetIndex
!= numberOfJumpTargets
; ++m_jumpTargetIndex
) {
433 unsigned jumpTarget
= m_codeBlock
->jumpTarget(m_jumpTargetIndex
);
434 if (jumpTarget
== bytecodeIndex
)
436 if (jumpTarget
> bytecodeIndex
)
442 inline void JIT::map(unsigned bytecodeIndex
, unsigned virtualRegisterIndex
, RegisterID tag
, RegisterID payload
)
444 if (isLabeled(bytecodeIndex
))
447 m_mappedBytecodeIndex
= bytecodeIndex
;
448 m_mappedVirtualRegisterIndex
= virtualRegisterIndex
;
450 m_mappedPayload
= payload
;
453 inline void JIT::unmap(RegisterID registerID
)
455 if (m_mappedTag
== registerID
)
456 m_mappedTag
= (RegisterID
)-1;
457 else if (m_mappedPayload
== registerID
)
458 m_mappedPayload
= (RegisterID
)-1;
461 inline void JIT::unmap()
463 m_mappedBytecodeIndex
= (unsigned)-1;
464 m_mappedVirtualRegisterIndex
= (unsigned)-1;
465 m_mappedTag
= (RegisterID
)-1;
466 m_mappedPayload
= (RegisterID
)-1;
469 inline bool JIT::isMapped(unsigned virtualRegisterIndex
)
471 if (m_mappedBytecodeIndex
!= m_bytecodeIndex
)
473 if (m_mappedVirtualRegisterIndex
!= virtualRegisterIndex
)
478 inline bool JIT::getMappedPayload(unsigned virtualRegisterIndex
, RegisterID
& payload
)
480 if (m_mappedBytecodeIndex
!= m_bytecodeIndex
)
482 if (m_mappedVirtualRegisterIndex
!= virtualRegisterIndex
)
484 if (m_mappedPayload
== (RegisterID
)-1)
486 payload
= m_mappedPayload
;
490 inline bool JIT::getMappedTag(unsigned virtualRegisterIndex
, RegisterID
& tag
)
492 if (m_mappedBytecodeIndex
!= m_bytecodeIndex
)
494 if (m_mappedVirtualRegisterIndex
!= virtualRegisterIndex
)
496 if (m_mappedTag
== (RegisterID
)-1)
502 inline void JIT::emitJumpSlowCaseIfNotJSCell(unsigned virtualRegisterIndex
)
504 if (!m_codeBlock
->isKnownNotImmediate(virtualRegisterIndex
))
505 addSlowCase(branch32(NotEqual
, tagFor(virtualRegisterIndex
), Imm32(JSValue::CellTag
)));
508 inline void JIT::emitJumpSlowCaseIfNotJSCell(unsigned virtualRegisterIndex
, RegisterID tag
)
510 if (!m_codeBlock
->isKnownNotImmediate(virtualRegisterIndex
))
511 addSlowCase(branch32(NotEqual
, tag
, Imm32(JSValue::CellTag
)));
514 inline void JIT::linkSlowCaseIfNotJSCell(Vector
<SlowCaseEntry
>::iterator
& iter
, unsigned virtualRegisterIndex
)
516 if (!m_codeBlock
->isKnownNotImmediate(virtualRegisterIndex
))
520 ALWAYS_INLINE
bool JIT::isOperandConstantImmediateInt(unsigned src
)
522 return m_codeBlock
->isConstantRegisterIndex(src
) && getConstantOperand(src
).isInt32();
525 ALWAYS_INLINE
bool JIT::getOperandConstantImmediateInt(unsigned op1
, unsigned op2
, unsigned& op
, int32_t& constant
)
527 if (isOperandConstantImmediateInt(op1
)) {
528 constant
= getConstantOperand(op1
).asInt32();
533 if (isOperandConstantImmediateInt(op2
)) {
534 constant
= getConstantOperand(op2
).asInt32();
542 ALWAYS_INLINE
bool JIT::isOperandConstantImmediateDouble(unsigned src
)
544 return m_codeBlock
->isConstantRegisterIndex(src
) && getConstantOperand(src
).isDouble();
547 /* Deprecated: Please use JITStubCall instead. */
549 ALWAYS_INLINE
void JIT::emitPutJITStubArgFromVirtualRegister(unsigned src
, unsigned argumentNumber
, RegisterID scratch1
, RegisterID scratch2
)
551 if (m_codeBlock
->isConstantRegisterIndex(src
)) {
552 JSValue constant
= m_codeBlock
->getConstant(src
);
553 poke(Imm32(constant
.payload()), argumentNumber
);
554 poke(Imm32(constant
.tag()), argumentNumber
+ 1);
556 emitLoad(src
, scratch1
, scratch2
);
557 poke(scratch2
, argumentNumber
);
558 poke(scratch1
, argumentNumber
+ 1);
562 #else // USE(JSVALUE32_64)
564 ALWAYS_INLINE
void JIT::killLastResultRegister()
566 m_lastResultBytecodeRegister
= std::numeric_limits
<int>::max();
569 // get arg puts an arg from the SF register array into a h/w register
570 ALWAYS_INLINE
void JIT::emitGetVirtualRegister(int src
, RegisterID dst
)
572 ASSERT(m_bytecodeIndex
!= (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeIndex is set.
574 // TODO: we want to reuse values that are already in registers if we can - add a register allocator!
575 if (m_codeBlock
->isConstantRegisterIndex(src
)) {
576 JSValue value
= m_codeBlock
->getConstant(src
);
577 move(ImmPtr(JSValue::encode(value
)), dst
);
578 killLastResultRegister();
582 if (src
== m_lastResultBytecodeRegister
&& m_codeBlock
->isTemporaryRegisterIndex(src
)) {
583 bool atJumpTarget
= false;
584 while (m_jumpTargetsPosition
< m_codeBlock
->numberOfJumpTargets() && m_codeBlock
->jumpTarget(m_jumpTargetsPosition
) <= m_bytecodeIndex
) {
585 if (m_codeBlock
->jumpTarget(m_jumpTargetsPosition
) == m_bytecodeIndex
)
587 ++m_jumpTargetsPosition
;
591 // The argument we want is already stored in eax
592 if (dst
!= cachedResultRegister
)
593 move(cachedResultRegister
, dst
);
594 killLastResultRegister();
599 loadPtr(Address(callFrameRegister
, src
* sizeof(Register
)), dst
);
600 killLastResultRegister();
603 ALWAYS_INLINE
void JIT::emitGetVirtualRegisters(int src1
, RegisterID dst1
, int src2
, RegisterID dst2
)
605 if (src2
== m_lastResultBytecodeRegister
) {
606 emitGetVirtualRegister(src2
, dst2
);
607 emitGetVirtualRegister(src1
, dst1
);
609 emitGetVirtualRegister(src1
, dst1
);
610 emitGetVirtualRegister(src2
, dst2
);
614 ALWAYS_INLINE
int32_t JIT::getConstantOperandImmediateInt(unsigned src
)
616 return getConstantOperand(src
).asInt32();
619 ALWAYS_INLINE
bool JIT::isOperandConstantImmediateInt(unsigned src
)
621 return m_codeBlock
->isConstantRegisterIndex(src
) && getConstantOperand(src
).isInt32();
624 ALWAYS_INLINE
void JIT::emitPutVirtualRegister(unsigned dst
, RegisterID from
)
626 storePtr(from
, Address(callFrameRegister
, dst
* sizeof(Register
)));
627 m_lastResultBytecodeRegister
= (from
== cachedResultRegister
) ? dst
: std::numeric_limits
<int>::max();
630 ALWAYS_INLINE
void JIT::emitInitRegister(unsigned dst
)
632 storePtr(ImmPtr(JSValue::encode(jsUndefined())), Address(callFrameRegister
, dst
* sizeof(Register
)));
635 ALWAYS_INLINE
JIT::Jump
JIT::emitJumpIfJSCell(RegisterID reg
)
638 return branchTestPtr(Zero
, reg
, tagMaskRegister
);
640 return branchTest32(Zero
, reg
, Imm32(JSImmediate::TagMask
));
644 ALWAYS_INLINE
JIT::Jump
JIT::emitJumpIfBothJSCells(RegisterID reg1
, RegisterID reg2
, RegisterID scratch
)
647 orPtr(reg2
, scratch
);
648 return emitJumpIfJSCell(scratch
);
651 ALWAYS_INLINE
void JIT::emitJumpSlowCaseIfJSCell(RegisterID reg
)
653 addSlowCase(emitJumpIfJSCell(reg
));
656 ALWAYS_INLINE
JIT::Jump
JIT::emitJumpIfNotJSCell(RegisterID reg
)
659 return branchTestPtr(NonZero
, reg
, tagMaskRegister
);
661 return branchTest32(NonZero
, reg
, Imm32(JSImmediate::TagMask
));
665 ALWAYS_INLINE
void JIT::emitJumpSlowCaseIfNotJSCell(RegisterID reg
)
667 addSlowCase(emitJumpIfNotJSCell(reg
));
670 ALWAYS_INLINE
void JIT::emitJumpSlowCaseIfNotJSCell(RegisterID reg
, int vReg
)
672 if (!m_codeBlock
->isKnownNotImmediate(vReg
))
673 emitJumpSlowCaseIfNotJSCell(reg
);
677 ALWAYS_INLINE
JIT::Jump
JIT::emitJumpIfImmediateNumber(RegisterID reg
)
679 return branchTestPtr(NonZero
, reg
, tagTypeNumberRegister
);
681 ALWAYS_INLINE
JIT::Jump
JIT::emitJumpIfNotImmediateNumber(RegisterID reg
)
683 return branchTestPtr(Zero
, reg
, tagTypeNumberRegister
);
687 ALWAYS_INLINE
JIT::Jump
JIT::emitJumpIfImmediateInteger(RegisterID reg
)
690 return branchPtr(AboveOrEqual
, reg
, tagTypeNumberRegister
);
692 return branchTest32(NonZero
, reg
, Imm32(JSImmediate::TagTypeNumber
));
696 ALWAYS_INLINE
JIT::Jump
JIT::emitJumpIfNotImmediateInteger(RegisterID reg
)
699 return branchPtr(Below
, reg
, tagTypeNumberRegister
);
701 return branchTest32(Zero
, reg
, Imm32(JSImmediate::TagTypeNumber
));
705 ALWAYS_INLINE
JIT::Jump
JIT::emitJumpIfNotImmediateIntegers(RegisterID reg1
, RegisterID reg2
, RegisterID scratch
)
708 andPtr(reg2
, scratch
);
709 return emitJumpIfNotImmediateInteger(scratch
);
712 ALWAYS_INLINE
void JIT::emitJumpSlowCaseIfNotImmediateInteger(RegisterID reg
)
714 addSlowCase(emitJumpIfNotImmediateInteger(reg
));
717 ALWAYS_INLINE
void JIT::emitJumpSlowCaseIfNotImmediateIntegers(RegisterID reg1
, RegisterID reg2
, RegisterID scratch
)
719 addSlowCase(emitJumpIfNotImmediateIntegers(reg1
, reg2
, scratch
));
723 ALWAYS_INLINE
void JIT::emitFastArithDeTagImmediate(RegisterID reg
)
725 subPtr(Imm32(JSImmediate::TagTypeNumber
), reg
);
728 ALWAYS_INLINE
JIT::Jump
JIT::emitFastArithDeTagImmediateJumpIfZero(RegisterID reg
)
730 return branchSubPtr(Zero
, Imm32(JSImmediate::TagTypeNumber
), reg
);
734 ALWAYS_INLINE
void JIT::emitFastArithReTagImmediate(RegisterID src
, RegisterID dest
)
737 emitFastArithIntToImmNoCheck(src
, dest
);
741 addPtr(Imm32(JSImmediate::TagTypeNumber
), dest
);
745 ALWAYS_INLINE
void JIT::emitFastArithImmToInt(RegisterID reg
)
750 rshiftPtr(Imm32(JSImmediate::IntegerPayloadShift
), reg
);
754 // operand is int32_t, must have been zero-extended if register is 64-bit.
755 ALWAYS_INLINE
void JIT::emitFastArithIntToImmNoCheck(RegisterID src
, RegisterID dest
)
760 orPtr(tagTypeNumberRegister
, dest
);
762 signExtend32ToPtr(src
, dest
);
764 emitFastArithReTagImmediate(dest
, dest
);
768 ALWAYS_INLINE
void JIT::emitTagAsBoolImmediate(RegisterID reg
)
770 lshift32(Imm32(JSImmediate::ExtendedPayloadShift
), reg
);
771 or32(Imm32(static_cast<int32_t>(JSImmediate::FullTagTypeBool
)), reg
);
774 /* Deprecated: Please use JITStubCall instead. */
776 // get arg puts an arg from the SF register array onto the stack, as an arg to a context threaded function.
777 ALWAYS_INLINE
void JIT::emitPutJITStubArgFromVirtualRegister(unsigned src
, unsigned argumentNumber
, RegisterID scratch
)
779 if (m_codeBlock
->isConstantRegisterIndex(src
)) {
780 JSValue value
= m_codeBlock
->getConstant(src
);
781 emitPutJITStubArgConstant(JSValue::encode(value
), argumentNumber
);
783 loadPtr(Address(callFrameRegister
, src
* sizeof(Register
)), scratch
);
784 emitPutJITStubArg(scratch
, argumentNumber
);
787 killLastResultRegister();
790 #endif // USE(JSVALUE32_64)
794 #endif // ENABLE(JIT)