2 * Copyright (C) 2008, 2012, 2013, 2015 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 #include "JSCInlines.h"
36 inline MacroAssembler::JumpList
JIT::emitDoubleGetByVal(Instruction
* instruction
, PatchableJump
& badType
)
38 JumpList slowCases
= emitDoubleLoad(instruction
, badType
);
39 moveDoubleTo64(fpRegT0
, regT0
);
40 sub64(tagTypeNumberRegister
, regT0
);
44 inline MacroAssembler::JumpList
JIT::emitDoubleGetByVal(Instruction
* instruction
, PatchableJump
& badType
)
46 JumpList slowCases
= emitDoubleLoad(instruction
, badType
);
47 moveDoubleToInts(fpRegT0
, regT0
, regT1
);
50 #endif // USE(JSVALUE64)
52 ALWAYS_INLINE
MacroAssembler::JumpList
JIT::emitLoadForArrayMode(Instruction
* currentInstruction
, JITArrayMode arrayMode
, PatchableJump
& badType
)
56 return emitInt32Load(currentInstruction
, badType
);
58 return emitDoubleLoad(currentInstruction
, badType
);
60 return emitContiguousLoad(currentInstruction
, badType
);
62 return emitArrayStorageLoad(currentInstruction
, badType
);
66 RELEASE_ASSERT_NOT_REACHED();
67 return MacroAssembler::JumpList();
70 inline MacroAssembler::JumpList
JIT::emitContiguousGetByVal(Instruction
* instruction
, PatchableJump
& badType
, IndexingType expectedShape
)
72 return emitContiguousLoad(instruction
, badType
, expectedShape
);
75 inline MacroAssembler::JumpList
JIT::emitArrayStorageGetByVal(Instruction
* instruction
, PatchableJump
& badType
)
77 return emitArrayStorageLoad(instruction
, badType
);
80 ALWAYS_INLINE
bool JIT::isOperandConstantImmediateDouble(int src
)
82 return m_codeBlock
->isConstantRegisterIndex(src
) && getConstantOperand(src
).isDouble();
85 ALWAYS_INLINE JSValue
JIT::getConstantOperand(int src
)
87 ASSERT(m_codeBlock
->isConstantRegisterIndex(src
));
88 return m_codeBlock
->getConstant(src
);
91 ALWAYS_INLINE
void JIT::emitPutIntToCallFrameHeader(RegisterID from
, JSStack::CallFrameHeaderEntry entry
)
94 store32(TrustedImm32(Int32Tag
), intTagFor(entry
, callFrameRegister
));
95 store32(from
, intPayloadFor(entry
, callFrameRegister
));
97 store64(from
, addressFor(entry
, callFrameRegister
));
101 ALWAYS_INLINE
void JIT::emitLoadCharacterString(RegisterID src
, RegisterID dst
, JumpList
& failures
)
103 failures
.append(branchStructure(NotEqual
, Address(src
, JSCell::structureIDOffset()), m_vm
->stringStructure
.get()));
104 failures
.append(branch32(NotEqual
, MacroAssembler::Address(src
, ThunkHelpers::jsStringLengthOffset()), TrustedImm32(1)));
105 loadPtr(MacroAssembler::Address(src
, ThunkHelpers::jsStringValueOffset()), dst
);
106 failures
.append(branchTest32(Zero
, dst
));
107 loadPtr(MacroAssembler::Address(dst
, StringImpl::flagsOffset()), regT1
);
108 loadPtr(MacroAssembler::Address(dst
, StringImpl::dataOffset()), dst
);
112 is16Bit
.append(branchTest32(Zero
, regT1
, TrustedImm32(StringImpl::flagIs8Bit())));
113 load8(MacroAssembler::Address(dst
, 0), dst
);
114 cont8Bit
.append(jump());
116 load16(MacroAssembler::Address(dst
, 0), dst
);
120 ALWAYS_INLINE
JIT::Call
JIT::emitNakedCall(CodePtr function
)
122 ASSERT(m_bytecodeOffset
!= std::numeric_limits
<unsigned>::max()); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
123 Call nakedCall
= nearCall();
124 m_calls
.append(CallRecord(nakedCall
, m_bytecodeOffset
, function
.executableAddress()));
128 ALWAYS_INLINE
void JIT::updateTopCallFrame()
130 ASSERT(static_cast<int>(m_bytecodeOffset
) >= 0);
131 #if USE(JSVALUE32_64)
132 Instruction
* instruction
= m_codeBlock
->instructions().begin() + m_bytecodeOffset
+ 1;
133 uint32_t locationBits
= CallFrame::Location::encodeAsBytecodeInstruction(instruction
);
135 uint32_t locationBits
= CallFrame::Location::encodeAsBytecodeOffset(m_bytecodeOffset
+ 1);
137 store32(TrustedImm32(locationBits
), intTagFor(JSStack::ArgumentCount
));
138 storePtr(callFrameRegister
, &m_vm
->topCallFrame
);
141 ALWAYS_INLINE
MacroAssembler::Call
JIT::appendCallWithExceptionCheck(const FunctionPtr
& function
)
143 updateTopCallFrame();
144 MacroAssembler::Call call
= appendCall(function
);
149 #if OS(WINDOWS) && CPU(X86_64)
150 ALWAYS_INLINE
MacroAssembler::Call
JIT::appendCallWithExceptionCheckAndSlowPathReturnType(const FunctionPtr
& function
)
152 updateTopCallFrame();
153 MacroAssembler::Call call
= appendCallWithSlowPathReturnType(function
);
159 ALWAYS_INLINE
MacroAssembler::Call
JIT::appendCallWithCallFrameRollbackOnException(const FunctionPtr
& function
)
161 updateTopCallFrame(); // The callee is responsible for setting topCallFrame to their caller
162 MacroAssembler::Call call
= appendCall(function
);
163 exceptionCheckWithCallFrameRollback();
167 ALWAYS_INLINE
MacroAssembler::Call
JIT::appendCallWithExceptionCheckSetJSValueResult(const FunctionPtr
& function
, int dst
)
169 MacroAssembler::Call call
= appendCallWithExceptionCheck(function
);
171 emitPutVirtualRegister(dst
, returnValueGPR
);
173 emitStore(dst
, returnValueGPR2
, returnValueGPR
);
178 ALWAYS_INLINE
MacroAssembler::Call
JIT::appendCallWithExceptionCheckSetJSValueResultWithProfile(const FunctionPtr
& function
, int dst
)
180 MacroAssembler::Call call
= appendCallWithExceptionCheck(function
);
181 emitValueProfilingSite();
183 emitPutVirtualRegister(dst
, returnValueGPR
);
185 emitStore(dst
, returnValueGPR2
, returnValueGPR
);
190 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(C_JITOperation_E operation
)
192 setupArgumentsExecState();
193 return appendCallWithExceptionCheck(operation
);
196 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(C_JITOperation_EJsc operation
, GPRReg arg1
)
198 setupArgumentsWithExecState(arg1
);
199 return appendCallWithExceptionCheck(operation
);
202 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(C_JITOperation_EJscZ operation
, GPRReg arg1
, int32_t arg2
)
204 setupArgumentsWithExecState(arg1
, TrustedImm32(arg2
));
205 return appendCallWithExceptionCheck(operation
);
208 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(C_JITOperation_EL operation
, GPRReg arg1
)
210 setupArgumentsWithExecState(arg1
);
211 return appendCallWithExceptionCheck(operation
);
214 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(C_JITOperation_EL operation
, TrustedImmPtr arg1
)
216 setupArgumentsWithExecState(arg1
);
217 return appendCallWithExceptionCheck(operation
);
220 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(C_JITOperation_EO operation
, GPRReg arg
)
222 setupArgumentsWithExecState(arg
);
223 return appendCallWithExceptionCheck(operation
);
226 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(C_JITOperation_ESt operation
, Structure
* structure
)
228 setupArgumentsWithExecState(TrustedImmPtr(structure
));
229 return appendCallWithExceptionCheck(operation
);
232 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(C_JITOperation_EZ operation
, int32_t arg
)
234 setupArgumentsWithExecState(TrustedImm32(arg
));
235 return appendCallWithExceptionCheck(operation
);
238 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(J_JITOperation_E operation
, int dst
)
240 setupArgumentsExecState();
241 return appendCallWithExceptionCheckSetJSValueResult(operation
, dst
);
244 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(J_JITOperation_EAapJcpZ operation
, int dst
, ArrayAllocationProfile
* arg1
, GPRReg arg2
, int32_t arg3
)
246 setupArgumentsWithExecState(TrustedImmPtr(arg1
), arg2
, TrustedImm32(arg3
));
247 return appendCallWithExceptionCheckSetJSValueResult(operation
, dst
);
250 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(J_JITOperation_EAapJcpZ operation
, int dst
, ArrayAllocationProfile
* arg1
, const JSValue
* arg2
, int32_t arg3
)
252 setupArgumentsWithExecState(TrustedImmPtr(arg1
), TrustedImmPtr(arg2
), TrustedImm32(arg3
));
253 return appendCallWithExceptionCheckSetJSValueResult(operation
, dst
);
256 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(J_JITOperation_EC operation
, int dst
, JSCell
* cell
)
258 setupArgumentsWithExecState(TrustedImmPtr(cell
));
259 return appendCallWithExceptionCheckSetJSValueResult(operation
, dst
);
262 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(V_JITOperation_EC operation
, JSCell
* cell
)
264 setupArgumentsWithExecState(TrustedImmPtr(cell
));
265 return appendCallWithExceptionCheck(operation
);
268 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(J_JITOperation_EJscC operation
, int dst
, GPRReg arg1
, JSCell
* cell
)
270 setupArgumentsWithExecState(arg1
, TrustedImmPtr(cell
));
271 return appendCallWithExceptionCheckSetJSValueResult(operation
, dst
);
274 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(J_JITOperation_EP operation
, int dst
, void* pointer
)
276 setupArgumentsWithExecState(TrustedImmPtr(pointer
));
277 return appendCallWithExceptionCheckSetJSValueResult(operation
, dst
);
280 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(WithProfileTag
, J_JITOperation_EPc operation
, int dst
, Instruction
* bytecodePC
)
282 setupArgumentsWithExecState(TrustedImmPtr(bytecodePC
));
283 return appendCallWithExceptionCheckSetJSValueResultWithProfile(operation
, dst
);
286 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(J_JITOperation_EZ operation
, int dst
, int32_t arg
)
288 setupArgumentsWithExecState(TrustedImm32(arg
));
289 return appendCallWithExceptionCheckSetJSValueResult(operation
, dst
);
292 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(J_JITOperation_EZZ operation
, int dst
, int32_t arg1
, int32_t arg2
)
294 setupArgumentsWithExecState(TrustedImm32(arg1
), TrustedImm32(arg2
));
295 return appendCallWithExceptionCheckSetJSValueResult(operation
, dst
);
298 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(S_JITOperation_ECC operation
, RegisterID regOp1
, RegisterID regOp2
)
300 setupArgumentsWithExecState(regOp1
, regOp2
);
301 return appendCallWithExceptionCheck(operation
);
304 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(S_JITOperation_EOJss operation
, RegisterID regOp1
, RegisterID regOp2
)
306 setupArgumentsWithExecState(regOp1
, regOp2
);
307 return appendCallWithExceptionCheck(operation
);
310 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(Sprt_JITOperation_EZ operation
, int32_t op
)
312 #if OS(WINDOWS) && CPU(X86_64)
313 setupArgumentsWithExecStateForCallWithSlowPathReturnType(TrustedImm32(op
));
314 return appendCallWithExceptionCheckAndSlowPathReturnType(operation
);
316 setupArgumentsWithExecState(TrustedImm32(op
));
317 return appendCallWithExceptionCheck(operation
);
321 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(V_JITOperation_E operation
)
323 setupArgumentsExecState();
324 return appendCallWithExceptionCheck(operation
);
327 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(V_JITOperation_EC operation
, RegisterID regOp
)
329 setupArgumentsWithExecState(regOp
);
330 return appendCallWithExceptionCheck(operation
);
333 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(V_JITOperation_ECC operation
, RegisterID regOp1
, RegisterID regOp2
)
335 setupArgumentsWithExecState(regOp1
, regOp2
);
336 return appendCallWithExceptionCheck(operation
);
339 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(J_JITOperation_EE operation
, RegisterID regOp
)
341 setupArgumentsWithExecState(regOp
);
342 updateTopCallFrame();
343 return appendCallWithExceptionCheck(operation
);
346 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(V_JITOperation_EPc operation
, Instruction
* bytecodePC
)
348 setupArgumentsWithExecState(TrustedImmPtr(bytecodePC
));
349 return appendCallWithExceptionCheck(operation
);
352 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(V_JITOperation_EZ operation
, int32_t op
)
354 setupArgumentsWithExecState(TrustedImm32(op
));
355 return appendCallWithExceptionCheck(operation
);
358 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperationWithCallFrameRollbackOnException(J_JITOperation_E operation
)
360 setupArgumentsExecState();
361 return appendCallWithCallFrameRollbackOnException(operation
);
364 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperationWithCallFrameRollbackOnException(V_JITOperation_ECb operation
, CodeBlock
* pointer
)
366 setupArgumentsWithExecState(TrustedImmPtr(pointer
));
367 return appendCallWithCallFrameRollbackOnException(operation
);
370 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperationWithCallFrameRollbackOnException(Z_JITOperation_E operation
)
372 setupArgumentsExecState();
373 return appendCallWithCallFrameRollbackOnException(operation
);
378 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(Z_JITOperation_EJZZ operation
, GPRReg arg1
, int32_t arg2
, int32_t arg3
)
380 setupArgumentsWithExecState(arg1
, TrustedImm32(arg2
), TrustedImm32(arg3
));
381 return appendCallWithExceptionCheck(operation
);
384 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(F_JITOperation_EFJZZ operation
, GPRReg arg1
, GPRReg arg2
, int32_t arg3
, GPRReg arg4
)
386 setupArgumentsWithExecState(arg1
, arg2
, TrustedImm32(arg3
), arg4
);
387 return appendCallWithExceptionCheck(operation
);
390 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(V_JITOperation_ESsiJJI operation
, StructureStubInfo
* stubInfo
, RegisterID regOp1
, RegisterID regOp2
, UniquedStringImpl
* uid
)
392 setupArgumentsWithExecState(TrustedImmPtr(stubInfo
), regOp1
, regOp2
, TrustedImmPtr(uid
));
393 return appendCallWithExceptionCheck(operation
);
396 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(V_JITOperation_EJJJ operation
, RegisterID regOp1
, RegisterID regOp2
, RegisterID regOp3
)
398 setupArgumentsWithExecState(regOp1
, regOp2
, regOp3
);
399 return appendCallWithExceptionCheck(operation
);
402 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(V_JITOperation_EJJJAp operation
, RegisterID regOp1
, RegisterID regOp2
, RegisterID regOp3
, ArrayProfile
* arrayProfile
)
404 setupArgumentsWithExecState(regOp1
, regOp2
, regOp3
, TrustedImmPtr(arrayProfile
));
405 return appendCallWithExceptionCheck(operation
);
408 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(V_JITOperation_EZJ operation
, int dst
, GPRReg arg
)
410 setupArgumentsWithExecState(TrustedImm32(dst
), arg
);
411 return appendCallWithExceptionCheck(operation
);
414 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(JIT::WithProfileTag
, J_JITOperation_ESsiJI operation
, int dst
, StructureStubInfo
* stubInfo
, GPRReg arg1
, UniquedStringImpl
* uid
)
416 setupArgumentsWithExecState(TrustedImmPtr(stubInfo
), arg1
, TrustedImmPtr(uid
));
417 return appendCallWithExceptionCheckSetJSValueResultWithProfile(operation
, dst
);
420 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(JIT::WithProfileTag
, J_JITOperation_EJJ operation
, int dst
, GPRReg arg1
, GPRReg arg2
)
422 setupArgumentsWithExecState(arg1
, arg2
);
423 return appendCallWithExceptionCheckSetJSValueResultWithProfile(operation
, dst
);
426 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(J_JITOperation_EAapJ operation
, int dst
, ArrayAllocationProfile
* arg1
, GPRReg arg2
)
428 setupArgumentsWithExecState(TrustedImmPtr(arg1
), arg2
);
429 return appendCallWithExceptionCheckSetJSValueResult(operation
, dst
);
432 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(J_JITOperation_EJ operation
, int dst
, GPRReg arg1
)
434 setupArgumentsWithExecState(arg1
);
435 return appendCallWithExceptionCheckSetJSValueResult(operation
, dst
);
438 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(J_JITOperation_EJIdc operation
, int dst
, GPRReg arg1
, const Identifier
* arg2
)
440 setupArgumentsWithExecState(arg1
, TrustedImmPtr(arg2
));
441 return appendCallWithExceptionCheckSetJSValueResult(operation
, dst
);
444 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(J_JITOperation_EJJ operation
, int dst
, GPRReg arg1
, GPRReg arg2
)
446 setupArgumentsWithExecState(arg1
, arg2
);
447 return appendCallWithExceptionCheckSetJSValueResult(operation
, dst
);
450 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(J_JITOperation_EJJAp operation
, int dst
, GPRReg arg1
, GPRReg arg2
, ArrayProfile
* arrayProfile
)
452 setupArgumentsWithExecState(arg1
, arg2
, TrustedImmPtr(arrayProfile
));
453 return appendCallWithExceptionCheckSetJSValueResult(operation
, dst
);
456 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperationNoExceptionCheck(V_JITOperation_EJ operation
, GPRReg arg1
)
458 setupArgumentsWithExecState(arg1
);
459 updateTopCallFrame();
460 return appendCall(operation
);
463 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(P_JITOperation_EJS operation
, GPRReg arg1
, size_t arg2
)
465 setupArgumentsWithExecState(arg1
, TrustedImmPtr(arg2
));
466 return appendCallWithExceptionCheck(operation
);
469 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(S_JITOperation_EJ operation
, RegisterID regOp
)
471 setupArgumentsWithExecState(regOp
);
472 return appendCallWithExceptionCheck(operation
);
475 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(S_JITOperation_EJJ operation
, RegisterID regOp1
, RegisterID regOp2
)
477 setupArgumentsWithExecState(regOp1
, regOp2
);
478 return appendCallWithExceptionCheck(operation
);
481 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(V_JITOperation_EZSymtabJ operation
, int op1
, SymbolTable
* symbolTable
, RegisterID regOp3
)
483 setupArgumentsWithExecState(TrustedImm32(op1
), TrustedImmPtr(symbolTable
), regOp3
);
484 return appendCallWithExceptionCheck(operation
);
487 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(V_JITOperation_EJ operation
, RegisterID regOp
)
489 setupArgumentsWithExecState(regOp
);
490 return appendCallWithExceptionCheck(operation
);
493 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(V_JITOperation_EJIdJ operation
, RegisterID regOp1
, const Identifier
* identOp2
, RegisterID regOp3
)
495 setupArgumentsWithExecState(regOp1
, TrustedImmPtr(identOp2
), regOp3
);
496 return appendCallWithExceptionCheck(operation
);
499 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(V_JITOperation_EJIdJJ operation
, RegisterID regOp1
, const Identifier
* identOp2
, RegisterID regOp3
, RegisterID regOp4
)
501 setupArgumentsWithExecState(regOp1
, TrustedImmPtr(identOp2
), regOp3
, regOp4
);
502 return appendCallWithExceptionCheck(operation
);
505 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(V_JITOperation_EJZ operation
, RegisterID regOp1
, int32_t op2
)
507 setupArgumentsWithExecState(regOp1
, TrustedImm32(op2
));
508 return appendCallWithExceptionCheck(operation
);
511 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(V_JITOperation_EJZJ operation
, RegisterID regOp1
, int32_t op2
, RegisterID regOp3
)
513 setupArgumentsWithExecState(regOp1
, TrustedImm32(op2
), regOp3
);
514 return appendCallWithExceptionCheck(operation
);
517 #else // USE(JSVALUE32_64)
519 // EncodedJSValue in JSVALUE32_64 is a 64-bit integer. When being compiled in ARM EABI, it must be aligned even-numbered register (r0, r2 or [sp]).
520 // To avoid assemblies from using wrong registers, let's occupy r1 or r3 with a dummy argument when necessary.
521 #if (COMPILER_SUPPORTS(EABI) && CPU(ARM)) || CPU(MIPS)
522 #define EABI_32BIT_DUMMY_ARG TrustedImm32(0),
524 #define EABI_32BIT_DUMMY_ARG
527 // JSVALUE32_64 is a 64-bit integer that cannot be put half in an argument register and half on stack when using SH4 architecture.
528 // To avoid this, let's occupy the 4th argument register (r7) with a dummy argument when necessary. This must only be done when there
529 // is no other 32-bit value argument behind this 64-bit JSValue.
531 #define SH4_32BIT_DUMMY_ARG TrustedImm32(0),
533 #define SH4_32BIT_DUMMY_ARG
536 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperationNoExceptionCheck(V_JITOperation_EJ operation
, GPRReg arg1Tag
, GPRReg arg1Payload
)
538 setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload
, arg1Tag
);
539 updateTopCallFrame();
540 return appendCall(operation
);
543 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(Z_JITOperation_EJZZ operation
, GPRReg arg1Tag
, GPRReg arg1Payload
, int32_t arg2
, int32_t arg3
)
545 setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload
, arg1Tag
, TrustedImm32(arg2
), TrustedImm32(arg3
));
546 return appendCallWithExceptionCheck(operation
);
549 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(F_JITOperation_EFJZZ operation
, GPRReg arg1
, GPRReg arg2Tag
, GPRReg arg2Payload
, int32_t arg3
, GPRReg arg4
)
551 setupArgumentsWithExecState(arg1
, arg2Payload
, arg2Tag
, TrustedImm32(arg3
), arg4
);
552 return appendCallWithExceptionCheck(operation
);
555 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(J_JITOperation_EAapJ operation
, int dst
, ArrayAllocationProfile
* arg1
, GPRReg arg2Tag
, GPRReg arg2Payload
)
557 setupArgumentsWithExecState(TrustedImmPtr(arg1
), arg2Payload
, arg2Tag
);
558 return appendCallWithExceptionCheckSetJSValueResult(operation
, dst
);
561 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(J_JITOperation_EJ operation
, int dst
, GPRReg arg1Tag
, GPRReg arg1Payload
)
563 setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload
, arg1Tag
);
564 return appendCallWithExceptionCheckSetJSValueResult(operation
, dst
);
567 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(JIT::WithProfileTag
, J_JITOperation_ESsiJI operation
, int dst
, StructureStubInfo
* stubInfo
, GPRReg arg1Tag
, GPRReg arg1Payload
, UniquedStringImpl
* uid
)
569 setupArgumentsWithExecState(TrustedImmPtr(stubInfo
), arg1Payload
, arg1Tag
, TrustedImmPtr(uid
));
570 return appendCallWithExceptionCheckSetJSValueResultWithProfile(operation
, dst
);
573 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(J_JITOperation_EJIdc operation
, int dst
, GPRReg arg1Tag
, GPRReg arg1Payload
, const Identifier
* arg2
)
575 setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload
, arg1Tag
, TrustedImmPtr(arg2
));
576 return appendCallWithExceptionCheckSetJSValueResult(operation
, dst
);
579 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(J_JITOperation_EJJ operation
, int dst
, GPRReg arg1Tag
, GPRReg arg1Payload
, GPRReg arg2Tag
, GPRReg arg2Payload
)
581 setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload
, arg1Tag
, SH4_32BIT_DUMMY_ARG arg2Payload
, arg2Tag
);
582 return appendCallWithExceptionCheckSetJSValueResult(operation
, dst
);
585 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(J_JITOperation_EJJAp operation
, int dst
, GPRReg arg1Tag
, GPRReg arg1Payload
, GPRReg arg2Tag
, GPRReg arg2Payload
, ArrayProfile
* arrayProfile
)
587 setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload
, arg1Tag
, SH4_32BIT_DUMMY_ARG arg2Payload
, arg2Tag
, TrustedImmPtr(arrayProfile
));
588 return appendCallWithExceptionCheckSetJSValueResult(operation
, dst
);
591 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(JIT::WithProfileTag
, J_JITOperation_EJJ operation
, int dst
, GPRReg arg1Tag
, GPRReg arg1Payload
, GPRReg arg2Tag
, GPRReg arg2Payload
)
593 setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload
, arg1Tag
, SH4_32BIT_DUMMY_ARG arg2Payload
, arg2Tag
);
594 return appendCallWithExceptionCheckSetJSValueResultWithProfile(operation
, dst
);
597 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(P_JITOperation_EJS operation
, GPRReg arg1Tag
, GPRReg arg1Payload
, size_t arg2
)
599 setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload
, arg1Tag
, TrustedImmPtr(arg2
));
600 return appendCallWithExceptionCheck(operation
);
603 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(S_JITOperation_EJ operation
, RegisterID argTag
, RegisterID argPayload
)
605 setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG argPayload
, argTag
);
606 return appendCallWithExceptionCheck(operation
);
609 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(S_JITOperation_EJJ operation
, RegisterID arg1Tag
, RegisterID arg1Payload
, RegisterID arg2Tag
, RegisterID arg2Payload
)
611 setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload
, arg1Tag
, SH4_32BIT_DUMMY_ARG arg2Payload
, arg2Tag
);
612 return appendCallWithExceptionCheck(operation
);
615 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(V_JITOperation_ECIC operation
, RegisterID regOp1
, const Identifier
* identOp2
, RegisterID regOp3
)
617 setupArgumentsWithExecState(regOp1
, TrustedImmPtr(identOp2
), regOp3
);
618 return appendCallWithExceptionCheck(operation
);
621 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(V_JITOperation_ECICC operation
, RegisterID regOp1
, const Identifier
* identOp2
, RegisterID regOp3
, RegisterID regOp4
)
623 setupArgumentsWithExecState(regOp1
, TrustedImmPtr(identOp2
), regOp3
, regOp4
);
624 return appendCallWithExceptionCheck(operation
);
627 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(V_JITOperation_EJ operation
, RegisterID regOp1Tag
, RegisterID regOp1Payload
)
629 setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG regOp1Payload
, regOp1Tag
);
630 return appendCallWithExceptionCheck(operation
);
633 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(V_JITOperation_EZSymtabJ operation
, int32_t op1
, SymbolTable
* symbolTable
, RegisterID regOp3Tag
, RegisterID regOp3Payload
)
635 setupArgumentsWithExecState(TrustedImm32(op1
), TrustedImmPtr(symbolTable
), EABI_32BIT_DUMMY_ARG regOp3Payload
, regOp3Tag
);
636 return appendCallWithExceptionCheck(operation
);
639 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(V_JITOperation_ESsiJJI operation
, StructureStubInfo
* stubInfo
, RegisterID regOp1Tag
, RegisterID regOp1Payload
, RegisterID regOp2Tag
, RegisterID regOp2Payload
, UniquedStringImpl
* uid
)
641 setupArgumentsWithExecState(TrustedImmPtr(stubInfo
), regOp1Payload
, regOp1Tag
, regOp2Payload
, regOp2Tag
, TrustedImmPtr(uid
));
642 return appendCallWithExceptionCheck(operation
);
645 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(V_JITOperation_EJJJ operation
, RegisterID regOp1Tag
, RegisterID regOp1Payload
, RegisterID regOp2Tag
, RegisterID regOp2Payload
, RegisterID regOp3Tag
, RegisterID regOp3Payload
)
647 setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG regOp1Payload
, regOp1Tag
, SH4_32BIT_DUMMY_ARG regOp2Payload
, regOp2Tag
, regOp3Payload
, regOp3Tag
);
648 return appendCallWithExceptionCheck(operation
);
651 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(V_JITOperation_EJJJAp operation
, RegisterID regOp1Tag
, RegisterID regOp1Payload
, RegisterID regOp2Tag
, RegisterID regOp2Payload
, RegisterID regOp3Tag
, RegisterID regOp3Payload
, ArrayProfile
* arrayProfile
)
653 setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG regOp1Payload
, regOp1Tag
, SH4_32BIT_DUMMY_ARG regOp2Payload
, regOp2Tag
, regOp3Payload
, regOp3Tag
, TrustedImmPtr(arrayProfile
));
654 return appendCallWithExceptionCheck(operation
);
657 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(V_JITOperation_EZJ operation
, int dst
, RegisterID regOp1Tag
, RegisterID regOp1Payload
)
659 setupArgumentsWithExecState(TrustedImm32(dst
), regOp1Payload
, regOp1Tag
);
660 return appendCallWithExceptionCheck(operation
);
663 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(V_JITOperation_EJZ operation
, RegisterID regOp1Tag
, RegisterID regOp1Payload
, int32_t op2
)
665 setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG regOp1Payload
, regOp1Tag
, TrustedImm32(op2
));
666 return appendCallWithExceptionCheck(operation
);
669 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(V_JITOperation_EJZJ operation
, RegisterID regOp1Tag
, RegisterID regOp1Payload
, int32_t op2
, RegisterID regOp3Tag
, RegisterID regOp3Payload
)
671 setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG regOp1Payload
, regOp1Tag
, TrustedImm32(op2
), EABI_32BIT_DUMMY_ARG regOp3Payload
, regOp3Tag
);
672 return appendCallWithExceptionCheck(operation
);
675 #undef EABI_32BIT_DUMMY_ARG
676 #undef SH4_32BIT_DUMMY_ARG
678 #endif // USE(JSVALUE32_64)
680 ALWAYS_INLINE
JIT::Jump
JIT::checkStructure(RegisterID reg
, Structure
* structure
)
682 return branchStructure(NotEqual
, Address(reg
, JSCell::structureIDOffset()), structure
);
685 ALWAYS_INLINE
void JIT::linkSlowCaseIfNotJSCell(Vector
<SlowCaseEntry
>::iterator
& iter
, int vReg
)
687 if (!m_codeBlock
->isKnownNotImmediate(vReg
))
691 ALWAYS_INLINE
void JIT::addSlowCase(Jump jump
)
693 ASSERT(m_bytecodeOffset
!= std::numeric_limits
<unsigned>::max()); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
695 m_slowCases
.append(SlowCaseEntry(jump
, m_bytecodeOffset
));
698 ALWAYS_INLINE
void JIT::addSlowCase(JumpList jumpList
)
700 ASSERT(m_bytecodeOffset
!= std::numeric_limits
<unsigned>::max()); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
702 const JumpList::JumpVector
& jumpVector
= jumpList
.jumps();
703 size_t size
= jumpVector
.size();
704 for (size_t i
= 0; i
< size
; ++i
)
705 m_slowCases
.append(SlowCaseEntry(jumpVector
[i
], m_bytecodeOffset
));
708 ALWAYS_INLINE
void JIT::addSlowCase()
710 ASSERT(m_bytecodeOffset
!= std::numeric_limits
<unsigned>::max()); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
712 Jump emptyJump
; // Doing it this way to make Windows happy.
713 m_slowCases
.append(SlowCaseEntry(emptyJump
, m_bytecodeOffset
));
716 ALWAYS_INLINE
void JIT::addJump(Jump jump
, int relativeOffset
)
718 ASSERT(m_bytecodeOffset
!= std::numeric_limits
<unsigned>::max()); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
720 m_jmpTable
.append(JumpTable(jump
, m_bytecodeOffset
+ relativeOffset
));
723 ALWAYS_INLINE
void JIT::emitJumpSlowToHot(Jump jump
, int relativeOffset
)
725 ASSERT(m_bytecodeOffset
!= std::numeric_limits
<unsigned>::max()); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
727 jump
.linkTo(m_labels
[m_bytecodeOffset
+ relativeOffset
], this);
730 ALWAYS_INLINE
JIT::Jump
JIT::emitJumpIfCellObject(RegisterID cellReg
)
732 return branch8(AboveOrEqual
, Address(cellReg
, JSCell::typeInfoTypeOffset()), TrustedImm32(ObjectType
));
735 ALWAYS_INLINE
JIT::Jump
JIT::emitJumpIfCellNotObject(RegisterID cellReg
)
737 return branch8(Below
, Address(cellReg
, JSCell::typeInfoTypeOffset()), TrustedImm32(ObjectType
));
740 #if ENABLE(SAMPLING_FLAGS)
741 ALWAYS_INLINE
void JIT::setSamplingFlag(int32_t flag
)
745 or32(TrustedImm32(1u << (flag
- 1)), AbsoluteAddress(SamplingFlags::addressOfFlags()));
748 ALWAYS_INLINE
void JIT::clearSamplingFlag(int32_t flag
)
752 and32(TrustedImm32(~(1u << (flag
- 1))), AbsoluteAddress(SamplingFlags::addressOfFlags()));
756 #if ENABLE(SAMPLING_COUNTERS)
757 ALWAYS_INLINE
void JIT::emitCount(AbstractSamplingCounter
& counter
, int32_t count
)
759 add64(TrustedImm32(count
), AbsoluteAddress(counter
.addressOfCounter()));
763 #if ENABLE(OPCODE_SAMPLING)
765 ALWAYS_INLINE
void JIT::sampleInstruction(Instruction
* instruction
, bool inHostFunction
)
767 move(TrustedImmPtr(m_interpreter
->sampler()->sampleSlot()), X86Registers::ecx
);
768 storePtr(TrustedImmPtr(m_interpreter
->sampler()->encodeSample(instruction
, inHostFunction
)), X86Registers::ecx
);
771 ALWAYS_INLINE
void JIT::sampleInstruction(Instruction
* instruction
, bool inHostFunction
)
773 storePtr(TrustedImmPtr(m_interpreter
->sampler()->encodeSample(instruction
, inHostFunction
)), m_interpreter
->sampler()->sampleSlot());
778 #if ENABLE(CODEBLOCK_SAMPLING)
780 ALWAYS_INLINE
void JIT::sampleCodeBlock(CodeBlock
* codeBlock
)
782 move(TrustedImmPtr(m_interpreter
->sampler()->codeBlockSlot()), X86Registers::ecx
);
783 storePtr(TrustedImmPtr(codeBlock
), X86Registers::ecx
);
786 ALWAYS_INLINE
void JIT::sampleCodeBlock(CodeBlock
* codeBlock
)
788 storePtr(TrustedImmPtr(codeBlock
), m_interpreter
->sampler()->codeBlockSlot());
793 ALWAYS_INLINE
bool JIT::isOperandConstantImmediateChar(int src
)
795 return m_codeBlock
->isConstantRegisterIndex(src
) && getConstantOperand(src
).isString() && asString(getConstantOperand(src
).asCell())->length() == 1;
798 template<typename StructureType
>
799 inline void JIT::emitAllocateJSObject(RegisterID allocator
, StructureType structure
, RegisterID result
, RegisterID scratch
)
801 loadPtr(Address(allocator
, MarkedAllocator::offsetOfFreeListHead()), result
);
802 addSlowCase(branchTestPtr(Zero
, result
));
804 // remove the object from the free list
805 loadPtr(Address(result
), scratch
);
806 storePtr(scratch
, Address(allocator
, MarkedAllocator::offsetOfFreeListHead()));
808 // initialize the object's property storage pointer
809 storePtr(TrustedImmPtr(0), Address(result
, JSObject::butterflyOffset()));
811 // initialize the object's structure
812 emitStoreStructureWithTypeInfo(structure
, result
, scratch
);
815 inline void JIT::emitValueProfilingSite(ValueProfile
* valueProfile
)
817 ASSERT(shouldEmitProfiling());
818 ASSERT(valueProfile
);
820 const RegisterID value
= regT0
;
821 #if USE(JSVALUE32_64)
822 const RegisterID valueTag
= regT1
;
825 // We're in a simple configuration: only one bucket, so we can just do a direct
828 store64(value
, valueProfile
->m_buckets
);
830 EncodedValueDescriptor
* descriptor
= bitwise_cast
<EncodedValueDescriptor
*>(valueProfile
->m_buckets
);
831 store32(value
, &descriptor
->asBits
.payload
);
832 store32(valueTag
, &descriptor
->asBits
.tag
);
836 inline void JIT::emitValueProfilingSite(unsigned bytecodeOffset
)
838 if (!shouldEmitProfiling())
840 emitValueProfilingSite(m_codeBlock
->valueProfileForBytecodeOffset(bytecodeOffset
));
843 inline void JIT::emitValueProfilingSite()
845 emitValueProfilingSite(m_bytecodeOffset
);
848 inline void JIT::emitArrayProfilingSiteWithCell(RegisterID cell
, RegisterID indexingType
, ArrayProfile
* arrayProfile
)
850 if (shouldEmitProfiling()) {
851 load32(MacroAssembler::Address(cell
, JSCell::structureIDOffset()), indexingType
);
852 store32(indexingType
, arrayProfile
->addressOfLastSeenStructureID());
855 load8(Address(cell
, JSCell::indexingTypeOffset()), indexingType
);
858 inline void JIT::emitArrayProfilingSiteForBytecodeIndexWithCell(RegisterID cell
, RegisterID indexingType
, unsigned bytecodeIndex
)
860 emitArrayProfilingSiteWithCell(cell
, indexingType
, m_codeBlock
->getOrAddArrayProfile(bytecodeIndex
));
863 inline void JIT::emitArrayProfileStoreToHoleSpecialCase(ArrayProfile
* arrayProfile
)
865 store8(TrustedImm32(1), arrayProfile
->addressOfMayStoreToHole());
868 inline void JIT::emitArrayProfileOutOfBoundsSpecialCase(ArrayProfile
* arrayProfile
)
870 store8(TrustedImm32(1), arrayProfile
->addressOfOutOfBounds());
873 static inline bool arrayProfileSaw(ArrayModes arrayModes
, IndexingType capability
)
875 return arrayModesInclude(arrayModes
, capability
);
878 inline JITArrayMode
JIT::chooseArrayMode(ArrayProfile
* profile
)
880 ConcurrentJITLocker
locker(m_codeBlock
->m_lock
);
881 profile
->computeUpdatedPrediction(locker
, m_codeBlock
);
882 ArrayModes arrayModes
= profile
->observedArrayModes(locker
);
883 if (arrayProfileSaw(arrayModes
, DoubleShape
))
885 if (arrayProfileSaw(arrayModes
, Int32Shape
))
887 if (arrayProfileSaw(arrayModes
, ArrayStorageShape
))
888 return JITArrayStorage
;
889 return JITContiguous
;
892 #if USE(JSVALUE32_64)
894 inline void JIT::emitLoadTag(int index
, RegisterID tag
)
896 if (m_codeBlock
->isConstantRegisterIndex(index
)) {
897 move(Imm32(getConstantOperand(index
).tag()), tag
);
901 load32(tagFor(index
), tag
);
904 inline void JIT::emitLoadPayload(int index
, RegisterID payload
)
906 if (m_codeBlock
->isConstantRegisterIndex(index
)) {
907 move(Imm32(getConstantOperand(index
).payload()), payload
);
911 load32(payloadFor(index
), payload
);
914 inline void JIT::emitLoad(const JSValue
& v
, RegisterID tag
, RegisterID payload
)
916 move(Imm32(v
.payload()), payload
);
917 move(Imm32(v
.tag()), tag
);
920 inline void JIT::emitLoad(int index
, RegisterID tag
, RegisterID payload
, RegisterID base
)
922 RELEASE_ASSERT(tag
!= payload
);
924 if (base
== callFrameRegister
) {
925 RELEASE_ASSERT(payload
!= base
);
926 emitLoadPayload(index
, payload
);
927 emitLoadTag(index
, tag
);
931 if (payload
== base
) { // avoid stomping base
932 load32(tagFor(index
, base
), tag
);
933 load32(payloadFor(index
, base
), payload
);
937 load32(payloadFor(index
, base
), payload
);
938 load32(tagFor(index
, base
), tag
);
941 inline void JIT::emitLoad2(int index1
, RegisterID tag1
, RegisterID payload1
, int index2
, RegisterID tag2
, RegisterID payload2
)
943 emitLoad(index2
, tag2
, payload2
);
944 emitLoad(index1
, tag1
, payload1
);
947 inline void JIT::emitLoadDouble(int index
, FPRegisterID value
)
949 if (m_codeBlock
->isConstantRegisterIndex(index
)) {
950 WriteBarrier
<Unknown
>& inConstantPool
= m_codeBlock
->constantRegister(index
);
951 loadDouble(TrustedImmPtr(&inConstantPool
), value
);
953 loadDouble(addressFor(index
), value
);
956 inline void JIT::emitLoadInt32ToDouble(int index
, FPRegisterID value
)
958 if (m_codeBlock
->isConstantRegisterIndex(index
)) {
959 WriteBarrier
<Unknown
>& inConstantPool
= m_codeBlock
->constantRegister(index
);
960 char* bytePointer
= reinterpret_cast<char*>(&inConstantPool
);
961 convertInt32ToDouble(AbsoluteAddress(bytePointer
+ OBJECT_OFFSETOF(JSValue
, u
.asBits
.payload
)), value
);
963 convertInt32ToDouble(payloadFor(index
), value
);
966 inline void JIT::emitStore(int index
, RegisterID tag
, RegisterID payload
, RegisterID base
)
968 store32(payload
, payloadFor(index
, base
));
969 store32(tag
, tagFor(index
, base
));
972 inline void JIT::emitStoreInt32(int index
, RegisterID payload
, bool indexIsInt32
)
974 store32(payload
, payloadFor(index
, callFrameRegister
));
976 store32(TrustedImm32(JSValue::Int32Tag
), tagFor(index
, callFrameRegister
));
979 inline void JIT::emitStoreInt32(int index
, TrustedImm32 payload
, bool indexIsInt32
)
981 store32(payload
, payloadFor(index
, callFrameRegister
));
983 store32(TrustedImm32(JSValue::Int32Tag
), tagFor(index
, callFrameRegister
));
986 inline void JIT::emitStoreCell(int index
, RegisterID payload
, bool indexIsCell
)
988 store32(payload
, payloadFor(index
, callFrameRegister
));
990 store32(TrustedImm32(JSValue::CellTag
), tagFor(index
, callFrameRegister
));
993 inline void JIT::emitStoreBool(int index
, RegisterID payload
, bool indexIsBool
)
995 store32(payload
, payloadFor(index
, callFrameRegister
));
997 store32(TrustedImm32(JSValue::BooleanTag
), tagFor(index
, callFrameRegister
));
1000 inline void JIT::emitStoreDouble(int index
, FPRegisterID value
)
1002 storeDouble(value
, addressFor(index
));
1005 inline void JIT::emitStore(int index
, const JSValue constant
, RegisterID base
)
1007 store32(Imm32(constant
.payload()), payloadFor(index
, base
));
1008 store32(Imm32(constant
.tag()), tagFor(index
, base
));
1011 ALWAYS_INLINE
void JIT::emitInitRegister(int dst
)
1013 emitStore(dst
, jsUndefined());
1016 inline void JIT::emitJumpSlowCaseIfNotJSCell(int virtualRegisterIndex
)
1018 if (!m_codeBlock
->isKnownNotImmediate(virtualRegisterIndex
)) {
1019 if (m_codeBlock
->isConstantRegisterIndex(virtualRegisterIndex
))
1020 addSlowCase(jump());
1022 addSlowCase(emitJumpIfNotJSCell(virtualRegisterIndex
));
1026 inline void JIT::emitJumpSlowCaseIfNotJSCell(int virtualRegisterIndex
, RegisterID tag
)
1028 if (!m_codeBlock
->isKnownNotImmediate(virtualRegisterIndex
)) {
1029 if (m_codeBlock
->isConstantRegisterIndex(virtualRegisterIndex
))
1030 addSlowCase(jump());
1032 addSlowCase(branch32(NotEqual
, tag
, TrustedImm32(JSValue::CellTag
)));
1036 ALWAYS_INLINE
bool JIT::isOperandConstantImmediateInt(int src
)
1038 return m_codeBlock
->isConstantRegisterIndex(src
) && getConstantOperand(src
).isInt32();
1041 ALWAYS_INLINE
bool JIT::getOperandConstantImmediateInt(int op1
, int op2
, int& op
, int32_t& constant
)
1043 if (isOperandConstantImmediateInt(op1
)) {
1044 constant
= getConstantOperand(op1
).asInt32();
1049 if (isOperandConstantImmediateInt(op2
)) {
1050 constant
= getConstantOperand(op2
).asInt32();
1058 #else // USE(JSVALUE32_64)
1060 // get arg puts an arg from the SF register array into a h/w register
1061 ALWAYS_INLINE
void JIT::emitGetVirtualRegister(int src
, RegisterID dst
)
1063 ASSERT(m_bytecodeOffset
!= std::numeric_limits
<unsigned>::max()); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
1065 // TODO: we want to reuse values that are already in registers if we can - add a register allocator!
1066 if (m_codeBlock
->isConstantRegisterIndex(src
)) {
1067 JSValue value
= m_codeBlock
->getConstant(src
);
1068 if (!value
.isNumber())
1069 move(TrustedImm64(JSValue::encode(value
)), dst
);
1071 move(Imm64(JSValue::encode(value
)), dst
);
1075 load64(Address(callFrameRegister
, src
* sizeof(Register
)), dst
);
1078 ALWAYS_INLINE
void JIT::emitGetVirtualRegister(VirtualRegister src
, RegisterID dst
)
1080 emitGetVirtualRegister(src
.offset(), dst
);
1083 ALWAYS_INLINE
void JIT::emitGetVirtualRegisters(int src1
, RegisterID dst1
, int src2
, RegisterID dst2
)
1085 emitGetVirtualRegister(src1
, dst1
);
1086 emitGetVirtualRegister(src2
, dst2
);
1089 ALWAYS_INLINE
void JIT::emitGetVirtualRegisters(VirtualRegister src1
, RegisterID dst1
, VirtualRegister src2
, RegisterID dst2
)
1091 emitGetVirtualRegisters(src1
.offset(), dst1
, src2
.offset(), dst2
);
1094 ALWAYS_INLINE
int32_t JIT::getConstantOperandImmediateInt(int src
)
1096 return getConstantOperand(src
).asInt32();
1099 ALWAYS_INLINE
bool JIT::isOperandConstantImmediateInt(int src
)
1101 return m_codeBlock
->isConstantRegisterIndex(src
) && getConstantOperand(src
).isInt32();
1104 ALWAYS_INLINE
void JIT::emitPutVirtualRegister(int dst
, RegisterID from
)
1106 store64(from
, Address(callFrameRegister
, dst
* sizeof(Register
)));
1109 ALWAYS_INLINE
void JIT::emitPutVirtualRegister(VirtualRegister dst
, RegisterID from
)
1111 emitPutVirtualRegister(dst
.offset(), from
);
1114 ALWAYS_INLINE
void JIT::emitInitRegister(int dst
)
1116 store64(TrustedImm64(JSValue::encode(jsUndefined())), Address(callFrameRegister
, dst
* sizeof(Register
)));
1119 ALWAYS_INLINE
JIT::Jump
JIT::emitJumpIfJSCell(RegisterID reg
)
1121 return branchTest64(Zero
, reg
, tagMaskRegister
);
1124 ALWAYS_INLINE
JIT::Jump
JIT::emitJumpIfBothJSCells(RegisterID reg1
, RegisterID reg2
, RegisterID scratch
)
1126 move(reg1
, scratch
);
1127 or64(reg2
, scratch
);
1128 return emitJumpIfJSCell(scratch
);
1131 ALWAYS_INLINE
void JIT::emitJumpSlowCaseIfJSCell(RegisterID reg
)
1133 addSlowCase(emitJumpIfJSCell(reg
));
1136 ALWAYS_INLINE
void JIT::emitJumpSlowCaseIfNotJSCell(RegisterID reg
)
1138 addSlowCase(emitJumpIfNotJSCell(reg
));
1141 ALWAYS_INLINE
void JIT::emitJumpSlowCaseIfNotJSCell(RegisterID reg
, int vReg
)
1143 if (!m_codeBlock
->isKnownNotImmediate(vReg
))
1144 emitJumpSlowCaseIfNotJSCell(reg
);
1147 inline void JIT::emitLoadDouble(int index
, FPRegisterID value
)
1149 if (m_codeBlock
->isConstantRegisterIndex(index
)) {
1150 WriteBarrier
<Unknown
>& inConstantPool
= m_codeBlock
->constantRegister(index
);
1151 loadDouble(TrustedImmPtr(&inConstantPool
), value
);
1153 loadDouble(addressFor(index
), value
);
1156 inline void JIT::emitLoadInt32ToDouble(int index
, FPRegisterID value
)
1158 if (m_codeBlock
->isConstantRegisterIndex(index
)) {
1159 ASSERT(isOperandConstantImmediateInt(index
));
1160 convertInt32ToDouble(Imm32(getConstantOperand(index
).asInt32()), value
);
1162 convertInt32ToDouble(addressFor(index
), value
);
1165 ALWAYS_INLINE
JIT::Jump
JIT::emitJumpIfImmediateInteger(RegisterID reg
)
1167 return branch64(AboveOrEqual
, reg
, tagTypeNumberRegister
);
1170 ALWAYS_INLINE
JIT::Jump
JIT::emitJumpIfNotImmediateInteger(RegisterID reg
)
1172 return branch64(Below
, reg
, tagTypeNumberRegister
);
1175 ALWAYS_INLINE
JIT::Jump
JIT::emitJumpIfNotImmediateIntegers(RegisterID reg1
, RegisterID reg2
, RegisterID scratch
)
1177 move(reg1
, scratch
);
1178 and64(reg2
, scratch
);
1179 return emitJumpIfNotImmediateInteger(scratch
);
1182 ALWAYS_INLINE
void JIT::emitJumpSlowCaseIfNotImmediateInteger(RegisterID reg
)
1184 addSlowCase(emitJumpIfNotImmediateInteger(reg
));
1187 ALWAYS_INLINE
void JIT::emitJumpSlowCaseIfNotImmediateIntegers(RegisterID reg1
, RegisterID reg2
, RegisterID scratch
)
1189 addSlowCase(emitJumpIfNotImmediateIntegers(reg1
, reg2
, scratch
));
1192 ALWAYS_INLINE
void JIT::emitJumpSlowCaseIfNotImmediateNumber(RegisterID reg
)
1194 addSlowCase(emitJumpIfNotImmediateNumber(reg
));
1197 ALWAYS_INLINE
void JIT::emitFastArithReTagImmediate(RegisterID src
, RegisterID dest
)
1199 emitFastArithIntToImmNoCheck(src
, dest
);
1202 ALWAYS_INLINE
void JIT::emitTagAsBoolImmediate(RegisterID reg
)
1204 or32(TrustedImm32(static_cast<int32_t>(ValueFalse
)), reg
);
1207 #endif // USE(JSVALUE32_64)
1209 template <typename T
>
1210 JIT::Jump
JIT::branchStructure(RelationalCondition condition
, T leftHandSide
, Structure
* structure
)
1213 return branch32(condition
, leftHandSide
, TrustedImm32(structure
->id()));
1215 return branchPtr(condition
, leftHandSide
, TrustedImmPtr(structure
));
1219 template <typename T
>
1220 MacroAssembler::Jump
branchStructure(MacroAssembler
& jit
, MacroAssembler::RelationalCondition condition
, T leftHandSide
, Structure
* structure
)
1223 return jit
.branch32(condition
, leftHandSide
, MacroAssembler::TrustedImm32(structure
->id()));
1225 return jit
.branchPtr(condition
, leftHandSide
, MacroAssembler::TrustedImmPtr(structure
));
1231 #endif // ENABLE(JIT)
1233 #endif // JITInlines_h