2 * Copyright (C) 2008, 2012, 2013 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 #include "JSCInlines.h"
35 ALWAYS_INLINE
bool JIT::isOperandConstantImmediateDouble(int src
)
37 return m_codeBlock
->isConstantRegisterIndex(src
) && getConstantOperand(src
).isDouble();
40 ALWAYS_INLINE JSValue
JIT::getConstantOperand(int src
)
42 ASSERT(m_codeBlock
->isConstantRegisterIndex(src
));
43 return m_codeBlock
->getConstant(src
);
46 ALWAYS_INLINE
void JIT::emitPutIntToCallFrameHeader(RegisterID from
, JSStack::CallFrameHeaderEntry entry
)
49 store32(TrustedImm32(Int32Tag
), intTagFor(entry
, callFrameRegister
));
50 store32(from
, intPayloadFor(entry
, callFrameRegister
));
52 store64(from
, addressFor(entry
, callFrameRegister
));
56 ALWAYS_INLINE
void JIT::emitGetFromCallFrameHeaderPtr(JSStack::CallFrameHeaderEntry entry
, RegisterID to
, RegisterID from
)
58 loadPtr(Address(from
, entry
* sizeof(Register
)), to
);
61 ALWAYS_INLINE
void JIT::emitGetFromCallFrameHeader32(JSStack::CallFrameHeaderEntry entry
, RegisterID to
, RegisterID from
)
63 load32(Address(from
, entry
* sizeof(Register
)), to
);
67 ALWAYS_INLINE
void JIT::emitGetFromCallFrameHeader64(JSStack::CallFrameHeaderEntry entry
, RegisterID to
, RegisterID from
)
69 load64(Address(from
, entry
* sizeof(Register
)), to
);
73 ALWAYS_INLINE
void JIT::emitLoadCharacterString(RegisterID src
, RegisterID dst
, JumpList
& failures
)
75 failures
.append(branchStructure(NotEqual
, Address(src
, JSCell::structureIDOffset()), m_vm
->stringStructure
.get()));
76 failures
.append(branch32(NotEqual
, MacroAssembler::Address(src
, ThunkHelpers::jsStringLengthOffset()), TrustedImm32(1)));
77 loadPtr(MacroAssembler::Address(src
, ThunkHelpers::jsStringValueOffset()), dst
);
78 failures
.append(branchTest32(Zero
, dst
));
79 loadPtr(MacroAssembler::Address(dst
, StringImpl::flagsOffset()), regT1
);
80 loadPtr(MacroAssembler::Address(dst
, StringImpl::dataOffset()), dst
);
84 is16Bit
.append(branchTest32(Zero
, regT1
, TrustedImm32(StringImpl::flagIs8Bit())));
85 load8(MacroAssembler::Address(dst
, 0), dst
);
86 cont8Bit
.append(jump());
88 load16(MacroAssembler::Address(dst
, 0), dst
);
92 ALWAYS_INLINE
JIT::Call
JIT::emitNakedCall(CodePtr function
)
94 ASSERT(m_bytecodeOffset
!= (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
95 Call nakedCall
= nearCall();
96 m_calls
.append(CallRecord(nakedCall
, m_bytecodeOffset
, function
.executableAddress()));
100 ALWAYS_INLINE
void JIT::updateTopCallFrame()
102 ASSERT(static_cast<int>(m_bytecodeOffset
) >= 0);
103 #if USE(JSVALUE32_64)
104 Instruction
* instruction
= m_codeBlock
->instructions().begin() + m_bytecodeOffset
+ 1;
105 uint32_t locationBits
= CallFrame::Location::encodeAsBytecodeInstruction(instruction
);
107 uint32_t locationBits
= CallFrame::Location::encodeAsBytecodeOffset(m_bytecodeOffset
+ 1);
109 store32(TrustedImm32(locationBits
), intTagFor(JSStack::ArgumentCount
));
110 storePtr(callFrameRegister
, &m_vm
->topCallFrame
);
113 ALWAYS_INLINE
MacroAssembler::Call
JIT::appendCallWithExceptionCheck(const FunctionPtr
& function
)
115 updateTopCallFrame();
116 MacroAssembler::Call call
= appendCall(function
);
121 #if OS(WINDOWS) && CPU(X86_64)
122 ALWAYS_INLINE
MacroAssembler::Call
JIT::appendCallWithExceptionCheckAndSlowPathReturnType(const FunctionPtr
& function
)
124 updateTopCallFrame();
125 MacroAssembler::Call call
= appendCallWithSlowPathReturnType(function
);
131 ALWAYS_INLINE
MacroAssembler::Call
JIT::appendCallWithCallFrameRollbackOnException(const FunctionPtr
& function
)
133 updateTopCallFrame(); // The callee is responsible for setting topCallFrame to their caller
134 MacroAssembler::Call call
= appendCall(function
);
135 exceptionCheckWithCallFrameRollback();
139 ALWAYS_INLINE
MacroAssembler::Call
JIT::appendCallWithExceptionCheckSetJSValueResult(const FunctionPtr
& function
, int dst
)
141 MacroAssembler::Call call
= appendCallWithExceptionCheck(function
);
143 emitPutVirtualRegister(dst
, returnValueGPR
);
145 emitStore(dst
, returnValueGPR2
, returnValueGPR
);
150 ALWAYS_INLINE
MacroAssembler::Call
JIT::appendCallWithExceptionCheckSetJSValueResultWithProfile(const FunctionPtr
& function
, int dst
)
152 MacroAssembler::Call call
= appendCallWithExceptionCheck(function
);
153 emitValueProfilingSite();
155 emitPutVirtualRegister(dst
, returnValueGPR
);
157 emitStore(dst
, returnValueGPR2
, returnValueGPR
);
162 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(C_JITOperation_E operation
)
164 setupArgumentsExecState();
165 return appendCallWithExceptionCheck(operation
);
168 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(C_JITOperation_EO operation
, GPRReg arg
)
170 setupArgumentsWithExecState(arg
);
171 return appendCallWithExceptionCheck(operation
);
174 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(C_JITOperation_ESt operation
, Structure
* structure
)
176 setupArgumentsWithExecState(TrustedImmPtr(structure
));
177 return appendCallWithExceptionCheck(operation
);
180 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(C_JITOperation_EZ operation
, int32_t arg
)
182 setupArgumentsWithExecState(TrustedImm32(arg
));
183 return appendCallWithExceptionCheck(operation
);
186 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(J_JITOperation_E operation
, int dst
)
188 setupArgumentsExecState();
189 return appendCallWithExceptionCheckSetJSValueResult(operation
, dst
);
192 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(J_JITOperation_EAapJcpZ operation
, int dst
, ArrayAllocationProfile
* arg1
, GPRReg arg2
, int32_t arg3
)
194 setupArgumentsWithExecState(TrustedImmPtr(arg1
), arg2
, TrustedImm32(arg3
));
195 return appendCallWithExceptionCheckSetJSValueResult(operation
, dst
);
198 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(J_JITOperation_EAapJcpZ operation
, int dst
, ArrayAllocationProfile
* arg1
, const JSValue
* arg2
, int32_t arg3
)
200 setupArgumentsWithExecState(TrustedImmPtr(arg1
), TrustedImmPtr(arg2
), TrustedImm32(arg3
));
201 return appendCallWithExceptionCheckSetJSValueResult(operation
, dst
);
204 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(J_JITOperation_EC operation
, int dst
, JSCell
* cell
)
206 setupArgumentsWithExecState(TrustedImmPtr(cell
));
207 return appendCallWithExceptionCheckSetJSValueResult(operation
, dst
);
210 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(V_JITOperation_EC operation
, JSCell
* cell
)
212 setupArgumentsWithExecState(TrustedImmPtr(cell
));
213 return appendCallWithExceptionCheck(operation
);
216 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(J_JITOperation_EP operation
, int dst
, void* pointer
)
218 setupArgumentsWithExecState(TrustedImmPtr(pointer
));
219 return appendCallWithExceptionCheckSetJSValueResult(operation
, dst
);
222 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(WithProfileTag
, J_JITOperation_EPc operation
, int dst
, Instruction
* bytecodePC
)
224 setupArgumentsWithExecState(TrustedImmPtr(bytecodePC
));
225 return appendCallWithExceptionCheckSetJSValueResultWithProfile(operation
, dst
);
228 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(J_JITOperation_EZ operation
, int dst
, int32_t arg
)
230 setupArgumentsWithExecState(TrustedImm32(arg
));
231 return appendCallWithExceptionCheckSetJSValueResult(operation
, dst
);
234 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(S_JITOperation_ECC operation
, RegisterID regOp1
, RegisterID regOp2
)
236 setupArgumentsWithExecState(regOp1
, regOp2
);
237 return appendCallWithExceptionCheck(operation
);
240 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(S_JITOperation_EOJss operation
, RegisterID regOp1
, RegisterID regOp2
)
242 setupArgumentsWithExecState(regOp1
, regOp2
);
243 return appendCallWithExceptionCheck(operation
);
246 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(Sprt_JITOperation_EZ operation
, int32_t op
)
248 #if OS(WINDOWS) && CPU(X86_64)
249 setupArgumentsWithExecStateForCallWithSlowPathReturnType(TrustedImm32(op
));
250 return appendCallWithExceptionCheckAndSlowPathReturnType(operation
);
252 setupArgumentsWithExecState(TrustedImm32(op
));
253 return appendCallWithExceptionCheck(operation
);
257 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(V_JITOperation_E operation
)
259 setupArgumentsExecState();
260 return appendCallWithExceptionCheck(operation
);
263 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(V_JITOperation_EC operation
, RegisterID regOp
)
265 setupArgumentsWithExecState(regOp
);
266 return appendCallWithExceptionCheck(operation
);
269 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(V_JITOperation_ECC operation
, RegisterID regOp1
, RegisterID regOp2
)
271 setupArgumentsWithExecState(regOp1
, regOp2
);
272 return appendCallWithExceptionCheck(operation
);
275 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(V_JITOperation_EPc operation
, Instruction
* bytecodePC
)
277 setupArgumentsWithExecState(TrustedImmPtr(bytecodePC
));
278 return appendCallWithExceptionCheck(operation
);
281 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(V_JITOperation_EZ operation
, int32_t op
)
283 setupArgumentsWithExecState(TrustedImm32(op
));
284 return appendCallWithExceptionCheck(operation
);
287 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperationWithCallFrameRollbackOnException(J_JITOperation_E operation
)
289 setupArgumentsExecState();
290 return appendCallWithCallFrameRollbackOnException(operation
);
293 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperationNoExceptionCheck(J_JITOperation_EE operation
, RegisterID regOp
)
295 setupArgumentsWithExecState(regOp
);
296 updateTopCallFrame();
297 return appendCall(operation
);
300 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperationWithCallFrameRollbackOnException(V_JITOperation_ECb operation
, CodeBlock
* pointer
)
302 setupArgumentsWithExecState(TrustedImmPtr(pointer
));
303 return appendCallWithCallFrameRollbackOnException(operation
);
306 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperationWithCallFrameRollbackOnException(Z_JITOperation_E operation
)
308 setupArgumentsExecState();
309 return appendCallWithCallFrameRollbackOnException(operation
);
314 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(F_JITOperation_EJZZ operation
, GPRReg arg1
, int32_t arg2
, int32_t arg3
)
316 setupArgumentsWithExecState(arg1
, TrustedImm32(arg2
), TrustedImm32(arg3
));
317 return appendCallWithExceptionCheck(operation
);
320 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(F_JITOperation_EFJJZ operation
, GPRReg arg1
, GPRReg arg2
, GPRReg arg3
, int32_t arg4
)
322 setupArgumentsWithExecState(arg1
, arg2
, arg3
, TrustedImm32(arg4
));
323 return appendCallWithExceptionCheck(operation
);
326 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(V_JITOperation_ESsiJJI operation
, StructureStubInfo
* stubInfo
, RegisterID regOp1
, RegisterID regOp2
, StringImpl
* uid
)
328 setupArgumentsWithExecState(TrustedImmPtr(stubInfo
), regOp1
, regOp2
, TrustedImmPtr(uid
));
329 return appendCallWithExceptionCheck(operation
);
332 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(V_JITOperation_EJJJ operation
, RegisterID regOp1
, RegisterID regOp2
, RegisterID regOp3
)
334 setupArgumentsWithExecState(regOp1
, regOp2
, regOp3
);
335 return appendCallWithExceptionCheck(operation
);
338 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(JIT::WithProfileTag
, J_JITOperation_ESsiJI operation
, int dst
, StructureStubInfo
* stubInfo
, GPRReg arg1
, StringImpl
* uid
)
340 setupArgumentsWithExecState(TrustedImmPtr(stubInfo
), arg1
, TrustedImmPtr(uid
));
341 return appendCallWithExceptionCheckSetJSValueResultWithProfile(operation
, dst
);
344 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(JIT::WithProfileTag
, J_JITOperation_EJJ operation
, int dst
, GPRReg arg1
, GPRReg arg2
)
346 setupArgumentsWithExecState(arg1
, arg2
);
347 return appendCallWithExceptionCheckSetJSValueResultWithProfile(operation
, dst
);
350 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(J_JITOperation_EAapJ operation
, int dst
, ArrayAllocationProfile
* arg1
, GPRReg arg2
)
352 setupArgumentsWithExecState(TrustedImmPtr(arg1
), arg2
);
353 return appendCallWithExceptionCheckSetJSValueResult(operation
, dst
);
356 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(J_JITOperation_EJ operation
, int dst
, GPRReg arg1
)
358 setupArgumentsWithExecState(arg1
);
359 return appendCallWithExceptionCheckSetJSValueResult(operation
, dst
);
362 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(J_JITOperation_EJIdc operation
, int dst
, GPRReg arg1
, const Identifier
* arg2
)
364 setupArgumentsWithExecState(arg1
, TrustedImmPtr(arg2
));
365 return appendCallWithExceptionCheckSetJSValueResult(operation
, dst
);
368 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(J_JITOperation_EJJ operation
, int dst
, GPRReg arg1
, GPRReg arg2
)
370 setupArgumentsWithExecState(arg1
, arg2
);
371 return appendCallWithExceptionCheckSetJSValueResult(operation
, dst
);
374 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperationNoExceptionCheck(V_JITOperation_EJ operation
, GPRReg arg1
)
376 setupArgumentsWithExecState(arg1
);
377 updateTopCallFrame();
378 return appendCall(operation
);
381 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(P_JITOperation_EJS operation
, GPRReg arg1
, size_t arg2
)
383 setupArgumentsWithExecState(arg1
, TrustedImmPtr(arg2
));
384 return appendCallWithExceptionCheck(operation
);
387 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(S_JITOperation_EJ operation
, RegisterID regOp
)
389 setupArgumentsWithExecState(regOp
);
390 return appendCallWithExceptionCheck(operation
);
393 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(S_JITOperation_EJJ operation
, RegisterID regOp1
, RegisterID regOp2
)
395 setupArgumentsWithExecState(regOp1
, regOp2
);
396 return appendCallWithExceptionCheck(operation
);
399 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(V_JITOperation_EIdJZ operation
, const Identifier
* identOp1
, RegisterID regOp2
, int32_t op3
)
401 setupArgumentsWithExecState(TrustedImmPtr(identOp1
), regOp2
, TrustedImm32(op3
));
402 return appendCallWithExceptionCheck(operation
);
405 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(V_JITOperation_EJ operation
, RegisterID regOp
)
407 setupArgumentsWithExecState(regOp
);
408 return appendCallWithExceptionCheck(operation
);
411 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(V_JITOperation_EJIdJJ operation
, RegisterID regOp1
, const Identifier
* identOp2
, RegisterID regOp3
, RegisterID regOp4
)
413 setupArgumentsWithExecState(regOp1
, TrustedImmPtr(identOp2
), regOp3
, regOp4
);
414 return appendCallWithExceptionCheck(operation
);
417 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(V_JITOperation_EJZ operation
, RegisterID regOp1
, int32_t op2
)
419 setupArgumentsWithExecState(regOp1
, TrustedImm32(op2
));
420 return appendCallWithExceptionCheck(operation
);
423 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(V_JITOperation_EJZJ operation
, RegisterID regOp1
, int32_t op2
, RegisterID regOp3
)
425 setupArgumentsWithExecState(regOp1
, TrustedImm32(op2
), regOp3
);
426 return appendCallWithExceptionCheck(operation
);
429 #else // USE(JSVALUE32_64)
431 // EncodedJSValue in JSVALUE32_64 is a 64-bit integer. When being compiled in ARM EABI, it must be aligned even-numbered register (r0, r2 or [sp]).
432 // To avoid assemblies from using wrong registers, let's occupy r1 or r3 with a dummy argument when necessary.
433 #if (COMPILER_SUPPORTS(EABI) && CPU(ARM)) || CPU(MIPS)
434 #define EABI_32BIT_DUMMY_ARG TrustedImm32(0),
436 #define EABI_32BIT_DUMMY_ARG
439 // JSVALUE32_64 is a 64-bit integer that cannot be put half in an argument register and half on stack when using SH4 architecture.
440 // To avoid this, let's occupy the 4th argument register (r7) with a dummy argument when necessary. This must only be done when there
441 // is no other 32-bit value argument behind this 64-bit JSValue.
443 #define SH4_32BIT_DUMMY_ARG TrustedImm32(0),
445 #define SH4_32BIT_DUMMY_ARG
448 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperationNoExceptionCheck(V_JITOperation_EJ operation
, GPRReg arg1Tag
, GPRReg arg1Payload
)
450 setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload
, arg1Tag
);
451 updateTopCallFrame();
452 return appendCall(operation
);
455 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(F_JITOperation_EJZZ operation
, GPRReg arg1Tag
, GPRReg arg1Payload
, int32_t arg2
, int32_t arg3
)
457 setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload
, arg1Tag
, TrustedImm32(arg2
), TrustedImm32(arg3
));
458 return appendCallWithExceptionCheck(operation
);
461 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(F_JITOperation_EFJJZ operation
, GPRReg arg1
, GPRReg arg2Tag
, GPRReg arg2Payload
, GPRReg arg3Tag
, GPRReg arg3Payload
, int32_t arg4
)
463 setupArgumentsWithExecState(arg1
, arg2Payload
, arg2Tag
, arg3Payload
, arg3Tag
, TrustedImm32(arg4
));
464 return appendCallWithExceptionCheck(operation
);
467 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(J_JITOperation_EAapJ operation
, int dst
, ArrayAllocationProfile
* arg1
, GPRReg arg2Tag
, GPRReg arg2Payload
)
469 setupArgumentsWithExecState(TrustedImmPtr(arg1
), arg2Payload
, arg2Tag
);
470 return appendCallWithExceptionCheckSetJSValueResult(operation
, dst
);
473 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(J_JITOperation_EJ operation
, int dst
, GPRReg arg1Tag
, GPRReg arg1Payload
)
475 setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload
, arg1Tag
);
476 return appendCallWithExceptionCheckSetJSValueResult(operation
, dst
);
479 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(JIT::WithProfileTag
, J_JITOperation_ESsiJI operation
, int dst
, StructureStubInfo
* stubInfo
, GPRReg arg1Tag
, GPRReg arg1Payload
, StringImpl
* uid
)
481 setupArgumentsWithExecState(TrustedImmPtr(stubInfo
), arg1Payload
, arg1Tag
, TrustedImmPtr(uid
));
482 return appendCallWithExceptionCheckSetJSValueResultWithProfile(operation
, dst
);
485 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(J_JITOperation_EJIdc operation
, int dst
, GPRReg arg1Tag
, GPRReg arg1Payload
, const Identifier
* arg2
)
487 setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload
, arg1Tag
, TrustedImmPtr(arg2
));
488 return appendCallWithExceptionCheckSetJSValueResult(operation
, dst
);
491 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(J_JITOperation_EJJ operation
, int dst
, GPRReg arg1Tag
, GPRReg arg1Payload
, GPRReg arg2Tag
, GPRReg arg2Payload
)
493 setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload
, arg1Tag
, SH4_32BIT_DUMMY_ARG arg2Payload
, arg2Tag
);
494 return appendCallWithExceptionCheckSetJSValueResult(operation
, dst
);
497 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(JIT::WithProfileTag
, J_JITOperation_EJJ operation
, int dst
, GPRReg arg1Tag
, GPRReg arg1Payload
, GPRReg arg2Tag
, GPRReg arg2Payload
)
499 setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload
, arg1Tag
, SH4_32BIT_DUMMY_ARG arg2Payload
, arg2Tag
);
500 return appendCallWithExceptionCheckSetJSValueResultWithProfile(operation
, dst
);
503 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(P_JITOperation_EJS operation
, GPRReg arg1Tag
, GPRReg arg1Payload
, size_t arg2
)
505 setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload
, arg1Tag
, TrustedImmPtr(arg2
));
506 return appendCallWithExceptionCheck(operation
);
509 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(S_JITOperation_EJ operation
, RegisterID argTag
, RegisterID argPayload
)
511 setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG argPayload
, argTag
);
512 return appendCallWithExceptionCheck(operation
);
515 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(S_JITOperation_EJJ operation
, RegisterID arg1Tag
, RegisterID arg1Payload
, RegisterID arg2Tag
, RegisterID arg2Payload
)
517 setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload
, arg1Tag
, SH4_32BIT_DUMMY_ARG arg2Payload
, arg2Tag
);
518 return appendCallWithExceptionCheck(operation
);
521 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(V_JITOperation_ECICC operation
, RegisterID regOp1
, const Identifier
* identOp2
, RegisterID regOp3
, RegisterID regOp4
)
523 setupArgumentsWithExecState(regOp1
, TrustedImmPtr(identOp2
), regOp3
, regOp4
);
524 return appendCallWithExceptionCheck(operation
);
527 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(V_JITOperation_EJ operation
, RegisterID regOp1Tag
, RegisterID regOp1Payload
)
529 setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG regOp1Payload
, regOp1Tag
);
530 return appendCallWithExceptionCheck(operation
);
533 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(V_JITOperation_EIdJZ operation
, const Identifier
* identOp1
, RegisterID regOp2Tag
, RegisterID regOp2Payload
, int32_t op3
)
535 setupArgumentsWithExecState(TrustedImmPtr(identOp1
), regOp2Payload
, regOp2Tag
, TrustedImm32(op3
));
536 return appendCallWithExceptionCheck(operation
);
539 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(V_JITOperation_ESsiJJI operation
, StructureStubInfo
* stubInfo
, RegisterID regOp1Tag
, RegisterID regOp1Payload
, RegisterID regOp2Tag
, RegisterID regOp2Payload
, StringImpl
* uid
)
541 setupArgumentsWithExecState(TrustedImmPtr(stubInfo
), regOp1Payload
, regOp1Tag
, regOp2Payload
, regOp2Tag
, TrustedImmPtr(uid
));
542 return appendCallWithExceptionCheck(operation
);
545 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(V_JITOperation_EJJJ operation
, RegisterID regOp1Tag
, RegisterID regOp1Payload
, RegisterID regOp2Tag
, RegisterID regOp2Payload
, RegisterID regOp3Tag
, RegisterID regOp3Payload
)
547 setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG regOp1Payload
, regOp1Tag
, SH4_32BIT_DUMMY_ARG regOp2Payload
, regOp2Tag
, regOp3Payload
, regOp3Tag
);
548 return appendCallWithExceptionCheck(operation
);
551 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(V_JITOperation_EJZ operation
, RegisterID regOp1Tag
, RegisterID regOp1Payload
, int32_t op2
)
553 setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG regOp1Payload
, regOp1Tag
, TrustedImm32(op2
));
554 return appendCallWithExceptionCheck(operation
);
557 ALWAYS_INLINE
MacroAssembler::Call
JIT::callOperation(V_JITOperation_EJZJ operation
, RegisterID regOp1Tag
, RegisterID regOp1Payload
, int32_t op2
, RegisterID regOp3Tag
, RegisterID regOp3Payload
)
559 setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG regOp1Payload
, regOp1Tag
, TrustedImm32(op2
), EABI_32BIT_DUMMY_ARG regOp3Payload
, regOp3Tag
);
560 return appendCallWithExceptionCheck(operation
);
563 #undef EABI_32BIT_DUMMY_ARG
564 #undef SH4_32BIT_DUMMY_ARG
566 #endif // USE(JSVALUE32_64)
568 ALWAYS_INLINE
JIT::Jump
JIT::checkStructure(RegisterID reg
, Structure
* structure
)
570 return branchStructure(NotEqual
, Address(reg
, JSCell::structureIDOffset()), structure
);
573 ALWAYS_INLINE
void JIT::linkSlowCaseIfNotJSCell(Vector
<SlowCaseEntry
>::iterator
& iter
, int vReg
)
575 if (!m_codeBlock
->isKnownNotImmediate(vReg
))
579 ALWAYS_INLINE
void JIT::addSlowCase(Jump jump
)
581 ASSERT(m_bytecodeOffset
!= (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
583 m_slowCases
.append(SlowCaseEntry(jump
, m_bytecodeOffset
));
586 ALWAYS_INLINE
void JIT::addSlowCase(JumpList jumpList
)
588 ASSERT(m_bytecodeOffset
!= (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
590 const JumpList::JumpVector
& jumpVector
= jumpList
.jumps();
591 size_t size
= jumpVector
.size();
592 for (size_t i
= 0; i
< size
; ++i
)
593 m_slowCases
.append(SlowCaseEntry(jumpVector
[i
], m_bytecodeOffset
));
596 ALWAYS_INLINE
void JIT::addSlowCase()
598 ASSERT(m_bytecodeOffset
!= (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
600 Jump emptyJump
; // Doing it this way to make Windows happy.
601 m_slowCases
.append(SlowCaseEntry(emptyJump
, m_bytecodeOffset
));
604 ALWAYS_INLINE
void JIT::addJump(Jump jump
, int relativeOffset
)
606 ASSERT(m_bytecodeOffset
!= (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
608 m_jmpTable
.append(JumpTable(jump
, m_bytecodeOffset
+ relativeOffset
));
611 ALWAYS_INLINE
void JIT::emitJumpSlowToHot(Jump jump
, int relativeOffset
)
613 ASSERT(m_bytecodeOffset
!= (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
615 jump
.linkTo(m_labels
[m_bytecodeOffset
+ relativeOffset
], this);
618 ALWAYS_INLINE
JIT::Jump
JIT::emitJumpIfCellNotObject(RegisterID cellReg
)
620 return branch8(Below
, Address(cellReg
, JSCell::typeInfoTypeOffset()), TrustedImm32(ObjectType
));
623 #if ENABLE(SAMPLING_FLAGS)
624 ALWAYS_INLINE
void JIT::setSamplingFlag(int32_t flag
)
628 or32(TrustedImm32(1u << (flag
- 1)), AbsoluteAddress(SamplingFlags::addressOfFlags()));
631 ALWAYS_INLINE
void JIT::clearSamplingFlag(int32_t flag
)
635 and32(TrustedImm32(~(1u << (flag
- 1))), AbsoluteAddress(SamplingFlags::addressOfFlags()));
639 #if ENABLE(SAMPLING_COUNTERS)
640 ALWAYS_INLINE
void JIT::emitCount(AbstractSamplingCounter
& counter
, int32_t count
)
642 add64(TrustedImm32(count
), AbsoluteAddress(counter
.addressOfCounter()));
646 #if ENABLE(OPCODE_SAMPLING)
648 ALWAYS_INLINE
void JIT::sampleInstruction(Instruction
* instruction
, bool inHostFunction
)
650 move(TrustedImmPtr(m_interpreter
->sampler()->sampleSlot()), X86Registers::ecx
);
651 storePtr(TrustedImmPtr(m_interpreter
->sampler()->encodeSample(instruction
, inHostFunction
)), X86Registers::ecx
);
654 ALWAYS_INLINE
void JIT::sampleInstruction(Instruction
* instruction
, bool inHostFunction
)
656 storePtr(TrustedImmPtr(m_interpreter
->sampler()->encodeSample(instruction
, inHostFunction
)), m_interpreter
->sampler()->sampleSlot());
661 #if ENABLE(CODEBLOCK_SAMPLING)
663 ALWAYS_INLINE
void JIT::sampleCodeBlock(CodeBlock
* codeBlock
)
665 move(TrustedImmPtr(m_interpreter
->sampler()->codeBlockSlot()), X86Registers::ecx
);
666 storePtr(TrustedImmPtr(codeBlock
), X86Registers::ecx
);
669 ALWAYS_INLINE
void JIT::sampleCodeBlock(CodeBlock
* codeBlock
)
671 storePtr(TrustedImmPtr(codeBlock
), m_interpreter
->sampler()->codeBlockSlot());
676 ALWAYS_INLINE
bool JIT::isOperandConstantImmediateChar(int src
)
678 return m_codeBlock
->isConstantRegisterIndex(src
) && getConstantOperand(src
).isString() && asString(getConstantOperand(src
).asCell())->length() == 1;
681 template<typename StructureType
>
682 inline void JIT::emitAllocateJSObject(RegisterID allocator
, StructureType structure
, RegisterID result
, RegisterID scratch
)
684 loadPtr(Address(allocator
, MarkedAllocator::offsetOfFreeListHead()), result
);
685 addSlowCase(branchTestPtr(Zero
, result
));
687 // remove the object from the free list
688 loadPtr(Address(result
), scratch
);
689 storePtr(scratch
, Address(allocator
, MarkedAllocator::offsetOfFreeListHead()));
691 // initialize the object's property storage pointer
692 storePtr(TrustedImmPtr(0), Address(result
, JSObject::butterflyOffset()));
694 // initialize the object's structure
695 emitStoreStructureWithTypeInfo(structure
, result
, scratch
);
698 inline void JIT::emitValueProfilingSite(ValueProfile
* valueProfile
)
700 ASSERT(shouldEmitProfiling());
701 ASSERT(valueProfile
);
703 const RegisterID value
= regT0
;
704 #if USE(JSVALUE32_64)
705 const RegisterID valueTag
= regT1
;
708 // We're in a simple configuration: only one bucket, so we can just do a direct
711 store64(value
, valueProfile
->m_buckets
);
713 EncodedValueDescriptor
* descriptor
= bitwise_cast
<EncodedValueDescriptor
*>(valueProfile
->m_buckets
);
714 store32(value
, &descriptor
->asBits
.payload
);
715 store32(valueTag
, &descriptor
->asBits
.tag
);
719 inline void JIT::emitValueProfilingSite(unsigned bytecodeOffset
)
721 if (!shouldEmitProfiling())
723 emitValueProfilingSite(m_codeBlock
->valueProfileForBytecodeOffset(bytecodeOffset
));
726 inline void JIT::emitValueProfilingSite()
728 emitValueProfilingSite(m_bytecodeOffset
);
731 inline void JIT::emitArrayProfilingSiteWithCell(RegisterID cell
, RegisterID indexingType
, ArrayProfile
* arrayProfile
)
733 if (shouldEmitProfiling()) {
734 load32(MacroAssembler::Address(cell
, JSCell::structureIDOffset()), indexingType
);
735 store32(indexingType
, arrayProfile
->addressOfLastSeenStructureID());
738 load8(Address(cell
, JSCell::indexingTypeOffset()), indexingType
);
741 inline void JIT::emitArrayProfilingSiteForBytecodeIndexWithCell(RegisterID cell
, RegisterID indexingType
, unsigned bytecodeIndex
)
743 emitArrayProfilingSiteWithCell(cell
, indexingType
, m_codeBlock
->getOrAddArrayProfile(bytecodeIndex
));
746 inline void JIT::emitArrayProfileStoreToHoleSpecialCase(ArrayProfile
* arrayProfile
)
748 store8(TrustedImm32(1), arrayProfile
->addressOfMayStoreToHole());
751 inline void JIT::emitArrayProfileOutOfBoundsSpecialCase(ArrayProfile
* arrayProfile
)
753 store8(TrustedImm32(1), arrayProfile
->addressOfOutOfBounds());
756 static inline bool arrayProfileSaw(ArrayModes arrayModes
, IndexingType capability
)
758 return arrayModesInclude(arrayModes
, capability
);
761 inline JITArrayMode
JIT::chooseArrayMode(ArrayProfile
* profile
)
763 ConcurrentJITLocker
locker(m_codeBlock
->m_lock
);
764 profile
->computeUpdatedPrediction(locker
, m_codeBlock
);
765 ArrayModes arrayModes
= profile
->observedArrayModes(locker
);
766 if (arrayProfileSaw(arrayModes
, DoubleShape
))
768 if (arrayProfileSaw(arrayModes
, Int32Shape
))
770 if (arrayProfileSaw(arrayModes
, ArrayStorageShape
))
771 return JITArrayStorage
;
772 return JITContiguous
;
775 #if USE(JSVALUE32_64)
777 inline void JIT::emitLoadTag(int index
, RegisterID tag
)
779 if (m_codeBlock
->isConstantRegisterIndex(index
)) {
780 move(Imm32(getConstantOperand(index
).tag()), tag
);
784 load32(tagFor(index
), tag
);
787 inline void JIT::emitLoadPayload(int index
, RegisterID payload
)
789 if (m_codeBlock
->isConstantRegisterIndex(index
)) {
790 move(Imm32(getConstantOperand(index
).payload()), payload
);
794 load32(payloadFor(index
), payload
);
797 inline void JIT::emitLoad(const JSValue
& v
, RegisterID tag
, RegisterID payload
)
799 move(Imm32(v
.payload()), payload
);
800 move(Imm32(v
.tag()), tag
);
803 inline void JIT::emitLoad(int index
, RegisterID tag
, RegisterID payload
, RegisterID base
)
805 RELEASE_ASSERT(tag
!= payload
);
807 if (base
== callFrameRegister
) {
808 RELEASE_ASSERT(payload
!= base
);
809 emitLoadPayload(index
, payload
);
810 emitLoadTag(index
, tag
);
814 if (payload
== base
) { // avoid stomping base
815 load32(tagFor(index
, base
), tag
);
816 load32(payloadFor(index
, base
), payload
);
820 load32(payloadFor(index
, base
), payload
);
821 load32(tagFor(index
, base
), tag
);
824 inline void JIT::emitLoad2(int index1
, RegisterID tag1
, RegisterID payload1
, int index2
, RegisterID tag2
, RegisterID payload2
)
826 emitLoad(index2
, tag2
, payload2
);
827 emitLoad(index1
, tag1
, payload1
);
830 inline void JIT::emitLoadDouble(int index
, FPRegisterID value
)
832 if (m_codeBlock
->isConstantRegisterIndex(index
)) {
833 WriteBarrier
<Unknown
>& inConstantPool
= m_codeBlock
->constantRegister(index
);
834 loadDouble(TrustedImmPtr(&inConstantPool
), value
);
836 loadDouble(addressFor(index
), value
);
839 inline void JIT::emitLoadInt32ToDouble(int index
, FPRegisterID value
)
841 if (m_codeBlock
->isConstantRegisterIndex(index
)) {
842 WriteBarrier
<Unknown
>& inConstantPool
= m_codeBlock
->constantRegister(index
);
843 char* bytePointer
= reinterpret_cast<char*>(&inConstantPool
);
844 convertInt32ToDouble(AbsoluteAddress(bytePointer
+ OBJECT_OFFSETOF(JSValue
, u
.asBits
.payload
)), value
);
846 convertInt32ToDouble(payloadFor(index
), value
);
849 inline void JIT::emitStore(int index
, RegisterID tag
, RegisterID payload
, RegisterID base
)
851 store32(payload
, payloadFor(index
, base
));
852 store32(tag
, tagFor(index
, base
));
855 inline void JIT::emitStoreInt32(int index
, RegisterID payload
, bool indexIsInt32
)
857 store32(payload
, payloadFor(index
, callFrameRegister
));
859 store32(TrustedImm32(JSValue::Int32Tag
), tagFor(index
, callFrameRegister
));
862 inline void JIT::emitStoreInt32(int index
, TrustedImm32 payload
, bool indexIsInt32
)
864 store32(payload
, payloadFor(index
, callFrameRegister
));
866 store32(TrustedImm32(JSValue::Int32Tag
), tagFor(index
, callFrameRegister
));
869 inline void JIT::emitStoreCell(int index
, RegisterID payload
, bool indexIsCell
)
871 store32(payload
, payloadFor(index
, callFrameRegister
));
873 store32(TrustedImm32(JSValue::CellTag
), tagFor(index
, callFrameRegister
));
876 inline void JIT::emitStoreBool(int index
, RegisterID payload
, bool indexIsBool
)
878 store32(payload
, payloadFor(index
, callFrameRegister
));
880 store32(TrustedImm32(JSValue::BooleanTag
), tagFor(index
, callFrameRegister
));
883 inline void JIT::emitStoreDouble(int index
, FPRegisterID value
)
885 storeDouble(value
, addressFor(index
));
888 inline void JIT::emitStore(int index
, const JSValue constant
, RegisterID base
)
890 store32(Imm32(constant
.payload()), payloadFor(index
, base
));
891 store32(Imm32(constant
.tag()), tagFor(index
, base
));
894 ALWAYS_INLINE
void JIT::emitInitRegister(int dst
)
896 emitStore(dst
, jsUndefined());
899 inline void JIT::emitJumpSlowCaseIfNotJSCell(int virtualRegisterIndex
)
901 if (!m_codeBlock
->isKnownNotImmediate(virtualRegisterIndex
)) {
902 if (m_codeBlock
->isConstantRegisterIndex(virtualRegisterIndex
))
905 addSlowCase(emitJumpIfNotJSCell(virtualRegisterIndex
));
909 inline void JIT::emitJumpSlowCaseIfNotJSCell(int virtualRegisterIndex
, RegisterID tag
)
911 if (!m_codeBlock
->isKnownNotImmediate(virtualRegisterIndex
)) {
912 if (m_codeBlock
->isConstantRegisterIndex(virtualRegisterIndex
))
915 addSlowCase(branch32(NotEqual
, tag
, TrustedImm32(JSValue::CellTag
)));
919 ALWAYS_INLINE
bool JIT::isOperandConstantImmediateInt(int src
)
921 return m_codeBlock
->isConstantRegisterIndex(src
) && getConstantOperand(src
).isInt32();
924 ALWAYS_INLINE
bool JIT::getOperandConstantImmediateInt(int op1
, int op2
, int& op
, int32_t& constant
)
926 if (isOperandConstantImmediateInt(op1
)) {
927 constant
= getConstantOperand(op1
).asInt32();
932 if (isOperandConstantImmediateInt(op2
)) {
933 constant
= getConstantOperand(op2
).asInt32();
941 #else // USE(JSVALUE32_64)
943 // get arg puts an arg from the SF register array into a h/w register
944 ALWAYS_INLINE
void JIT::emitGetVirtualRegister(int src
, RegisterID dst
)
946 ASSERT(m_bytecodeOffset
!= (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
948 // TODO: we want to reuse values that are already in registers if we can - add a register allocator!
949 if (m_codeBlock
->isConstantRegisterIndex(src
)) {
950 JSValue value
= m_codeBlock
->getConstant(src
);
951 if (!value
.isNumber())
952 move(TrustedImm64(JSValue::encode(value
)), dst
);
954 move(Imm64(JSValue::encode(value
)), dst
);
958 load64(Address(callFrameRegister
, src
* sizeof(Register
)), dst
);
961 ALWAYS_INLINE
void JIT::emitGetVirtualRegister(VirtualRegister src
, RegisterID dst
)
963 emitGetVirtualRegister(src
.offset(), dst
);
966 ALWAYS_INLINE
void JIT::emitGetVirtualRegisters(int src1
, RegisterID dst1
, int src2
, RegisterID dst2
)
968 emitGetVirtualRegister(src1
, dst1
);
969 emitGetVirtualRegister(src2
, dst2
);
972 ALWAYS_INLINE
void JIT::emitGetVirtualRegisters(VirtualRegister src1
, RegisterID dst1
, VirtualRegister src2
, RegisterID dst2
)
974 emitGetVirtualRegisters(src1
.offset(), dst1
, src2
.offset(), dst2
);
977 ALWAYS_INLINE
int32_t JIT::getConstantOperandImmediateInt(int src
)
979 return getConstantOperand(src
).asInt32();
982 ALWAYS_INLINE
bool JIT::isOperandConstantImmediateInt(int src
)
984 return m_codeBlock
->isConstantRegisterIndex(src
) && getConstantOperand(src
).isInt32();
987 ALWAYS_INLINE
void JIT::emitPutVirtualRegister(int dst
, RegisterID from
)
989 store64(from
, Address(callFrameRegister
, dst
* sizeof(Register
)));
992 ALWAYS_INLINE
void JIT::emitPutVirtualRegister(VirtualRegister dst
, RegisterID from
)
994 emitPutVirtualRegister(dst
.offset(), from
);
997 ALWAYS_INLINE
void JIT::emitInitRegister(int dst
)
999 store64(TrustedImm64(JSValue::encode(jsUndefined())), Address(callFrameRegister
, dst
* sizeof(Register
)));
1002 ALWAYS_INLINE
JIT::Jump
JIT::emitJumpIfJSCell(RegisterID reg
)
1004 return branchTest64(Zero
, reg
, tagMaskRegister
);
1007 ALWAYS_INLINE
JIT::Jump
JIT::emitJumpIfBothJSCells(RegisterID reg1
, RegisterID reg2
, RegisterID scratch
)
1009 move(reg1
, scratch
);
1010 or64(reg2
, scratch
);
1011 return emitJumpIfJSCell(scratch
);
1014 ALWAYS_INLINE
void JIT::emitJumpSlowCaseIfJSCell(RegisterID reg
)
1016 addSlowCase(emitJumpIfJSCell(reg
));
1019 ALWAYS_INLINE
void JIT::emitJumpSlowCaseIfNotJSCell(RegisterID reg
)
1021 addSlowCase(emitJumpIfNotJSCell(reg
));
1024 ALWAYS_INLINE
void JIT::emitJumpSlowCaseIfNotJSCell(RegisterID reg
, int vReg
)
1026 if (!m_codeBlock
->isKnownNotImmediate(vReg
))
1027 emitJumpSlowCaseIfNotJSCell(reg
);
1030 inline void JIT::emitLoadDouble(int index
, FPRegisterID value
)
1032 if (m_codeBlock
->isConstantRegisterIndex(index
)) {
1033 WriteBarrier
<Unknown
>& inConstantPool
= m_codeBlock
->constantRegister(index
);
1034 loadDouble(TrustedImmPtr(&inConstantPool
), value
);
1036 loadDouble(addressFor(index
), value
);
1039 inline void JIT::emitLoadInt32ToDouble(int index
, FPRegisterID value
)
1041 if (m_codeBlock
->isConstantRegisterIndex(index
)) {
1042 ASSERT(isOperandConstantImmediateInt(index
));
1043 convertInt32ToDouble(Imm32(getConstantOperand(index
).asInt32()), value
);
1045 convertInt32ToDouble(addressFor(index
), value
);
1048 ALWAYS_INLINE
JIT::Jump
JIT::emitJumpIfImmediateInteger(RegisterID reg
)
1050 return branch64(AboveOrEqual
, reg
, tagTypeNumberRegister
);
1053 ALWAYS_INLINE
JIT::Jump
JIT::emitJumpIfNotImmediateInteger(RegisterID reg
)
1055 return branch64(Below
, reg
, tagTypeNumberRegister
);
1058 ALWAYS_INLINE
JIT::Jump
JIT::emitJumpIfNotImmediateIntegers(RegisterID reg1
, RegisterID reg2
, RegisterID scratch
)
1060 move(reg1
, scratch
);
1061 and64(reg2
, scratch
);
1062 return emitJumpIfNotImmediateInteger(scratch
);
1065 ALWAYS_INLINE
void JIT::emitJumpSlowCaseIfNotImmediateInteger(RegisterID reg
)
1067 addSlowCase(emitJumpIfNotImmediateInteger(reg
));
1070 ALWAYS_INLINE
void JIT::emitJumpSlowCaseIfNotImmediateIntegers(RegisterID reg1
, RegisterID reg2
, RegisterID scratch
)
1072 addSlowCase(emitJumpIfNotImmediateIntegers(reg1
, reg2
, scratch
));
1075 ALWAYS_INLINE
void JIT::emitJumpSlowCaseIfNotImmediateNumber(RegisterID reg
)
1077 addSlowCase(emitJumpIfNotImmediateNumber(reg
));
1080 ALWAYS_INLINE
void JIT::emitFastArithReTagImmediate(RegisterID src
, RegisterID dest
)
1082 emitFastArithIntToImmNoCheck(src
, dest
);
1085 ALWAYS_INLINE
void JIT::emitTagAsBoolImmediate(RegisterID reg
)
1087 or32(TrustedImm32(static_cast<int32_t>(ValueFalse
)), reg
);
1090 #endif // USE(JSVALUE32_64)
1092 template <typename T
>
1093 JIT::Jump
JIT::branchStructure(RelationalCondition condition
, T leftHandSide
, Structure
* structure
)
1096 return branch32(condition
, leftHandSide
, TrustedImm32(structure
->id()));
1098 return branchPtr(condition
, leftHandSide
, TrustedImmPtr(structure
));
1102 template <typename T
>
1103 MacroAssembler::Jump
branchStructure(MacroAssembler
& jit
, MacroAssembler::RelationalCondition condition
, T leftHandSide
, Structure
* structure
)
1106 return jit
.branch32(condition
, leftHandSide
, MacroAssembler::TrustedImm32(structure
->id()));
1108 return jit
.branchPtr(condition
, leftHandSide
, MacroAssembler::TrustedImmPtr(structure
));
1114 #endif // ENABLE(JIT)
1116 #endif // JITInlines_h