2 * Copyright (C) 2011, 2012, 2013, 2014 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 #ifndef DFGSpeculativeJIT_h
27 #define DFGSpeculativeJIT_h
31 #include "DFGAbstractInterpreter.h"
32 #include "DFGGenerationInfo.h"
33 #include "DFGInPlaceAbstractState.h"
34 #include "DFGJITCompiler.h"
35 #include "DFGOSRExit.h"
36 #include "DFGOSRExitJumpPlaceholder.h"
37 #include "DFGSilentRegisterSavePlan.h"
38 #include "DFGValueSource.h"
39 #include "JITOperations.h"
40 #include "MarkedAllocator.h"
42 #include "ValueRecovery.h"
43 #include "VirtualRegister.h"
45 namespace JSC
{ namespace DFG
{
49 class SlowPathGenerator
;
51 class SpeculateInt32Operand
;
52 class SpeculateStrictInt32Operand
;
53 class SpeculateDoubleOperand
;
54 class SpeculateCellOperand
;
55 class SpeculateBooleanOperand
;
57 enum GeneratedOperandType
{ GeneratedOperandTypeUnknown
, GeneratedOperandInteger
, GeneratedOperandJSValue
};
59 inline GPRReg
extractResult(GPRReg result
) { return result
; }
61 inline GPRReg
extractResult(JSValueRegs result
) { return result
.gpr(); }
63 inline JSValueRegs
extractResult(JSValueRegs result
) { return result
; }
65 inline NoResultTag
extractResult(NoResultTag
) { return NoResult
; }
67 // === SpeculativeJIT ===
69 // The SpeculativeJIT is used to generate a fast, but potentially
70 // incomplete code path for the dataflow. When code generating
71 // we may make assumptions about operand types, dynamically check,
72 // and bail-out to an alternate code path if these checks fail.
73 // Importantly, the speculative code path cannot be reentered once
74 // a speculative check has failed. This allows the SpeculativeJIT
75 // to propagate type information (including information that has
76 // only speculatively been asserted) through the dataflow.
77 class SpeculativeJIT
{
78 WTF_MAKE_FAST_ALLOCATED
;
80 friend struct OSRExit
;
82 typedef JITCompiler::TrustedImm32 TrustedImm32
;
83 typedef JITCompiler::Imm32 Imm32
;
84 typedef JITCompiler::TrustedImmPtr TrustedImmPtr
;
85 typedef JITCompiler::ImmPtr ImmPtr
;
86 typedef JITCompiler::TrustedImm64 TrustedImm64
;
87 typedef JITCompiler::Imm64 Imm64
;
89 // These constants are used to set priorities for spill order for
90 // the register allocator.
93 SpillOrderConstant
= 1, // no spill, and cheap fill
94 SpillOrderSpilled
= 2, // no spill
95 SpillOrderJS
= 4, // needs spill
96 SpillOrderCell
= 4, // needs spill
97 SpillOrderStorage
= 4, // needs spill
98 SpillOrderInteger
= 5, // needs spill and box
99 SpillOrderBoolean
= 5, // needs spill and box
100 SpillOrderDouble
= 6, // needs spill and convert
102 #elif USE(JSVALUE32_64)
104 SpillOrderConstant
= 1, // no spill, and cheap fill
105 SpillOrderSpilled
= 2, // no spill
106 SpillOrderJS
= 4, // needs spill
107 SpillOrderStorage
= 4, // needs spill
108 SpillOrderDouble
= 4, // needs spill
109 SpillOrderInteger
= 5, // needs spill and box
110 SpillOrderCell
= 5, // needs spill and box
111 SpillOrderBoolean
= 5, // needs spill and box
115 enum UseChildrenMode
{ CallUseChildren
, UseChildrenCalledExplicitly
};
118 SpeculativeJIT(JITCompiler
&);
123 void createOSREntries();
124 void linkOSREntries(LinkBuffer
&);
126 BasicBlock
* nextBlock()
128 for (BlockIndex resultIndex
= m_block
->index
+ 1; ; resultIndex
++) {
129 if (resultIndex
>= m_jit
.graph().numBlocks())
131 if (BasicBlock
* result
= m_jit
.graph().block(resultIndex
))
137 GPRReg
fillJSValue(Edge
);
138 #elif USE(JSVALUE32_64)
139 bool fillJSValue(Edge
, GPRReg
&, GPRReg
&, FPRReg
&);
141 GPRReg
fillStorage(Edge
);
143 // lock and unlock GPR & FPR registers.
144 void lock(GPRReg reg
)
148 void lock(FPRReg reg
)
152 void unlock(GPRReg reg
)
156 void unlock(FPRReg reg
)
161 // Used to check whether a child node is on its last use,
162 // and its machine registers may be reused.
163 bool canReuse(Node
* node
)
165 return generationInfo(node
).canReuse();
167 bool canReuse(Edge nodeUse
)
169 return canReuse(nodeUse
.node());
171 GPRReg
reuse(GPRReg reg
)
176 FPRReg
reuse(FPRReg reg
)
182 // Allocate a gpr/fpr.
185 #if ENABLE(DFG_REGISTER_ALLOCATION_VALIDATION)
186 m_jit
.addRegisterAllocationAtOffset(m_jit
.debugOffset());
188 VirtualRegister spillMe
;
189 GPRReg gpr
= m_gprs
.allocate(spillMe
);
190 if (spillMe
.isValid()) {
191 #if USE(JSVALUE32_64)
192 GenerationInfo
& info
= generationInfoFromVirtualRegister(spillMe
);
193 if ((info
.registerFormat() & DataFormatJS
))
194 m_gprs
.release(info
.tagGPR() == gpr
? info
.payloadGPR() : info
.tagGPR());
200 GPRReg
allocate(GPRReg specific
)
202 #if ENABLE(DFG_REGISTER_ALLOCATION_VALIDATION)
203 m_jit
.addRegisterAllocationAtOffset(m_jit
.debugOffset());
205 VirtualRegister spillMe
= m_gprs
.allocateSpecific(specific
);
206 if (spillMe
.isValid()) {
207 #if USE(JSVALUE32_64)
208 GenerationInfo
& info
= generationInfoFromVirtualRegister(spillMe
);
209 RELEASE_ASSERT(info
.registerFormat() != DataFormatJSDouble
);
210 if ((info
.registerFormat() & DataFormatJS
))
211 m_gprs
.release(info
.tagGPR() == specific
? info
.payloadGPR() : info
.tagGPR());
219 return m_gprs
.tryAllocate();
223 #if ENABLE(DFG_REGISTER_ALLOCATION_VALIDATION)
224 m_jit
.addRegisterAllocationAtOffset(m_jit
.debugOffset());
226 VirtualRegister spillMe
;
227 FPRReg fpr
= m_fprs
.allocate(spillMe
);
228 if (spillMe
.isValid())
233 // Check whether a VirtualRegsiter is currently in a machine register.
234 // We use this when filling operands to fill those that are already in
235 // machine registers first (by locking VirtualRegsiters that are already
236 // in machine register before filling those that are not we attempt to
237 // avoid spilling values we will need immediately).
238 bool isFilled(Node
* node
)
240 return generationInfo(node
).registerFormat() != DataFormatNone
;
242 bool isFilledDouble(Node
* node
)
244 return generationInfo(node
).registerFormat() == DataFormatDouble
;
247 // Called on an operand once it has been consumed by a parent node.
250 if (!node
->hasResult())
252 GenerationInfo
& info
= generationInfo(node
);
254 // use() returns true when the value becomes dead, and any
255 // associated resources may be freed.
256 if (!info
.use(*m_stream
))
259 // Release the associated machine registers.
260 DataFormat registerFormat
= info
.registerFormat();
262 if (registerFormat
== DataFormatDouble
)
263 m_fprs
.release(info
.fpr());
264 else if (registerFormat
!= DataFormatNone
)
265 m_gprs
.release(info
.gpr());
266 #elif USE(JSVALUE32_64)
267 if (registerFormat
== DataFormatDouble
)
268 m_fprs
.release(info
.fpr());
269 else if (registerFormat
& DataFormatJS
) {
270 m_gprs
.release(info
.tagGPR());
271 m_gprs
.release(info
.payloadGPR());
272 } else if (registerFormat
!= DataFormatNone
)
273 m_gprs
.release(info
.gpr());
276 void use(Edge nodeUse
)
281 RegisterSet
usedRegisters();
283 bool masqueradesAsUndefinedWatchpointIsStillValid(const CodeOrigin
& codeOrigin
)
285 return m_jit
.graph().masqueradesAsUndefinedWatchpointIsStillValid(codeOrigin
);
287 bool masqueradesAsUndefinedWatchpointIsStillValid()
289 return masqueradesAsUndefinedWatchpointIsStillValid(m_currentNode
->origin
.semantic
);
293 void storeToWriteBarrierBuffer(GPRReg cell
, GPRReg scratch1
, GPRReg scratch2
);
294 void storeToWriteBarrierBuffer(JSCell
*, GPRReg scratch1
, GPRReg scratch2
);
296 void writeBarrier(GPRReg owner
, GPRReg scratch1
, GPRReg scratch2
);
297 void writeBarrier(GPRReg owner
, JSCell
* value
, GPRReg scratch1
, GPRReg scratch2
);
299 void writeBarrier(GPRReg owner
, GPRReg value
, Edge valueUse
, GPRReg scratch1
, GPRReg scratch2
);
300 void writeBarrier(JSCell
* owner
, GPRReg value
, Edge valueUse
, GPRReg scratch1
, GPRReg scratch2
);
302 void compileStoreBarrier(Node
*);
304 static GPRReg
selectScratchGPR(GPRReg preserve1
= InvalidGPRReg
, GPRReg preserve2
= InvalidGPRReg
, GPRReg preserve3
= InvalidGPRReg
, GPRReg preserve4
= InvalidGPRReg
)
306 return AssemblyHelpers::selectScratchGPR(preserve1
, preserve2
, preserve3
, preserve4
);
309 // Called by the speculative operand types, below, to fill operand to
310 // machine registers, implicitly generating speculation checks as needed.
311 GPRReg
fillSpeculateInt32(Edge
, DataFormat
& returnFormat
);
312 GPRReg
fillSpeculateInt32Strict(Edge
);
313 GPRReg
fillSpeculateInt52(Edge
, DataFormat desiredFormat
);
314 FPRReg
fillSpeculateDouble(Edge
);
315 GPRReg
fillSpeculateCell(Edge
);
316 GPRReg
fillSpeculateBoolean(Edge
);
317 GeneratedOperandType
checkGeneratedTypeForToInt32(Node
*);
319 void addSlowPathGenerator(PassOwnPtr
<SlowPathGenerator
>);
320 void runSlowPathGenerators();
323 void noticeOSRBirth(Node
*);
324 void bail(AbortReason
);
325 void compileCurrentBlock();
327 void checkArgumentTypes();
329 void clearGenerationInfo();
331 // These methods are used when generating 'unexpected'
332 // calls out from JIT code to C++ helper routines -
333 // they spill all live values to the appropriate
334 // slots in the JSStack without changing any state
335 // in the GenerationInfo.
336 SilentRegisterSavePlan
silentSavePlanForGPR(VirtualRegister spillMe
, GPRReg source
);
337 SilentRegisterSavePlan
silentSavePlanForFPR(VirtualRegister spillMe
, FPRReg source
);
338 void silentSpill(const SilentRegisterSavePlan
&);
339 void silentFill(const SilentRegisterSavePlan
&, GPRReg canTrample
);
341 template<typename CollectionType
>
342 void silentSpillAllRegistersImpl(bool doSpill
, CollectionType
& plans
, GPRReg exclude
, GPRReg exclude2
= InvalidGPRReg
, FPRReg fprExclude
= InvalidFPRReg
)
344 ASSERT(plans
.isEmpty());
345 for (gpr_iterator iter
= m_gprs
.begin(); iter
!= m_gprs
.end(); ++iter
) {
346 GPRReg gpr
= iter
.regID();
347 if (iter
.name().isValid() && gpr
!= exclude
&& gpr
!= exclude2
) {
348 SilentRegisterSavePlan plan
= silentSavePlanForGPR(iter
.name(), gpr
);
354 for (fpr_iterator iter
= m_fprs
.begin(); iter
!= m_fprs
.end(); ++iter
) {
355 if (iter
.name().isValid() && iter
.regID() != fprExclude
) {
356 SilentRegisterSavePlan plan
= silentSavePlanForFPR(iter
.name(), iter
.regID());
363 template<typename CollectionType
>
364 void silentSpillAllRegistersImpl(bool doSpill
, CollectionType
& plans
, NoResultTag
)
366 silentSpillAllRegistersImpl(doSpill
, plans
, InvalidGPRReg
, InvalidGPRReg
, InvalidFPRReg
);
368 template<typename CollectionType
>
369 void silentSpillAllRegistersImpl(bool doSpill
, CollectionType
& plans
, FPRReg exclude
)
371 silentSpillAllRegistersImpl(doSpill
, plans
, InvalidGPRReg
, InvalidGPRReg
, exclude
);
373 #if USE(JSVALUE32_64)
374 template<typename CollectionType
>
375 void silentSpillAllRegistersImpl(bool doSpill
, CollectionType
& plans
, JSValueRegs exclude
)
377 silentSpillAllRegistersImpl(doSpill
, plans
, exclude
.tagGPR(), exclude
.payloadGPR());
381 void silentSpillAllRegisters(GPRReg exclude
, GPRReg exclude2
= InvalidGPRReg
, FPRReg fprExclude
= InvalidFPRReg
)
383 silentSpillAllRegistersImpl(true, m_plans
, exclude
, exclude2
, fprExclude
);
385 void silentSpillAllRegisters(FPRReg exclude
)
387 silentSpillAllRegisters(InvalidGPRReg
, InvalidGPRReg
, exclude
);
390 static GPRReg
pickCanTrample(GPRReg exclude
)
392 GPRReg result
= GPRInfo::regT0
;
393 if (result
== exclude
)
394 result
= GPRInfo::regT1
;
397 static GPRReg
pickCanTrample(FPRReg
)
399 return GPRInfo::regT0
;
401 static GPRReg
pickCanTrample(NoResultTag
)
403 return GPRInfo::regT0
;
406 #if USE(JSVALUE32_64)
407 static GPRReg
pickCanTrample(JSValueRegs exclude
)
409 GPRReg result
= GPRInfo::regT0
;
410 if (result
== exclude
.tagGPR()) {
411 result
= GPRInfo::regT1
;
412 if (result
== exclude
.payloadGPR())
413 result
= GPRInfo::regT2
;
414 } else if (result
== exclude
.payloadGPR()) {
415 result
= GPRInfo::regT1
;
416 if (result
== exclude
.tagGPR())
417 result
= GPRInfo::regT2
;
423 template<typename RegisterType
>
424 void silentFillAllRegisters(RegisterType exclude
)
426 GPRReg canTrample
= pickCanTrample(exclude
);
428 while (!m_plans
.isEmpty()) {
429 SilentRegisterSavePlan
& plan
= m_plans
.last();
430 silentFill(plan
, canTrample
);
431 m_plans
.removeLast();
435 // These methods convert between doubles, and doubles boxed and JSValues.
437 GPRReg
boxDouble(FPRReg fpr
, GPRReg gpr
)
439 return m_jit
.boxDouble(fpr
, gpr
);
441 FPRReg
unboxDouble(GPRReg gpr
, FPRReg fpr
)
443 return m_jit
.unboxDouble(gpr
, fpr
);
445 GPRReg
boxDouble(FPRReg fpr
)
447 return boxDouble(fpr
, allocate());
450 void boxInt52(GPRReg sourceGPR
, GPRReg targetGPR
, DataFormat
);
451 #elif USE(JSVALUE32_64)
452 void boxDouble(FPRReg fpr
, GPRReg tagGPR
, GPRReg payloadGPR
)
454 m_jit
.boxDouble(fpr
, tagGPR
, payloadGPR
);
456 void unboxDouble(GPRReg tagGPR
, GPRReg payloadGPR
, FPRReg fpr
, FPRReg scratchFPR
)
458 m_jit
.unboxDouble(tagGPR
, payloadGPR
, fpr
, scratchFPR
);
461 void boxDouble(FPRReg fpr
, JSValueRegs regs
)
463 m_jit
.boxDouble(fpr
, regs
);
466 // Spill a VirtualRegister to the JSStack.
467 void spill(VirtualRegister spillMe
)
469 GenerationInfo
& info
= generationInfoFromVirtualRegister(spillMe
);
471 #if USE(JSVALUE32_64)
472 if (info
.registerFormat() == DataFormatNone
) // it has been spilled. JS values which have two GPRs can reach here
475 // Check the GenerationInfo to see if this value need writing
476 // to the JSStack - if not, mark it as spilled & return.
477 if (!info
.needsSpill()) {
478 info
.setSpilled(*m_stream
, spillMe
);
482 DataFormat spillFormat
= info
.registerFormat();
483 switch (spillFormat
) {
484 case DataFormatStorage
: {
485 // This is special, since it's not a JS value - as in it's not visible to JS
487 m_jit
.storePtr(info
.gpr(), JITCompiler::addressFor(spillMe
));
488 info
.spill(*m_stream
, spillMe
, DataFormatStorage
);
492 case DataFormatInt32
: {
493 m_jit
.store32(info
.gpr(), JITCompiler::payloadFor(spillMe
));
494 info
.spill(*m_stream
, spillMe
, DataFormatInt32
);
499 case DataFormatDouble
: {
500 m_jit
.storeDouble(info
.fpr(), JITCompiler::addressFor(spillMe
));
501 info
.spill(*m_stream
, spillMe
, DataFormatDouble
);
505 case DataFormatInt52
:
506 case DataFormatStrictInt52
: {
507 m_jit
.store64(info
.gpr(), JITCompiler::addressFor(spillMe
));
508 info
.spill(*m_stream
, spillMe
, spillFormat
);
513 // The following code handles JSValues, int32s, and cells.
514 RELEASE_ASSERT(spillFormat
== DataFormatCell
|| spillFormat
& DataFormatJS
);
516 GPRReg reg
= info
.gpr();
517 // We need to box int32 and cell values ...
518 // but on JSVALUE64 boxing a cell is a no-op!
519 if (spillFormat
== DataFormatInt32
)
520 m_jit
.or64(GPRInfo::tagTypeNumberRegister
, reg
);
522 // Spill the value, and record it as spilled in its boxed form.
523 m_jit
.store64(reg
, JITCompiler::addressFor(spillMe
));
524 info
.spill(*m_stream
, spillMe
, (DataFormat
)(spillFormat
| DataFormatJS
));
526 #elif USE(JSVALUE32_64)
528 case DataFormatBoolean
: {
529 m_jit
.store32(info
.gpr(), JITCompiler::payloadFor(spillMe
));
530 info
.spill(*m_stream
, spillMe
, spillFormat
);
534 case DataFormatDouble
: {
535 // On JSVALUE32_64 boxing a double is a no-op.
536 m_jit
.storeDouble(info
.fpr(), JITCompiler::addressFor(spillMe
));
537 info
.spill(*m_stream
, spillMe
, DataFormatDouble
);
542 // The following code handles JSValues.
543 RELEASE_ASSERT(spillFormat
& DataFormatJS
);
544 m_jit
.store32(info
.tagGPR(), JITCompiler::tagFor(spillMe
));
545 m_jit
.store32(info
.payloadGPR(), JITCompiler::payloadFor(spillMe
));
546 info
.spill(*m_stream
, spillMe
, spillFormat
);
552 bool isKnownInteger(Node
* node
) { return m_state
.forNode(node
).isType(SpecInt32
); }
553 bool isKnownCell(Node
* node
) { return m_state
.forNode(node
).isType(SpecCell
); }
555 bool isKnownNotInteger(Node
* node
) { return !(m_state
.forNode(node
).m_type
& SpecInt32
); }
556 bool isKnownNotNumber(Node
* node
) { return !(m_state
.forNode(node
).m_type
& SpecFullNumber
); }
557 bool isKnownNotCell(Node
* node
) { return !(m_state
.forNode(node
).m_type
& SpecCell
); }
559 // Checks/accessors for constant values.
560 bool isConstant(Node
* node
) { return m_jit
.graph().isConstant(node
); }
561 bool isJSConstant(Node
* node
) { return m_jit
.graph().isJSConstant(node
); }
562 bool isInt32Constant(Node
* node
) { return m_jit
.graph().isInt32Constant(node
); }
563 bool isDoubleConstant(Node
* node
) { return m_jit
.graph().isDoubleConstant(node
); }
564 bool isNumberConstant(Node
* node
) { return m_jit
.graph().isNumberConstant(node
); }
565 bool isBooleanConstant(Node
* node
) { return m_jit
.graph().isBooleanConstant(node
); }
566 bool isFunctionConstant(Node
* node
) { return m_jit
.graph().isFunctionConstant(node
); }
567 int32_t valueOfInt32Constant(Node
* node
) { return m_jit
.graph().valueOfInt32Constant(node
); }
568 double valueOfNumberConstant(Node
* node
) { return m_jit
.graph().valueOfNumberConstant(node
); }
569 #if USE(JSVALUE32_64)
570 void* addressOfDoubleConstant(Node
* node
) { return m_jit
.addressOfDoubleConstant(node
); }
572 JSValue
valueOfJSConstant(Node
* node
) { return m_jit
.graph().valueOfJSConstant(node
); }
573 bool valueOfBooleanConstant(Node
* node
) { return m_jit
.graph().valueOfBooleanConstant(node
); }
574 JSFunction
* valueOfFunctionConstant(Node
* node
) { return m_jit
.graph().valueOfFunctionConstant(node
); }
575 bool isNullConstant(Node
* node
)
577 if (!isConstant(node
))
579 return valueOfJSConstant(node
).isNull();
582 StringImpl
* identifierUID(unsigned index
)
584 return m_jit
.graph().identifiers()[index
];
587 // Spill all VirtualRegisters back to the JSStack.
588 void flushRegisters()
590 for (gpr_iterator iter
= m_gprs
.begin(); iter
!= m_gprs
.end(); ++iter
) {
591 if (iter
.name().isValid()) {
596 for (fpr_iterator iter
= m_fprs
.begin(); iter
!= m_fprs
.end(); ++iter
) {
597 if (iter
.name().isValid()) {
605 // Used to ASSERT flushRegisters() has been called prior to
606 // calling out from JIT code to a C helper function.
609 for (gpr_iterator iter
= m_gprs
.begin(); iter
!= m_gprs
.end(); ++iter
) {
610 if (iter
.name().isValid())
613 for (fpr_iterator iter
= m_fprs
.begin(); iter
!= m_fprs
.end(); ++iter
) {
614 if (iter
.name().isValid())
622 MacroAssembler::Imm64
valueOfJSConstantAsImm64(Node
* node
)
624 return MacroAssembler::Imm64(JSValue::encode(valueOfJSConstant(node
)));
628 // Helper functions to enable code sharing in implementations of bit/shift ops.
629 void bitOp(NodeType op
, int32_t imm
, GPRReg op1
, GPRReg result
)
633 m_jit
.and32(Imm32(imm
), op1
, result
);
636 m_jit
.or32(Imm32(imm
), op1
, result
);
639 m_jit
.xor32(Imm32(imm
), op1
, result
);
642 RELEASE_ASSERT_NOT_REACHED();
645 void bitOp(NodeType op
, GPRReg op1
, GPRReg op2
, GPRReg result
)
649 m_jit
.and32(op1
, op2
, result
);
652 m_jit
.or32(op1
, op2
, result
);
655 m_jit
.xor32(op1
, op2
, result
);
658 RELEASE_ASSERT_NOT_REACHED();
661 void shiftOp(NodeType op
, GPRReg op1
, int32_t shiftAmount
, GPRReg result
)
665 m_jit
.rshift32(op1
, Imm32(shiftAmount
), result
);
668 m_jit
.lshift32(op1
, Imm32(shiftAmount
), result
);
671 m_jit
.urshift32(op1
, Imm32(shiftAmount
), result
);
674 RELEASE_ASSERT_NOT_REACHED();
677 void shiftOp(NodeType op
, GPRReg op1
, GPRReg shiftAmount
, GPRReg result
)
681 m_jit
.rshift32(op1
, shiftAmount
, result
);
684 m_jit
.lshift32(op1
, shiftAmount
, result
);
687 m_jit
.urshift32(op1
, shiftAmount
, result
);
690 RELEASE_ASSERT_NOT_REACHED();
694 // Returns the index of the branch node if peephole is okay, UINT_MAX otherwise.
695 unsigned detectPeepHoleBranch()
697 // Check that no intervening nodes will be generated.
698 for (unsigned index
= m_indexInBlock
+ 1; index
< m_block
->size() - 1; ++index
) {
699 Node
* node
= m_block
->at(index
);
700 if (!node
->shouldGenerate())
702 // Check if it's a Phantom that can be safely ignored.
703 if (node
->op() == Phantom
&& !node
->child1())
708 // Check if the lastNode is a branch on this node.
709 Node
* lastNode
= m_block
->last();
710 return lastNode
->op() == Branch
&& lastNode
->child1() == m_currentNode
? m_block
->size() - 1 : UINT_MAX
;
713 void compileMovHint(Node
*);
714 void compileMovHintAndCheck(Node
*);
717 void cachedGetById(CodeOrigin
, GPRReg baseGPR
, GPRReg resultGPR
, unsigned identifierNumber
, JITCompiler::Jump slowPathTarget
= JITCompiler::Jump(), SpillRegistersMode
= NeedToSpill
);
718 void cachedPutById(CodeOrigin
, GPRReg base
, GPRReg value
, GPRReg scratchGPR
, unsigned identifierNumber
, PutKind
, JITCompiler::Jump slowPathTarget
= JITCompiler::Jump(), SpillRegistersMode
= NeedToSpill
);
719 #elif USE(JSVALUE32_64)
720 void cachedGetById(CodeOrigin
, GPRReg baseTagGPROrNone
, GPRReg basePayloadGPR
, GPRReg resultTagGPR
, GPRReg resultPayloadGPR
, unsigned identifierNumber
, JITCompiler::Jump slowPathTarget
= JITCompiler::Jump(), SpillRegistersMode
= NeedToSpill
);
721 void cachedPutById(CodeOrigin
, GPRReg basePayloadGPR
, GPRReg valueTagGPR
, GPRReg valuePayloadGPR
, GPRReg scratchGPR
, unsigned identifierNumber
, PutKind
, JITCompiler::Jump slowPathTarget
= JITCompiler::Jump(), SpillRegistersMode
= NeedToSpill
);
724 void compileIn(Node
*);
726 void compileBaseValueStoreBarrier(Edge
& baseEdge
, Edge
& valueEdge
);
728 void nonSpeculativeNonPeepholeCompareNull(Edge operand
, bool invert
= false);
729 void nonSpeculativePeepholeBranchNull(Edge operand
, Node
* branchNode
, bool invert
= false);
730 bool nonSpeculativeCompareNull(Node
*, Edge operand
, bool invert
= false);
732 void nonSpeculativePeepholeBranch(Node
*, Node
* branchNode
, MacroAssembler::RelationalCondition
, S_JITOperation_EJJ helperFunction
);
733 void nonSpeculativeNonPeepholeCompare(Node
*, MacroAssembler::RelationalCondition
, S_JITOperation_EJJ helperFunction
);
734 bool nonSpeculativeCompare(Node
*, MacroAssembler::RelationalCondition
, S_JITOperation_EJJ helperFunction
);
736 void nonSpeculativePeepholeStrictEq(Node
*, Node
* branchNode
, bool invert
= false);
737 void nonSpeculativeNonPeepholeStrictEq(Node
*, bool invert
= false);
738 bool nonSpeculativeStrictEq(Node
*, bool invert
= false);
740 void compileInstanceOfForObject(Node
*, GPRReg valueReg
, GPRReg prototypeReg
, GPRReg scratchAndResultReg
, GPRReg scratch2Reg
);
741 void compileInstanceOf(Node
*);
743 ptrdiff_t calleeFrameOffset(int numArgs
)
745 return virtualRegisterForLocal(m_jit
.graph().m_nextMachineLocal
- 1 + JSStack::CallFrameHeaderSize
+ numArgs
).offset() * sizeof(Register
);
748 // Access to our fixed callee CallFrame.
749 MacroAssembler::Address
calleeFrameSlot(int slot
)
751 ASSERT(slot
>= JSStack::CallerFrameAndPCSize
);
752 return MacroAssembler::Address(MacroAssembler::stackPointerRegister
, sizeof(Register
) * (slot
- JSStack::CallerFrameAndPCSize
));
755 // Access to our fixed callee CallFrame.
756 MacroAssembler::Address
calleeArgumentSlot(int argument
)
758 return calleeFrameSlot(virtualRegisterForArgument(argument
).offset());
761 MacroAssembler::Address
calleeFrameTagSlot(int slot
)
763 return calleeFrameSlot(slot
).withOffset(OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.tag
));
766 MacroAssembler::Address
calleeFramePayloadSlot(int slot
)
768 return calleeFrameSlot(slot
).withOffset(OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.payload
));
771 MacroAssembler::Address
calleeArgumentTagSlot(int argument
)
773 return calleeArgumentSlot(argument
).withOffset(OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.tag
));
776 MacroAssembler::Address
calleeArgumentPayloadSlot(int argument
)
778 return calleeArgumentSlot(argument
).withOffset(OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.payload
));
781 MacroAssembler::Address
calleeFrameCallerFrame()
783 return calleeFrameSlot(0).withOffset(CallFrame::callerFrameOffset());
786 void emitCall(Node
*);
788 int32_t framePointerOffsetToGetActivationRegisters()
790 return m_jit
.codeBlock()->framePointerOffsetToGetActivationRegisters(
791 m_jit
.graph().m_machineCaptureStart
);
794 // Called once a node has completed code generation but prior to setting
795 // its result, to free up its children. (This must happen prior to setting
796 // the nodes result, since the node may have the same VirtualRegister as
797 // a child, and as such will use the same GeneratioInfo).
798 void useChildren(Node
*);
800 // These method called to initialize the the GenerationInfo
801 // to describe the result of an operation.
802 void int32Result(GPRReg reg
, Node
* node
, DataFormat format
= DataFormatInt32
, UseChildrenMode mode
= CallUseChildren
)
804 if (mode
== CallUseChildren
)
807 VirtualRegister virtualRegister
= node
->virtualRegister();
808 GenerationInfo
& info
= generationInfoFromVirtualRegister(virtualRegister
);
810 if (format
== DataFormatInt32
) {
811 m_jit
.jitAssertIsInt32(reg
);
812 m_gprs
.retain(reg
, virtualRegister
, SpillOrderInteger
);
813 info
.initInt32(node
, node
->refCount(), reg
);
816 RELEASE_ASSERT(format
== DataFormatJSInt32
);
817 m_jit
.jitAssertIsJSInt32(reg
);
818 m_gprs
.retain(reg
, virtualRegister
, SpillOrderJS
);
819 info
.initJSValue(node
, node
->refCount(), reg
, format
);
820 #elif USE(JSVALUE32_64)
821 RELEASE_ASSERT_NOT_REACHED();
825 void int32Result(GPRReg reg
, Node
* node
, UseChildrenMode mode
)
827 int32Result(reg
, node
, DataFormatInt32
, mode
);
829 void int52Result(GPRReg reg
, Node
* node
, DataFormat format
, UseChildrenMode mode
= CallUseChildren
)
831 if (mode
== CallUseChildren
)
834 VirtualRegister virtualRegister
= node
->virtualRegister();
835 GenerationInfo
& info
= generationInfoFromVirtualRegister(virtualRegister
);
837 m_gprs
.retain(reg
, virtualRegister
, SpillOrderJS
);
838 info
.initInt52(node
, node
->refCount(), reg
, format
);
840 void int52Result(GPRReg reg
, Node
* node
, UseChildrenMode mode
= CallUseChildren
)
842 int52Result(reg
, node
, DataFormatInt52
, mode
);
844 void strictInt52Result(GPRReg reg
, Node
* node
, UseChildrenMode mode
= CallUseChildren
)
846 int52Result(reg
, node
, DataFormatStrictInt52
, mode
);
848 void noResult(Node
* node
, UseChildrenMode mode
= CallUseChildren
)
850 if (mode
== UseChildrenCalledExplicitly
)
854 void cellResult(GPRReg reg
, Node
* node
, UseChildrenMode mode
= CallUseChildren
)
856 if (mode
== CallUseChildren
)
859 VirtualRegister virtualRegister
= node
->virtualRegister();
860 m_gprs
.retain(reg
, virtualRegister
, SpillOrderCell
);
861 GenerationInfo
& info
= generationInfoFromVirtualRegister(virtualRegister
);
862 info
.initCell(node
, node
->refCount(), reg
);
864 void blessedBooleanResult(GPRReg reg
, Node
* node
, UseChildrenMode mode
= CallUseChildren
)
867 jsValueResult(reg
, node
, DataFormatJSBoolean
, mode
);
869 if (mode
== CallUseChildren
)
872 VirtualRegister virtualRegister
= node
->virtualRegister();
873 m_gprs
.retain(reg
, virtualRegister
, SpillOrderBoolean
);
874 GenerationInfo
& info
= generationInfoFromVirtualRegister(virtualRegister
);
875 info
.initBoolean(node
, node
->refCount(), reg
);
878 void unblessedBooleanResult(GPRReg reg
, Node
* node
, UseChildrenMode mode
= CallUseChildren
)
883 blessedBooleanResult(reg
, node
, mode
);
886 void jsValueResult(GPRReg reg
, Node
* node
, DataFormat format
= DataFormatJS
, UseChildrenMode mode
= CallUseChildren
)
888 if (format
== DataFormatJSInt32
)
889 m_jit
.jitAssertIsJSInt32(reg
);
891 if (mode
== CallUseChildren
)
894 VirtualRegister virtualRegister
= node
->virtualRegister();
895 m_gprs
.retain(reg
, virtualRegister
, SpillOrderJS
);
896 GenerationInfo
& info
= generationInfoFromVirtualRegister(virtualRegister
);
897 info
.initJSValue(node
, node
->refCount(), reg
, format
);
899 void jsValueResult(GPRReg reg
, Node
* node
, UseChildrenMode mode
)
901 jsValueResult(reg
, node
, DataFormatJS
, mode
);
903 #elif USE(JSVALUE32_64)
904 void booleanResult(GPRReg reg
, Node
* node
, UseChildrenMode mode
= CallUseChildren
)
906 if (mode
== CallUseChildren
)
909 VirtualRegister virtualRegister
= node
->virtualRegister();
910 m_gprs
.retain(reg
, virtualRegister
, SpillOrderBoolean
);
911 GenerationInfo
& info
= generationInfoFromVirtualRegister(virtualRegister
);
912 info
.initBoolean(node
, node
->refCount(), reg
);
914 void jsValueResult(GPRReg tag
, GPRReg payload
, Node
* node
, DataFormat format
= DataFormatJS
, UseChildrenMode mode
= CallUseChildren
)
916 if (mode
== CallUseChildren
)
919 VirtualRegister virtualRegister
= node
->virtualRegister();
920 m_gprs
.retain(tag
, virtualRegister
, SpillOrderJS
);
921 m_gprs
.retain(payload
, virtualRegister
, SpillOrderJS
);
922 GenerationInfo
& info
= generationInfoFromVirtualRegister(virtualRegister
);
923 info
.initJSValue(node
, node
->refCount(), tag
, payload
, format
);
925 void jsValueResult(GPRReg tag
, GPRReg payload
, Node
* node
, UseChildrenMode mode
)
927 jsValueResult(tag
, payload
, node
, DataFormatJS
, mode
);
930 void jsValueResult(JSValueRegs regs
, Node
* node
, DataFormat format
= DataFormatJS
, UseChildrenMode mode
= CallUseChildren
)
933 jsValueResult(regs
.gpr(), node
, format
, mode
);
935 jsValueResult(regs
.tagGPR(), regs
.payloadGPR(), node
, format
, mode
);
938 void storageResult(GPRReg reg
, Node
* node
, UseChildrenMode mode
= CallUseChildren
)
940 if (mode
== CallUseChildren
)
943 VirtualRegister virtualRegister
= node
->virtualRegister();
944 m_gprs
.retain(reg
, virtualRegister
, SpillOrderStorage
);
945 GenerationInfo
& info
= generationInfoFromVirtualRegister(virtualRegister
);
946 info
.initStorage(node
, node
->refCount(), reg
);
948 void doubleResult(FPRReg reg
, Node
* node
, UseChildrenMode mode
= CallUseChildren
)
950 if (mode
== CallUseChildren
)
953 VirtualRegister virtualRegister
= node
->virtualRegister();
954 m_fprs
.retain(reg
, virtualRegister
, SpillOrderDouble
);
955 GenerationInfo
& info
= generationInfoFromVirtualRegister(virtualRegister
);
956 info
.initDouble(node
, node
->refCount(), reg
);
958 void initConstantInfo(Node
* node
)
960 ASSERT(isInt32Constant(node
) || isNumberConstant(node
) || isJSConstant(node
));
961 generationInfo(node
).initConstant(node
, node
->refCount());
964 // These methods add calls to C++ helper functions.
965 // These methods are broadly value representation specific (i.e.
966 // deal with the fact that a JSValue may be passed in one or two
967 // machine registers, and delegate the calling convention specific
968 // decision as to how to fill the regsiters to setupArguments* methods.
970 JITCompiler::Call
callOperation(P_JITOperation_E operation
, GPRReg result
)
972 m_jit
.setupArgumentsExecState();
973 return appendCallWithExceptionCheckSetResult(operation
, result
);
975 JITCompiler::Call
callOperation(P_JITOperation_EC operation
, GPRReg result
, GPRReg cell
)
977 m_jit
.setupArgumentsWithExecState(cell
);
978 return appendCallWithExceptionCheckSetResult(operation
, result
);
980 JITCompiler::Call
callOperation(P_JITOperation_EO operation
, GPRReg result
, GPRReg object
)
982 m_jit
.setupArgumentsWithExecState(object
);
983 return appendCallWithExceptionCheckSetResult(operation
, result
);
985 JITCompiler::Call
callOperation(P_JITOperation_EOS operation
, GPRReg result
, GPRReg object
, size_t size
)
987 m_jit
.setupArgumentsWithExecState(object
, TrustedImmPtr(size
));
988 return appendCallWithExceptionCheckSetResult(operation
, result
);
990 JITCompiler::Call
callOperation(P_JITOperation_EOZ operation
, GPRReg result
, GPRReg object
, int32_t size
)
992 m_jit
.setupArgumentsWithExecState(object
, TrustedImmPtr(size
));
993 return appendCallWithExceptionCheckSetResult(operation
, result
);
995 JITCompiler::Call
callOperation(C_JITOperation_EOZ operation
, GPRReg result
, GPRReg object
, int32_t size
)
997 m_jit
.setupArgumentsWithExecState(object
, TrustedImmPtr(static_cast<size_t>(size
)));
998 return appendCallWithExceptionCheckSetResult(operation
, result
);
1000 JITCompiler::Call
callOperation(P_JITOperation_EPS operation
, GPRReg result
, GPRReg old
, size_t size
)
1002 m_jit
.setupArgumentsWithExecState(old
, TrustedImmPtr(size
));
1003 return appendCallWithExceptionCheckSetResult(operation
, result
);
1005 JITCompiler::Call
callOperation(P_JITOperation_ES operation
, GPRReg result
, size_t size
)
1007 m_jit
.setupArgumentsWithExecState(TrustedImmPtr(size
));
1008 return appendCallWithExceptionCheckSetResult(operation
, result
);
1010 JITCompiler::Call
callOperation(P_JITOperation_ESJss operation
, GPRReg result
, size_t index
, GPRReg arg1
)
1012 m_jit
.setupArgumentsWithExecState(TrustedImmPtr(index
), arg1
);
1013 return appendCallWithExceptionCheckSetResult(operation
, result
);
1015 JITCompiler::Call
callOperation(P_JITOperation_ESt operation
, GPRReg result
, Structure
* structure
)
1017 m_jit
.setupArgumentsWithExecState(TrustedImmPtr(structure
));
1018 return appendCallWithExceptionCheckSetResult(operation
, result
);
1020 JITCompiler::Call
callOperation(P_JITOperation_EStZ operation
, GPRReg result
, Structure
* structure
, GPRReg arg2
)
1022 m_jit
.setupArgumentsWithExecState(TrustedImmPtr(structure
), arg2
);
1023 return appendCallWithExceptionCheckSetResult(operation
, result
);
1025 JITCompiler::Call
callOperation(P_JITOperation_EStZ operation
, GPRReg result
, Structure
* structure
, size_t arg2
)
1027 m_jit
.setupArgumentsWithExecState(TrustedImmPtr(structure
), TrustedImm32(arg2
));
1028 return appendCallWithExceptionCheckSetResult(operation
, result
);
1030 JITCompiler::Call
callOperation(P_JITOperation_EStZ operation
, GPRReg result
, GPRReg arg1
, GPRReg arg2
)
1032 m_jit
.setupArgumentsWithExecState(arg1
, arg2
);
1033 return appendCallWithExceptionCheckSetResult(operation
, result
);
1035 JITCompiler::Call
callOperation(P_JITOperation_EStPS operation
, GPRReg result
, Structure
* structure
, void* pointer
, size_t size
)
1037 m_jit
.setupArgumentsWithExecState(TrustedImmPtr(structure
), TrustedImmPtr(pointer
), TrustedImmPtr(size
));
1038 return appendCallWithExceptionCheckSetResult(operation
, result
);
1040 JITCompiler::Call
callOperation(P_JITOperation_EStSS operation
, GPRReg result
, Structure
* structure
, size_t index
, size_t size
)
1042 m_jit
.setupArgumentsWithExecState(TrustedImmPtr(structure
), TrustedImmPtr(index
), TrustedImmPtr(size
));
1043 return appendCallWithExceptionCheckSetResult(operation
, result
);
1045 JITCompiler::Call
callOperation(C_JITOperation_E operation
, GPRReg result
)
1047 m_jit
.setupArgumentsExecState();
1048 return appendCallWithExceptionCheckSetResult(operation
, result
);
1050 JITCompiler::Call
callOperation(C_JITOperation_EC operation
, GPRReg result
, GPRReg arg1
)
1052 m_jit
.setupArgumentsWithExecState(arg1
);
1053 return appendCallWithExceptionCheckSetResult(operation
, result
);
1055 JITCompiler::Call
callOperation(C_JITOperation_EC operation
, GPRReg result
, JSCell
* cell
)
1057 m_jit
.setupArgumentsWithExecState(TrustedImmPtr(cell
));
1058 return appendCallWithExceptionCheckSetResult(operation
, result
);
1060 JITCompiler::Call
callOperation(C_JITOperation_ECC operation
, GPRReg result
, GPRReg arg1
, JSCell
* cell
)
1062 m_jit
.setupArgumentsWithExecState(arg1
, TrustedImmPtr(cell
));
1063 return appendCallWithExceptionCheckSetResult(operation
, result
);
1065 JITCompiler::Call
callOperation(C_JITOperation_EIcf operation
, GPRReg result
, InlineCallFrame
* inlineCallFrame
)
1067 m_jit
.setupArgumentsWithExecState(TrustedImmPtr(inlineCallFrame
));
1068 return appendCallWithExceptionCheckSetResult(operation
, result
);
1070 JITCompiler::Call
callOperation(C_JITOperation_ESt operation
, GPRReg result
, Structure
* structure
)
1072 m_jit
.setupArgumentsWithExecState(TrustedImmPtr(structure
));
1073 return appendCallWithExceptionCheckSetResult(operation
, result
);
1075 JITCompiler::Call
callOperation(C_JITOperation_EJssSt operation
, GPRReg result
, GPRReg arg1
, Structure
* structure
)
1077 m_jit
.setupArgumentsWithExecState(arg1
, TrustedImmPtr(structure
));
1078 return appendCallWithExceptionCheckSetResult(operation
, result
);
1080 JITCompiler::Call
callOperation(C_JITOperation_EJssJss operation
, GPRReg result
, GPRReg arg1
, GPRReg arg2
)
1082 m_jit
.setupArgumentsWithExecState(arg1
, arg2
);
1083 return appendCallWithExceptionCheckSetResult(operation
, result
);
1085 JITCompiler::Call
callOperation(C_JITOperation_EJssJssJss operation
, GPRReg result
, GPRReg arg1
, GPRReg arg2
, GPRReg arg3
)
1087 m_jit
.setupArgumentsWithExecState(arg1
, arg2
, arg3
);
1088 return appendCallWithExceptionCheckSetResult(operation
, result
);
1091 JITCompiler::Call
callOperation(S_JITOperation_ECC operation
, GPRReg result
, GPRReg arg1
, GPRReg arg2
)
1093 m_jit
.setupArgumentsWithExecState(arg1
, arg2
);
1094 return appendCallWithExceptionCheckSetResult(operation
, result
);
1097 JITCompiler::Call
callOperation(Jss_JITOperation_EZ operation
, GPRReg result
, GPRReg arg1
)
1099 m_jit
.setupArgumentsWithExecState(arg1
);
1100 return appendCallWithExceptionCheckSetResult(operation
, result
);
1103 JITCompiler::Call
callOperation(V_JITOperation_EC operation
, GPRReg arg1
)
1105 m_jit
.setupArgumentsWithExecState(arg1
);
1106 return appendCallWithExceptionCheck(operation
);
1109 JITCompiler::Call
callOperation(V_JITOperation_EC operation
, JSCell
* arg1
)
1111 m_jit
.setupArgumentsWithExecState(TrustedImmPtr(arg1
));
1112 return appendCallWithExceptionCheck(operation
);
1115 JITCompiler::Call
callOperation(V_JITOperation_ECIcf operation
, GPRReg arg1
, InlineCallFrame
* inlineCallFrame
)
1117 m_jit
.setupArgumentsWithExecState(arg1
, TrustedImmPtr(inlineCallFrame
));
1118 return appendCallWithExceptionCheck(operation
);
1120 JITCompiler::Call
callOperation(V_JITOperation_ECCIcf operation
, GPRReg arg1
, GPRReg arg2
, InlineCallFrame
* inlineCallFrame
)
1122 m_jit
.setupArgumentsWithExecState(arg1
, arg2
, TrustedImmPtr(inlineCallFrame
));
1123 return appendCallWithExceptionCheck(operation
);
1126 JITCompiler::Call
callOperation(V_JITOperation_ECZ operation
, GPRReg arg1
, int arg2
)
1128 m_jit
.setupArgumentsWithExecState(arg1
, TrustedImm32(arg2
));
1129 return appendCallWithExceptionCheck(operation
);
1131 JITCompiler::Call
callOperation(V_JITOperation_ECC operation
, GPRReg arg1
, GPRReg arg2
)
1133 m_jit
.setupArgumentsWithExecState(arg1
, arg2
);
1134 return appendCallWithExceptionCheck(operation
);
1136 JITCompiler::Call
callOperation(V_JITOperation_ECC operation
, GPRReg arg1
, JSCell
* arg2
)
1138 m_jit
.setupArgumentsWithExecState(arg1
, TrustedImmPtr(arg2
));
1139 return appendCallWithExceptionCheck(operation
);
1141 JITCompiler::Call
callOperation(V_JITOperation_ECC operation
, JSCell
* arg1
, GPRReg arg2
)
1143 m_jit
.setupArgumentsWithExecState(TrustedImmPtr(arg1
), arg2
);
1144 return appendCallWithExceptionCheck(operation
);
1147 JITCompiler::Call
callOperationWithCallFrameRollbackOnException(V_JITOperation_ECb operation
, void* pointer
)
1149 m_jit
.setupArgumentsWithExecState(TrustedImmPtr(pointer
));
1150 return appendCallWithCallFrameRollbackOnException(operation
);
1153 JITCompiler::Call
callOperationWithCallFrameRollbackOnException(Z_JITOperation_E operation
, GPRReg result
)
1155 m_jit
.setupArgumentsExecState();
1156 return appendCallWithCallFrameRollbackOnExceptionSetResult(operation
, result
);
1159 template<typename FunctionType
, typename ArgumentType1
>
1160 JITCompiler::Call
callOperation(FunctionType operation
, NoResultTag
, ArgumentType1 arg1
)
1162 return callOperation(operation
, arg1
);
1164 template<typename FunctionType
, typename ArgumentType1
, typename ArgumentType2
>
1165 JITCompiler::Call
callOperation(FunctionType operation
, NoResultTag
, ArgumentType1 arg1
, ArgumentType2 arg2
)
1167 return callOperation(operation
, arg1
, arg2
);
1169 template<typename FunctionType
, typename ArgumentType1
, typename ArgumentType2
, typename ArgumentType3
>
1170 JITCompiler::Call
callOperation(FunctionType operation
, NoResultTag
, ArgumentType1 arg1
, ArgumentType2 arg2
, ArgumentType3 arg3
)
1172 return callOperation(operation
, arg1
, arg2
, arg3
);
1174 template<typename FunctionType
, typename ArgumentType1
, typename ArgumentType2
, typename ArgumentType3
, typename ArgumentType4
>
1175 JITCompiler::Call
callOperation(FunctionType operation
, NoResultTag
, ArgumentType1 arg1
, ArgumentType2 arg2
, ArgumentType3 arg3
, ArgumentType4 arg4
)
1177 return callOperation(operation
, arg1
, arg2
, arg3
, arg4
);
1179 template<typename FunctionType
, typename ArgumentType1
, typename ArgumentType2
, typename ArgumentType3
, typename ArgumentType4
, typename ArgumentType5
>
1180 JITCompiler::Call
callOperation(FunctionType operation
, NoResultTag
, ArgumentType1 arg1
, ArgumentType2 arg2
, ArgumentType3 arg3
, ArgumentType4 arg4
, ArgumentType5 arg5
)
1182 return callOperation(operation
, arg1
, arg2
, arg3
, arg4
, arg5
);
1185 JITCompiler::Call
callOperation(D_JITOperation_ZZ operation
, FPRReg result
, GPRReg arg1
, GPRReg arg2
)
1187 m_jit
.setupArguments(arg1
, arg2
);
1188 return appendCallSetResult(operation
, result
);
1190 JITCompiler::Call
callOperation(D_JITOperation_D operation
, FPRReg result
, FPRReg arg1
)
1192 m_jit
.setupArguments(arg1
);
1193 return appendCallSetResult(operation
, result
);
1195 JITCompiler::Call
callOperation(D_JITOperation_DD operation
, FPRReg result
, FPRReg arg1
, FPRReg arg2
)
1197 m_jit
.setupArguments(arg1
, arg2
);
1198 return appendCallSetResult(operation
, result
);
1200 JITCompiler::Call
callOperation(I_JITOperation_EJss operation
, GPRReg result
, GPRReg arg1
)
1202 m_jit
.setupArgumentsWithExecState(arg1
);
1203 return appendCallWithExceptionCheckSetResult(operation
, result
);
1205 JITCompiler::Call
callOperation(C_JITOperation_EZ operation
, GPRReg result
, GPRReg arg1
)
1207 m_jit
.setupArgumentsWithExecState(arg1
);
1208 return appendCallWithExceptionCheckSetResult(operation
, result
);
1210 JITCompiler::Call
callOperation(C_JITOperation_EZ operation
, GPRReg result
, int32_t arg1
)
1212 m_jit
.setupArgumentsWithExecState(TrustedImm32(arg1
));
1213 return appendCallWithExceptionCheckSetResult(operation
, result
);
1217 JITCompiler::Call
callOperation(J_JITOperation_E operation
, GPRReg result
)
1219 m_jit
.setupArgumentsExecState();
1220 return appendCallWithExceptionCheckSetResult(operation
, result
);
1222 JITCompiler::Call
callOperation(J_JITOperation_EP operation
, GPRReg result
, void* pointer
)
1224 m_jit
.setupArgumentsWithExecState(TrustedImmPtr(pointer
));
1225 return appendCallWithExceptionCheckSetResult(operation
, result
);
1227 JITCompiler::Call
callOperation(Z_JITOperation_D operation
, GPRReg result
, FPRReg arg1
)
1229 m_jit
.setupArguments(arg1
);
1230 JITCompiler::Call call
= m_jit
.appendCall(operation
);
1231 m_jit
.zeroExtend32ToPtr(GPRInfo::returnValueGPR
, result
);
1234 JITCompiler::Call
callOperation(Q_JITOperation_J operation
, GPRReg result
, GPRReg value
)
1236 m_jit
.setupArguments(value
);
1237 return appendCallSetResult(operation
, result
);
1239 JITCompiler::Call
callOperation(Q_JITOperation_D operation
, GPRReg result
, FPRReg value
)
1241 m_jit
.setupArguments(value
);
1242 return appendCallSetResult(operation
, result
);
1244 JITCompiler::Call
callOperation(J_JITOperation_EI operation
, GPRReg result
, StringImpl
* uid
)
1246 m_jit
.setupArgumentsWithExecState(TrustedImmPtr(uid
));
1247 return appendCallWithExceptionCheckSetResult(operation
, result
);
1249 JITCompiler::Call
callOperation(J_JITOperation_EA operation
, GPRReg result
, GPRReg arg1
)
1251 m_jit
.setupArgumentsWithExecState(arg1
);
1252 return appendCallWithExceptionCheckSetResult(operation
, result
);
1254 JITCompiler::Call
callOperation(J_JITOperation_EAZ operation
, GPRReg result
, GPRReg arg1
, GPRReg arg2
)
1256 m_jit
.setupArgumentsWithExecState(arg1
, arg2
);
1257 return appendCallWithExceptionCheckSetResult(operation
, result
);
1259 JITCompiler::Call
callOperation(J_JITOperation_EJssZ operation
, GPRReg result
, GPRReg arg1
, GPRReg arg2
)
1261 m_jit
.setupArgumentsWithExecState(arg1
, arg2
);
1262 return appendCallWithExceptionCheckSetResult(operation
, result
);
1264 JITCompiler::Call
callOperation(J_JITOperation_EPS operation
, GPRReg result
, void* pointer
, size_t size
)
1266 m_jit
.setupArgumentsWithExecState(TrustedImmPtr(pointer
), TrustedImmPtr(size
));
1267 return appendCallWithExceptionCheckSetResult(operation
, result
);
1269 JITCompiler::Call
callOperation(J_JITOperation_ESS operation
, GPRReg result
, int startConstant
, int numConstants
)
1271 m_jit
.setupArgumentsWithExecState(TrustedImm32(startConstant
), TrustedImm32(numConstants
));
1272 return appendCallWithExceptionCheckSetResult(operation
, result
);
1274 JITCompiler::Call
callOperation(J_JITOperation_EPP operation
, GPRReg result
, GPRReg arg1
, void* pointer
)
1276 m_jit
.setupArgumentsWithExecState(arg1
, TrustedImmPtr(pointer
));
1277 return appendCallWithExceptionCheckSetResult(operation
, result
);
1279 JITCompiler::Call
callOperation(J_JITOperation_EC operation
, GPRReg result
, JSCell
* cell
)
1281 m_jit
.setupArgumentsWithExecState(TrustedImmPtr(cell
));
1282 return appendCallWithExceptionCheckSetResult(operation
, result
);
1284 JITCompiler::Call
callOperation(J_JITOperation_ESsiCI operation
, GPRReg result
, StructureStubInfo
* stubInfo
, GPRReg arg1
, const StringImpl
* uid
)
1286 m_jit
.setupArgumentsWithExecState(TrustedImmPtr(stubInfo
), arg1
, TrustedImmPtr(uid
));
1287 return appendCallWithExceptionCheckSetResult(operation
, result
);
1289 JITCompiler::Call
callOperation(J_JITOperation_ESsiJI operation
, GPRReg result
, StructureStubInfo
* stubInfo
, GPRReg arg1
, StringImpl
* uid
)
1291 m_jit
.setupArgumentsWithExecState(TrustedImmPtr(stubInfo
), arg1
, TrustedImmPtr(uid
));
1292 return appendCallWithExceptionCheckSetResult(operation
, result
);
1294 JITCompiler::Call
callOperation(J_JITOperation_EDA operation
, GPRReg result
, FPRReg arg1
, GPRReg arg2
)
1296 m_jit
.setupArgumentsWithExecState(arg1
, arg2
);
1297 return appendCallWithExceptionCheckSetResult(operation
, result
);
1299 JITCompiler::Call
callOperation(J_JITOperation_EJA operation
, GPRReg result
, GPRReg arg1
, GPRReg arg2
)
1301 m_jit
.setupArgumentsWithExecState(arg1
, arg2
);
1302 return appendCallWithExceptionCheckSetResult(operation
, result
);
1304 JITCompiler::Call
callOperation(J_JITOperation_EP operation
, GPRReg result
, GPRReg arg1
)
1306 m_jit
.setupArgumentsWithExecState(arg1
);
1307 return appendCallWithExceptionCheckSetResult(operation
, result
);
1309 JITCompiler::Call
callOperation(J_JITOperation_EZ operation
, GPRReg result
, GPRReg arg1
)
1311 m_jit
.setupArgumentsWithExecState(arg1
);
1312 return appendCallWithExceptionCheckSetResult(operation
, result
);
1314 JITCompiler::Call
callOperation(J_JITOperation_EZ operation
, GPRReg result
, int32_t arg1
)
1316 m_jit
.setupArgumentsWithExecState(TrustedImm32(arg1
));
1317 return appendCallWithExceptionCheckSetResult(operation
, result
);
1319 JITCompiler::Call
callOperation(J_JITOperation_EZZ operation
, GPRReg result
, int32_t arg1
, GPRReg arg2
)
1321 m_jit
.setupArgumentsWithExecState(TrustedImm32(arg1
), arg2
);
1322 return appendCallWithExceptionCheckSetResult(operation
, result
);
1324 JITCompiler::Call
callOperation(J_JITOperation_EZIcfZ operation
, GPRReg result
, int32_t arg1
, InlineCallFrame
* inlineCallFrame
, GPRReg arg2
)
1326 m_jit
.setupArgumentsWithExecState(TrustedImm32(arg1
), TrustedImmPtr(inlineCallFrame
), arg2
);
1327 return appendCallWithExceptionCheckSetResult(operation
, result
);
1330 JITCompiler::Call
callOperation(P_JITOperation_EJS operation
, GPRReg result
, GPRReg value
, size_t index
)
1332 m_jit
.setupArgumentsWithExecState(value
, TrustedImmPtr(index
));
1333 return appendCallSetResult(operation
, result
);
1336 JITCompiler::Call
callOperation(P_JITOperation_EStJ operation
, GPRReg result
, Structure
* structure
, GPRReg arg2
)
1338 m_jit
.setupArgumentsWithExecState(TrustedImmPtr(structure
), arg2
);
1339 return appendCallWithExceptionCheckSetResult(operation
, result
);
1342 JITCompiler::Call
callOperation(C_JITOperation_EJ operation
, GPRReg result
, GPRReg arg1
)
1344 m_jit
.setupArgumentsWithExecState(arg1
);
1345 return appendCallWithExceptionCheckSetResult(operation
, result
);
1347 JITCompiler::Call
callOperation(S_JITOperation_J operation
, GPRReg result
, GPRReg arg1
)
1349 m_jit
.setupArguments(arg1
);
1350 return appendCallSetResult(operation
, result
);
1352 JITCompiler::Call
callOperation(S_JITOperation_EJ operation
, GPRReg result
, GPRReg arg1
)
1354 m_jit
.setupArgumentsWithExecState(arg1
);
1355 return appendCallWithExceptionCheckSetResult(operation
, result
);
1357 JITCompiler::Call
callOperation(J_JITOperation_EJ operation
, GPRReg result
, GPRReg arg1
)
1359 m_jit
.setupArgumentsWithExecState(arg1
);
1360 return appendCallWithExceptionCheckSetResult(operation
, result
);
1362 JITCompiler::Call
callOperation(S_JITOperation_EJJ operation
, GPRReg result
, GPRReg arg1
, GPRReg arg2
)
1364 m_jit
.setupArgumentsWithExecState(arg1
, arg2
);
1365 return appendCallWithExceptionCheckSetResult(operation
, result
);
1368 JITCompiler::Call
callOperation(J_JITOperation_EPP operation
, GPRReg result
, GPRReg arg1
, GPRReg arg2
)
1370 m_jit
.setupArgumentsWithExecState(arg1
, arg2
);
1371 return appendCallWithExceptionCheckSetResult(operation
, result
);
1373 JITCompiler::Call
callOperation(J_JITOperation_EJJ operation
, GPRReg result
, GPRReg arg1
, GPRReg arg2
)
1375 m_jit
.setupArgumentsWithExecState(arg1
, arg2
);
1376 return appendCallWithExceptionCheckSetResult(operation
, result
);
1378 JITCompiler::Call
callOperation(J_JITOperation_EJJ operation
, GPRReg result
, GPRReg arg1
, MacroAssembler::TrustedImm32 imm
)
1380 m_jit
.setupArgumentsWithExecState(arg1
, MacroAssembler::TrustedImm64(JSValue::encode(jsNumber(imm
.m_value
))));
1381 return appendCallWithExceptionCheckSetResult(operation
, result
);
1383 JITCompiler::Call
callOperation(J_JITOperation_EJJ operation
, GPRReg result
, MacroAssembler::TrustedImm32 imm
, GPRReg arg2
)
1385 m_jit
.setupArgumentsWithExecState(MacroAssembler::TrustedImm64(JSValue::encode(jsNumber(imm
.m_value
))), arg2
);
1386 return appendCallWithExceptionCheckSetResult(operation
, result
);
1388 JITCompiler::Call
callOperation(J_JITOperation_ECC operation
, GPRReg result
, GPRReg arg1
, GPRReg arg2
)
1390 m_jit
.setupArgumentsWithExecState(arg1
, arg2
);
1391 return appendCallWithExceptionCheckSetResult(operation
, result
);
1393 JITCompiler::Call
callOperation(J_JITOperation_ECJ operation
, GPRReg result
, GPRReg arg1
, GPRReg arg2
)
1395 m_jit
.setupArgumentsWithExecState(arg1
, arg2
);
1396 return appendCallWithExceptionCheckSetResult(operation
, result
);
1398 JITCompiler::Call
callOperation(J_JITOperation_ECJ operation
, GPRReg result
, GPRReg arg1
, JSValueRegs arg2
)
1400 m_jit
.setupArgumentsWithExecState(arg1
, arg2
.gpr());
1401 return appendCallWithExceptionCheckSetResult(operation
, result
);
1404 JITCompiler::Call
callOperation(V_JITOperation_EOZD operation
, GPRReg arg1
, GPRReg arg2
, FPRReg arg3
)
1406 m_jit
.setupArgumentsWithExecState(arg1
, arg2
, arg3
);
1407 return appendCallWithExceptionCheck(operation
);
1409 JITCompiler::Call
callOperation(V_JITOperation_EJ operation
, GPRReg arg1
)
1411 m_jit
.setupArgumentsWithExecState(arg1
);
1412 return appendCallWithExceptionCheck(operation
);
1414 JITCompiler::Call
callOperation(V_JITOperation_EJPP operation
, GPRReg arg1
, GPRReg arg2
, void* pointer
)
1416 m_jit
.setupArgumentsWithExecState(arg1
, arg2
, TrustedImmPtr(pointer
));
1417 return appendCallWithExceptionCheck(operation
);
1419 JITCompiler::Call
callOperation(V_JITOperation_ESsiJJI operation
, StructureStubInfo
* stubInfo
, GPRReg arg1
, GPRReg arg2
, StringImpl
* uid
)
1421 m_jit
.setupArgumentsWithExecState(TrustedImmPtr(stubInfo
), arg1
, arg2
, TrustedImmPtr(uid
));
1422 return appendCallWithExceptionCheck(operation
);
1424 JITCompiler::Call
callOperation(V_JITOperation_EJJJ operation
, GPRReg arg1
, GPRReg arg2
, GPRReg arg3
)
1426 m_jit
.setupArgumentsWithExecState(arg1
, arg2
, arg3
);
1427 return appendCallWithExceptionCheck(operation
);
1429 JITCompiler::Call
callOperation(V_JITOperation_EPZJ operation
, GPRReg arg1
, GPRReg arg2
, GPRReg arg3
)
1431 m_jit
.setupArgumentsWithExecState(arg1
, arg2
, arg3
);
1432 return appendCallWithExceptionCheck(operation
);
1435 JITCompiler::Call
callOperation(V_JITOperation_EOZJ operation
, GPRReg arg1
, GPRReg arg2
, GPRReg arg3
)
1437 m_jit
.setupArgumentsWithExecState(arg1
, arg2
, arg3
);
1438 return appendCallWithExceptionCheck(operation
);
1440 JITCompiler::Call
callOperation(V_JITOperation_ECJJ operation
, GPRReg arg1
, GPRReg arg2
, GPRReg arg3
)
1442 m_jit
.setupArgumentsWithExecState(arg1
, arg2
, arg3
);
1443 return appendCallWithExceptionCheck(operation
);
1446 JITCompiler::Call
callOperation(V_JITOperation_EVwsJ operation
, VariableWatchpointSet
* watchpointSet
, GPRReg arg
)
1448 m_jit
.setupArgumentsWithExecState(TrustedImmPtr(watchpointSet
), arg
);
1449 return appendCall(operation
);
1452 JITCompiler::Call
callOperation(D_JITOperation_EJ operation
, FPRReg result
, GPRReg arg1
)
1454 m_jit
.setupArgumentsWithExecState(arg1
);
1455 return appendCallWithExceptionCheckSetResult(operation
, result
);
1458 #else // USE(JSVALUE32_64)
1460 // EncodedJSValue in JSVALUE32_64 is a 64-bit integer. When being compiled in ARM EABI, it must be aligned even-numbered register (r0, r2 or [sp]).
1461 // To avoid assemblies from using wrong registers, let's occupy r1 or r3 with a dummy argument when necessary.
1462 #if (COMPILER_SUPPORTS(EABI) && CPU(ARM)) || CPU(MIPS)
1463 #define EABI_32BIT_DUMMY_ARG TrustedImm32(0),
1465 #define EABI_32BIT_DUMMY_ARG
1468 // JSVALUE32_64 is a 64-bit integer that cannot be put half in an argument register and half on stack when using SH4 architecture.
1469 // To avoid this, let's occupy the 4th argument register (r7) with a dummy argument when necessary. This must only be done when there
1470 // is no other 32-bit value argument behind this 64-bit JSValue.
1472 #define SH4_32BIT_DUMMY_ARG TrustedImm32(0),
1474 #define SH4_32BIT_DUMMY_ARG
1477 JITCompiler::Call
callOperation(Z_JITOperation_D operation
, GPRReg result
, FPRReg arg1
)
1479 prepareForExternalCall();
1480 m_jit
.setupArguments(arg1
);
1481 JITCompiler::Call call
= m_jit
.appendCall(operation
);
1482 m_jit
.zeroExtend32ToPtr(GPRInfo::returnValueGPR
, result
);
1485 JITCompiler::Call
callOperation(J_JITOperation_E operation
, GPRReg resultTag
, GPRReg resultPayload
)
1487 m_jit
.setupArgumentsExecState();
1488 return appendCallWithExceptionCheckSetResult(operation
, resultPayload
, resultTag
);
1490 JITCompiler::Call
callOperation(J_JITOperation_EP operation
, GPRReg resultTag
, GPRReg resultPayload
, void* pointer
)
1492 m_jit
.setupArgumentsWithExecState(TrustedImmPtr(pointer
));
1493 return appendCallWithExceptionCheckSetResult(operation
, resultPayload
, resultTag
);
1495 JITCompiler::Call
callOperation(J_JITOperation_EPP operation
, GPRReg resultTag
, GPRReg resultPayload
, GPRReg arg1
, void* pointer
)
1497 m_jit
.setupArgumentsWithExecState(arg1
, TrustedImmPtr(pointer
));
1498 return appendCallWithExceptionCheckSetResult(operation
, resultPayload
, resultTag
);
1500 JITCompiler::Call
callOperation(J_JITOperation_EP operation
, GPRReg resultTag
, GPRReg resultPayload
, GPRReg arg1
)
1502 m_jit
.setupArgumentsWithExecState(arg1
);
1503 return appendCallWithExceptionCheckSetResult(operation
, resultPayload
, resultTag
);
1505 JITCompiler::Call
callOperation(J_JITOperation_EI operation
, GPRReg resultTag
, GPRReg resultPayload
, StringImpl
* uid
)
1507 m_jit
.setupArgumentsWithExecState(TrustedImmPtr(uid
));
1508 return appendCallWithExceptionCheckSetResult(operation
, resultPayload
, resultTag
);
1510 JITCompiler::Call
callOperation(J_JITOperation_EA operation
, GPRReg resultTag
, GPRReg resultPayload
, GPRReg arg1
)
1512 m_jit
.setupArgumentsWithExecState(arg1
);
1513 return appendCallWithExceptionCheckSetResult(operation
, resultPayload
, resultTag
);
1515 JITCompiler::Call
callOperation(J_JITOperation_EAZ operation
, GPRReg resultTag
, GPRReg resultPayload
, GPRReg arg1
, GPRReg arg2
)
1517 m_jit
.setupArgumentsWithExecState(arg1
, arg2
);
1518 return appendCallWithExceptionCheckSetResult(operation
, resultPayload
, resultTag
);
1520 JITCompiler::Call
callOperation(J_JITOperation_EJssZ operation
, GPRReg resultTag
, GPRReg resultPayload
, GPRReg arg1
, GPRReg arg2
)
1522 m_jit
.setupArgumentsWithExecState(arg1
, arg2
);
1523 return appendCallWithExceptionCheckSetResult(operation
, resultPayload
, resultTag
);
1525 JITCompiler::Call
callOperation(J_JITOperation_EPS operation
, GPRReg resultTag
, GPRReg resultPayload
, void* pointer
, size_t size
)
1527 m_jit
.setupArgumentsWithExecState(TrustedImmPtr(pointer
), TrustedImmPtr(size
));
1528 return appendCallWithExceptionCheckSetResult(operation
, resultPayload
, resultTag
);
1530 JITCompiler::Call
callOperation(J_JITOperation_ESS operation
, GPRReg resultTag
, GPRReg resultPayload
, int startConstant
, int numConstants
)
1532 m_jit
.setupArgumentsWithExecState(TrustedImm32(startConstant
), TrustedImm32(numConstants
));
1533 return appendCallWithExceptionCheckSetResult(operation
, resultPayload
, resultTag
);
1535 JITCompiler::Call
callOperation(J_JITOperation_EJP operation
, GPRReg resultTag
, GPRReg resultPayload
, GPRReg arg1Tag
, GPRReg arg1Payload
, void* pointer
)
1537 m_jit
.setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload
, arg1Tag
, TrustedImmPtr(pointer
));
1538 return appendCallWithExceptionCheckSetResult(operation
, resultPayload
, resultTag
);
1540 JITCompiler::Call
callOperation(J_JITOperation_EJP operation
, GPRReg resultTag
, GPRReg resultPayload
, GPRReg arg1Tag
, GPRReg arg1Payload
, GPRReg arg2
)
1542 m_jit
.setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload
, arg1Tag
, arg2
);
1543 return appendCallWithExceptionCheckSetResult(operation
, resultPayload
, resultTag
);
1546 JITCompiler::Call
callOperation(J_JITOperation_EC operation
, GPRReg resultTag
, GPRReg resultPayload
, JSCell
* cell
)
1548 m_jit
.setupArgumentsWithExecState(TrustedImmPtr(cell
));
1549 return appendCallWithExceptionCheckSetResult(operation
, resultPayload
, resultTag
);
1551 JITCompiler::Call
callOperation(J_JITOperation_ESsiCI operation
, GPRReg resultTag
, GPRReg resultPayload
, StructureStubInfo
* stubInfo
, GPRReg arg1
, const StringImpl
* uid
)
1553 m_jit
.setupArgumentsWithExecState(TrustedImmPtr(stubInfo
), arg1
, TrustedImmPtr(uid
));
1554 return appendCallWithExceptionCheckSetResult(operation
, resultPayload
, resultTag
);
1556 JITCompiler::Call
callOperation(J_JITOperation_ESsiJI operation
, GPRReg resultTag
, GPRReg resultPayload
, StructureStubInfo
* stubInfo
, GPRReg arg1Tag
, GPRReg arg1Payload
, StringImpl
* uid
)
1558 m_jit
.setupArgumentsWithExecState(TrustedImmPtr(stubInfo
), arg1Payload
, arg1Tag
, TrustedImmPtr(uid
));
1559 return appendCallWithExceptionCheckSetResult(operation
, resultPayload
, resultTag
);
1561 JITCompiler::Call
callOperation(J_JITOperation_ESsiJI operation
, GPRReg resultTag
, GPRReg resultPayload
, StructureStubInfo
* stubInfo
, int32_t arg1Tag
, GPRReg arg1Payload
, StringImpl
* uid
)
1563 m_jit
.setupArgumentsWithExecState(TrustedImmPtr(stubInfo
), arg1Payload
, TrustedImm32(arg1Tag
), TrustedImmPtr(uid
));
1564 return appendCallWithExceptionCheckSetResult(operation
, resultPayload
, resultTag
);
1566 JITCompiler::Call
callOperation(J_JITOperation_EDA operation
, GPRReg resultTag
, GPRReg resultPayload
, FPRReg arg1
, GPRReg arg2
)
1568 m_jit
.setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1
, arg2
);
1569 return appendCallWithExceptionCheckSetResult(operation
, resultPayload
, resultTag
);
1571 JITCompiler::Call
callOperation(J_JITOperation_EJA operation
, GPRReg resultTag
, GPRReg resultPayload
, GPRReg arg1Tag
, GPRReg arg1Payload
, GPRReg arg2
)
1573 m_jit
.setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload
, arg1Tag
, arg2
);
1574 return appendCallWithExceptionCheckSetResult(operation
, resultPayload
, resultTag
);
1576 JITCompiler::Call
callOperation(J_JITOperation_EJA operation
, GPRReg resultTag
, GPRReg resultPayload
, TrustedImm32 arg1Tag
, GPRReg arg1Payload
, GPRReg arg2
)
1578 m_jit
.setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload
, arg1Tag
, arg2
);
1579 return appendCallWithExceptionCheckSetResult(operation
, resultPayload
, resultTag
);
1581 JITCompiler::Call
callOperation(J_JITOperation_EJ operation
, GPRReg resultTag
, GPRReg resultPayload
, GPRReg arg1Tag
, GPRReg arg1Payload
)
1583 m_jit
.setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload
, arg1Tag
);
1584 return appendCallWithExceptionCheckSetResult(operation
, resultPayload
, resultTag
);
1586 JITCompiler::Call
callOperation(J_JITOperation_EZ operation
, GPRReg resultTag
, GPRReg resultPayload
, GPRReg arg1
)
1588 m_jit
.setupArgumentsWithExecState(arg1
);
1589 return appendCallWithExceptionCheckSetResult(operation
, resultPayload
, resultTag
);
1591 JITCompiler::Call
callOperation(J_JITOperation_EZ operation
, GPRReg resultTag
, GPRReg resultPayload
, int32_t arg1
)
1593 m_jit
.setupArgumentsWithExecState(TrustedImm32(arg1
));
1594 return appendCallWithExceptionCheckSetResult(operation
, resultPayload
, resultTag
);
1596 JITCompiler::Call
callOperation(J_JITOperation_EZIcfZ operation
, GPRReg resultTag
, GPRReg resultPayload
, int32_t arg1
, InlineCallFrame
* inlineCallFrame
, GPRReg arg2
)
1598 m_jit
.setupArgumentsWithExecState(TrustedImm32(arg1
), TrustedImmPtr(inlineCallFrame
), arg2
);
1599 return appendCallWithExceptionCheckSetResult(operation
, resultPayload
, resultTag
);
1601 JITCompiler::Call
callOperation(J_JITOperation_EZZ operation
, GPRReg resultTag
, GPRReg resultPayload
, int32_t arg1
, GPRReg arg2
)
1603 m_jit
.setupArgumentsWithExecState(TrustedImm32(arg1
), arg2
);
1604 return appendCallWithExceptionCheckSetResult(operation
, resultPayload
, resultTag
);
1607 JITCompiler::Call
callOperation(P_JITOperation_EJS operation
, GPRReg result
, JSValueRegs value
, size_t index
)
1609 m_jit
.setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG value
.payloadGPR(), value
.tagGPR(), TrustedImmPtr(index
));
1610 return appendCallSetResult(operation
, result
);
1613 JITCompiler::Call
callOperation(P_JITOperation_EStJ operation
, GPRReg result
, Structure
* structure
, GPRReg arg2Tag
, GPRReg arg2Payload
)
1615 m_jit
.setupArgumentsWithExecState(TrustedImmPtr(structure
), arg2Payload
, arg2Tag
);
1616 return appendCallWithExceptionCheckSetResult(operation
, result
);
1619 JITCompiler::Call
callOperation(C_JITOperation_EJ operation
, GPRReg result
, GPRReg arg1Tag
, GPRReg arg1Payload
)
1621 m_jit
.setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload
, arg1Tag
);
1622 return appendCallWithExceptionCheckSetResult(operation
, result
);
1624 JITCompiler::Call
callOperation(S_JITOperation_J operation
, GPRReg result
, GPRReg arg1Tag
, GPRReg arg1Payload
)
1626 m_jit
.setupArguments(arg1Payload
, arg1Tag
);
1627 return appendCallSetResult(operation
, result
);
1629 JITCompiler::Call
callOperation(S_JITOperation_EJ operation
, GPRReg result
, GPRReg arg1Tag
, GPRReg arg1Payload
)
1631 m_jit
.setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload
, arg1Tag
);
1632 return appendCallWithExceptionCheckSetResult(operation
, result
);
1635 JITCompiler::Call
callOperation(S_JITOperation_EJJ operation
, GPRReg result
, GPRReg arg1Tag
, GPRReg arg1Payload
, GPRReg arg2Tag
, GPRReg arg2Payload
)
1637 m_jit
.setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload
, arg1Tag
, SH4_32BIT_DUMMY_ARG arg2Payload
, arg2Tag
);
1638 return appendCallWithExceptionCheckSetResult(operation
, result
);
1640 JITCompiler::Call
callOperation(J_JITOperation_EJJ operation
, GPRReg resultTag
, GPRReg resultPayload
, GPRReg arg1Tag
, GPRReg arg1Payload
, GPRReg arg2Tag
, GPRReg arg2Payload
)
1642 m_jit
.setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload
, arg1Tag
, SH4_32BIT_DUMMY_ARG arg2Payload
, arg2Tag
);
1643 return appendCallWithExceptionCheckSetResult(operation
, resultPayload
, resultTag
);
1645 JITCompiler::Call
callOperation(J_JITOperation_EJJ operation
, GPRReg resultTag
, GPRReg resultPayload
, GPRReg arg1Tag
, GPRReg arg1Payload
, MacroAssembler::TrustedImm32 imm
)
1647 m_jit
.setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload
, arg1Tag
, SH4_32BIT_DUMMY_ARG imm
, TrustedImm32(JSValue::Int32Tag
));
1648 return appendCallWithExceptionCheckSetResult(operation
, resultPayload
, resultTag
);
1650 JITCompiler::Call
callOperation(J_JITOperation_EJJ operation
, GPRReg resultTag
, GPRReg resultPayload
, MacroAssembler::TrustedImm32 imm
, GPRReg arg2Tag
, GPRReg arg2Payload
)
1652 m_jit
.setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG imm
, TrustedImm32(JSValue::Int32Tag
), SH4_32BIT_DUMMY_ARG arg2Payload
, arg2Tag
);
1653 return appendCallWithExceptionCheckSetResult(operation
, resultPayload
, resultTag
);
1656 JITCompiler::Call
callOperation(J_JITOperation_ECJ operation
, GPRReg resultTag
, GPRReg resultPayload
, GPRReg arg1
, GPRReg arg2Tag
, GPRReg arg2Payload
)
1658 m_jit
.setupArgumentsWithExecState(arg1
, arg2Payload
, arg2Tag
);
1659 return appendCallWithExceptionCheckSetResult(operation
, resultPayload
, resultTag
);
1661 JITCompiler::Call
callOperation(J_JITOperation_ECJ operation
, JSValueRegs result
, GPRReg arg1
, JSValueRegs arg2
)
1663 m_jit
.setupArgumentsWithExecState(arg1
, arg2
.payloadGPR(), arg2
.tagGPR());
1664 return appendCallWithExceptionCheckSetResult(operation
, result
.payloadGPR(), result
.tagGPR());
1666 JITCompiler::Call
callOperation(J_JITOperation_ECC operation
, GPRReg resultTag
, GPRReg resultPayload
, GPRReg arg1
, GPRReg arg2
)
1668 m_jit
.setupArgumentsWithExecState(arg1
, arg2
);
1669 return appendCallWithExceptionCheckSetResult(operation
, resultPayload
, resultTag
);
1672 JITCompiler::Call
callOperation(V_JITOperation_EOZD operation
, GPRReg arg1
, GPRReg arg2
, FPRReg arg3
)
1674 m_jit
.setupArgumentsWithExecState(arg1
, arg2
, EABI_32BIT_DUMMY_ARG arg3
);
1675 return appendCallWithExceptionCheck(operation
);
1678 JITCompiler::Call
callOperation(V_JITOperation_EJ operation
, GPRReg arg1Tag
, GPRReg arg1Payload
)
1680 m_jit
.setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload
, arg1Tag
);
1681 return appendCallWithExceptionCheck(operation
);
1684 JITCompiler::Call
callOperation(V_JITOperation_EJPP operation
, GPRReg arg1Tag
, GPRReg arg1Payload
, GPRReg arg2
, void* pointer
)
1686 m_jit
.setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload
, arg1Tag
, arg2
, TrustedImmPtr(pointer
));
1687 return appendCallWithExceptionCheck(operation
);
1689 JITCompiler::Call
callOperation(V_JITOperation_ESsiJJI operation
, StructureStubInfo
* stubInfo
, GPRReg arg1Tag
, GPRReg arg1Payload
, GPRReg arg2Payload
, StringImpl
* uid
)
1691 m_jit
.setupArgumentsWithExecState(TrustedImmPtr(stubInfo
), arg1Payload
, arg1Tag
, arg2Payload
, TrustedImm32(JSValue::CellTag
), TrustedImmPtr(uid
));
1692 return appendCallWithExceptionCheck(operation
);
1694 JITCompiler::Call
callOperation(V_JITOperation_ECJJ operation
, GPRReg arg1
, GPRReg arg2Tag
, GPRReg arg2Payload
, GPRReg arg3Tag
, GPRReg arg3Payload
)
1696 m_jit
.setupArgumentsWithExecState(arg1
, arg2Payload
, arg2Tag
, arg3Payload
, arg3Tag
);
1697 return appendCallWithExceptionCheck(operation
);
1700 JITCompiler::Call
callOperation(V_JITOperation_EPZJ operation
, GPRReg arg1
, GPRReg arg2
, GPRReg arg3Tag
, GPRReg arg3Payload
)
1702 m_jit
.setupArgumentsWithExecState(arg1
, arg2
, EABI_32BIT_DUMMY_ARG SH4_32BIT_DUMMY_ARG arg3Payload
, arg3Tag
);
1703 return appendCallWithExceptionCheck(operation
);
1706 JITCompiler::Call
callOperation(V_JITOperation_EOZJ operation
, GPRReg arg1
, GPRReg arg2
, GPRReg arg3Tag
, GPRReg arg3Payload
)
1708 m_jit
.setupArgumentsWithExecState(arg1
, arg2
, EABI_32BIT_DUMMY_ARG SH4_32BIT_DUMMY_ARG arg3Payload
, arg3Tag
);
1709 return appendCallWithExceptionCheck(operation
);
1711 JITCompiler::Call
callOperation(V_JITOperation_EOZJ operation
, GPRReg arg1
, GPRReg arg2
, TrustedImm32 arg3Tag
, GPRReg arg3Payload
)
1713 m_jit
.setupArgumentsWithExecState(arg1
, arg2
, EABI_32BIT_DUMMY_ARG SH4_32BIT_DUMMY_ARG arg3Payload
, arg3Tag
);
1714 return appendCallWithExceptionCheck(operation
);
1717 JITCompiler::Call
callOperation(V_JITOperation_EVwsJ operation
, VariableWatchpointSet
* watchpointSet
, GPRReg argTag
, GPRReg argPayload
)
1719 m_jit
.setupArgumentsWithExecState(TrustedImmPtr(watchpointSet
), argPayload
, argTag
);
1720 return appendCall(operation
);
1723 JITCompiler::Call
callOperation(D_JITOperation_EJ operation
, FPRReg result
, GPRReg arg1Tag
, GPRReg arg1Payload
)
1725 m_jit
.setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload
, arg1Tag
);
1726 return appendCallWithExceptionCheckSetResult(operation
, result
);
1729 #undef EABI_32BIT_DUMMY_ARG
1730 #undef SH4_32BIT_DUMMY_ARG
1732 template<typename FunctionType
>
1733 JITCompiler::Call
callOperation(
1734 FunctionType operation
, JSValueRegs result
)
1736 return callOperation(operation
, result
.tagGPR(), result
.payloadGPR());
1738 template<typename FunctionType
, typename ArgumentType1
>
1739 JITCompiler::Call
callOperation(
1740 FunctionType operation
, JSValueRegs result
, ArgumentType1 arg1
)
1742 return callOperation(operation
, result
.tagGPR(), result
.payloadGPR(), arg1
);
1744 template<typename FunctionType
, typename ArgumentType1
, typename ArgumentType2
>
1745 JITCompiler::Call
callOperation(
1746 FunctionType operation
, JSValueRegs result
, ArgumentType1 arg1
, ArgumentType2 arg2
)
1748 return callOperation(operation
, result
.tagGPR(), result
.payloadGPR(), arg1
, arg2
);
1751 typename FunctionType
, typename ArgumentType1
, typename ArgumentType2
,
1752 typename ArgumentType3
>
1753 JITCompiler::Call
callOperation(
1754 FunctionType operation
, JSValueRegs result
, ArgumentType1 arg1
, ArgumentType2 arg2
,
1757 return callOperation(operation
, result
.tagGPR(), result
.payloadGPR(), arg1
, arg2
, arg3
);
1760 typename FunctionType
, typename ArgumentType1
, typename ArgumentType2
,
1761 typename ArgumentType3
, typename ArgumentType4
>
1762 JITCompiler::Call
callOperation(
1763 FunctionType operation
, JSValueRegs result
, ArgumentType1 arg1
, ArgumentType2 arg2
,
1764 ArgumentType3 arg3
, ArgumentType4 arg4
)
1766 return callOperation(operation
, result
.tagGPR(), result
.payloadGPR(), arg1
, arg2
, arg3
, arg4
);
1769 typename FunctionType
, typename ArgumentType1
, typename ArgumentType2
,
1770 typename ArgumentType3
, typename ArgumentType4
, typename ArgumentType5
>
1771 JITCompiler::Call
callOperation(
1772 FunctionType operation
, JSValueRegs result
, ArgumentType1 arg1
, ArgumentType2 arg2
,
1773 ArgumentType3 arg3
, ArgumentType4 arg4
, ArgumentType5 arg5
)
1775 return callOperation(
1776 operation
, result
.tagGPR(), result
.payloadGPR(), arg1
, arg2
, arg3
, arg4
, arg5
);
1778 #endif // USE(JSVALUE32_64)
1780 #if !defined(NDEBUG) && !CPU(ARM) && !CPU(MIPS) && !CPU(SH4)
1781 void prepareForExternalCall()
1783 // We're about to call out to a "native" helper function. The helper
1784 // function is expected to set topCallFrame itself with the ExecState
1785 // that is passed to it.
1787 // We explicitly trash topCallFrame here so that we'll know if some of
1788 // the helper functions are not setting topCallFrame when they should
1789 // be doing so. Note: the previous value in topcallFrame was not valid
1790 // anyway since it was not being updated by JIT'ed code by design.
1792 for (unsigned i
= 0; i
< sizeof(void*) / 4; i
++)
1793 m_jit
.store32(TrustedImm32(0xbadbeef), reinterpret_cast<char*>(&m_jit
.vm()->topCallFrame
) + i
* 4);
1796 void prepareForExternalCall() { }
1799 // These methods add call instructions, with optional exception checks & setting results.
1800 JITCompiler::Call
appendCallWithExceptionCheck(const FunctionPtr
& function
)
1802 prepareForExternalCall();
1803 m_jit
.emitStoreCodeOrigin(m_currentNode
->origin
.semantic
);
1804 JITCompiler::Call call
= m_jit
.appendCall(function
);
1805 m_jit
.exceptionCheck();
1808 JITCompiler::Call
appendCallWithCallFrameRollbackOnException(const FunctionPtr
& function
)
1810 prepareForExternalCall();
1811 m_jit
.emitStoreCodeOrigin(m_currentNode
->origin
.semantic
);
1812 JITCompiler::Call call
= m_jit
.appendCall(function
);
1813 m_jit
.exceptionCheckWithCallFrameRollback();
1816 JITCompiler::Call
appendCallWithExceptionCheckSetResult(const FunctionPtr
& function
, GPRReg result
)
1818 JITCompiler::Call call
= appendCallWithExceptionCheck(function
);
1819 if ((result
!= InvalidGPRReg
) && (result
!= GPRInfo::returnValueGPR
))
1820 m_jit
.move(GPRInfo::returnValueGPR
, result
);
1823 JITCompiler::Call
appendCallWithCallFrameRollbackOnExceptionSetResult(const FunctionPtr
& function
, GPRReg result
)
1825 JITCompiler::Call call
= appendCallWithCallFrameRollbackOnException(function
);
1826 if ((result
!= InvalidGPRReg
) && (result
!= GPRInfo::returnValueGPR
))
1827 m_jit
.move(GPRInfo::returnValueGPR
, result
);
1830 JITCompiler::Call
appendCallSetResult(const FunctionPtr
& function
, GPRReg result
)
1832 prepareForExternalCall();
1833 m_jit
.emitStoreCodeOrigin(m_currentNode
->origin
.semantic
);
1834 JITCompiler::Call call
= m_jit
.appendCall(function
);
1835 if (result
!= InvalidGPRReg
)
1836 m_jit
.move(GPRInfo::returnValueGPR
, result
);
1839 JITCompiler::Call
appendCall(const FunctionPtr
& function
)
1841 prepareForExternalCall();
1842 m_jit
.emitStoreCodeOrigin(m_currentNode
->origin
.semantic
);
1843 return m_jit
.appendCall(function
);
1845 JITCompiler::Call
appendCallWithExceptionCheckSetResult(const FunctionPtr
& function
, GPRReg result1
, GPRReg result2
)
1847 JITCompiler::Call call
= appendCallWithExceptionCheck(function
);
1848 m_jit
.setupResults(result1
, result2
);
1852 JITCompiler::Call
appendCallWithExceptionCheckSetResult(const FunctionPtr
& function
, FPRReg result
)
1854 JITCompiler::Call call
= appendCallWithExceptionCheck(function
);
1855 if (result
!= InvalidFPRReg
) {
1856 m_jit
.assembler().fstpl(0, JITCompiler::stackPointerRegister
);
1857 m_jit
.loadDouble(JITCompiler::stackPointerRegister
, result
);
1861 JITCompiler::Call
appendCallSetResult(const FunctionPtr
& function
, FPRReg result
)
1863 JITCompiler::Call call
= appendCall(function
);
1864 if (result
!= InvalidFPRReg
) {
1865 m_jit
.assembler().fstpl(0, JITCompiler::stackPointerRegister
);
1866 m_jit
.loadDouble(JITCompiler::stackPointerRegister
, result
);
1870 #elif CPU(ARM) && !CPU(ARM_HARDFP)
1871 JITCompiler::Call
appendCallWithExceptionCheckSetResult(const FunctionPtr
& function
, FPRReg result
)
1873 JITCompiler::Call call
= appendCallWithExceptionCheck(function
);
1874 if (result
!= InvalidFPRReg
)
1875 m_jit
.assembler().vmov(result
, GPRInfo::returnValueGPR
, GPRInfo::returnValueGPR2
);
1878 JITCompiler::Call
appendCallSetResult(const FunctionPtr
& function
, FPRReg result
)
1880 JITCompiler::Call call
= appendCall(function
);
1881 if (result
!= InvalidFPRReg
)
1882 m_jit
.assembler().vmov(result
, GPRInfo::returnValueGPR
, GPRInfo::returnValueGPR2
);
1885 #else // CPU(X86_64) || (CPU(ARM) && CPU(ARM_HARDFP)) || CPU(ARM64) || CPU(MIPS) || CPU(SH4)
1886 JITCompiler::Call
appendCallWithExceptionCheckSetResult(const FunctionPtr
& function
, FPRReg result
)
1888 JITCompiler::Call call
= appendCallWithExceptionCheck(function
);
1889 if (result
!= InvalidFPRReg
)
1890 m_jit
.moveDouble(FPRInfo::returnValueFPR
, result
);
1893 JITCompiler::Call
appendCallSetResult(const FunctionPtr
& function
, FPRReg result
)
1895 JITCompiler::Call call
= appendCall(function
);
1896 if (result
!= InvalidFPRReg
)
1897 m_jit
.moveDouble(FPRInfo::returnValueFPR
, result
);
1902 void branchDouble(JITCompiler::DoubleCondition cond
, FPRReg left
, FPRReg right
, BasicBlock
* destination
)
1904 return addBranch(m_jit
.branchDouble(cond
, left
, right
), destination
);
1907 void branchDoubleNonZero(FPRReg value
, FPRReg scratch
, BasicBlock
* destination
)
1909 return addBranch(m_jit
.branchDoubleNonZero(value
, scratch
), destination
);
1912 template<typename T
, typename U
>
1913 void branch32(JITCompiler::RelationalCondition cond
, T left
, U right
, BasicBlock
* destination
)
1915 return addBranch(m_jit
.branch32(cond
, left
, right
), destination
);
1918 template<typename T
, typename U
>
1919 void branchTest32(JITCompiler::ResultCondition cond
, T value
, U mask
, BasicBlock
* destination
)
1921 return addBranch(m_jit
.branchTest32(cond
, value
, mask
), destination
);
1924 template<typename T
>
1925 void branchTest32(JITCompiler::ResultCondition cond
, T value
, BasicBlock
* destination
)
1927 return addBranch(m_jit
.branchTest32(cond
, value
), destination
);
1931 template<typename T
, typename U
>
1932 void branch64(JITCompiler::RelationalCondition cond
, T left
, U right
, BasicBlock
* destination
)
1934 return addBranch(m_jit
.branch64(cond
, left
, right
), destination
);
1938 template<typename T
, typename U
>
1939 void branch8(JITCompiler::RelationalCondition cond
, T left
, U right
, BasicBlock
* destination
)
1941 return addBranch(m_jit
.branch8(cond
, left
, right
), destination
);
1944 template<typename T
, typename U
>
1945 void branchPtr(JITCompiler::RelationalCondition cond
, T left
, U right
, BasicBlock
* destination
)
1947 return addBranch(m_jit
.branchPtr(cond
, left
, right
), destination
);
1950 template<typename T
, typename U
>
1951 void branchTestPtr(JITCompiler::ResultCondition cond
, T value
, U mask
, BasicBlock
* destination
)
1953 return addBranch(m_jit
.branchTestPtr(cond
, value
, mask
), destination
);
1956 template<typename T
>
1957 void branchTestPtr(JITCompiler::ResultCondition cond
, T value
, BasicBlock
* destination
)
1959 return addBranch(m_jit
.branchTestPtr(cond
, value
), destination
);
1962 template<typename T
, typename U
>
1963 void branchTest8(JITCompiler::ResultCondition cond
, T value
, U mask
, BasicBlock
* destination
)
1965 return addBranch(m_jit
.branchTest8(cond
, value
, mask
), destination
);
1968 template<typename T
>
1969 void branchTest8(JITCompiler::ResultCondition cond
, T value
, BasicBlock
* destination
)
1971 return addBranch(m_jit
.branchTest8(cond
, value
), destination
);
1974 enum FallThroughMode
{
1978 void jump(BasicBlock
* destination
, FallThroughMode fallThroughMode
= AtFallThroughPoint
)
1980 if (destination
== nextBlock()
1981 && fallThroughMode
== AtFallThroughPoint
)
1983 addBranch(m_jit
.jump(), destination
);
1986 void addBranch(const MacroAssembler::Jump
& jump
, BasicBlock
* destination
)
1988 m_branches
.append(BranchRecord(jump
, destination
));
1990 void addBranch(const MacroAssembler::JumpList
& jump
, BasicBlock
* destination
);
1992 void linkBranches();
1994 void dump(const char* label
= 0);
1996 bool isInteger(Node
* node
)
1998 if (node
->hasInt32Result())
2001 if (isInt32Constant(node
))
2004 return generationInfo(node
).isJSInt32();
2007 bool betterUseStrictInt52(Node
* node
)
2009 return !generationInfo(node
).isInt52();
2011 bool betterUseStrictInt52(Edge edge
)
2013 return betterUseStrictInt52(edge
.node());
2016 bool compare(Node
*, MacroAssembler::RelationalCondition
, MacroAssembler::DoubleCondition
, S_JITOperation_EJJ
);
2017 bool compilePeepHoleBranch(Node
*, MacroAssembler::RelationalCondition
, MacroAssembler::DoubleCondition
, S_JITOperation_EJJ
);
2018 void compilePeepHoleInt32Branch(Node
*, Node
* branchNode
, JITCompiler::RelationalCondition
);
2019 void compilePeepHoleInt52Branch(Node
*, Node
* branchNode
, JITCompiler::RelationalCondition
);
2020 void compilePeepHoleBooleanBranch(Node
*, Node
* branchNode
, JITCompiler::RelationalCondition
);
2021 void compilePeepHoleDoubleBranch(Node
*, Node
* branchNode
, JITCompiler::DoubleCondition
);
2022 void compilePeepHoleObjectEquality(Node
*, Node
* branchNode
);
2023 void compilePeepHoleObjectToObjectOrOtherEquality(Edge leftChild
, Edge rightChild
, Node
* branchNode
);
2024 void compileObjectEquality(Node
*);
2025 void compileObjectToObjectOrOtherEquality(Edge leftChild
, Edge rightChild
);
2026 void compileObjectOrOtherLogicalNot(Edge value
);
2027 void compileLogicalNot(Node
*);
2028 void compileStringEquality(
2029 Node
*, GPRReg leftGPR
, GPRReg rightGPR
, GPRReg lengthGPR
,
2030 GPRReg leftTempGPR
, GPRReg rightTempGPR
, GPRReg leftTemp2GPR
,
2031 GPRReg rightTemp2GPR
, JITCompiler::JumpList fastTrue
,
2032 JITCompiler::JumpList fastSlow
);
2033 void compileStringEquality(Node
*);
2034 void compileStringIdentEquality(Node
*);
2035 void compileStringToUntypedEquality(Node
*, Edge stringEdge
, Edge untypedEdge
);
2036 void compileStringIdentToNotStringVarEquality(Node
*, Edge stringEdge
, Edge notStringVarEdge
);
2037 void compileStringZeroLength(Node
*);
2038 void compileMiscStrictEq(Node
*);
2040 void emitObjectOrOtherBranch(Edge value
, BasicBlock
* taken
, BasicBlock
* notTaken
);
2041 void emitBranch(Node
*);
2043 struct StringSwitchCase
{
2044 StringSwitchCase() { }
2046 StringSwitchCase(StringImpl
* string
, BasicBlock
* target
)
2052 bool operator<(const StringSwitchCase
& other
) const;
2058 void emitSwitchIntJump(SwitchData
*, GPRReg value
, GPRReg scratch
);
2059 void emitSwitchImm(Node
*, SwitchData
*);
2060 void emitSwitchCharStringJump(SwitchData
*, GPRReg value
, GPRReg scratch
);
2061 void emitSwitchChar(Node
*, SwitchData
*);
2062 void emitBinarySwitchStringRecurse(
2063 SwitchData
*, const Vector
<StringSwitchCase
>&, unsigned numChecked
,
2064 unsigned begin
, unsigned end
, GPRReg buffer
, GPRReg length
, GPRReg temp
,
2065 unsigned alreadyCheckedLength
, bool checkedExactLength
);
2066 void emitSwitchStringOnString(SwitchData
*, GPRReg string
);
2067 void emitSwitchString(Node
*, SwitchData
*);
2068 void emitSwitch(Node
*);
2070 void compileToStringOnCell(Node
*);
2071 void compileNewStringObject(Node
*);
2073 void compileNewTypedArray(Node
*);
2075 void compileInt32Compare(Node
*, MacroAssembler::RelationalCondition
);
2076 void compileInt52Compare(Node
*, MacroAssembler::RelationalCondition
);
2077 void compileBooleanCompare(Node
*, MacroAssembler::RelationalCondition
);
2078 void compileDoubleCompare(Node
*, MacroAssembler::DoubleCondition
);
2080 bool compileStrictEq(Node
*);
2082 void compileAllocatePropertyStorage(Node
*);
2083 void compileReallocatePropertyStorage(Node
*);
2085 #if USE(JSVALUE32_64)
2086 template<typename BaseOperandType
, typename PropertyOperandType
, typename ValueOperandType
, typename TagType
>
2087 void compileContiguousPutByVal(Node
*, BaseOperandType
&, PropertyOperandType
&, ValueOperandType
&, GPRReg valuePayloadReg
, TagType valueTag
);
2089 void compileDoublePutByVal(Node
*, SpeculateCellOperand
& base
, SpeculateStrictInt32Operand
& property
);
2090 bool putByValWillNeedExtraRegister(ArrayMode arrayMode
)
2092 return arrayMode
.mayStoreToHole();
2094 GPRReg
temporaryRegisterForPutByVal(GPRTemporary
&, ArrayMode
);
2095 GPRReg
temporaryRegisterForPutByVal(GPRTemporary
& temporary
, Node
* node
)
2097 return temporaryRegisterForPutByVal(temporary
, node
->arrayMode());
2100 void compileGetCharCodeAt(Node
*);
2101 void compileGetByValOnString(Node
*);
2102 void compileFromCharCode(Node
*);
2104 void compileGetByValOnArguments(Node
*);
2105 void compileGetArgumentsLength(Node
*);
2107 void compileGetArrayLength(Node
*);
2109 void compileValueRep(Node
*);
2110 void compileDoubleRep(Node
*);
2112 void compileValueToInt32(Node
*);
2113 void compileUInt32ToNumber(Node
*);
2114 void compileDoubleAsInt32(Node
*);
2115 void compileAdd(Node
*);
2116 void compileMakeRope(Node
*);
2117 void compileArithSub(Node
*);
2118 void compileArithNegate(Node
*);
2119 void compileArithMul(Node
*);
2120 void compileArithDiv(Node
*);
2121 void compileArithMod(Node
*);
2122 void compileConstantStoragePointer(Node
*);
2123 void compileGetIndexedPropertyStorage(Node
*);
2124 JITCompiler::Jump
jumpForTypedArrayOutOfBounds(Node
*, GPRReg baseGPR
, GPRReg indexGPR
);
2125 void emitTypedArrayBoundsCheck(Node
*, GPRReg baseGPR
, GPRReg indexGPR
);
2126 void compileGetTypedArrayByteOffset(Node
*);
2127 void compileGetByValOnIntTypedArray(Node
*, TypedArrayType
);
2128 void compilePutByValForIntTypedArray(GPRReg base
, GPRReg property
, Node
*, TypedArrayType
);
2129 void compileGetByValOnFloatTypedArray(Node
*, TypedArrayType
);
2130 void compilePutByValForFloatTypedArray(GPRReg base
, GPRReg property
, Node
*, TypedArrayType
);
2131 void compileNewFunctionNoCheck(Node
*);
2132 void compileNewFunctionExpression(Node
*);
2133 bool compileRegExpExec(Node
*);
2135 JITCompiler::Jump
branchIsCell(JSValueRegs
);
2136 JITCompiler::Jump
branchNotCell(JSValueRegs
);
2137 JITCompiler::Jump
branchIsOther(JSValueRegs
, GPRReg tempGPR
);
2138 JITCompiler::Jump
branchNotOther(JSValueRegs
, GPRReg tempGPR
);
2140 void moveTrueTo(GPRReg
);
2141 void moveFalseTo(GPRReg
);
2142 void blessBoolean(GPRReg
);
2144 // size can be an immediate or a register, and must be in bytes. If size is a register,
2145 // it must be a different register than resultGPR. Emits code that place a pointer to
2146 // the end of the allocation. The returned jump is the jump to the slow path.
2147 template<typename SizeType
>
2148 MacroAssembler::Jump
emitAllocateBasicStorage(SizeType size
, GPRReg resultGPR
)
2150 CopiedAllocator
* copiedAllocator
= &m_jit
.vm()->heap
.storageAllocator();
2152 // It's invalid to allocate zero bytes in CopiedSpace.
2154 m_jit
.move(size
, resultGPR
);
2155 MacroAssembler::Jump nonZeroSize
= m_jit
.branchTest32(MacroAssembler::NonZero
, resultGPR
);
2156 m_jit
.abortWithReason(DFGBasicStorageAllocatorZeroSize
);
2157 nonZeroSize
.link(&m_jit
);
2160 m_jit
.loadPtr(&copiedAllocator
->m_currentRemaining
, resultGPR
);
2161 MacroAssembler::Jump slowPath
= m_jit
.branchSubPtr(JITCompiler::Signed
, size
, resultGPR
);
2162 m_jit
.storePtr(resultGPR
, &copiedAllocator
->m_currentRemaining
);
2163 m_jit
.negPtr(resultGPR
);
2164 m_jit
.addPtr(JITCompiler::AbsoluteAddress(&copiedAllocator
->m_currentPayloadEnd
), resultGPR
);
2169 // Allocator for a cell of a specific size.
2170 template <typename StructureType
> // StructureType can be GPR or ImmPtr.
2171 void emitAllocateJSCell(GPRReg resultGPR
, GPRReg allocatorGPR
, StructureType structure
,
2172 GPRReg scratchGPR
, MacroAssembler::JumpList
& slowPath
)
2174 m_jit
.loadPtr(MacroAssembler::Address(allocatorGPR
, MarkedAllocator::offsetOfFreeListHead()), resultGPR
);
2175 slowPath
.append(m_jit
.branchTestPtr(MacroAssembler::Zero
, resultGPR
));
2177 // The object is half-allocated: we have what we know is a fresh object, but
2178 // it's still on the GC's free list.
2179 m_jit
.loadPtr(MacroAssembler::Address(resultGPR
), scratchGPR
);
2180 m_jit
.storePtr(scratchGPR
, MacroAssembler::Address(allocatorGPR
, MarkedAllocator::offsetOfFreeListHead()));
2182 // Initialize the object's Structure.
2183 m_jit
.emitStoreStructureWithTypeInfo(structure
, resultGPR
, scratchGPR
);
2186 // Allocator for an object of a specific size.
2187 template <typename StructureType
, typename StorageType
> // StructureType and StorageType can be GPR or ImmPtr.
2188 void emitAllocateJSObject(GPRReg resultGPR
, GPRReg allocatorGPR
, StructureType structure
,
2189 StorageType storage
, GPRReg scratchGPR
, MacroAssembler::JumpList
& slowPath
)
2191 emitAllocateJSCell(resultGPR
, allocatorGPR
, structure
, scratchGPR
, slowPath
);
2193 // Initialize the object's property storage pointer.
2194 m_jit
.storePtr(storage
, MacroAssembler::Address(resultGPR
, JSObject::butterflyOffset()));
2197 // Convenience allocator for a built-in object.
2198 template <typename ClassType
, typename StructureType
, typename StorageType
> // StructureType and StorageType can be GPR or ImmPtr.
2199 void emitAllocateJSObject(GPRReg resultGPR
, StructureType structure
, StorageType storage
,
2200 GPRReg scratchGPR1
, GPRReg scratchGPR2
, MacroAssembler::JumpList
& slowPath
)
2202 MarkedAllocator
* allocator
= 0;
2203 size_t size
= ClassType::allocationSize(0);
2204 if (ClassType::needsDestruction
&& ClassType::hasImmortalStructure
)
2205 allocator
= &m_jit
.vm()->heap
.allocatorForObjectWithImmortalStructureDestructor(size
);
2206 else if (ClassType::needsDestruction
)
2207 allocator
= &m_jit
.vm()->heap
.allocatorForObjectWithNormalDestructor(size
);
2209 allocator
= &m_jit
.vm()->heap
.allocatorForObjectWithoutDestructor(size
);
2210 m_jit
.move(TrustedImmPtr(allocator
), scratchGPR1
);
2211 emitAllocateJSObject(resultGPR
, scratchGPR1
, structure
, storage
, scratchGPR2
, slowPath
);
2214 template <typename T
>
2215 void emitAllocateDestructibleObject(GPRReg resultGPR
, Structure
* structure
,
2216 GPRReg scratchGPR1
, GPRReg scratchGPR2
, MacroAssembler::JumpList
& slowPath
)
2218 emitAllocateJSObject
<T
>(resultGPR
, TrustedImmPtr(structure
), TrustedImmPtr(0), scratchGPR1
, scratchGPR2
, slowPath
);
2219 m_jit
.storePtr(TrustedImmPtr(structure
->classInfo()), MacroAssembler::Address(resultGPR
, JSDestructibleObject::classInfoOffset()));
2222 void emitAllocateJSArray(GPRReg resultGPR
, Structure
*, GPRReg storageGPR
, unsigned numElements
);
2223 void emitAllocateArguments(GPRReg resultGPR
, GPRReg scratchGPR1
, GPRReg scratchGPR2
, MacroAssembler::JumpList
& slowPath
);
2225 // Add a speculation check.
2226 void speculationCheck(ExitKind
, JSValueSource
, Node
*, MacroAssembler::Jump jumpToFail
);
2227 void speculationCheck(ExitKind
, JSValueSource
, Node
*, const MacroAssembler::JumpList
& jumpsToFail
);
2229 // Add a speculation check without additional recovery, and with a promise to supply a jump later.
2230 OSRExitJumpPlaceholder
speculationCheck(ExitKind
, JSValueSource
, Node
*);
2231 OSRExitJumpPlaceholder
speculationCheck(ExitKind
, JSValueSource
, Edge
);
2232 void speculationCheck(ExitKind
, JSValueSource
, Edge
, MacroAssembler::Jump jumpToFail
);
2233 void speculationCheck(ExitKind
, JSValueSource
, Edge
, const MacroAssembler::JumpList
& jumpsToFail
);
2234 // Add a speculation check with additional recovery.
2235 void speculationCheck(ExitKind
, JSValueSource
, Node
*, MacroAssembler::Jump jumpToFail
, const SpeculationRecovery
&);
2236 void speculationCheck(ExitKind
, JSValueSource
, Edge
, MacroAssembler::Jump jumpToFail
, const SpeculationRecovery
&);
2238 void emitInvalidationPoint(Node
*);
2240 // Called when we statically determine that a speculation will fail.
2241 void terminateSpeculativeExecution(ExitKind
, JSValueRegs
, Node
*);
2242 void terminateSpeculativeExecution(ExitKind
, JSValueRegs
, Edge
);
2244 // Helpers for performing type checks on an edge stored in the given registers.
2245 bool needsTypeCheck(Edge edge
, SpeculatedType typesPassedThrough
) { return m_interpreter
.needsTypeCheck(edge
, typesPassedThrough
); }
2246 void typeCheck(JSValueSource
, Edge
, SpeculatedType typesPassedThrough
, MacroAssembler::Jump jumpToFail
);
2248 void speculateInt32(Edge
);
2250 void convertMachineInt(Edge
, GPRReg resultGPR
);
2251 void speculateMachineInt(Edge
);
2252 void speculateDoubleRepMachineInt(Edge
);
2253 #endif // USE(JSVALUE64)
2254 void speculateNumber(Edge
);
2255 void speculateDoubleReal(Edge
);
2256 void speculateBoolean(Edge
);
2257 void speculateCell(Edge
);
2258 void speculateObject(Edge
);
2259 void speculateFinalObject(Edge
);
2260 void speculateObjectOrOther(Edge
);
2261 void speculateString(Edge edge
, GPRReg cell
);
2262 void speculateStringIdentAndLoadStorage(Edge edge
, GPRReg string
, GPRReg storage
);
2263 void speculateStringIdent(Edge edge
, GPRReg string
);
2264 void speculateStringIdent(Edge
);
2265 void speculateString(Edge
);
2266 void speculateNotStringVar(Edge
);
2267 template<typename StructureLocationType
>
2268 void speculateStringObjectForStructure(Edge
, StructureLocationType
);
2269 void speculateStringObject(Edge
, GPRReg
);
2270 void speculateStringObject(Edge
);
2271 void speculateStringOrStringObject(Edge
);
2272 void speculateNotCell(Edge
);
2273 void speculateOther(Edge
);
2274 void speculateMisc(Edge
, JSValueRegs
);
2275 void speculateMisc(Edge
);
2276 void speculate(Node
*, Edge
);
2278 JITCompiler::Jump
jumpSlowForUnwantedArrayMode(GPRReg tempWithIndexingTypeReg
, ArrayMode
, IndexingType
);
2279 JITCompiler::JumpList
jumpSlowForUnwantedArrayMode(GPRReg tempWithIndexingTypeReg
, ArrayMode
);
2280 void checkArray(Node
*);
2281 void arrayify(Node
*, GPRReg baseReg
, GPRReg propertyReg
);
2282 void arrayify(Node
*);
2284 template<bool strict
>
2285 GPRReg
fillSpeculateInt32Internal(Edge
, DataFormat
& returnFormat
);
2287 // It is possible, during speculative generation, to reach a situation in which we
2288 // can statically determine a speculation will fail (for example, when two nodes
2289 // will make conflicting speculations about the same operand). In such cases this
2290 // flag is cleared, indicating no further code generation should take place.
2293 void recordSetLocal(
2294 VirtualRegister bytecodeReg
, VirtualRegister machineReg
, DataFormat format
)
2296 m_stream
->appendAndLog(VariableEvent::setLocal(bytecodeReg
, machineReg
, format
));
2299 void recordSetLocal(DataFormat format
)
2301 VariableAccessData
* variable
= m_currentNode
->variableAccessData();
2302 recordSetLocal(variable
->local(), variable
->machineLocal(), format
);
2305 GenerationInfo
& generationInfoFromVirtualRegister(VirtualRegister virtualRegister
)
2307 return m_generationInfo
[virtualRegister
.toLocal()];
2310 GenerationInfo
& generationInfo(Node
* node
)
2312 return generationInfoFromVirtualRegister(node
->virtualRegister());
2315 GenerationInfo
& generationInfo(Edge edge
)
2317 return generationInfo(edge
.node());
2320 // The JIT, while also provides MacroAssembler functionality.
2323 // The current node being generated.
2324 BasicBlock
* m_block
;
2325 Node
* m_currentNode
;
2326 NodeType m_lastGeneratedNode
;
2328 unsigned m_indexInBlock
;
2329 // Virtual and physical register maps.
2330 Vector
<GenerationInfo
, 32> m_generationInfo
;
2331 RegisterBank
<GPRInfo
> m_gprs
;
2332 RegisterBank
<FPRInfo
> m_fprs
;
2334 Vector
<MacroAssembler::Label
> m_osrEntryHeads
;
2336 struct BranchRecord
{
2337 BranchRecord(MacroAssembler::Jump jump
, BasicBlock
* destination
)
2339 , destination(destination
)
2343 MacroAssembler::Jump jump
;
2344 BasicBlock
* destination
;
2346 Vector
<BranchRecord
, 8> m_branches
;
2348 CodeOrigin m_codeOriginForExitTarget
;
2349 CodeOrigin m_codeOriginForExitProfile
;
2351 InPlaceAbstractState m_state
;
2352 AbstractInterpreter
<InPlaceAbstractState
> m_interpreter
;
2354 VariableEventStream
* m_stream
;
2355 MinifiedGraph
* m_minifiedGraph
;
2357 bool m_isCheckingArgumentTypes
;
2359 Vector
<OwnPtr
<SlowPathGenerator
>, 8> m_slowPathGenerators
;
2360 Vector
<SilentRegisterSavePlan
> m_plans
;
2364 // === Operand types ===
2366 // These classes are used to lock the operands to a node into machine
2367 // registers. These classes implement of pattern of locking a value
2368 // into register at the point of construction only if it is already in
2369 // registers, and otherwise loading it lazily at the point it is first
2370 // used. We do so in order to attempt to avoid spilling one operand
2371 // in order to make space available for another.
2373 class JSValueOperand
{
2375 explicit JSValueOperand(SpeculativeJIT
* jit
, Edge edge
, OperandSpeculationMode mode
= AutomaticOperandSpeculation
)
2379 , m_gprOrInvalid(InvalidGPRReg
)
2380 #elif USE(JSVALUE32_64)
2385 ASSERT_UNUSED(mode
, mode
== ManualOperandSpeculation
|| edge
.useKind() == UntypedUse
);
2387 if (jit
->isFilled(node()))
2389 #elif USE(JSVALUE32_64)
2390 m_register
.pair
.tagGPR
= InvalidGPRReg
;
2391 m_register
.pair
.payloadGPR
= InvalidGPRReg
;
2392 if (jit
->isFilled(node()))
2400 ASSERT(m_gprOrInvalid
!= InvalidGPRReg
);
2401 m_jit
->unlock(m_gprOrInvalid
);
2402 #elif USE(JSVALUE32_64)
2404 ASSERT(m_register
.fpr
!= InvalidFPRReg
);
2405 m_jit
->unlock(m_register
.fpr
);
2407 ASSERT(m_register
.pair
.tagGPR
!= InvalidGPRReg
&& m_register
.pair
.payloadGPR
!= InvalidGPRReg
);
2408 m_jit
->unlock(m_register
.pair
.tagGPR
);
2409 m_jit
->unlock(m_register
.pair
.payloadGPR
);
2421 return edge().node();
2427 if (m_gprOrInvalid
== InvalidGPRReg
)
2428 m_gprOrInvalid
= m_jit
->fillJSValue(m_edge
);
2429 return m_gprOrInvalid
;
2431 JSValueRegs
jsValueRegs()
2433 return JSValueRegs(gpr());
2435 #elif USE(JSVALUE32_64)
2436 bool isDouble() { return m_isDouble
; }
2440 if (m_register
.pair
.tagGPR
== InvalidGPRReg
&& m_register
.pair
.payloadGPR
== InvalidGPRReg
)
2441 m_isDouble
= !m_jit
->fillJSValue(m_edge
, m_register
.pair
.tagGPR
, m_register
.pair
.payloadGPR
, m_register
.fpr
);
2447 ASSERT(!m_isDouble
);
2448 return m_register
.pair
.tagGPR
;
2454 ASSERT(!m_isDouble
);
2455 return m_register
.pair
.payloadGPR
;
2458 JSValueRegs
jsValueRegs()
2460 return JSValueRegs(tagGPR(), payloadGPR());
2463 GPRReg
gpr(WhichValueWord which
)
2465 return jsValueRegs().gpr(which
);
2472 return m_register
.fpr
;
2482 SpeculativeJIT
* m_jit
;
2485 GPRReg m_gprOrInvalid
;
2486 #elif USE(JSVALUE32_64)
2498 class StorageOperand
{
2500 explicit StorageOperand(SpeculativeJIT
* jit
, Edge edge
)
2503 , m_gprOrInvalid(InvalidGPRReg
)
2506 ASSERT(edge
.useKind() == UntypedUse
|| edge
.useKind() == KnownCellUse
);
2507 if (jit
->isFilled(node()))
2513 ASSERT(m_gprOrInvalid
!= InvalidGPRReg
);
2514 m_jit
->unlock(m_gprOrInvalid
);
2524 return edge().node();
2529 if (m_gprOrInvalid
== InvalidGPRReg
)
2530 m_gprOrInvalid
= m_jit
->fillStorage(edge());
2531 return m_gprOrInvalid
;
2540 SpeculativeJIT
* m_jit
;
2542 GPRReg m_gprOrInvalid
;
2546 // === Temporaries ===
2548 // These classes are used to allocate temporary registers.
2549 // A mechanism is provided to attempt to reuse the registers
2550 // currently allocated to child nodes whose value is consumed
2551 // by, and not live after, this operation.
2553 enum ReuseTag
{ Reuse
};
2555 class GPRTemporary
{
2558 GPRTemporary(SpeculativeJIT
*);
2559 GPRTemporary(SpeculativeJIT
*, GPRReg specific
);
2560 template<typename T
>
2561 GPRTemporary(SpeculativeJIT
* jit
, ReuseTag
, T
& operand
)
2563 , m_gpr(InvalidGPRReg
)
2565 if (m_jit
->canReuse(operand
.node()))
2566 m_gpr
= m_jit
->reuse(operand
.gpr());
2568 m_gpr
= m_jit
->allocate();
2570 template<typename T1
, typename T2
>
2571 GPRTemporary(SpeculativeJIT
* jit
, ReuseTag
, T1
& op1
, T2
& op2
)
2573 , m_gpr(InvalidGPRReg
)
2575 if (m_jit
->canReuse(op1
.node()))
2576 m_gpr
= m_jit
->reuse(op1
.gpr());
2577 else if (m_jit
->canReuse(op2
.node()))
2578 m_gpr
= m_jit
->reuse(op2
.gpr());
2580 m_gpr
= m_jit
->allocate();
2582 #if USE(JSVALUE32_64)
2583 GPRTemporary(SpeculativeJIT
*, ReuseTag
, JSValueOperand
&, WhichValueWord
);
2586 void adopt(GPRTemporary
&);
2590 if (m_jit
&& m_gpr
!= InvalidGPRReg
)
2591 m_jit
->unlock(gpr());
2600 SpeculativeJIT
* m_jit
;
2604 class JSValueRegsTemporary
{
2606 JSValueRegsTemporary();
2607 JSValueRegsTemporary(SpeculativeJIT
*);
2608 ~JSValueRegsTemporary();
2616 GPRTemporary m_payloadGPR
;
2617 GPRTemporary m_tagGPR
;
2621 class FPRTemporary
{
2623 FPRTemporary(SpeculativeJIT
*);
2624 FPRTemporary(SpeculativeJIT
*, SpeculateDoubleOperand
&);
2625 FPRTemporary(SpeculativeJIT
*, SpeculateDoubleOperand
&, SpeculateDoubleOperand
&);
2626 #if USE(JSVALUE32_64)
2627 FPRTemporary(SpeculativeJIT
*, JSValueOperand
&);
2632 m_jit
->unlock(fpr());
2637 ASSERT(m_fpr
!= InvalidFPRReg
);
2642 FPRTemporary(SpeculativeJIT
* jit
, FPRReg lockedFPR
)
2649 SpeculativeJIT
* m_jit
;
2656 // These classes lock the result of a call to a C++ helper function.
2658 class GPRResult
: public GPRTemporary
{
2660 GPRResult(SpeculativeJIT
* jit
)
2661 : GPRTemporary(jit
, GPRInfo::returnValueGPR
)
2666 #if USE(JSVALUE32_64)
2667 class GPRResult2
: public GPRTemporary
{
2669 GPRResult2(SpeculativeJIT
* jit
)
2670 : GPRTemporary(jit
, GPRInfo::returnValueGPR2
)
2676 class FPRResult
: public FPRTemporary
{
2678 FPRResult(SpeculativeJIT
* jit
)
2679 : FPRTemporary(jit
, lockedResult(jit
))
2684 static FPRReg
lockedResult(SpeculativeJIT
* jit
)
2686 jit
->lock(FPRInfo::returnValueFPR
);
2687 return FPRInfo::returnValueFPR
;
2692 // === Speculative Operand types ===
2694 // SpeculateInt32Operand, SpeculateStrictInt32Operand and SpeculateCellOperand.
2696 // These are used to lock the operands to a node into machine registers within the
2697 // SpeculativeJIT. The classes operate like those above, however these will
2698 // perform a speculative check for a more restrictive type than we can statically
2699 // determine the operand to have. If the operand does not have the requested type,
2700 // a bail-out to the non-speculative path will be taken.
2702 class SpeculateInt32Operand
{
2704 explicit SpeculateInt32Operand(SpeculativeJIT
* jit
, Edge edge
, OperandSpeculationMode mode
= AutomaticOperandSpeculation
)
2707 , m_gprOrInvalid(InvalidGPRReg
)
2709 , m_format(DataFormatNone
)
2713 ASSERT_UNUSED(mode
, mode
== ManualOperandSpeculation
|| (edge
.useKind() == Int32Use
|| edge
.useKind() == KnownInt32Use
));
2714 if (jit
->isFilled(node()))
2718 ~SpeculateInt32Operand()
2720 ASSERT(m_gprOrInvalid
!= InvalidGPRReg
);
2721 m_jit
->unlock(m_gprOrInvalid
);
2731 return edge().node();
2736 gpr(); // m_format is set when m_gpr is locked.
2737 ASSERT(m_format
== DataFormatInt32
|| m_format
== DataFormatJSInt32
);
2743 if (m_gprOrInvalid
== InvalidGPRReg
)
2744 m_gprOrInvalid
= m_jit
->fillSpeculateInt32(edge(), m_format
);
2745 return m_gprOrInvalid
;
2754 SpeculativeJIT
* m_jit
;
2756 GPRReg m_gprOrInvalid
;
2757 DataFormat m_format
;
2760 class SpeculateStrictInt32Operand
{
2762 explicit SpeculateStrictInt32Operand(SpeculativeJIT
* jit
, Edge edge
, OperandSpeculationMode mode
= AutomaticOperandSpeculation
)
2765 , m_gprOrInvalid(InvalidGPRReg
)
2768 ASSERT_UNUSED(mode
, mode
== ManualOperandSpeculation
|| (edge
.useKind() == Int32Use
|| edge
.useKind() == KnownInt32Use
));
2769 if (jit
->isFilled(node()))
2773 ~SpeculateStrictInt32Operand()
2775 ASSERT(m_gprOrInvalid
!= InvalidGPRReg
);
2776 m_jit
->unlock(m_gprOrInvalid
);
2786 return edge().node();
2791 if (m_gprOrInvalid
== InvalidGPRReg
)
2792 m_gprOrInvalid
= m_jit
->fillSpeculateInt32Strict(edge());
2793 return m_gprOrInvalid
;
2802 SpeculativeJIT
* m_jit
;
2804 GPRReg m_gprOrInvalid
;
2807 // Gives you a canonical Int52 (i.e. it's left-shifted by 16, low bits zero).
2808 class SpeculateInt52Operand
{
2810 explicit SpeculateInt52Operand(SpeculativeJIT
* jit
, Edge edge
)
2813 , m_gprOrInvalid(InvalidGPRReg
)
2815 RELEASE_ASSERT(edge
.useKind() == Int52RepUse
);
2816 if (jit
->isFilled(node()))
2820 ~SpeculateInt52Operand()
2822 ASSERT(m_gprOrInvalid
!= InvalidGPRReg
);
2823 m_jit
->unlock(m_gprOrInvalid
);
2833 return edge().node();
2838 if (m_gprOrInvalid
== InvalidGPRReg
)
2839 m_gprOrInvalid
= m_jit
->fillSpeculateInt52(edge(), DataFormatInt52
);
2840 return m_gprOrInvalid
;
2849 SpeculativeJIT
* m_jit
;
2851 GPRReg m_gprOrInvalid
;
2854 // Gives you a strict Int52 (i.e. the payload is in the low 48 bits, high 16 bits are sign-extended).
2855 class SpeculateStrictInt52Operand
{
2857 explicit SpeculateStrictInt52Operand(SpeculativeJIT
* jit
, Edge edge
)
2860 , m_gprOrInvalid(InvalidGPRReg
)
2862 RELEASE_ASSERT(edge
.useKind() == Int52RepUse
);
2863 if (jit
->isFilled(node()))
2867 ~SpeculateStrictInt52Operand()
2869 ASSERT(m_gprOrInvalid
!= InvalidGPRReg
);
2870 m_jit
->unlock(m_gprOrInvalid
);
2880 return edge().node();
2885 if (m_gprOrInvalid
== InvalidGPRReg
)
2886 m_gprOrInvalid
= m_jit
->fillSpeculateInt52(edge(), DataFormatStrictInt52
);
2887 return m_gprOrInvalid
;
2896 SpeculativeJIT
* m_jit
;
2898 GPRReg m_gprOrInvalid
;
2901 enum OppositeShiftTag
{ OppositeShift
};
2903 class SpeculateWhicheverInt52Operand
{
2905 explicit SpeculateWhicheverInt52Operand(SpeculativeJIT
* jit
, Edge edge
)
2908 , m_gprOrInvalid(InvalidGPRReg
)
2909 , m_strict(jit
->betterUseStrictInt52(edge
))
2911 RELEASE_ASSERT(edge
.useKind() == Int52RepUse
);
2912 if (jit
->isFilled(node()))
2916 explicit SpeculateWhicheverInt52Operand(SpeculativeJIT
* jit
, Edge edge
, const SpeculateWhicheverInt52Operand
& other
)
2919 , m_gprOrInvalid(InvalidGPRReg
)
2920 , m_strict(other
.m_strict
)
2922 RELEASE_ASSERT(edge
.useKind() == Int52RepUse
);
2923 if (jit
->isFilled(node()))
2927 explicit SpeculateWhicheverInt52Operand(SpeculativeJIT
* jit
, Edge edge
, OppositeShiftTag
, const SpeculateWhicheverInt52Operand
& other
)
2930 , m_gprOrInvalid(InvalidGPRReg
)
2931 , m_strict(!other
.m_strict
)
2933 RELEASE_ASSERT(edge
.useKind() == Int52RepUse
);
2934 if (jit
->isFilled(node()))
2938 ~SpeculateWhicheverInt52Operand()
2940 ASSERT(m_gprOrInvalid
!= InvalidGPRReg
);
2941 m_jit
->unlock(m_gprOrInvalid
);
2951 return edge().node();
2956 if (m_gprOrInvalid
== InvalidGPRReg
) {
2957 m_gprOrInvalid
= m_jit
->fillSpeculateInt52(
2958 edge(), m_strict
? DataFormatStrictInt52
: DataFormatInt52
);
2960 return m_gprOrInvalid
;
2968 DataFormat
format() const
2970 return m_strict
? DataFormatStrictInt52
: DataFormatInt52
;
2974 SpeculativeJIT
* m_jit
;
2976 GPRReg m_gprOrInvalid
;
2980 class SpeculateDoubleOperand
{
2982 explicit SpeculateDoubleOperand(SpeculativeJIT
* jit
, Edge edge
)
2985 , m_fprOrInvalid(InvalidFPRReg
)
2988 RELEASE_ASSERT(isDouble(edge
.useKind()));
2989 if (jit
->isFilled(node()))
2993 ~SpeculateDoubleOperand()
2995 ASSERT(m_fprOrInvalid
!= InvalidFPRReg
);
2996 m_jit
->unlock(m_fprOrInvalid
);
3006 return edge().node();
3011 if (m_fprOrInvalid
== InvalidFPRReg
)
3012 m_fprOrInvalid
= m_jit
->fillSpeculateDouble(edge());
3013 return m_fprOrInvalid
;
3022 SpeculativeJIT
* m_jit
;
3024 FPRReg m_fprOrInvalid
;
3027 class SpeculateCellOperand
{
3029 explicit SpeculateCellOperand(SpeculativeJIT
* jit
, Edge edge
, OperandSpeculationMode mode
= AutomaticOperandSpeculation
)
3032 , m_gprOrInvalid(InvalidGPRReg
)
3037 ASSERT_UNUSED(mode
, mode
== ManualOperandSpeculation
|| isCell(edge
.useKind()));
3038 if (jit
->isFilled(node()))
3042 ~SpeculateCellOperand()
3046 ASSERT(m_gprOrInvalid
!= InvalidGPRReg
);
3047 m_jit
->unlock(m_gprOrInvalid
);
3057 return edge().node();
3063 if (m_gprOrInvalid
== InvalidGPRReg
)
3064 m_gprOrInvalid
= m_jit
->fillSpeculateCell(edge());
3065 return m_gprOrInvalid
;
3075 SpeculativeJIT
* m_jit
;
3077 GPRReg m_gprOrInvalid
;
3080 class SpeculateBooleanOperand
{
3082 explicit SpeculateBooleanOperand(SpeculativeJIT
* jit
, Edge edge
, OperandSpeculationMode mode
= AutomaticOperandSpeculation
)
3085 , m_gprOrInvalid(InvalidGPRReg
)
3088 ASSERT_UNUSED(mode
, mode
== ManualOperandSpeculation
|| edge
.useKind() == BooleanUse
);
3089 if (jit
->isFilled(node()))
3093 ~SpeculateBooleanOperand()
3095 ASSERT(m_gprOrInvalid
!= InvalidGPRReg
);
3096 m_jit
->unlock(m_gprOrInvalid
);
3106 return edge().node();
3111 if (m_gprOrInvalid
== InvalidGPRReg
)
3112 m_gprOrInvalid
= m_jit
->fillSpeculateBoolean(edge());
3113 return m_gprOrInvalid
;
3122 SpeculativeJIT
* m_jit
;
3124 GPRReg m_gprOrInvalid
;
3127 template<typename StructureLocationType
>
3128 void SpeculativeJIT::speculateStringObjectForStructure(Edge edge
, StructureLocationType structureLocation
)
3130 Structure
* stringObjectStructure
=
3131 m_jit
.globalObjectFor(m_currentNode
->origin
.semantic
)->stringObjectStructure();
3133 if (!m_state
.forNode(edge
).m_currentKnownStructure
.isSubsetOf(StructureSet(stringObjectStructure
))) {
3135 NotStringObject
, JSValueRegs(), 0,
3136 m_jit
.branchStructurePtr(
3137 JITCompiler::NotEqual
, structureLocation
, stringObjectStructure
));
3141 #define DFG_TYPE_CHECK(source, edge, typesPassedThrough, jumpToFail) do { \
3142 JSValueSource _dtc_source = (source); \
3143 Edge _dtc_edge = (edge); \
3144 SpeculatedType _dtc_typesPassedThrough = typesPassedThrough; \
3145 if (!needsTypeCheck(_dtc_edge, _dtc_typesPassedThrough)) \
3147 typeCheck(_dtc_source, _dtc_edge, _dtc_typesPassedThrough, (jumpToFail)); \
3150 } } // namespace JSC::DFG