2 * Copyright (C) 2013, 2014 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 #include "FTLSlowPathCall.h"
31 #include "CCallHelpers.h"
34 #include "JSCInlines.h"
36 namespace JSC
{ namespace FTL
{
40 // This code relies on us being 64-bit. FTL is currently always 64-bit.
41 static const size_t wordSize
= 8;
43 // This will be an RAII thingy that will set up the necessary stack sizes and offsets and such.
47 State
& state
, const RegisterSet
& usedRegisters
, CCallHelpers
& jit
,
48 unsigned numArgs
, GPRReg returnRegister
)
50 , m_usedRegisters(usedRegisters
)
53 , m_returnRegister(returnRegister
)
55 // We don't care that you're using callee-save, stack, or hardware registers.
56 m_usedRegisters
.exclude(RegisterSet::stackRegisters());
57 m_usedRegisters
.exclude(RegisterSet::reservedHardwareRegisters());
58 m_usedRegisters
.exclude(RegisterSet::calleeSaveRegisters());
60 // The return register doesn't need to be saved.
61 if (m_returnRegister
!= InvalidGPRReg
)
62 m_usedRegisters
.clear(m_returnRegister
);
64 size_t stackBytesNeededForReturnAddress
= wordSize
;
66 m_offsetToSavingArea
=
67 (std::max(m_numArgs
, NUMBER_OF_ARGUMENT_REGISTERS
) - NUMBER_OF_ARGUMENT_REGISTERS
) * wordSize
;
69 for (unsigned i
= std::min(NUMBER_OF_ARGUMENT_REGISTERS
, numArgs
); i
--;)
70 m_argumentRegisters
.set(GPRInfo::toArgumentRegister(i
));
71 m_callingConventionRegisters
.merge(m_argumentRegisters
);
72 if (returnRegister
!= InvalidGPRReg
)
73 m_callingConventionRegisters
.set(GPRInfo::returnValueGPR
);
74 m_callingConventionRegisters
.filter(m_usedRegisters
);
76 unsigned numberOfCallingConventionRegisters
=
77 m_callingConventionRegisters
.numberOfSetRegisters();
79 size_t offsetToThunkSavingArea
=
80 m_offsetToSavingArea
+
81 numberOfCallingConventionRegisters
* wordSize
;
84 offsetToThunkSavingArea
+
85 stackBytesNeededForReturnAddress
+
86 (m_usedRegisters
.numberOfSetRegisters() - numberOfCallingConventionRegisters
) * wordSize
;
88 m_stackBytesNeeded
= (m_stackBytesNeeded
+ stackAlignmentBytes() - 1) & ~(stackAlignmentBytes() - 1);
90 m_jit
.subPtr(CCallHelpers::TrustedImm32(m_stackBytesNeeded
), CCallHelpers::stackPointerRegister
);
92 m_thunkSaveSet
= m_usedRegisters
;
94 // This relies on all calling convention registers also being temp registers.
95 unsigned stackIndex
= 0;
96 for (unsigned i
= GPRInfo::numberOfRegisters
; i
--;) {
97 GPRReg reg
= GPRInfo::toRegister(i
);
98 if (!m_callingConventionRegisters
.get(reg
))
100 m_jit
.storePtr(reg
, CCallHelpers::Address(CCallHelpers::stackPointerRegister
, m_offsetToSavingArea
+ (stackIndex
++) * wordSize
));
101 m_thunkSaveSet
.clear(reg
);
104 m_offset
= offsetToThunkSavingArea
;
109 if (m_returnRegister
!= InvalidGPRReg
)
110 m_jit
.move(GPRInfo::returnValueGPR
, m_returnRegister
);
112 unsigned stackIndex
= 0;
113 for (unsigned i
= GPRInfo::numberOfRegisters
; i
--;) {
114 GPRReg reg
= GPRInfo::toRegister(i
);
115 if (!m_callingConventionRegisters
.get(reg
))
117 m_jit
.loadPtr(CCallHelpers::Address(CCallHelpers::stackPointerRegister
, m_offsetToSavingArea
+ (stackIndex
++) * wordSize
), reg
);
120 m_jit
.addPtr(CCallHelpers::TrustedImm32(m_stackBytesNeeded
), CCallHelpers::stackPointerRegister
);
123 RegisterSet
usedRegisters() const
125 return m_thunkSaveSet
;
128 ptrdiff_t offset() const
133 SlowPathCallKey
keyWithTarget(void* callTarget
) const
135 return SlowPathCallKey(usedRegisters(), callTarget
, m_argumentRegisters
, offset());
138 MacroAssembler::Call
makeCall(void* callTarget
, MacroAssembler::JumpList
* exceptionTarget
)
140 MacroAssembler::Call result
= m_jit
.call();
141 m_state
.finalizer
->slowPathCalls
.append(SlowPathCall(
142 result
, keyWithTarget(callTarget
)));
144 exceptionTarget
->append(m_jit
.emitExceptionCheck());
150 RegisterSet m_usedRegisters
;
151 RegisterSet m_argumentRegisters
;
152 RegisterSet m_callingConventionRegisters
;
155 GPRReg m_returnRegister
;
156 size_t m_offsetToSavingArea
;
157 size_t m_stackBytesNeeded
;
158 RegisterSet m_thunkSaveSet
;
162 } // anonymous namespace
164 void storeCodeOrigin(State
& state
, CCallHelpers
& jit
, CodeOrigin codeOrigin
)
166 if (!codeOrigin
.isSet())
169 unsigned index
= state
.jitCode
->common
.addCodeOrigin(codeOrigin
);
170 unsigned locationBits
= CallFrame::Location::encodeAsCodeOriginIndex(index
);
172 CCallHelpers::TrustedImm32(locationBits
),
173 CCallHelpers::tagFor(static_cast<VirtualRegister
>(JSStack::ArgumentCount
)));
176 MacroAssembler::Call
callOperation(
177 State
& state
, const RegisterSet
& usedRegisters
, CCallHelpers
& jit
,
178 CodeOrigin codeOrigin
, MacroAssembler::JumpList
* exceptionTarget
,
179 J_JITOperation_ESsiJI operation
, GPRReg result
, StructureStubInfo
* stubInfo
,
180 GPRReg object
, StringImpl
* uid
)
182 storeCodeOrigin(state
, jit
, codeOrigin
);
183 CallContext
context(state
, usedRegisters
, jit
, 4, result
);
184 jit
.setupArgumentsWithExecState(
185 CCallHelpers::TrustedImmPtr(stubInfo
), object
,
186 CCallHelpers::TrustedImmPtr(uid
));
187 return context
.makeCall(bitwise_cast
<void*>(operation
), exceptionTarget
);
190 MacroAssembler::Call
callOperation(
191 State
& state
, const RegisterSet
& usedRegisters
, CCallHelpers
& jit
,
192 CodeOrigin codeOrigin
, MacroAssembler::JumpList
* exceptionTarget
,
193 V_JITOperation_ESsiJJI operation
, StructureStubInfo
* stubInfo
, GPRReg value
,
194 GPRReg object
, StringImpl
* uid
)
196 storeCodeOrigin(state
, jit
, codeOrigin
);
197 CallContext
context(state
, usedRegisters
, jit
, 5, InvalidGPRReg
);
198 jit
.setupArgumentsWithExecState(
199 CCallHelpers::TrustedImmPtr(stubInfo
), value
, object
,
200 CCallHelpers::TrustedImmPtr(uid
));
201 return context
.makeCall(bitwise_cast
<void*>(operation
), exceptionTarget
);
204 } } // namespace JSC::FTL
206 #endif // ENABLE(FTL_JIT)