2 * Copyright (C) 2013, 2014 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 #include "RegisterPreservationWrapperGenerator.h"
31 #include "AssemblyHelpers.h"
32 #include "LinkBuffer.h"
33 #include "JSCInlines.h"
34 #include "StackAlignment.h"
38 RegisterSet
registersToPreserve()
40 RegisterSet calleeSaves
= RegisterSet::calleeSaveRegisters();
42 // No need to preserve FP since that always gets preserved anyway.
43 calleeSaves
.clear(GPRInfo::callFrameRegister
);
48 ptrdiff_t registerPreservationOffset()
50 unsigned numberOfCalleeSaves
= registersToPreserve().numberOfSetRegisters();
52 // Need to preserve the old return PC.
53 unsigned numberOfValuesToSave
= numberOfCalleeSaves
+ 1;
55 // Alignment. Preserve the same alignment invariants that the caller imposed.
56 unsigned numberOfNewStackSlots
=
57 WTF::roundUpToMultipleOf(stackAlignmentRegisters(), numberOfValuesToSave
);
59 return sizeof(Register
) * numberOfNewStackSlots
;
62 MacroAssemblerCodeRef
generateRegisterPreservationWrapper(VM
& vm
, ExecutableBase
* executable
, MacroAssemblerCodePtr target
)
65 // We shouldn't ever be generating wrappers for native functions.
66 RegisterSet toSave
= registersToPreserve();
67 ptrdiff_t offset
= registerPreservationOffset();
69 AssemblyHelpers
jit(&vm
, 0);
71 jit
.preserveReturnAddressAfterCall(GPRInfo::regT1
);
73 AssemblyHelpers::Address(
74 AssemblyHelpers::stackPointerRegister
,
75 (JSStack::ArgumentCount
- JSStack::CallerFrameAndPCSize
) * sizeof(Register
) + PayloadOffset
),
78 // Place the stack pointer where we want it to be.
79 jit
.subPtr(AssemblyHelpers::TrustedImm32(offset
), AssemblyHelpers::stackPointerRegister
);
81 // Compute the number of things we will be copying.
83 AssemblyHelpers::TrustedImm32(
84 JSStack::CallFrameHeaderSize
- JSStack::CallerFrameAndPCSize
),
87 ASSERT(!toSave
.get(GPRInfo::regT4
));
88 jit
.move(AssemblyHelpers::stackPointerRegister
, GPRInfo::regT4
);
90 AssemblyHelpers::Label loop
= jit
.label();
91 jit
.sub32(AssemblyHelpers::TrustedImm32(1), GPRInfo::regT2
);
92 jit
.load64(AssemblyHelpers::Address(GPRInfo::regT4
, offset
), GPRInfo::regT0
);
93 jit
.store64(GPRInfo::regT0
, GPRInfo::regT4
);
94 jit
.addPtr(AssemblyHelpers::TrustedImm32(sizeof(Register
)), GPRInfo::regT4
);
95 jit
.branchTest32(AssemblyHelpers::NonZero
, GPRInfo::regT2
).linkTo(loop
, &jit
);
97 // At this point regT4 + offset points to where we save things.
98 ptrdiff_t currentOffset
= 0;
99 jit
.storePtr(GPRInfo::regT1
, AssemblyHelpers::Address(GPRInfo::regT4
, currentOffset
));
101 for (GPRReg gpr
= AssemblyHelpers::firstRegister(); gpr
<= AssemblyHelpers::lastRegister(); gpr
= static_cast<GPRReg
>(gpr
+ 1)) {
102 if (!toSave
.get(gpr
))
104 currentOffset
+= sizeof(Register
);
105 jit
.store64(gpr
, AssemblyHelpers::Address(GPRInfo::regT4
, currentOffset
));
107 for (FPRReg fpr
= AssemblyHelpers::firstFPRegister(); fpr
<= AssemblyHelpers::lastFPRegister(); fpr
= static_cast<FPRReg
>(fpr
+ 1)) {
108 if (!toSave
.get(fpr
))
110 currentOffset
+= sizeof(Register
);
111 jit
.storeDouble(fpr
, AssemblyHelpers::Address(GPRInfo::regT4
, currentOffset
));
114 // Assume that there aren't any saved FP registers.
116 // Restore the tag registers.
117 jit
.move(AssemblyHelpers::TrustedImm64(TagTypeNumber
), GPRInfo::tagTypeNumberRegister
);
118 jit
.add64(AssemblyHelpers::TrustedImm32(TagMask
- TagTypeNumber
), GPRInfo::tagTypeNumberRegister
, GPRInfo::tagMaskRegister
);
121 AssemblyHelpers::TrustedImmPtr(
122 vm
.getCTIStub(registerRestorationThunkGenerator
).code().executableAddress()),
123 GPRInfo::nonArgGPR0
);
124 jit
.restoreReturnAddressBeforeReturn(GPRInfo::nonArgGPR0
);
125 AssemblyHelpers::Jump jump
= jit
.jump();
127 LinkBuffer
linkBuffer(vm
, jit
, GLOBAL_THUNK_ID
);
128 linkBuffer
.link(jump
, CodeLocationLabel(target
));
130 if (Options::verboseFTLToJSThunk())
131 dataLog("Need a thunk for calls from FTL to non-FTL version of ", *executable
, "\n");
133 return FINALIZE_DFG_CODE(linkBuffer
, ("Register preservation wrapper for %s/%s, %p", toCString(executable
->hashFor(CodeForCall
)).data(), toCString(executable
->hashFor(CodeForConstruct
)).data(), target
.executableAddress()));
134 #else // ENABLE(FTL_JIT)
136 UNUSED_PARAM(executable
);
137 UNUSED_PARAM(target
);
138 // We don't support non-FTL builds for two reasons:
139 // - It just so happens that currently only the FTL bottoms out in this code.
140 // - The code above uses 64-bit instructions. It doesn't necessarily have to; it would be
141 // easy to change it so that it doesn't. But obviously making that change would be a
142 // prerequisite to removing this #if.
143 UNREACHABLE_FOR_PLATFORM();
144 return MacroAssemblerCodeRef();
145 #endif // ENABLE(FTL_JIT)
148 static void generateRegisterRestoration(AssemblyHelpers
& jit
)
151 RegisterSet toSave
= registersToPreserve();
152 ptrdiff_t offset
= registerPreservationOffset();
154 ASSERT(!toSave
.get(GPRInfo::regT4
));
156 // We need to place the stack pointer back to where the caller thought they left it.
157 // But also, in order to recover the registers, we need to figure out how big the
158 // arguments area is.
161 AssemblyHelpers::Address(
162 AssemblyHelpers::stackPointerRegister
,
163 (JSStack::ArgumentCount
- JSStack::CallerFrameAndPCSize
) * sizeof(Register
) + PayloadOffset
),
166 jit
.move(GPRInfo::regT4
, GPRInfo::regT2
);
167 jit
.lshift32(AssemblyHelpers::TrustedImm32(3), GPRInfo::regT2
);
169 jit
.addPtr(AssemblyHelpers::TrustedImm32(offset
), AssemblyHelpers::stackPointerRegister
);
170 jit
.addPtr(AssemblyHelpers::stackPointerRegister
, GPRInfo::regT2
);
172 // We saved things at:
174 // adjSP + (JSStack::CallFrameHeaderSize - JSStack::CallerFrameAndPCSize + NumArgs) * 8
178 // adjSP = origSP - offset
180 // regT2 now points at:
182 // origSP + NumArgs * 8
183 // = adjSP + offset + NumArgs * 8
185 // So if we subtract offset and then add JSStack::CallFrameHeaderSize and subtract
186 // JSStack::CallerFrameAndPCSize, we'll get the thing we want.
187 ptrdiff_t currentOffset
= -offset
+ sizeof(Register
) * (
188 JSStack::CallFrameHeaderSize
- JSStack::CallerFrameAndPCSize
);
189 jit
.loadPtr(AssemblyHelpers::Address(GPRInfo::regT2
, currentOffset
), GPRInfo::regT1
);
191 for (GPRReg gpr
= AssemblyHelpers::firstRegister(); gpr
<= AssemblyHelpers::lastRegister(); gpr
= static_cast<GPRReg
>(gpr
+ 1)) {
192 if (!toSave
.get(gpr
))
194 currentOffset
+= sizeof(Register
);
195 jit
.load64(AssemblyHelpers::Address(GPRInfo::regT2
, currentOffset
), gpr
);
197 for (FPRReg fpr
= AssemblyHelpers::firstFPRegister(); fpr
<= AssemblyHelpers::lastFPRegister(); fpr
= static_cast<FPRReg
>(fpr
+ 1)) {
198 if (!toSave
.get(fpr
))
200 currentOffset
+= sizeof(Register
);
201 jit
.loadDouble(AssemblyHelpers::Address(GPRInfo::regT2
, currentOffset
), fpr
);
204 // Thunks like this rely on the ArgumentCount being intact. Pay it forward.
207 AssemblyHelpers::Address(
208 AssemblyHelpers::stackPointerRegister
,
209 (JSStack::ArgumentCount
- JSStack::CallerFrameAndPCSize
) * sizeof(Register
) + PayloadOffset
));
211 if (!ASSERT_DISABLED
) {
212 AssemblyHelpers::Jump ok
= jit
.branchPtr(
213 AssemblyHelpers::Above
, GPRInfo::regT1
, AssemblyHelpers::TrustedImmPtr(static_cast<size_t>(0x1000)));
214 jit
.abortWithReason(RPWUnreasonableJumpTarget
);
218 jit
.jump(GPRInfo::regT1
);
219 #else // ENABLE(FTL_JIT)
221 UNREACHABLE_FOR_PLATFORM();
222 #endif // ENABLE(FTL_JIT)
225 MacroAssemblerCodeRef
registerRestorationThunkGenerator(VM
* vm
)
227 AssemblyHelpers
jit(vm
, 0);
228 generateRegisterRestoration(jit
);
229 LinkBuffer
linkBuffer(*vm
, jit
, GLOBAL_THUNK_ID
);
230 return FINALIZE_CODE(linkBuffer
, ("Register restoration thunk"));
235 #endif // ENABLE(JIT)