]>
git.saurik.com Git - apple/javascriptcore.git/blob - ftl/FTLThunks.cpp
2 * Copyright (C) 2013 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 #include "FTLThunks.h"
31 #include "AssemblyHelpers.h"
33 #include "FTLOSRExitCompiler.h"
34 #include "FTLSaveRestore.h"
36 #include "LinkBuffer.h"
38 namespace JSC
{ namespace FTL
{
42 MacroAssemblerCodeRef
osrExitGenerationThunkGenerator(VM
* vm
)
44 AssemblyHelpers
jit(vm
, 0);
46 // Note that the "return address" will be the OSR exit ID.
48 ptrdiff_t stackMisalignment
= MacroAssembler::pushToSaveByteOffset();
50 // Pretend that we're a C call frame.
51 jit
.pushToSave(MacroAssembler::framePointerRegister
);
52 jit
.move(MacroAssembler::stackPointerRegister
, MacroAssembler::framePointerRegister
);
53 stackMisalignment
+= MacroAssembler::pushToSaveByteOffset();
55 // Now create ourselves enough stack space to give saveAllRegisters() a scratch slot.
56 unsigned numberOfRequiredPops
= 0;
58 jit
.pushToSave(GPRInfo::regT0
);
59 stackMisalignment
+= MacroAssembler::pushToSaveByteOffset();
60 numberOfRequiredPops
++;
61 } while (stackMisalignment
% stackAlignmentBytes());
63 ScratchBuffer
* scratchBuffer
= vm
->scratchBufferForSize(requiredScratchMemorySizeInBytes());
64 char* buffer
= static_cast<char*>(scratchBuffer
->dataBuffer());
66 saveAllRegisters(jit
, buffer
);
68 // Tell GC mark phase how much of the scratch buffer is active during call.
69 jit
.move(MacroAssembler::TrustedImmPtr(scratchBuffer
->activeLengthPtr()), GPRInfo::nonArgGPR1
);
70 jit
.storePtr(MacroAssembler::TrustedImmPtr(requiredScratchMemorySizeInBytes()), GPRInfo::nonArgGPR1
);
72 jit
.loadPtr(GPRInfo::callFrameRegister
, GPRInfo::argumentGPR0
);
74 GPRInfo::argumentGPR1
,
75 (stackMisalignment
- MacroAssembler::pushToSaveByteOffset()) / sizeof(void*));
76 MacroAssembler::Call functionCall
= jit
.call();
78 // At this point we want to make a tail call to what was returned to us in the
79 // returnValueGPR. But at the same time as we do this, we must restore all registers.
80 // The way we will accomplish this is by arranging to have the tail call target in the
81 // return address "slot" (be it a register or the stack).
83 jit
.move(GPRInfo::returnValueGPR
, GPRInfo::regT0
);
85 // Make sure we tell the GC that we're not using the scratch buffer anymore.
86 jit
.move(MacroAssembler::TrustedImmPtr(scratchBuffer
->activeLengthPtr()), GPRInfo::regT1
);
87 jit
.storePtr(MacroAssembler::TrustedImmPtr(0), GPRInfo::regT1
);
89 // Prepare for tail call.
90 while (numberOfRequiredPops
--)
91 jit
.popToRestore(GPRInfo::regT1
);
92 jit
.popToRestore(MacroAssembler::framePointerRegister
);
94 // At this point we're sitting on the return address - so if we did a jump right now, the
95 // tail-callee would be happy. Instead we'll stash the callee in the return address and then
96 // restore all registers.
98 jit
.restoreReturnAddressBeforeReturn(GPRInfo::regT0
);
100 restoreAllRegisters(jit
, buffer
);
104 LinkBuffer
patchBuffer(*vm
, jit
, GLOBAL_THUNK_ID
);
105 patchBuffer
.link(functionCall
, compileFTLOSRExit
);
106 return FINALIZE_CODE(patchBuffer
, ("FTL OSR exit generation thunk"));
109 static void registerClobberCheck(AssemblyHelpers
& jit
, RegisterSet dontClobber
)
111 if (!Options::clobberAllRegsInFTLICSlowPath())
114 RegisterSet clobber
= RegisterSet::allRegisters();
115 clobber
.exclude(RegisterSet::reservedHardwareRegisters());
116 clobber
.exclude(RegisterSet::stackRegisters());
117 clobber
.exclude(RegisterSet::calleeSaveRegisters());
118 clobber
.exclude(dontClobber
);
121 for (Reg reg
= Reg::first(); reg
<= Reg::last(); reg
= reg
.next()) {
122 if (!clobber
.get(reg
) || !reg
.isGPR())
125 jit
.move(AssemblyHelpers::TrustedImm32(0x1337beef), reg
.gpr());
129 for (Reg reg
= Reg::first(); reg
<= Reg::last(); reg
= reg
.next()) {
130 if (!clobber
.get(reg
) || !reg
.isFPR())
133 jit
.move64ToDouble(someGPR
, reg
.fpr());
137 MacroAssemblerCodeRef
slowPathCallThunkGenerator(VM
& vm
, const SlowPathCallKey
& key
)
139 AssemblyHelpers
jit(&vm
, 0);
141 // We want to save the given registers at the given offset, then we want to save the
142 // old return address somewhere past that offset, and then finally we want to make the
145 size_t currentOffset
= key
.offset() + sizeof(void*);
147 #if CPU(X86) || CPU(X86_64)
148 currentOffset
+= sizeof(void*);
151 for (MacroAssembler::RegisterID reg
= MacroAssembler::firstRegister(); reg
<= MacroAssembler::lastRegister(); reg
= static_cast<MacroAssembler::RegisterID
>(reg
+ 1)) {
152 if (!key
.usedRegisters().get(reg
))
154 jit
.storePtr(reg
, AssemblyHelpers::Address(MacroAssembler::stackPointerRegister
, currentOffset
));
155 currentOffset
+= sizeof(void*);
158 for (MacroAssembler::FPRegisterID reg
= MacroAssembler::firstFPRegister(); reg
<= MacroAssembler::lastFPRegister(); reg
= static_cast<MacroAssembler::FPRegisterID
>(reg
+ 1)) {
159 if (!key
.usedRegisters().get(reg
))
161 jit
.storeDouble(reg
, AssemblyHelpers::Address(MacroAssembler::stackPointerRegister
, currentOffset
));
162 currentOffset
+= sizeof(double);
165 jit
.preserveReturnAddressAfterCall(GPRInfo::nonArgGPR0
);
166 jit
.storePtr(GPRInfo::nonArgGPR0
, AssemblyHelpers::Address(MacroAssembler::stackPointerRegister
, key
.offset()));
168 registerClobberCheck(jit
, key
.argumentRegisters());
170 AssemblyHelpers::Call call
= jit
.call();
172 jit
.loadPtr(AssemblyHelpers::Address(MacroAssembler::stackPointerRegister
, key
.offset()), GPRInfo::nonPreservedNonReturnGPR
);
173 jit
.restoreReturnAddressBeforeReturn(GPRInfo::nonPreservedNonReturnGPR
);
175 for (MacroAssembler::FPRegisterID reg
= MacroAssembler::lastFPRegister(); ; reg
= static_cast<MacroAssembler::FPRegisterID
>(reg
- 1)) {
176 if (key
.usedRegisters().get(reg
)) {
177 currentOffset
-= sizeof(double);
178 jit
.loadDouble(AssemblyHelpers::Address(MacroAssembler::stackPointerRegister
, currentOffset
), reg
);
180 if (reg
== MacroAssembler::firstFPRegister())
184 for (MacroAssembler::RegisterID reg
= MacroAssembler::lastRegister(); ; reg
= static_cast<MacroAssembler::RegisterID
>(reg
- 1)) {
185 if (key
.usedRegisters().get(reg
)) {
186 currentOffset
-= sizeof(void*);
187 jit
.loadPtr(AssemblyHelpers::Address(MacroAssembler::stackPointerRegister
, currentOffset
), reg
);
189 if (reg
== MacroAssembler::firstRegister())
195 LinkBuffer
patchBuffer(vm
, jit
, GLOBAL_THUNK_ID
);
196 patchBuffer
.link(call
, FunctionPtr(key
.callTarget()));
197 return FINALIZE_CODE(patchBuffer
, ("FTL slow path call thunk for %s", toCString(key
).data()));
200 } } // namespace JSC::FTL
202 #endif // ENABLE(FTL_JIT)