]> git.saurik.com Git - apple/javascriptcore.git/blob - ftl/FTLJSCallVarargs.cpp
JavaScriptCore-7601.1.46.3.tar.gz
[apple/javascriptcore.git] / ftl / FTLJSCallVarargs.cpp
1 /*
2 * Copyright (C) 2015 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26 #include "config.h"
27 #include "FTLJSCallVarargs.h"
28
29 #if ENABLE(FTL_JIT)
30
31 #include "DFGNode.h"
32 #include "DFGOperations.h"
33 #include "JSCInlines.h"
34 #include "LinkBuffer.h"
35 #include "ScratchRegisterAllocator.h"
36 #include "SetupVarargsFrame.h"
37
38 namespace JSC { namespace FTL {
39
40 using namespace DFG;
41
42 JSCallVarargs::JSCallVarargs()
43 : m_stackmapID(UINT_MAX)
44 , m_node(nullptr)
45 , m_instructionOffset(UINT_MAX)
46 {
47 }
48
49 JSCallVarargs::JSCallVarargs(unsigned stackmapID, Node* node)
50 : m_stackmapID(stackmapID)
51 , m_node(node)
52 , m_callBase(
53 (node->op() == ConstructVarargs || node->op() == ConstructForwardVarargs)
54 ? CallLinkInfo::ConstructVarargs : CallLinkInfo::CallVarargs,
55 node->origin.semantic)
56 , m_instructionOffset(0)
57 {
58 ASSERT(
59 node->op() == CallVarargs || node->op() == CallForwardVarargs
60 || node->op() == ConstructVarargs || node->op() == ConstructForwardVarargs);
61 }
62
63 unsigned JSCallVarargs::numSpillSlotsNeeded()
64 {
65 return 4;
66 }
67
68 void JSCallVarargs::emit(CCallHelpers& jit, int32_t spillSlotsOffset)
69 {
70 // We are passed three pieces of information:
71 // - The callee.
72 // - The arguments object, if it's not a forwarding call.
73 // - The "this" value, if it's a constructor call.
74
75 CallVarargsData* data = m_node->callVarargsData();
76
77 GPRReg calleeGPR = GPRInfo::argumentGPR0;
78
79 GPRReg argumentsGPR = InvalidGPRReg;
80 GPRReg thisGPR = InvalidGPRReg;
81
82 bool forwarding = false;
83
84 switch (m_node->op()) {
85 case CallVarargs:
86 case ConstructVarargs:
87 argumentsGPR = GPRInfo::argumentGPR1;
88 thisGPR = GPRInfo::argumentGPR2;
89 break;
90 case CallForwardVarargs:
91 case ConstructForwardVarargs:
92 thisGPR = GPRInfo::argumentGPR1;
93 forwarding = true;
94 break;
95 default:
96 RELEASE_ASSERT_NOT_REACHED();
97 break;
98 }
99
100 const unsigned calleeSpillSlot = 0;
101 const unsigned argumentsSpillSlot = 1;
102 const unsigned thisSpillSlot = 2;
103 const unsigned stackPointerSpillSlot = 3;
104
105 // Get some scratch registers.
106 RegisterSet usedRegisters;
107 usedRegisters.merge(RegisterSet::stackRegisters());
108 usedRegisters.merge(RegisterSet::reservedHardwareRegisters());
109 usedRegisters.merge(RegisterSet::calleeSaveRegisters());
110 usedRegisters.set(calleeGPR);
111 if (argumentsGPR != InvalidGPRReg)
112 usedRegisters.set(argumentsGPR);
113 ASSERT(thisGPR);
114 usedRegisters.set(thisGPR);
115 ScratchRegisterAllocator allocator(usedRegisters);
116 GPRReg scratchGPR1 = allocator.allocateScratchGPR();
117 GPRReg scratchGPR2 = allocator.allocateScratchGPR();
118 GPRReg scratchGPR3 = allocator.allocateScratchGPR();
119
120 RELEASE_ASSERT(!allocator.numberOfReusedRegisters());
121
122 auto computeUsedStack = [&] (GPRReg targetGPR, unsigned extra) {
123 if (isARM64()) {
124 // Have to do this the weird way because $sp on ARM64 means zero when used in a subtraction.
125 jit.move(CCallHelpers::stackPointerRegister, targetGPR);
126 jit.negPtr(targetGPR);
127 jit.addPtr(GPRInfo::callFrameRegister, targetGPR);
128 } else {
129 jit.move(GPRInfo::callFrameRegister, targetGPR);
130 jit.subPtr(CCallHelpers::stackPointerRegister, targetGPR);
131 }
132 if (extra)
133 jit.subPtr(CCallHelpers::TrustedImm32(extra), targetGPR);
134 jit.urshiftPtr(CCallHelpers::Imm32(3), targetGPR);
135 };
136
137 auto callWithExceptionCheck = [&] (void* callee) {
138 jit.move(CCallHelpers::TrustedImmPtr(callee), GPRInfo::nonPreservedNonArgumentGPR);
139 jit.call(GPRInfo::nonPreservedNonArgumentGPR);
140 m_exceptions.append(jit.emitExceptionCheck(AssemblyHelpers::NormalExceptionCheck, AssemblyHelpers::FarJumpWidth));
141 };
142
143 if (isARM64()) {
144 jit.move(CCallHelpers::stackPointerRegister, scratchGPR1);
145 jit.storePtr(scratchGPR1, CCallHelpers::addressFor(spillSlotsOffset + stackPointerSpillSlot));
146 } else
147 jit.storePtr(CCallHelpers::stackPointerRegister, CCallHelpers::addressFor(spillSlotsOffset + stackPointerSpillSlot));
148
149 unsigned extraStack = sizeof(CallerFrameAndPC) +
150 WTF::roundUpToMultipleOf(stackAlignmentBytes(), 5 * sizeof(void*));
151
152 if (forwarding) {
153 CCallHelpers::JumpList slowCase;
154 computeUsedStack(scratchGPR2, 0);
155 emitSetupVarargsFrameFastCase(jit, scratchGPR2, scratchGPR1, scratchGPR2, scratchGPR3, m_node->child2()->origin.semantic.inlineCallFrame, data->firstVarArgOffset, slowCase);
156
157 CCallHelpers::Jump done = jit.jump();
158 slowCase.link(&jit);
159 jit.subPtr(CCallHelpers::TrustedImm32(extraStack), CCallHelpers::stackPointerRegister);
160 jit.setupArgumentsExecState();
161 callWithExceptionCheck(bitwise_cast<void*>(operationThrowStackOverflowForVarargs));
162 jit.abortWithReason(DFGVarargsThrowingPathDidNotThrow);
163
164 done.link(&jit);
165 jit.move(calleeGPR, GPRInfo::regT0);
166 } else {
167 // Gotta spill the callee, arguments, and this because we will need them later and we will have some
168 // calls that clobber them.
169 jit.store64(calleeGPR, CCallHelpers::addressFor(spillSlotsOffset + calleeSpillSlot));
170 jit.store64(argumentsGPR, CCallHelpers::addressFor(spillSlotsOffset + argumentsSpillSlot));
171 jit.store64(thisGPR, CCallHelpers::addressFor(spillSlotsOffset + thisSpillSlot));
172
173 computeUsedStack(scratchGPR1, 0);
174 jit.subPtr(CCallHelpers::TrustedImm32(extraStack), CCallHelpers::stackPointerRegister);
175 jit.setupArgumentsWithExecState(argumentsGPR, scratchGPR1, CCallHelpers::TrustedImm32(data->firstVarArgOffset));
176 callWithExceptionCheck(bitwise_cast<void*>(operationSizeFrameForVarargs));
177
178 jit.move(GPRInfo::returnValueGPR, scratchGPR1);
179 computeUsedStack(scratchGPR2, extraStack);
180 jit.load64(CCallHelpers::addressFor(spillSlotsOffset + argumentsSpillSlot), argumentsGPR);
181 emitSetVarargsFrame(jit, scratchGPR1, false, scratchGPR2, scratchGPR2);
182 jit.addPtr(CCallHelpers::TrustedImm32(-extraStack), scratchGPR2, CCallHelpers::stackPointerRegister);
183 jit.setupArgumentsWithExecState(scratchGPR2, argumentsGPR, CCallHelpers::TrustedImm32(data->firstVarArgOffset), scratchGPR1);
184 callWithExceptionCheck(bitwise_cast<void*>(operationSetupVarargsFrame));
185
186 jit.move(GPRInfo::returnValueGPR, scratchGPR2);
187
188 jit.load64(CCallHelpers::addressFor(spillSlotsOffset + thisSpillSlot), thisGPR);
189 jit.load64(CCallHelpers::addressFor(spillSlotsOffset + calleeSpillSlot), GPRInfo::regT0);
190 }
191
192 jit.addPtr(CCallHelpers::TrustedImm32(sizeof(CallerFrameAndPC)), scratchGPR2, CCallHelpers::stackPointerRegister);
193
194 jit.store64(thisGPR, CCallHelpers::calleeArgumentSlot(0));
195
196 // Henceforth we make the call. The base FTL call machinery expects the callee in regT0 and for the
197 // stack frame to already be set up, which it is.
198 jit.store64(GPRInfo::regT0, CCallHelpers::calleeFrameSlot(JSStack::Callee));
199
200 m_callBase.emit(jit);
201
202 // Undo the damage we've done.
203 if (isARM64()) {
204 GPRReg scratchGPRAtReturn = CCallHelpers::selectScratchGPR(GPRInfo::returnValueGPR);
205 jit.loadPtr(CCallHelpers::addressFor(spillSlotsOffset + stackPointerSpillSlot), scratchGPRAtReturn);
206 jit.move(scratchGPRAtReturn, CCallHelpers::stackPointerRegister);
207 } else
208 jit.loadPtr(CCallHelpers::addressFor(spillSlotsOffset + stackPointerSpillSlot), CCallHelpers::stackPointerRegister);
209 }
210
211 void JSCallVarargs::link(VM& vm, LinkBuffer& linkBuffer, CodeLocationLabel exceptionHandler)
212 {
213 m_callBase.link(vm, linkBuffer);
214 linkBuffer.link(m_exceptions, exceptionHandler);
215 }
216
217 } } // namespace JSC::FTL
218
219 #endif // ENABLE(FTL_JIT)
220