]> git.saurik.com Git - apple/javascriptcore.git/blame_incremental - jit/JITCall.cpp
JavaScriptCore-1218.34.tar.gz
[apple/javascriptcore.git] / jit / JITCall.cpp
... / ...
CommitLineData
1/*
2 * Copyright (C) 2008, 2013 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
27
28#if ENABLE(JIT)
29#if USE(JSVALUE64)
30#include "JIT.h"
31
32#include "Arguments.h"
33#include "CodeBlock.h"
34#include "JITInlines.h"
35#include "JITStubCall.h"
36#include "JSArray.h"
37#include "JSFunction.h"
38#include "Interpreter.h"
39#include "Operations.h"
40#include "RepatchBuffer.h"
41#include "ResultType.h"
42#include "SamplingTool.h"
43#include "ThunkGenerators.h"
44#include <wtf/StringPrintStream.h>
45
46#ifndef NDEBUG
47#include <stdio.h>
48#endif
49
50using namespace std;
51
52namespace JSC {
53
54void JIT::emit_op_call_put_result(Instruction* instruction)
55{
56 int dst = instruction[1].u.operand;
57 emitValueProfilingSite();
58 emitPutVirtualRegister(dst);
59 if (canBeOptimizedOrInlined())
60 killLastResultRegister(); // Make lastResultRegister tracking simpler in the DFG.
61}
62
63void JIT::compileLoadVarargs(Instruction* instruction)
64{
65 int thisValue = instruction[2].u.operand;
66 int arguments = instruction[3].u.operand;
67 int firstFreeRegister = instruction[4].u.operand;
68
69 killLastResultRegister();
70
71 JumpList slowCase;
72 JumpList end;
73 bool canOptimize = m_codeBlock->usesArguments()
74 && arguments == m_codeBlock->argumentsRegister()
75 && !m_codeBlock->symbolTable()->slowArguments();
76
77 if (canOptimize) {
78 emitGetVirtualRegister(arguments, regT0);
79 slowCase.append(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(JSValue()))));
80
81 emitGetFromCallFrameHeader32(JSStack::ArgumentCount, regT0);
82 slowCase.append(branch32(Above, regT0, TrustedImm32(Arguments::MaxArguments + 1)));
83 // regT0: argumentCountIncludingThis
84
85 move(regT0, regT1);
86 add32(TrustedImm32(firstFreeRegister + JSStack::CallFrameHeaderSize), regT1);
87 lshift32(TrustedImm32(3), regT1);
88 addPtr(callFrameRegister, regT1);
89 // regT1: newCallFrame
90
91 slowCase.append(branchPtr(Below, AbsoluteAddress(m_vm->interpreter->stack().addressOfEnd()), regT1));
92
93 // Initialize ArgumentCount.
94 store32(regT0, Address(regT1, JSStack::ArgumentCount * static_cast<int>(sizeof(Register)) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
95
96 // Initialize 'this'.
97 emitGetVirtualRegister(thisValue, regT2);
98 store64(regT2, Address(regT1, CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register))));
99
100 // Copy arguments.
101 neg32(regT0);
102 signExtend32ToPtr(regT0, regT0);
103 end.append(branchAdd64(Zero, TrustedImm32(1), regT0));
104 // regT0: -argumentCount
105
106 Label copyLoop = label();
107 load64(BaseIndex(callFrameRegister, regT0, TimesEight, CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register))), regT2);
108 store64(regT2, BaseIndex(regT1, regT0, TimesEight, CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register))));
109 branchAdd64(NonZero, TrustedImm32(1), regT0).linkTo(copyLoop, this);
110
111 end.append(jump());
112 }
113
114 if (canOptimize)
115 slowCase.link(this);
116
117 JITStubCall stubCall(this, cti_op_load_varargs);
118 stubCall.addArgument(thisValue, regT0);
119 stubCall.addArgument(arguments, regT0);
120 stubCall.addArgument(Imm32(firstFreeRegister));
121 stubCall.call(regT1);
122
123 if (canOptimize)
124 end.link(this);
125}
126
127void JIT::compileCallEval()
128{
129 JITStubCall stubCall(this, cti_op_call_eval); // Initializes ScopeChain; ReturnPC; CodeBlock.
130 stubCall.call();
131 addSlowCase(branch64(Equal, regT0, TrustedImm64(JSValue::encode(JSValue()))));
132 emitGetFromCallFrameHeaderPtr(JSStack::CallerFrame, callFrameRegister);
133
134 sampleCodeBlock(m_codeBlock);
135}
136
137void JIT::compileCallEvalSlowCase(Vector<SlowCaseEntry>::iterator& iter)
138{
139 linkSlowCase(iter);
140
141 emitGetFromCallFrameHeader64(JSStack::Callee, regT0);
142 emitNakedCall(m_vm->getCTIStub(virtualCallGenerator).code());
143
144 sampleCodeBlock(m_codeBlock);
145}
146
147void JIT::compileOpCall(OpcodeID opcodeID, Instruction* instruction, unsigned callLinkInfoIndex)
148{
149 int callee = instruction[1].u.operand;
150
151 /* Caller always:
152 - Updates callFrameRegister to callee callFrame.
153 - Initializes ArgumentCount; CallerFrame; Callee.
154
155 For a JS call:
156 - Caller initializes ScopeChain.
157 - Callee initializes ReturnPC; CodeBlock.
158 - Callee restores callFrameRegister before return.
159
160 For a non-JS call:
161 - Caller initializes ScopeChain; ReturnPC; CodeBlock.
162 - Caller restores callFrameRegister after return.
163 */
164
165 if (opcodeID == op_call_varargs)
166 compileLoadVarargs(instruction);
167 else {
168 int argCount = instruction[2].u.operand;
169 int registerOffset = instruction[3].u.operand;
170
171 if (opcodeID == op_call && shouldEmitProfiling()) {
172 emitGetVirtualRegister(registerOffset + CallFrame::argumentOffsetIncludingThis(0), regT0);
173 Jump done = emitJumpIfNotJSCell(regT0);
174 loadPtr(Address(regT0, JSCell::structureOffset()), regT0);
175 storePtr(regT0, instruction[5].u.arrayProfile->addressOfLastSeenStructure());
176 done.link(this);
177 }
178
179 addPtr(TrustedImm32(registerOffset * sizeof(Register)), callFrameRegister, regT1);
180 store32(TrustedImm32(argCount), Address(regT1, JSStack::ArgumentCount * static_cast<int>(sizeof(Register)) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
181 } // regT1 holds newCallFrame with ArgumentCount initialized.
182
183 store32(TrustedImm32(instruction - m_codeBlock->instructions().begin()), Address(callFrameRegister, JSStack::ArgumentCount * static_cast<int>(sizeof(Register)) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
184 emitGetVirtualRegister(callee, regT0); // regT0 holds callee.
185
186 store64(callFrameRegister, Address(regT1, JSStack::CallerFrame * static_cast<int>(sizeof(Register))));
187 store64(regT0, Address(regT1, JSStack::Callee * static_cast<int>(sizeof(Register))));
188 move(regT1, callFrameRegister);
189
190 if (opcodeID == op_call_eval) {
191 compileCallEval();
192 return;
193 }
194
195 DataLabelPtr addressOfLinkedFunctionCheck;
196 BEGIN_UNINTERRUPTED_SEQUENCE(sequenceOpCall);
197 Jump slowCase = branchPtrWithPatch(NotEqual, regT0, addressOfLinkedFunctionCheck, TrustedImmPtr(0));
198 END_UNINTERRUPTED_SEQUENCE(sequenceOpCall);
199 addSlowCase(slowCase);
200
201 ASSERT(m_callStructureStubCompilationInfo.size() == callLinkInfoIndex);
202 m_callStructureStubCompilationInfo.append(StructureStubCompilationInfo());
203 m_callStructureStubCompilationInfo[callLinkInfoIndex].hotPathBegin = addressOfLinkedFunctionCheck;
204 m_callStructureStubCompilationInfo[callLinkInfoIndex].callType = CallLinkInfo::callTypeFor(opcodeID);
205 m_callStructureStubCompilationInfo[callLinkInfoIndex].bytecodeIndex = m_bytecodeOffset;
206
207 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_scope)), regT1);
208 emitPutToCallFrameHeader(regT1, JSStack::ScopeChain);
209 m_callStructureStubCompilationInfo[callLinkInfoIndex].hotPathOther = emitNakedCall();
210
211 sampleCodeBlock(m_codeBlock);
212}
213
214void JIT::compileOpCallSlowCase(OpcodeID opcodeID, Instruction*, Vector<SlowCaseEntry>::iterator& iter, unsigned callLinkInfoIndex)
215{
216 if (opcodeID == op_call_eval) {
217 compileCallEvalSlowCase(iter);
218 return;
219 }
220
221 linkSlowCase(iter);
222
223 m_callStructureStubCompilationInfo[callLinkInfoIndex].callReturnLocation = emitNakedCall(opcodeID == op_construct ? m_vm->getCTIStub(linkConstructGenerator).code() : m_vm->getCTIStub(linkCallGenerator).code());
224
225 sampleCodeBlock(m_codeBlock);
226}
227
228void JIT::privateCompileClosureCall(CallLinkInfo* callLinkInfo, CodeBlock* calleeCodeBlock, Structure* expectedStructure, ExecutableBase* expectedExecutable, MacroAssemblerCodePtr codePtr)
229{
230 JumpList slowCases;
231
232 slowCases.append(branchTestPtr(NonZero, regT0, tagMaskRegister));
233 slowCases.append(branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(expectedStructure)));
234 slowCases.append(branchPtr(NotEqual, Address(regT0, JSFunction::offsetOfExecutable()), TrustedImmPtr(expectedExecutable)));
235
236 loadPtr(Address(regT0, JSFunction::offsetOfScopeChain()), regT1);
237 emitPutToCallFrameHeader(regT1, JSStack::ScopeChain);
238
239 Call call = nearCall();
240 Jump done = jump();
241
242 slowCases.link(this);
243 move(TrustedImmPtr(callLinkInfo->callReturnLocation.executableAddress()), regT2);
244 restoreReturnAddressBeforeReturn(regT2);
245 Jump slow = jump();
246
247 LinkBuffer patchBuffer(*m_vm, this, m_codeBlock);
248
249 patchBuffer.link(call, FunctionPtr(codePtr.executableAddress()));
250 patchBuffer.link(done, callLinkInfo->hotPathOther.labelAtOffset(0));
251 patchBuffer.link(slow, CodeLocationLabel(m_vm->getCTIStub(virtualCallGenerator).code()));
252
253 RefPtr<ClosureCallStubRoutine> stubRoutine = adoptRef(new ClosureCallStubRoutine(
254 FINALIZE_CODE(
255 patchBuffer,
256 ("Baseline closure call stub for %s, return point %p, target %p (%s)",
257 toCString(*m_codeBlock).data(),
258 callLinkInfo->hotPathOther.labelAtOffset(0).executableAddress(),
259 codePtr.executableAddress(),
260 toCString(pointerDump(calleeCodeBlock)).data())),
261 *m_vm, m_codeBlock->ownerExecutable(), expectedStructure, expectedExecutable,
262 callLinkInfo->codeOrigin));
263
264 RepatchBuffer repatchBuffer(m_codeBlock);
265
266 repatchBuffer.replaceWithJump(
267 RepatchBuffer::startOfBranchPtrWithPatchOnRegister(callLinkInfo->hotPathBegin),
268 CodeLocationLabel(stubRoutine->code().code()));
269 repatchBuffer.relink(callLinkInfo->callReturnLocation, m_vm->getCTIStub(virtualCallGenerator).code());
270
271 callLinkInfo->stub = stubRoutine.release();
272}
273
274} // namespace JSC
275
276#endif // USE(JSVALUE64)
277#endif // ENABLE(JIT)