]> git.saurik.com Git - apple/javascriptcore.git/blob - jit/JITCall32_64.cpp
JavaScriptCore-1097.13.tar.gz
[apple/javascriptcore.git] / jit / JITCall32_64.cpp
1 /*
2 * Copyright (C) 2008 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26 #include "config.h"
27
28 #if ENABLE(JIT)
29 #if USE(JSVALUE32_64)
30 #include "JIT.h"
31
32 #include "Arguments.h"
33 #include "CodeBlock.h"
34 #include "Interpreter.h"
35 #include "JITInlineMethods.h"
36 #include "JITStubCall.h"
37 #include "JSArray.h"
38 #include "JSFunction.h"
39 #include "ResultType.h"
40 #include "SamplingTool.h"
41
42 #ifndef NDEBUG
43 #include <stdio.h>
44 #endif
45
46 using namespace std;
47
48 namespace JSC {
49
50 void JIT::emit_op_call_put_result(Instruction* instruction)
51 {
52 int dst = instruction[1].u.operand;
53 emitValueProfilingSite();
54 emitStore(dst, regT1, regT0);
55 }
56
57 void JIT::emit_op_ret(Instruction* currentInstruction)
58 {
59 emitOptimizationCheck(RetOptimizationCheck);
60
61 unsigned dst = currentInstruction[1].u.operand;
62
63 emitLoad(dst, regT1, regT0);
64 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT2);
65 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
66
67 restoreReturnAddressBeforeReturn(regT2);
68 ret();
69 }
70
71 void JIT::emit_op_ret_object_or_this(Instruction* currentInstruction)
72 {
73 emitOptimizationCheck(RetOptimizationCheck);
74
75 unsigned result = currentInstruction[1].u.operand;
76 unsigned thisReg = currentInstruction[2].u.operand;
77
78 emitLoad(result, regT1, regT0);
79 Jump notJSCell = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
80 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
81 Jump notObject = emitJumpIfNotObject(regT2);
82
83 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT2);
84 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
85
86 restoreReturnAddressBeforeReturn(regT2);
87 ret();
88
89 notJSCell.link(this);
90 notObject.link(this);
91 emitLoad(thisReg, regT1, regT0);
92
93 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT2);
94 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
95
96 restoreReturnAddressBeforeReturn(regT2);
97 ret();
98 }
99
100 void JIT::emitSlow_op_call(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
101 {
102 compileOpCallSlowCase(op_call, currentInstruction, iter, m_callLinkInfoIndex++);
103 }
104
105 void JIT::emitSlow_op_call_eval(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
106 {
107 compileOpCallSlowCase(op_call_eval, currentInstruction, iter, m_callLinkInfoIndex);
108 }
109
110 void JIT::emitSlow_op_call_varargs(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
111 {
112 compileOpCallSlowCase(op_call_varargs, currentInstruction, iter, m_callLinkInfoIndex++);
113 }
114
115 void JIT::emitSlow_op_construct(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
116 {
117 compileOpCallSlowCase(op_construct, currentInstruction, iter, m_callLinkInfoIndex++);
118 }
119
120 void JIT::emit_op_call(Instruction* currentInstruction)
121 {
122 compileOpCall(op_call, currentInstruction, m_callLinkInfoIndex++);
123 }
124
125 void JIT::emit_op_call_eval(Instruction* currentInstruction)
126 {
127 compileOpCall(op_call_eval, currentInstruction, m_callLinkInfoIndex);
128 }
129
130 void JIT::emit_op_call_varargs(Instruction* currentInstruction)
131 {
132 compileOpCall(op_call_varargs, currentInstruction, m_callLinkInfoIndex++);
133 }
134
135 void JIT::emit_op_construct(Instruction* currentInstruction)
136 {
137 compileOpCall(op_construct, currentInstruction, m_callLinkInfoIndex++);
138 }
139
140 void JIT::compileLoadVarargs(Instruction* instruction)
141 {
142 int thisValue = instruction[2].u.operand;
143 int arguments = instruction[3].u.operand;
144 int firstFreeRegister = instruction[4].u.operand;
145
146 JumpList slowCase;
147 JumpList end;
148 if (m_codeBlock->usesArguments() && arguments == m_codeBlock->argumentsRegister()) {
149 emitLoadTag(arguments, regT1);
150 slowCase.append(branch32(NotEqual, regT1, TrustedImm32(JSValue::EmptyValueTag)));
151
152 load32(payloadFor(RegisterFile::ArgumentCount), regT2);
153 slowCase.append(branch32(Above, regT2, TrustedImm32(Arguments::MaxArguments + 1)));
154 // regT2: argumentCountIncludingThis
155
156 move(regT2, regT3);
157 add32(TrustedImm32(firstFreeRegister + RegisterFile::CallFrameHeaderSize), regT3);
158 lshift32(TrustedImm32(3), regT3);
159 addPtr(callFrameRegister, regT3);
160 // regT3: newCallFrame
161
162 slowCase.append(branchPtr(Below, AbsoluteAddress(m_globalData->interpreter->registerFile().addressOfEnd()), regT3));
163
164 // Initialize ArgumentCount.
165 store32(regT2, payloadFor(RegisterFile::ArgumentCount, regT3));
166
167 // Initialize 'this'.
168 emitLoad(thisValue, regT1, regT0);
169 store32(regT0, Address(regT3, OBJECT_OFFSETOF(JSValue, u.asBits.payload) + (CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register)))));
170 store32(regT1, Address(regT3, OBJECT_OFFSETOF(JSValue, u.asBits.tag) + (CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register)))));
171
172 // Copy arguments.
173 neg32(regT2);
174 end.append(branchAdd32(Zero, TrustedImm32(1), regT2));
175 // regT2: -argumentCount;
176
177 Label copyLoop = label();
178 load32(BaseIndex(callFrameRegister, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload) +(CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register)))), regT0);
179 load32(BaseIndex(callFrameRegister, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag) +(CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register)))), regT1);
180 store32(regT0, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload) +(CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register)))));
181 store32(regT1, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag) +(CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register)))));
182 branchAdd32(NonZero, TrustedImm32(1), regT2).linkTo(copyLoop, this);
183
184 end.append(jump());
185 }
186
187 if (m_codeBlock->usesArguments() && arguments == m_codeBlock->argumentsRegister())
188 slowCase.link(this);
189
190 JITStubCall stubCall(this, cti_op_load_varargs);
191 stubCall.addArgument(thisValue);
192 stubCall.addArgument(arguments);
193 stubCall.addArgument(Imm32(firstFreeRegister));
194 stubCall.call(regT3);
195
196 if (m_codeBlock->usesArguments() && arguments == m_codeBlock->argumentsRegister())
197 end.link(this);
198 }
199
200 void JIT::compileCallEval()
201 {
202 JITStubCall stubCall(this, cti_op_call_eval); // Initializes ScopeChain; ReturnPC; CodeBlock.
203 stubCall.call();
204 addSlowCase(branch32(Equal, regT1, TrustedImm32(JSValue::EmptyValueTag)));
205 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
206
207 sampleCodeBlock(m_codeBlock);
208 }
209
210 void JIT::compileCallEvalSlowCase(Vector<SlowCaseEntry>::iterator& iter)
211 {
212 linkSlowCase(iter);
213
214 emitLoad(RegisterFile::Callee, regT1, regT0);
215 emitNakedCall(m_globalData->jitStubs->ctiVirtualCall());
216
217 sampleCodeBlock(m_codeBlock);
218 }
219
220 void JIT::compileOpCall(OpcodeID opcodeID, Instruction* instruction, unsigned callLinkInfoIndex)
221 {
222 int callee = instruction[1].u.operand;
223
224 /* Caller always:
225 - Updates callFrameRegister to callee callFrame.
226 - Initializes ArgumentCount; CallerFrame; Callee.
227
228 For a JS call:
229 - Caller initializes ScopeChain.
230 - Callee initializes ReturnPC; CodeBlock.
231 - Callee restores callFrameRegister before return.
232
233 For a non-JS call:
234 - Caller initializes ScopeChain; ReturnPC; CodeBlock.
235 - Caller restores callFrameRegister after return.
236 */
237
238 if (opcodeID == op_call_varargs)
239 compileLoadVarargs(instruction);
240 else {
241 int argCount = instruction[2].u.operand;
242 int registerOffset = instruction[3].u.operand;
243
244 addPtr(TrustedImm32(registerOffset * sizeof(Register)), callFrameRegister, regT3);
245
246 store32(TrustedImm32(argCount), payloadFor(RegisterFile::ArgumentCount, regT3));
247 } // regT3 holds newCallFrame with ArgumentCount initialized.
248
249 storePtr(TrustedImmPtr(instruction), tagFor(RegisterFile::ArgumentCount, callFrameRegister));
250 emitLoad(callee, regT1, regT0); // regT1, regT0 holds callee.
251
252 storePtr(callFrameRegister, Address(regT3, RegisterFile::CallerFrame * static_cast<int>(sizeof(Register))));
253 emitStore(RegisterFile::Callee, regT1, regT0, regT3);
254 move(regT3, callFrameRegister);
255
256 if (opcodeID == op_call_eval) {
257 compileCallEval();
258 return;
259 }
260
261 DataLabelPtr addressOfLinkedFunctionCheck;
262 BEGIN_UNINTERRUPTED_SEQUENCE(sequenceOpCall);
263 Jump slowCase = branchPtrWithPatch(NotEqual, regT0, addressOfLinkedFunctionCheck, TrustedImmPtr(0));
264 END_UNINTERRUPTED_SEQUENCE(sequenceOpCall);
265
266 addSlowCase(slowCase);
267 addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)));
268
269 ASSERT(m_callStructureStubCompilationInfo.size() == callLinkInfoIndex);
270 m_callStructureStubCompilationInfo.append(StructureStubCompilationInfo());
271 m_callStructureStubCompilationInfo[callLinkInfoIndex].hotPathBegin = addressOfLinkedFunctionCheck;
272 m_callStructureStubCompilationInfo[callLinkInfoIndex].callType = CallLinkInfo::callTypeFor(opcodeID);
273 m_callStructureStubCompilationInfo[callLinkInfoIndex].bytecodeIndex = m_bytecodeOffset;
274
275 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_scopeChain)), regT1);
276 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
277 m_callStructureStubCompilationInfo[callLinkInfoIndex].hotPathOther = emitNakedCall();
278
279 sampleCodeBlock(m_codeBlock);
280 }
281
282 void JIT::compileOpCallSlowCase(OpcodeID opcodeID, Instruction*, Vector<SlowCaseEntry>::iterator& iter, unsigned callLinkInfoIndex)
283 {
284 if (opcodeID == op_call_eval) {
285 compileCallEvalSlowCase(iter);
286 return;
287 }
288
289 linkSlowCase(iter);
290 linkSlowCase(iter);
291
292 m_callStructureStubCompilationInfo[callLinkInfoIndex].callReturnLocation = emitNakedCall(opcodeID == op_construct ? m_globalData->jitStubs->ctiVirtualConstructLink() : m_globalData->jitStubs->ctiVirtualCallLink());
293
294 sampleCodeBlock(m_codeBlock);
295 }
296
297 } // namespace JSC
298
299 #endif // USE(JSVALUE32_64)
300 #endif // ENABLE(JIT)