]> git.saurik.com Git - apple/javascriptcore.git/blob - jit/JITCall32_64.cpp
0019d984e38499fc347db53d99304ae737b75335
[apple/javascriptcore.git] / jit / JITCall32_64.cpp
1 /*
2 * Copyright (C) 2008 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26 #include "config.h"
27
28 #if ENABLE(JIT)
29 #if USE(JSVALUE32_64)
30 #include "JIT.h"
31
32 #include "CodeBlock.h"
33 #include "Interpreter.h"
34 #include "JITInlineMethods.h"
35 #include "JITStubCall.h"
36 #include "JSArray.h"
37 #include "JSFunction.h"
38 #include "ResultType.h"
39 #include "SamplingTool.h"
40
41 #ifndef NDEBUG
42 #include <stdio.h>
43 #endif
44
45 using namespace std;
46
47 namespace JSC {
48
49 void JIT::compileOpCallInitializeCallFrame()
50 {
51 // regT0 holds callee, regT1 holds argCount
52 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_scopeChain)), regT3); // scopeChain
53 emitPutIntToCallFrameHeader(regT1, RegisterFile::ArgumentCount);
54 emitPutCellToCallFrameHeader(regT0, RegisterFile::Callee);
55 emitPutCellToCallFrameHeader(regT3, RegisterFile::ScopeChain);
56 }
57
58 void JIT::emit_op_call_put_result(Instruction* instruction)
59 {
60 int dst = instruction[1].u.operand;
61 emitStore(dst, regT1, regT0);
62 }
63
64 void JIT::compileOpCallVarargs(Instruction* instruction)
65 {
66 int callee = instruction[1].u.operand;
67 int argCountRegister = instruction[2].u.operand;
68 int registerOffset = instruction[3].u.operand;
69
70 emitLoad(callee, regT1, regT0);
71 emitLoadPayload(argCountRegister, regT2); // argCount
72 addPtr(Imm32(registerOffset), regT2, regT3); // registerOffset
73
74 emitJumpSlowCaseIfNotJSCell(callee, regT1);
75 addSlowCase(branchPtr(NotEqual, Address(regT0), TrustedImmPtr(m_globalData->jsFunctionVPtr)));
76
77 // Speculatively roll the callframe, assuming argCount will match the arity.
78 mul32(TrustedImm32(sizeof(Register)), regT3, regT3);
79 addPtr(callFrameRegister, regT3);
80 store32(TrustedImm32(JSValue::CellTag), tagFor(RegisterFile::CallerFrame, regT3));
81 storePtr(callFrameRegister, payloadFor(RegisterFile::CallerFrame, regT3));
82 move(regT3, callFrameRegister);
83
84 move(regT2, regT1); // argCount
85
86 emitNakedCall(m_globalData->jitStubs->ctiVirtualCall());
87
88 sampleCodeBlock(m_codeBlock);
89 }
90
91 void JIT::compileOpCallVarargsSlowCase(Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter)
92 {
93 int callee = instruction[1].u.operand;
94
95 linkSlowCaseIfNotJSCell(iter, callee);
96 linkSlowCase(iter);
97
98 JITStubCall stubCall(this, cti_op_call_NotJSFunction);
99 stubCall.addArgument(regT1, regT0);
100 stubCall.addArgument(regT3);
101 stubCall.addArgument(regT2);
102 stubCall.call();
103
104 sampleCodeBlock(m_codeBlock);
105 }
106
107 void JIT::emit_op_ret(Instruction* currentInstruction)
108 {
109 unsigned dst = currentInstruction[1].u.operand;
110
111 emitLoad(dst, regT1, regT0);
112 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT2);
113 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
114
115 restoreReturnAddressBeforeReturn(regT2);
116 ret();
117 }
118
119 void JIT::emit_op_ret_object_or_this(Instruction* currentInstruction)
120 {
121 unsigned result = currentInstruction[1].u.operand;
122 unsigned thisReg = currentInstruction[2].u.operand;
123
124 emitLoad(result, regT1, regT0);
125 Jump notJSCell = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
126 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
127 Jump notObject = branch8(NotEqual, Address(regT2, Structure::typeInfoTypeOffset()), TrustedImm32(ObjectType));
128
129 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT2);
130 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
131
132 restoreReturnAddressBeforeReturn(regT2);
133 ret();
134
135 notJSCell.link(this);
136 notObject.link(this);
137 emitLoad(thisReg, regT1, regT0);
138
139 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT2);
140 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
141
142 restoreReturnAddressBeforeReturn(regT2);
143 ret();
144 }
145
146 void JIT::emitSlow_op_call(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
147 {
148 compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_call);
149 }
150
151 void JIT::emitSlow_op_call_eval(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
152 {
153 compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_call_eval);
154 }
155
156 void JIT::emitSlow_op_call_varargs(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
157 {
158 compileOpCallVarargsSlowCase(currentInstruction, iter);
159 }
160
161 void JIT::emitSlow_op_construct(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
162 {
163 compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_construct);
164 }
165
166 void JIT::emit_op_call(Instruction* currentInstruction)
167 {
168 compileOpCall(op_call, currentInstruction, m_callLinkInfoIndex++);
169 }
170
171 void JIT::emit_op_call_eval(Instruction* currentInstruction)
172 {
173 compileOpCall(op_call_eval, currentInstruction, m_callLinkInfoIndex++);
174 }
175
176 void JIT::emit_op_call_varargs(Instruction* currentInstruction)
177 {
178 compileOpCallVarargs(currentInstruction);
179 }
180
181 void JIT::emit_op_construct(Instruction* currentInstruction)
182 {
183 compileOpCall(op_construct, currentInstruction, m_callLinkInfoIndex++);
184 }
185
186 #if !ENABLE(JIT_OPTIMIZE_CALL)
187
188 /* ------------------------------ BEGIN: !ENABLE(JIT_OPTIMIZE_CALL) ------------------------------ */
189
190 void JIT::compileOpCall(OpcodeID opcodeID, Instruction* instruction, unsigned)
191 {
192 int callee = instruction[1].u.operand;
193 int argCount = instruction[2].u.operand;
194 int registerOffset = instruction[3].u.operand;
195
196 Jump wasEval;
197 if (opcodeID == op_call_eval) {
198 JITStubCall stubCall(this, cti_op_call_eval);
199 stubCall.addArgument(callee);
200 stubCall.addArgument(JIT::Imm32(registerOffset));
201 stubCall.addArgument(JIT::Imm32(argCount));
202 stubCall.call();
203 wasEval = branch32(NotEqual, regT1, TrustedImm32(JSValue::EmptyValueTag));
204 }
205
206 emitLoad(callee, regT1, regT0);
207
208 emitJumpSlowCaseIfNotJSCell(callee, regT1);
209 addSlowCase(branchPtr(NotEqual, Address(regT0), TrustedImmPtr(m_globalData->jsFunctionVPtr)));
210
211 // Speculatively roll the callframe, assuming argCount will match the arity.
212 store32(TrustedImm32(JSValue::CellTag), tagFor(RegisterFile::CallerFrame + registerOffset, callFrameRegister));
213 storePtr(callFrameRegister, payloadFor(RegisterFile::CallerFrame + registerOffset, callFrameRegister));
214 addPtr(Imm32(registerOffset * static_cast<int>(sizeof(Register))), callFrameRegister);
215 move(TrustedImm32(argCount), regT1);
216
217 emitNakedCall(opcodeID == op_construct ? m_globalData->jitStubs->ctiVirtualConstruct() : m_globalData->jitStubs->ctiVirtualCall());
218
219 if (opcodeID == op_call_eval)
220 wasEval.link(this);
221
222 sampleCodeBlock(m_codeBlock);
223 }
224
225 void JIT::compileOpCallSlowCase(Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter, unsigned, OpcodeID opcodeID)
226 {
227 int callee = instruction[1].u.operand;
228 int argCount = instruction[2].u.operand;
229 int registerOffset = instruction[3].u.operand;
230
231 linkSlowCaseIfNotJSCell(iter, callee);
232 linkSlowCase(iter);
233
234 JITStubCall stubCall(this, opcodeID == op_construct ? cti_op_construct_NotJSConstruct : cti_op_call_NotJSFunction);
235 stubCall.addArgument(callee);
236 stubCall.addArgument(JIT::Imm32(registerOffset));
237 stubCall.addArgument(JIT::Imm32(argCount));
238 stubCall.call();
239
240 sampleCodeBlock(m_codeBlock);
241 }
242
243 #else // !ENABLE(JIT_OPTIMIZE_CALL)
244
245 /* ------------------------------ BEGIN: ENABLE(JIT_OPTIMIZE_CALL) ------------------------------ */
246
247 void JIT::compileOpCall(OpcodeID opcodeID, Instruction* instruction, unsigned callLinkInfoIndex)
248 {
249 int callee = instruction[1].u.operand;
250 int argCount = instruction[2].u.operand;
251 int registerOffset = instruction[3].u.operand;
252
253 Jump wasEval;
254 if (opcodeID == op_call_eval) {
255 JITStubCall stubCall(this, cti_op_call_eval);
256 stubCall.addArgument(callee);
257 stubCall.addArgument(JIT::Imm32(registerOffset));
258 stubCall.addArgument(JIT::Imm32(argCount));
259 stubCall.call();
260 wasEval = branch32(NotEqual, regT1, TrustedImm32(JSValue::EmptyValueTag));
261 }
262
263 emitLoad(callee, regT1, regT0);
264
265 DataLabelPtr addressOfLinkedFunctionCheck;
266
267 BEGIN_UNINTERRUPTED_SEQUENCE(sequenceOpCall);
268
269 Jump jumpToSlow = branchPtrWithPatch(NotEqual, regT0, addressOfLinkedFunctionCheck, TrustedImmPtr(0));
270
271 END_UNINTERRUPTED_SEQUENCE(sequenceOpCall);
272
273 addSlowCase(jumpToSlow);
274 ASSERT_JIT_OFFSET(differenceBetween(addressOfLinkedFunctionCheck, jumpToSlow), patchOffsetOpCallCompareToJump);
275 m_callStructureStubCompilationInfo[callLinkInfoIndex].hotPathBegin = addressOfLinkedFunctionCheck;
276 m_callStructureStubCompilationInfo[callLinkInfoIndex].isCall = opcodeID != op_construct;
277
278 addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)));
279
280 // The following is the fast case, only used whan a callee can be linked.
281
282 // Fast version of stack frame initialization, directly relative to edi.
283 // Note that this omits to set up RegisterFile::CodeBlock, which is set in the callee
284 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_scopeChain)), regT2);
285
286 store32(TrustedImm32(JSValue::Int32Tag), tagFor(registerOffset + RegisterFile::ArgumentCount));
287 store32(Imm32(argCount), payloadFor(registerOffset + RegisterFile::ArgumentCount));
288 storePtr(callFrameRegister, payloadFor(RegisterFile::CallerFrame + registerOffset, callFrameRegister));
289 emitStore(registerOffset + RegisterFile::Callee, regT1, regT0);
290 store32(TrustedImm32(JSValue::CellTag), tagFor(registerOffset + RegisterFile::ScopeChain));
291 store32(regT2, payloadFor(registerOffset + RegisterFile::ScopeChain));
292 addPtr(Imm32(registerOffset * sizeof(Register)), callFrameRegister);
293
294 // Call to the callee
295 m_callStructureStubCompilationInfo[callLinkInfoIndex].hotPathOther = emitNakedCall();
296
297 if (opcodeID == op_call_eval)
298 wasEval.link(this);
299
300 sampleCodeBlock(m_codeBlock);
301 }
302
303 void JIT::compileOpCallSlowCase(Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter, unsigned callLinkInfoIndex, OpcodeID opcodeID)
304 {
305 int callee = instruction[1].u.operand;
306 int argCount = instruction[2].u.operand;
307 int registerOffset = instruction[3].u.operand;
308
309 linkSlowCase(iter);
310 linkSlowCase(iter);
311
312 // Fast check for JS function.
313 Jump callLinkFailNotObject = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
314 Jump callLinkFailNotJSFunction = branchPtr(NotEqual, Address(regT0), TrustedImmPtr(m_globalData->jsFunctionVPtr));
315
316 // Speculatively roll the callframe, assuming argCount will match the arity.
317 store32(TrustedImm32(JSValue::CellTag), tagFor(RegisterFile::CallerFrame + registerOffset, callFrameRegister));
318 storePtr(callFrameRegister, payloadFor(RegisterFile::CallerFrame + registerOffset, callFrameRegister));
319 addPtr(Imm32(registerOffset * static_cast<int>(sizeof(Register))), callFrameRegister);
320 move(Imm32(argCount), regT1);
321
322 m_callStructureStubCompilationInfo[callLinkInfoIndex].callReturnLocation = emitNakedCall(opcodeID == op_construct ? m_globalData->jitStubs->ctiVirtualConstructLink() : m_globalData->jitStubs->ctiVirtualCallLink());
323
324 // Done! - return back to the hot path.
325 ASSERT(OPCODE_LENGTH(op_call) == OPCODE_LENGTH(op_call_eval));
326 ASSERT(OPCODE_LENGTH(op_call) == OPCODE_LENGTH(op_construct));
327 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_call));
328
329 // This handles host functions
330 callLinkFailNotObject.link(this);
331 callLinkFailNotJSFunction.link(this);
332
333 JITStubCall stubCall(this, opcodeID == op_construct ? cti_op_construct_NotJSConstruct : cti_op_call_NotJSFunction);
334 stubCall.addArgument(callee);
335 stubCall.addArgument(JIT::Imm32(registerOffset));
336 stubCall.addArgument(JIT::Imm32(argCount));
337 stubCall.call();
338
339 sampleCodeBlock(m_codeBlock);
340 }
341
342 /* ------------------------------ END: !ENABLE / ENABLE(JIT_OPTIMIZE_CALL) ------------------------------ */
343
344 #endif // !ENABLE(JIT_OPTIMIZE_CALL)
345
346 } // namespace JSC
347
348 #endif // USE(JSVALUE32_64)
349 #endif // ENABLE(JIT)