2 * Copyright (C) 2008 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32 #include "CodeBlock.h"
33 #include "Interpreter.h"
34 #include "JITInlineMethods.h"
35 #include "JITStubCall.h"
37 #include "JSFunction.h"
38 #include "ResultType.h"
39 #include "SamplingTool.h"
49 void JIT::compileOpCallInitializeCallFrame()
51 // regT0 holds callee, regT1 holds argCount
52 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSFunction
, m_scopeChain
)), regT3
); // scopeChain
53 emitPutIntToCallFrameHeader(regT1
, RegisterFile::ArgumentCount
);
54 emitPutCellToCallFrameHeader(regT0
, RegisterFile::Callee
);
55 emitPutCellToCallFrameHeader(regT3
, RegisterFile::ScopeChain
);
58 void JIT::emit_op_call_put_result(Instruction
* instruction
)
60 int dst
= instruction
[1].u
.operand
;
61 emitStore(dst
, regT1
, regT0
);
64 void JIT::compileOpCallVarargs(Instruction
* instruction
)
66 int callee
= instruction
[1].u
.operand
;
67 int argCountRegister
= instruction
[2].u
.operand
;
68 int registerOffset
= instruction
[3].u
.operand
;
70 emitLoad(callee
, regT1
, regT0
);
71 emitLoadPayload(argCountRegister
, regT2
); // argCount
72 addPtr(Imm32(registerOffset
), regT2
, regT3
); // registerOffset
74 emitJumpSlowCaseIfNotJSCell(callee
, regT1
);
75 addSlowCase(branchPtr(NotEqual
, Address(regT0
), TrustedImmPtr(m_globalData
->jsFunctionVPtr
)));
77 // Speculatively roll the callframe, assuming argCount will match the arity.
78 mul32(TrustedImm32(sizeof(Register
)), regT3
, regT3
);
79 addPtr(callFrameRegister
, regT3
);
80 store32(TrustedImm32(JSValue::CellTag
), tagFor(RegisterFile::CallerFrame
, regT3
));
81 storePtr(callFrameRegister
, payloadFor(RegisterFile::CallerFrame
, regT3
));
82 move(regT3
, callFrameRegister
);
84 move(regT2
, regT1
); // argCount
86 emitNakedCall(m_globalData
->jitStubs
->ctiVirtualCall());
88 sampleCodeBlock(m_codeBlock
);
91 void JIT::compileOpCallVarargsSlowCase(Instruction
* instruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
93 int callee
= instruction
[1].u
.operand
;
95 linkSlowCaseIfNotJSCell(iter
, callee
);
98 JITStubCall
stubCall(this, cti_op_call_NotJSFunction
);
99 stubCall
.addArgument(regT1
, regT0
);
100 stubCall
.addArgument(regT3
);
101 stubCall
.addArgument(regT2
);
104 sampleCodeBlock(m_codeBlock
);
107 void JIT::emit_op_ret(Instruction
* currentInstruction
)
109 unsigned dst
= currentInstruction
[1].u
.operand
;
111 emitLoad(dst
, regT1
, regT0
);
112 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC
, regT2
);
113 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame
, callFrameRegister
);
115 restoreReturnAddressBeforeReturn(regT2
);
119 void JIT::emit_op_ret_object_or_this(Instruction
* currentInstruction
)
121 unsigned result
= currentInstruction
[1].u
.operand
;
122 unsigned thisReg
= currentInstruction
[2].u
.operand
;
124 emitLoad(result
, regT1
, regT0
);
125 Jump notJSCell
= branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
));
126 loadPtr(Address(regT0
, JSCell::structureOffset()), regT2
);
127 Jump notObject
= branch8(NotEqual
, Address(regT2
, Structure::typeInfoTypeOffset()), TrustedImm32(ObjectType
));
129 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC
, regT2
);
130 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame
, callFrameRegister
);
132 restoreReturnAddressBeforeReturn(regT2
);
135 notJSCell
.link(this);
136 notObject
.link(this);
137 emitLoad(thisReg
, regT1
, regT0
);
139 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC
, regT2
);
140 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame
, callFrameRegister
);
142 restoreReturnAddressBeforeReturn(regT2
);
146 void JIT::emitSlow_op_call(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
148 compileOpCallSlowCase(currentInstruction
, iter
, m_callLinkInfoIndex
++, op_call
);
151 void JIT::emitSlow_op_call_eval(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
153 compileOpCallSlowCase(currentInstruction
, iter
, m_callLinkInfoIndex
++, op_call_eval
);
156 void JIT::emitSlow_op_call_varargs(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
158 compileOpCallVarargsSlowCase(currentInstruction
, iter
);
161 void JIT::emitSlow_op_construct(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
163 compileOpCallSlowCase(currentInstruction
, iter
, m_callLinkInfoIndex
++, op_construct
);
166 void JIT::emit_op_call(Instruction
* currentInstruction
)
168 compileOpCall(op_call
, currentInstruction
, m_callLinkInfoIndex
++);
171 void JIT::emit_op_call_eval(Instruction
* currentInstruction
)
173 compileOpCall(op_call_eval
, currentInstruction
, m_callLinkInfoIndex
++);
176 void JIT::emit_op_call_varargs(Instruction
* currentInstruction
)
178 compileOpCallVarargs(currentInstruction
);
181 void JIT::emit_op_construct(Instruction
* currentInstruction
)
183 compileOpCall(op_construct
, currentInstruction
, m_callLinkInfoIndex
++);
186 #if !ENABLE(JIT_OPTIMIZE_CALL)
188 /* ------------------------------ BEGIN: !ENABLE(JIT_OPTIMIZE_CALL) ------------------------------ */
190 void JIT::compileOpCall(OpcodeID opcodeID
, Instruction
* instruction
, unsigned)
192 int callee
= instruction
[1].u
.operand
;
193 int argCount
= instruction
[2].u
.operand
;
194 int registerOffset
= instruction
[3].u
.operand
;
197 if (opcodeID
== op_call_eval
) {
198 JITStubCall
stubCall(this, cti_op_call_eval
);
199 stubCall
.addArgument(callee
);
200 stubCall
.addArgument(JIT::Imm32(registerOffset
));
201 stubCall
.addArgument(JIT::Imm32(argCount
));
203 wasEval
= branch32(NotEqual
, regT1
, TrustedImm32(JSValue::EmptyValueTag
));
206 emitLoad(callee
, regT1
, regT0
);
208 emitJumpSlowCaseIfNotJSCell(callee
, regT1
);
209 addSlowCase(branchPtr(NotEqual
, Address(regT0
), TrustedImmPtr(m_globalData
->jsFunctionVPtr
)));
211 // Speculatively roll the callframe, assuming argCount will match the arity.
212 store32(TrustedImm32(JSValue::CellTag
), tagFor(RegisterFile::CallerFrame
+ registerOffset
, callFrameRegister
));
213 storePtr(callFrameRegister
, payloadFor(RegisterFile::CallerFrame
+ registerOffset
, callFrameRegister
));
214 addPtr(Imm32(registerOffset
* static_cast<int>(sizeof(Register
))), callFrameRegister
);
215 move(TrustedImm32(argCount
), regT1
);
217 emitNakedCall(opcodeID
== op_construct
? m_globalData
->jitStubs
->ctiVirtualConstruct() : m_globalData
->jitStubs
->ctiVirtualCall());
219 if (opcodeID
== op_call_eval
)
222 sampleCodeBlock(m_codeBlock
);
225 void JIT::compileOpCallSlowCase(Instruction
* instruction
, Vector
<SlowCaseEntry
>::iterator
& iter
, unsigned, OpcodeID opcodeID
)
227 int callee
= instruction
[1].u
.operand
;
228 int argCount
= instruction
[2].u
.operand
;
229 int registerOffset
= instruction
[3].u
.operand
;
231 linkSlowCaseIfNotJSCell(iter
, callee
);
234 JITStubCall
stubCall(this, opcodeID
== op_construct
? cti_op_construct_NotJSConstruct
: cti_op_call_NotJSFunction
);
235 stubCall
.addArgument(callee
);
236 stubCall
.addArgument(JIT::Imm32(registerOffset
));
237 stubCall
.addArgument(JIT::Imm32(argCount
));
240 sampleCodeBlock(m_codeBlock
);
243 #else // !ENABLE(JIT_OPTIMIZE_CALL)
245 /* ------------------------------ BEGIN: ENABLE(JIT_OPTIMIZE_CALL) ------------------------------ */
247 void JIT::compileOpCall(OpcodeID opcodeID
, Instruction
* instruction
, unsigned callLinkInfoIndex
)
249 int callee
= instruction
[1].u
.operand
;
250 int argCount
= instruction
[2].u
.operand
;
251 int registerOffset
= instruction
[3].u
.operand
;
254 if (opcodeID
== op_call_eval
) {
255 JITStubCall
stubCall(this, cti_op_call_eval
);
256 stubCall
.addArgument(callee
);
257 stubCall
.addArgument(JIT::Imm32(registerOffset
));
258 stubCall
.addArgument(JIT::Imm32(argCount
));
260 wasEval
= branch32(NotEqual
, regT1
, TrustedImm32(JSValue::EmptyValueTag
));
263 emitLoad(callee
, regT1
, regT0
);
265 DataLabelPtr addressOfLinkedFunctionCheck
;
267 BEGIN_UNINTERRUPTED_SEQUENCE(sequenceOpCall
);
269 Jump jumpToSlow
= branchPtrWithPatch(NotEqual
, regT0
, addressOfLinkedFunctionCheck
, TrustedImmPtr(0));
271 END_UNINTERRUPTED_SEQUENCE(sequenceOpCall
);
273 addSlowCase(jumpToSlow
);
274 ASSERT_JIT_OFFSET(differenceBetween(addressOfLinkedFunctionCheck
, jumpToSlow
), patchOffsetOpCallCompareToJump
);
275 m_callStructureStubCompilationInfo
[callLinkInfoIndex
].hotPathBegin
= addressOfLinkedFunctionCheck
;
276 m_callStructureStubCompilationInfo
[callLinkInfoIndex
].isCall
= opcodeID
!= op_construct
;
278 addSlowCase(branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
)));
280 // The following is the fast case, only used whan a callee can be linked.
282 // Fast version of stack frame initialization, directly relative to edi.
283 // Note that this omits to set up RegisterFile::CodeBlock, which is set in the callee
284 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSFunction
, m_scopeChain
)), regT2
);
286 store32(TrustedImm32(JSValue::Int32Tag
), tagFor(registerOffset
+ RegisterFile::ArgumentCount
));
287 store32(Imm32(argCount
), payloadFor(registerOffset
+ RegisterFile::ArgumentCount
));
288 storePtr(callFrameRegister
, payloadFor(RegisterFile::CallerFrame
+ registerOffset
, callFrameRegister
));
289 emitStore(registerOffset
+ RegisterFile::Callee
, regT1
, regT0
);
290 store32(TrustedImm32(JSValue::CellTag
), tagFor(registerOffset
+ RegisterFile::ScopeChain
));
291 store32(regT2
, payloadFor(registerOffset
+ RegisterFile::ScopeChain
));
292 addPtr(Imm32(registerOffset
* sizeof(Register
)), callFrameRegister
);
294 // Call to the callee
295 m_callStructureStubCompilationInfo
[callLinkInfoIndex
].hotPathOther
= emitNakedCall();
297 if (opcodeID
== op_call_eval
)
300 sampleCodeBlock(m_codeBlock
);
303 void JIT::compileOpCallSlowCase(Instruction
* instruction
, Vector
<SlowCaseEntry
>::iterator
& iter
, unsigned callLinkInfoIndex
, OpcodeID opcodeID
)
305 int callee
= instruction
[1].u
.operand
;
306 int argCount
= instruction
[2].u
.operand
;
307 int registerOffset
= instruction
[3].u
.operand
;
312 // Fast check for JS function.
313 Jump callLinkFailNotObject
= branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
));
314 Jump callLinkFailNotJSFunction
= branchPtr(NotEqual
, Address(regT0
), TrustedImmPtr(m_globalData
->jsFunctionVPtr
));
316 // Speculatively roll the callframe, assuming argCount will match the arity.
317 store32(TrustedImm32(JSValue::CellTag
), tagFor(RegisterFile::CallerFrame
+ registerOffset
, callFrameRegister
));
318 storePtr(callFrameRegister
, payloadFor(RegisterFile::CallerFrame
+ registerOffset
, callFrameRegister
));
319 addPtr(Imm32(registerOffset
* static_cast<int>(sizeof(Register
))), callFrameRegister
);
320 move(Imm32(argCount
), regT1
);
322 m_callStructureStubCompilationInfo
[callLinkInfoIndex
].callReturnLocation
= emitNakedCall(opcodeID
== op_construct
? m_globalData
->jitStubs
->ctiVirtualConstructLink() : m_globalData
->jitStubs
->ctiVirtualCallLink());
324 // Done! - return back to the hot path.
325 ASSERT(OPCODE_LENGTH(op_call
) == OPCODE_LENGTH(op_call_eval
));
326 ASSERT(OPCODE_LENGTH(op_call
) == OPCODE_LENGTH(op_construct
));
327 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_call
));
329 // This handles host functions
330 callLinkFailNotObject
.link(this);
331 callLinkFailNotJSFunction
.link(this);
333 JITStubCall
stubCall(this, opcodeID
== op_construct
? cti_op_construct_NotJSConstruct
: cti_op_call_NotJSFunction
);
334 stubCall
.addArgument(callee
);
335 stubCall
.addArgument(JIT::Imm32(registerOffset
));
336 stubCall
.addArgument(JIT::Imm32(argCount
));
339 sampleCodeBlock(m_codeBlock
);
342 /* ------------------------------ END: !ENABLE / ENABLE(JIT_OPTIMIZE_CALL) ------------------------------ */
344 #endif // !ENABLE(JIT_OPTIMIZE_CALL)
348 #endif // USE(JSVALUE32_64)
349 #endif // ENABLE(JIT)