2 * Copyright (C) 2008 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32 #include "CodeBlock.h"
33 #include "JITInlineMethods.h"
34 #include "JITStubCall.h"
36 #include "JSFunction.h"
37 #include "Interpreter.h"
38 #include "ResultType.h"
39 #include "SamplingTool.h"
49 void JIT::compileOpCallInitializeCallFrame()
51 // regT0 holds callee, regT1 holds argCount
52 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSFunction
, m_scopeChain
)), regT3
); // scopeChain
53 emitPutIntToCallFrameHeader(regT1
, RegisterFile::ArgumentCount
);
54 emitPutCellToCallFrameHeader(regT0
, RegisterFile::Callee
);
55 emitPutCellToCallFrameHeader(regT3
, RegisterFile::ScopeChain
);
58 void JIT::emit_op_call_put_result(Instruction
* instruction
)
60 int dst
= instruction
[1].u
.operand
;
61 emitPutVirtualRegister(dst
);
64 void JIT::compileOpCallVarargs(Instruction
* instruction
)
66 int callee
= instruction
[1].u
.operand
;
67 int argCountRegister
= instruction
[2].u
.operand
;
68 int registerOffset
= instruction
[3].u
.operand
;
70 emitGetVirtualRegister(argCountRegister
, regT1
);
71 emitFastArithImmToInt(regT1
);
72 emitGetVirtualRegister(callee
, regT0
);
73 addPtr(Imm32(registerOffset
), regT1
, regT2
);
75 // Check for JSFunctions.
76 emitJumpSlowCaseIfNotJSCell(regT0
);
77 addSlowCase(branchPtr(NotEqual
, Address(regT0
), TrustedImmPtr(m_globalData
->jsFunctionVPtr
)));
79 // Speculatively roll the callframe, assuming argCount will match the arity.
80 mul32(TrustedImm32(sizeof(Register
)), regT2
, regT2
);
81 intptr_t offset
= (intptr_t)sizeof(Register
) * (intptr_t)RegisterFile::CallerFrame
;
82 addPtr(Imm32((int32_t)offset
), regT2
, regT3
);
83 addPtr(callFrameRegister
, regT3
);
84 storePtr(callFrameRegister
, regT3
);
85 addPtr(regT2
, callFrameRegister
);
86 emitNakedCall(m_globalData
->jitStubs
->ctiVirtualCall());
88 sampleCodeBlock(m_codeBlock
);
91 void JIT::compileOpCallVarargsSlowCase(Instruction
*, Vector
<SlowCaseEntry
>::iterator
& iter
)
96 JITStubCall
stubCall(this, cti_op_call_NotJSFunction
);
97 stubCall
.addArgument(regT0
);
98 stubCall
.addArgument(regT2
);
99 stubCall
.addArgument(regT1
);
102 sampleCodeBlock(m_codeBlock
);
105 #if !ENABLE(JIT_OPTIMIZE_CALL)
107 /* ------------------------------ BEGIN: !ENABLE(JIT_OPTIMIZE_CALL) ------------------------------ */
109 void JIT::compileOpCall(OpcodeID opcodeID
, Instruction
* instruction
, unsigned)
111 int callee
= instruction
[1].u
.operand
;
112 int argCount
= instruction
[2].u
.operand
;
113 int registerOffset
= instruction
[3].u
.operand
;
117 if (opcodeID
== op_call_eval
) {
118 JITStubCall
stubCall(this, cti_op_call_eval
);
119 stubCall
.addArgument(callee
, regT0
);
120 stubCall
.addArgument(JIT::Imm32(registerOffset
));
121 stubCall
.addArgument(JIT::Imm32(argCount
));
123 wasEval
= branchPtr(NotEqual
, regT0
, TrustedImmPtr(JSValue::encode(JSValue())));
126 emitGetVirtualRegister(callee
, regT0
);
128 // Check for JSFunctions.
129 emitJumpSlowCaseIfNotJSCell(regT0
);
130 addSlowCase(branchPtr(NotEqual
, Address(regT0
), TrustedImmPtr(m_globalData
->jsFunctionVPtr
)));
132 // Speculatively roll the callframe, assuming argCount will match the arity.
133 storePtr(callFrameRegister
, Address(callFrameRegister
, (RegisterFile::CallerFrame
+ registerOffset
) * static_cast<int>(sizeof(Register
))));
134 addPtr(Imm32(registerOffset
* static_cast<int>(sizeof(Register
))), callFrameRegister
);
135 move(Imm32(argCount
), regT1
);
137 emitNakedCall(opcodeID
== op_construct
? m_globalData
->jitStubs
->ctiVirtualConstruct() : m_globalData
->jitStubs
->ctiVirtualCall());
139 if (opcodeID
== op_call_eval
)
142 sampleCodeBlock(m_codeBlock
);
145 void JIT::compileOpCallSlowCase(Instruction
* instruction
, Vector
<SlowCaseEntry
>::iterator
& iter
, unsigned, OpcodeID opcodeID
)
147 int argCount
= instruction
[2].u
.operand
;
148 int registerOffset
= instruction
[3].u
.operand
;
153 JITStubCall
stubCall(this, opcodeID
== op_construct
? cti_op_construct_NotJSConstruct
: cti_op_call_NotJSFunction
);
154 stubCall
.addArgument(regT0
);
155 stubCall
.addArgument(JIT::Imm32(registerOffset
));
156 stubCall
.addArgument(JIT::Imm32(argCount
));
159 sampleCodeBlock(m_codeBlock
);
162 #else // !ENABLE(JIT_OPTIMIZE_CALL)
164 /* ------------------------------ BEGIN: ENABLE(JIT_OPTIMIZE_CALL) ------------------------------ */
166 void JIT::compileOpCall(OpcodeID opcodeID
, Instruction
* instruction
, unsigned callLinkInfoIndex
)
168 int callee
= instruction
[1].u
.operand
;
169 int argCount
= instruction
[2].u
.operand
;
170 int registerOffset
= instruction
[3].u
.operand
;
174 if (opcodeID
== op_call_eval
) {
175 JITStubCall
stubCall(this, cti_op_call_eval
);
176 stubCall
.addArgument(callee
, regT0
);
177 stubCall
.addArgument(JIT::Imm32(registerOffset
));
178 stubCall
.addArgument(JIT::Imm32(argCount
));
180 wasEval
= branchPtr(NotEqual
, regT0
, TrustedImmPtr(JSValue::encode(JSValue())));
183 // This plants a check for a cached JSFunction value, so we can plant a fast link to the callee.
184 // This deliberately leaves the callee in ecx, used when setting up the stack frame below
185 emitGetVirtualRegister(callee
, regT0
);
186 DataLabelPtr addressOfLinkedFunctionCheck
;
188 BEGIN_UNINTERRUPTED_SEQUENCE(sequenceOpCall
);
190 Jump jumpToSlow
= branchPtrWithPatch(NotEqual
, regT0
, addressOfLinkedFunctionCheck
, TrustedImmPtr(JSValue::encode(JSValue())));
192 END_UNINTERRUPTED_SEQUENCE(sequenceOpCall
);
194 addSlowCase(jumpToSlow
);
195 ASSERT_JIT_OFFSET(differenceBetween(addressOfLinkedFunctionCheck
, jumpToSlow
), patchOffsetOpCallCompareToJump
);
196 m_callStructureStubCompilationInfo
[callLinkInfoIndex
].hotPathBegin
= addressOfLinkedFunctionCheck
;
197 m_callStructureStubCompilationInfo
[callLinkInfoIndex
].isCall
= opcodeID
!= op_construct
;
199 // The following is the fast case, only used whan a callee can be linked.
201 // Fast version of stack frame initialization, directly relative to edi.
202 // Note that this omits to set up RegisterFile::CodeBlock, which is set in the callee
204 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSFunction
, m_scopeChain
)), regT1
); // newScopeChain
206 store32(TrustedImm32(Int32Tag
), intTagFor(registerOffset
+ RegisterFile::ArgumentCount
));
207 store32(Imm32(argCount
), intPayloadFor(registerOffset
+ RegisterFile::ArgumentCount
));
208 storePtr(callFrameRegister
, Address(callFrameRegister
, (registerOffset
+ RegisterFile::CallerFrame
) * static_cast<int>(sizeof(Register
))));
209 storePtr(regT0
, Address(callFrameRegister
, (registerOffset
+ RegisterFile::Callee
) * static_cast<int>(sizeof(Register
))));
210 storePtr(regT1
, Address(callFrameRegister
, (registerOffset
+ RegisterFile::ScopeChain
) * static_cast<int>(sizeof(Register
))));
211 addPtr(Imm32(registerOffset
* sizeof(Register
)), callFrameRegister
);
213 // Call to the callee
214 m_callStructureStubCompilationInfo
[callLinkInfoIndex
].hotPathOther
= emitNakedCall();
216 if (opcodeID
== op_call_eval
)
219 sampleCodeBlock(m_codeBlock
);
222 void JIT::compileOpCallSlowCase(Instruction
* instruction
, Vector
<SlowCaseEntry
>::iterator
& iter
, unsigned callLinkInfoIndex
, OpcodeID opcodeID
)
224 int argCount
= instruction
[2].u
.operand
;
225 int registerOffset
= instruction
[3].u
.operand
;
229 // Fast check for JS function.
230 Jump callLinkFailNotObject
= emitJumpIfNotJSCell(regT0
);
231 Jump callLinkFailNotJSFunction
= branchPtr(NotEqual
, Address(regT0
), TrustedImmPtr(m_globalData
->jsFunctionVPtr
));
233 // Speculatively roll the callframe, assuming argCount will match the arity.
234 storePtr(callFrameRegister
, Address(callFrameRegister
, (RegisterFile::CallerFrame
+ registerOffset
) * static_cast<int>(sizeof(Register
))));
235 addPtr(Imm32(registerOffset
* static_cast<int>(sizeof(Register
))), callFrameRegister
);
236 move(Imm32(argCount
), regT1
);
238 m_callStructureStubCompilationInfo
[callLinkInfoIndex
].callReturnLocation
= emitNakedCall(opcodeID
== op_construct
? m_globalData
->jitStubs
->ctiVirtualConstructLink() : m_globalData
->jitStubs
->ctiVirtualCallLink());
240 // Done! - return back to the hot path.
241 ASSERT(OPCODE_LENGTH(op_call
) == OPCODE_LENGTH(op_call_eval
));
242 ASSERT(OPCODE_LENGTH(op_call
) == OPCODE_LENGTH(op_construct
));
243 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_call
));
245 // This handles host functions
246 callLinkFailNotObject
.link(this);
247 callLinkFailNotJSFunction
.link(this);
249 JITStubCall
stubCall(this, opcodeID
== op_construct
? cti_op_construct_NotJSConstruct
: cti_op_call_NotJSFunction
);
250 stubCall
.addArgument(regT0
);
251 stubCall
.addArgument(JIT::Imm32(registerOffset
));
252 stubCall
.addArgument(JIT::Imm32(argCount
));
255 sampleCodeBlock(m_codeBlock
);
258 /* ------------------------------ END: !ENABLE / ENABLE(JIT_OPTIMIZE_CALL) ------------------------------ */
260 #endif // !ENABLE(JIT_OPTIMIZE_CALL)
264 #endif // USE(JSVALUE64)
265 #endif // ENABLE(JIT)