2 * Copyright (C) 2008, 2013 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32 #include "Arguments.h"
33 #include "CodeBlock.h"
34 #include "Interpreter.h"
35 #include "JITInlines.h"
36 #include "JITStubCall.h"
38 #include "JSFunction.h"
39 #include "Operations.h"
40 #include "RepatchBuffer.h"
41 #include "ResultType.h"
42 #include "SamplingTool.h"
43 #include <wtf/StringPrintStream.h>
53 void JIT::emit_op_call_put_result(Instruction
* instruction
)
55 int dst
= instruction
[1].u
.operand
;
56 emitValueProfilingSite();
57 emitStore(dst
, regT1
, regT0
);
60 void JIT::emit_op_ret(Instruction
* currentInstruction
)
62 unsigned dst
= currentInstruction
[1].u
.operand
;
64 emitLoad(dst
, regT1
, regT0
);
65 emitGetFromCallFrameHeaderPtr(JSStack::ReturnPC
, regT2
);
66 emitGetFromCallFrameHeaderPtr(JSStack::CallerFrame
, callFrameRegister
);
68 restoreReturnAddressBeforeReturn(regT2
);
72 void JIT::emit_op_ret_object_or_this(Instruction
* currentInstruction
)
74 unsigned result
= currentInstruction
[1].u
.operand
;
75 unsigned thisReg
= currentInstruction
[2].u
.operand
;
77 emitLoad(result
, regT1
, regT0
);
78 Jump notJSCell
= branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
));
79 loadPtr(Address(regT0
, JSCell::structureOffset()), regT2
);
80 Jump notObject
= emitJumpIfNotObject(regT2
);
82 emitGetFromCallFrameHeaderPtr(JSStack::ReturnPC
, regT2
);
83 emitGetFromCallFrameHeaderPtr(JSStack::CallerFrame
, callFrameRegister
);
85 restoreReturnAddressBeforeReturn(regT2
);
90 emitLoad(thisReg
, regT1
, regT0
);
92 emitGetFromCallFrameHeaderPtr(JSStack::ReturnPC
, regT2
);
93 emitGetFromCallFrameHeaderPtr(JSStack::CallerFrame
, callFrameRegister
);
95 restoreReturnAddressBeforeReturn(regT2
);
99 void JIT::emitSlow_op_call(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
101 compileOpCallSlowCase(op_call
, currentInstruction
, iter
, m_callLinkInfoIndex
++);
104 void JIT::emitSlow_op_call_eval(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
106 compileOpCallSlowCase(op_call_eval
, currentInstruction
, iter
, m_callLinkInfoIndex
);
109 void JIT::emitSlow_op_call_varargs(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
111 compileOpCallSlowCase(op_call_varargs
, currentInstruction
, iter
, m_callLinkInfoIndex
++);
114 void JIT::emitSlow_op_construct(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
116 compileOpCallSlowCase(op_construct
, currentInstruction
, iter
, m_callLinkInfoIndex
++);
119 void JIT::emit_op_call(Instruction
* currentInstruction
)
121 compileOpCall(op_call
, currentInstruction
, m_callLinkInfoIndex
++);
124 void JIT::emit_op_call_eval(Instruction
* currentInstruction
)
126 compileOpCall(op_call_eval
, currentInstruction
, m_callLinkInfoIndex
);
129 void JIT::emit_op_call_varargs(Instruction
* currentInstruction
)
131 compileOpCall(op_call_varargs
, currentInstruction
, m_callLinkInfoIndex
++);
134 void JIT::emit_op_construct(Instruction
* currentInstruction
)
136 compileOpCall(op_construct
, currentInstruction
, m_callLinkInfoIndex
++);
139 void JIT::compileLoadVarargs(Instruction
* instruction
)
141 int thisValue
= instruction
[2].u
.operand
;
142 int arguments
= instruction
[3].u
.operand
;
143 int firstFreeRegister
= instruction
[4].u
.operand
;
147 bool canOptimize
= m_codeBlock
->usesArguments()
148 && arguments
== m_codeBlock
->argumentsRegister()
149 && !m_codeBlock
->symbolTable()->slowArguments();
152 emitLoadTag(arguments
, regT1
);
153 slowCase
.append(branch32(NotEqual
, regT1
, TrustedImm32(JSValue::EmptyValueTag
)));
155 load32(payloadFor(JSStack::ArgumentCount
), regT2
);
156 slowCase
.append(branch32(Above
, regT2
, TrustedImm32(Arguments::MaxArguments
+ 1)));
157 // regT2: argumentCountIncludingThis
160 add32(TrustedImm32(firstFreeRegister
+ JSStack::CallFrameHeaderSize
), regT3
);
161 lshift32(TrustedImm32(3), regT3
);
162 addPtr(callFrameRegister
, regT3
);
163 // regT3: newCallFrame
165 slowCase
.append(branchPtr(Below
, AbsoluteAddress(m_vm
->interpreter
->stack().addressOfEnd()), regT3
));
167 // Initialize ArgumentCount.
168 store32(regT2
, payloadFor(JSStack::ArgumentCount
, regT3
));
170 // Initialize 'this'.
171 emitLoad(thisValue
, regT1
, regT0
);
172 store32(regT0
, Address(regT3
, OBJECT_OFFSETOF(JSValue
, u
.asBits
.payload
) + (CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register
)))));
173 store32(regT1
, Address(regT3
, OBJECT_OFFSETOF(JSValue
, u
.asBits
.tag
) + (CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register
)))));
177 end
.append(branchAdd32(Zero
, TrustedImm32(1), regT2
));
178 // regT2: -argumentCount;
180 Label copyLoop
= label();
181 load32(BaseIndex(callFrameRegister
, regT2
, TimesEight
, OBJECT_OFFSETOF(JSValue
, u
.asBits
.payload
) +(CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register
)))), regT0
);
182 load32(BaseIndex(callFrameRegister
, regT2
, TimesEight
, OBJECT_OFFSETOF(JSValue
, u
.asBits
.tag
) +(CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register
)))), regT1
);
183 store32(regT0
, BaseIndex(regT3
, regT2
, TimesEight
, OBJECT_OFFSETOF(JSValue
, u
.asBits
.payload
) +(CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register
)))));
184 store32(regT1
, BaseIndex(regT3
, regT2
, TimesEight
, OBJECT_OFFSETOF(JSValue
, u
.asBits
.tag
) +(CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register
)))));
185 branchAdd32(NonZero
, TrustedImm32(1), regT2
).linkTo(copyLoop
, this);
193 JITStubCall
stubCall(this, cti_op_load_varargs
);
194 stubCall
.addArgument(thisValue
);
195 stubCall
.addArgument(arguments
);
196 stubCall
.addArgument(Imm32(firstFreeRegister
));
197 stubCall
.call(regT3
);
203 void JIT::compileCallEval()
205 JITStubCall
stubCall(this, cti_op_call_eval
); // Initializes ScopeChain; ReturnPC; CodeBlock.
207 addSlowCase(branch32(Equal
, regT1
, TrustedImm32(JSValue::EmptyValueTag
)));
208 emitGetFromCallFrameHeaderPtr(JSStack::CallerFrame
, callFrameRegister
);
210 sampleCodeBlock(m_codeBlock
);
213 void JIT::compileCallEvalSlowCase(Vector
<SlowCaseEntry
>::iterator
& iter
)
217 emitLoad(JSStack::Callee
, regT1
, regT0
);
218 emitNakedCall(m_vm
->getCTIStub(virtualCallGenerator
).code());
220 sampleCodeBlock(m_codeBlock
);
223 void JIT::compileOpCall(OpcodeID opcodeID
, Instruction
* instruction
, unsigned callLinkInfoIndex
)
225 int callee
= instruction
[1].u
.operand
;
228 - Updates callFrameRegister to callee callFrame.
229 - Initializes ArgumentCount; CallerFrame; Callee.
232 - Caller initializes ScopeChain.
233 - Callee initializes ReturnPC; CodeBlock.
234 - Callee restores callFrameRegister before return.
237 - Caller initializes ScopeChain; ReturnPC; CodeBlock.
238 - Caller restores callFrameRegister after return.
241 if (opcodeID
== op_call_varargs
)
242 compileLoadVarargs(instruction
);
244 int argCount
= instruction
[2].u
.operand
;
245 int registerOffset
= instruction
[3].u
.operand
;
247 if (opcodeID
== op_call
&& shouldEmitProfiling()) {
248 emitLoad(registerOffset
+ CallFrame::argumentOffsetIncludingThis(0), regT0
, regT1
);
249 Jump done
= branch32(NotEqual
, regT0
, TrustedImm32(JSValue::CellTag
));
250 loadPtr(Address(regT1
, JSCell::structureOffset()), regT1
);
251 storePtr(regT1
, instruction
[5].u
.arrayProfile
->addressOfLastSeenStructure());
255 addPtr(TrustedImm32(registerOffset
* sizeof(Register
)), callFrameRegister
, regT3
);
257 store32(TrustedImm32(argCount
), payloadFor(JSStack::ArgumentCount
, regT3
));
258 } // regT3 holds newCallFrame with ArgumentCount initialized.
260 storePtr(TrustedImmPtr(instruction
), tagFor(JSStack::ArgumentCount
, callFrameRegister
));
261 emitLoad(callee
, regT1
, regT0
); // regT1, regT0 holds callee.
263 storePtr(callFrameRegister
, Address(regT3
, JSStack::CallerFrame
* static_cast<int>(sizeof(Register
))));
264 emitStore(JSStack::Callee
, regT1
, regT0
, regT3
);
265 move(regT3
, callFrameRegister
);
267 if (opcodeID
== op_call_eval
) {
272 DataLabelPtr addressOfLinkedFunctionCheck
;
273 BEGIN_UNINTERRUPTED_SEQUENCE(sequenceOpCall
);
274 Jump slowCase
= branchPtrWithPatch(NotEqual
, regT0
, addressOfLinkedFunctionCheck
, TrustedImmPtr(0));
275 END_UNINTERRUPTED_SEQUENCE(sequenceOpCall
);
277 addSlowCase(slowCase
);
278 addSlowCase(branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
)));
280 ASSERT(m_callStructureStubCompilationInfo
.size() == callLinkInfoIndex
);
281 m_callStructureStubCompilationInfo
.append(StructureStubCompilationInfo());
282 m_callStructureStubCompilationInfo
[callLinkInfoIndex
].hotPathBegin
= addressOfLinkedFunctionCheck
;
283 m_callStructureStubCompilationInfo
[callLinkInfoIndex
].callType
= CallLinkInfo::callTypeFor(opcodeID
);
284 m_callStructureStubCompilationInfo
[callLinkInfoIndex
].bytecodeIndex
= m_bytecodeOffset
;
286 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSFunction
, m_scope
)), regT1
);
287 emitPutCellToCallFrameHeader(regT1
, JSStack::ScopeChain
);
288 m_callStructureStubCompilationInfo
[callLinkInfoIndex
].hotPathOther
= emitNakedCall();
290 sampleCodeBlock(m_codeBlock
);
293 void JIT::compileOpCallSlowCase(OpcodeID opcodeID
, Instruction
*, Vector
<SlowCaseEntry
>::iterator
& iter
, unsigned callLinkInfoIndex
)
295 if (opcodeID
== op_call_eval
) {
296 compileCallEvalSlowCase(iter
);
303 m_callStructureStubCompilationInfo
[callLinkInfoIndex
].callReturnLocation
= emitNakedCall(opcodeID
== op_construct
? m_vm
->getCTIStub(linkConstructGenerator
).code() : m_vm
->getCTIStub(linkCallGenerator
).code());
305 sampleCodeBlock(m_codeBlock
);
308 void JIT::privateCompileClosureCall(CallLinkInfo
* callLinkInfo
, CodeBlock
* calleeCodeBlock
, Structure
* expectedStructure
, ExecutableBase
* expectedExecutable
, MacroAssemblerCodePtr codePtr
)
312 slowCases
.append(branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
)));
313 slowCases
.append(branchPtr(NotEqual
, Address(regT0
, JSCell::structureOffset()), TrustedImmPtr(expectedStructure
)));
314 slowCases
.append(branchPtr(NotEqual
, Address(regT0
, JSFunction::offsetOfExecutable()), TrustedImmPtr(expectedExecutable
)));
316 loadPtr(Address(regT0
, JSFunction::offsetOfScopeChain()), regT1
);
317 emitPutCellToCallFrameHeader(regT1
, JSStack::ScopeChain
);
319 Call call
= nearCall();
322 slowCases
.link(this);
323 move(TrustedImmPtr(callLinkInfo
->callReturnLocation
.executableAddress()), regT2
);
324 restoreReturnAddressBeforeReturn(regT2
);
327 LinkBuffer
patchBuffer(*m_vm
, this, m_codeBlock
);
329 patchBuffer
.link(call
, FunctionPtr(codePtr
.executableAddress()));
330 patchBuffer
.link(done
, callLinkInfo
->hotPathOther
.labelAtOffset(0));
331 patchBuffer
.link(slow
, CodeLocationLabel(m_vm
->getCTIStub(virtualCallGenerator
).code()));
333 RefPtr
<ClosureCallStubRoutine
> stubRoutine
= adoptRef(new ClosureCallStubRoutine(
336 ("Baseline closure call stub for %s, return point %p, target %p (%s)",
337 toCString(*m_codeBlock
).data(),
338 callLinkInfo
->hotPathOther
.labelAtOffset(0).executableAddress(),
339 codePtr
.executableAddress(),
340 toCString(pointerDump(calleeCodeBlock
)).data())),
341 *m_vm
, m_codeBlock
->ownerExecutable(), expectedStructure
, expectedExecutable
,
342 callLinkInfo
->codeOrigin
));
344 RepatchBuffer
repatchBuffer(m_codeBlock
);
346 repatchBuffer
.replaceWithJump(
347 RepatchBuffer::startOfBranchPtrWithPatchOnRegister(callLinkInfo
->hotPathBegin
),
348 CodeLocationLabel(stubRoutine
->code().code()));
349 repatchBuffer
.relink(callLinkInfo
->callReturnLocation
, m_vm
->getCTIStub(virtualCallGenerator
).code());
351 callLinkInfo
->stub
= stubRoutine
.release();
356 #endif // USE(JSVALUE32_64)
357 #endif // ENABLE(JIT)