2 * Copyright (C) 2008, 2013-2015 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32 #include "CodeBlock.h"
33 #include "JITInlines.h"
35 #include "JSFunction.h"
36 #include "Interpreter.h"
37 #include "JSCInlines.h"
38 #include "LinkBuffer.h"
39 #include "RepatchBuffer.h"
40 #include "ResultType.h"
41 #include "SamplingTool.h"
42 #include "SetupVarargsFrame.h"
43 #include "StackAlignment.h"
44 #include "ThunkGenerators.h"
45 #include <wtf/StringPrintStream.h>
50 void JIT::emitPutCallResult(Instruction
* instruction
)
52 int dst
= instruction
[1].u
.operand
;
53 emitValueProfilingSite();
54 emitPutVirtualRegister(dst
);
57 void JIT::compileSetupVarargsFrame(Instruction
* instruction
, CallLinkInfo
* info
)
59 int thisValue
= instruction
[3].u
.operand
;
60 int arguments
= instruction
[4].u
.operand
;
61 int firstFreeRegister
= instruction
[5].u
.operand
;
62 int firstVarArgOffset
= instruction
[6].u
.operand
;
64 emitGetVirtualRegister(arguments
, regT1
);
65 callOperation(operationSizeFrameForVarargs
, regT1
, -firstFreeRegister
, firstVarArgOffset
);
66 move(TrustedImm32(-firstFreeRegister
), regT1
);
67 emitSetVarargsFrame(*this, returnValueGPR
, false, regT1
, regT1
);
68 addPtr(TrustedImm32(-(sizeof(CallerFrameAndPC
) + WTF::roundUpToMultipleOf(stackAlignmentBytes(), 5 * sizeof(void*)))), regT1
, stackPointerRegister
);
69 emitGetVirtualRegister(arguments
, regT2
);
70 callOperation(operationSetupVarargsFrame
, regT1
, regT2
, firstVarArgOffset
, regT0
);
71 move(returnValueGPR
, regT1
);
73 // Profile the argument count.
74 load32(Address(regT1
, JSStack::ArgumentCount
* static_cast<int>(sizeof(Register
)) + PayloadOffset
), regT2
);
75 load8(info
->addressOfMaxNumArguments(), regT0
);
76 Jump notBiggest
= branch32(Above
, regT0
, regT2
);
77 Jump notSaturated
= branch32(BelowOrEqual
, regT2
, TrustedImm32(255));
78 move(TrustedImm32(255), regT2
);
79 notSaturated
.link(this);
80 store8(regT2
, info
->addressOfMaxNumArguments());
81 notBiggest
.link(this);
84 emitGetVirtualRegister(thisValue
, regT0
);
85 store64(regT0
, Address(regT1
, CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register
))));
87 addPtr(TrustedImm32(sizeof(CallerFrameAndPC
)), regT1
, stackPointerRegister
);
90 void JIT::compileCallEval(Instruction
* instruction
)
92 addPtr(TrustedImm32(-static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC
))), stackPointerRegister
, regT1
);
93 storePtr(callFrameRegister
, Address(regT1
, CallFrame::callerFrameOffset()));
95 addPtr(TrustedImm32(stackPointerOffsetFor(m_codeBlock
) * sizeof(Register
)), callFrameRegister
, stackPointerRegister
);
96 checkStackPointerAlignment();
98 callOperation(operationCallEval
, regT1
);
100 addSlowCase(branch64(Equal
, regT0
, TrustedImm64(JSValue::encode(JSValue()))));
102 sampleCodeBlock(m_codeBlock
);
104 emitPutCallResult(instruction
);
107 void JIT::compileCallEvalSlowCase(Instruction
* instruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
110 int registerOffset
= -instruction
[4].u
.operand
;
112 addPtr(TrustedImm32(registerOffset
* sizeof(Register
) + sizeof(CallerFrameAndPC
)), callFrameRegister
, stackPointerRegister
);
114 load64(Address(stackPointerRegister
, sizeof(Register
) * JSStack::Callee
- sizeof(CallerFrameAndPC
)), regT0
);
115 move(TrustedImmPtr(&CallLinkInfo::dummy()), regT2
);
116 emitNakedCall(m_vm
->getCTIStub(virtualCallThunkGenerator
).code());
117 addPtr(TrustedImm32(stackPointerOffsetFor(m_codeBlock
) * sizeof(Register
)), callFrameRegister
, stackPointerRegister
);
118 checkStackPointerAlignment();
120 sampleCodeBlock(m_codeBlock
);
122 emitPutCallResult(instruction
);
125 void JIT::compileOpCall(OpcodeID opcodeID
, Instruction
* instruction
, unsigned callLinkInfoIndex
)
127 CallLinkInfo
* info
= m_codeBlock
->addCallLinkInfo();
129 int callee
= instruction
[2].u
.operand
;
132 - Updates callFrameRegister to callee callFrame.
133 - Initializes ArgumentCount; CallerFrame; Callee.
136 - Callee initializes ReturnPC; CodeBlock.
137 - Callee restores callFrameRegister before return.
140 - Caller initializes ReturnPC; CodeBlock.
141 - Caller restores callFrameRegister after return.
143 COMPILE_ASSERT(OPCODE_LENGTH(op_call
) == OPCODE_LENGTH(op_construct
), call_and_construct_opcodes_must_be_same_length
);
144 COMPILE_ASSERT(OPCODE_LENGTH(op_call
) == OPCODE_LENGTH(op_call_varargs
), call_and_call_varargs_opcodes_must_be_same_length
);
145 COMPILE_ASSERT(OPCODE_LENGTH(op_call
) == OPCODE_LENGTH(op_construct_varargs
), call_and_construct_varargs_opcodes_must_be_same_length
);
146 if (opcodeID
== op_call_varargs
|| opcodeID
== op_construct_varargs
)
147 compileSetupVarargsFrame(instruction
, info
);
149 int argCount
= instruction
[3].u
.operand
;
150 int registerOffset
= -instruction
[4].u
.operand
;
152 if (opcodeID
== op_call
&& shouldEmitProfiling()) {
153 emitGetVirtualRegister(registerOffset
+ CallFrame::argumentOffsetIncludingThis(0), regT0
);
154 Jump done
= emitJumpIfNotJSCell(regT0
);
155 load32(Address(regT0
, JSCell::structureIDOffset()), regT0
);
156 store32(regT0
, instruction
[OPCODE_LENGTH(op_call
) - 2].u
.arrayProfile
->addressOfLastSeenStructureID());
160 addPtr(TrustedImm32(registerOffset
* sizeof(Register
) + sizeof(CallerFrameAndPC
)), callFrameRegister
, stackPointerRegister
);
161 store32(TrustedImm32(argCount
), Address(stackPointerRegister
, JSStack::ArgumentCount
* static_cast<int>(sizeof(Register
)) + PayloadOffset
- sizeof(CallerFrameAndPC
)));
162 } // SP holds newCallFrame + sizeof(CallerFrameAndPC), with ArgumentCount initialized.
164 uint32_t bytecodeOffset
= instruction
- m_codeBlock
->instructions().begin();
165 uint32_t locationBits
= CallFrame::Location::encodeAsBytecodeOffset(bytecodeOffset
);
166 store32(TrustedImm32(locationBits
), Address(callFrameRegister
, JSStack::ArgumentCount
* static_cast<int>(sizeof(Register
)) + TagOffset
));
167 emitGetVirtualRegister(callee
, regT0
); // regT0 holds callee.
169 store64(regT0
, Address(stackPointerRegister
, JSStack::Callee
* static_cast<int>(sizeof(Register
)) - sizeof(CallerFrameAndPC
)));
171 if (opcodeID
== op_call_eval
) {
172 compileCallEval(instruction
);
176 DataLabelPtr addressOfLinkedFunctionCheck
;
177 Jump slowCase
= branchPtrWithPatch(NotEqual
, regT0
, addressOfLinkedFunctionCheck
, TrustedImmPtr(0));
178 addSlowCase(slowCase
);
180 ASSERT(m_callCompilationInfo
.size() == callLinkInfoIndex
);
181 info
->setUpCall(CallLinkInfo::callTypeFor(opcodeID
), CodeOrigin(m_bytecodeOffset
), regT0
);
182 m_callCompilationInfo
.append(CallCompilationInfo());
183 m_callCompilationInfo
[callLinkInfoIndex
].hotPathBegin
= addressOfLinkedFunctionCheck
;
184 m_callCompilationInfo
[callLinkInfoIndex
].callLinkInfo
= info
;
186 m_callCompilationInfo
[callLinkInfoIndex
].hotPathOther
= emitNakedCall();
188 addPtr(TrustedImm32(stackPointerOffsetFor(m_codeBlock
) * sizeof(Register
)), callFrameRegister
, stackPointerRegister
);
189 checkStackPointerAlignment();
191 sampleCodeBlock(m_codeBlock
);
193 emitPutCallResult(instruction
);
196 void JIT::compileOpCallSlowCase(OpcodeID opcodeID
, Instruction
* instruction
, Vector
<SlowCaseEntry
>::iterator
& iter
, unsigned callLinkInfoIndex
)
198 if (opcodeID
== op_call_eval
) {
199 compileCallEvalSlowCase(instruction
, iter
);
205 ThunkGenerator generator
= linkThunkGeneratorFor(
206 (opcodeID
== op_construct
|| opcodeID
== op_construct_varargs
) ? CodeForConstruct
: CodeForCall
,
207 RegisterPreservationNotRequired
);
209 move(TrustedImmPtr(m_callCompilationInfo
[callLinkInfoIndex
].callLinkInfo
), regT2
);
210 m_callCompilationInfo
[callLinkInfoIndex
].callReturnLocation
= emitNakedCall(m_vm
->getCTIStub(generator
).code());
212 addPtr(TrustedImm32(stackPointerOffsetFor(m_codeBlock
) * sizeof(Register
)), callFrameRegister
, stackPointerRegister
);
213 checkStackPointerAlignment();
215 sampleCodeBlock(m_codeBlock
);
217 emitPutCallResult(instruction
);
220 void JIT::emit_op_call(Instruction
* currentInstruction
)
222 compileOpCall(op_call
, currentInstruction
, m_callLinkInfoIndex
++);
225 void JIT::emit_op_call_eval(Instruction
* currentInstruction
)
227 compileOpCall(op_call_eval
, currentInstruction
, m_callLinkInfoIndex
);
230 void JIT::emit_op_call_varargs(Instruction
* currentInstruction
)
232 compileOpCall(op_call_varargs
, currentInstruction
, m_callLinkInfoIndex
++);
235 void JIT::emit_op_construct_varargs(Instruction
* currentInstruction
)
237 compileOpCall(op_construct_varargs
, currentInstruction
, m_callLinkInfoIndex
++);
240 void JIT::emit_op_construct(Instruction
* currentInstruction
)
242 compileOpCall(op_construct
, currentInstruction
, m_callLinkInfoIndex
++);
245 void JIT::emitSlow_op_call(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
247 compileOpCallSlowCase(op_call
, currentInstruction
, iter
, m_callLinkInfoIndex
++);
250 void JIT::emitSlow_op_call_eval(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
252 compileOpCallSlowCase(op_call_eval
, currentInstruction
, iter
, m_callLinkInfoIndex
);
255 void JIT::emitSlow_op_call_varargs(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
257 compileOpCallSlowCase(op_call_varargs
, currentInstruction
, iter
, m_callLinkInfoIndex
++);
260 void JIT::emitSlow_op_construct_varargs(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
262 compileOpCallSlowCase(op_construct_varargs
, currentInstruction
, iter
, m_callLinkInfoIndex
++);
265 void JIT::emitSlow_op_construct(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
267 compileOpCallSlowCase(op_construct
, currentInstruction
, iter
, m_callLinkInfoIndex
++);
272 #endif // USE(JSVALUE64)
273 #endif // ENABLE(JIT)