2 * Copyright (C) 2008 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 #include "CodeBlock.h"
32 #include "JITInlineMethods.h"
33 #include "JITStubCall.h"
35 #include "JSFunction.h"
36 #include "Interpreter.h"
37 #include "ResultType.h"
38 #include "SamplingTool.h"
50 void JIT::compileOpCallInitializeCallFrame()
52 // regT0 holds callee, regT1 holds argCount
53 store32(regT1
, Address(callFrameRegister
, RegisterFile::ArgumentCount
* static_cast<int>(sizeof(Register
))));
55 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSFunction
, m_data
) + OBJECT_OFFSETOF(ScopeChain
, m_node
)), regT1
); // scopeChain
57 emitStore(static_cast<unsigned>(RegisterFile::OptionalCalleeArguments
), JSValue());
58 storePtr(regT0
, Address(callFrameRegister
, RegisterFile::Callee
* static_cast<int>(sizeof(Register
)))); // callee
59 storePtr(regT1
, Address(callFrameRegister
, RegisterFile::ScopeChain
* static_cast<int>(sizeof(Register
)))); // scopeChain
62 void JIT::compileOpCallSetupArgs(Instruction
* instruction
)
64 int argCount
= instruction
[3].u
.operand
;
65 int registerOffset
= instruction
[4].u
.operand
;
67 emitPutJITStubArg(regT0
, 1);
68 emitPutJITStubArg(regT1
, 2);
69 emitPutJITStubArgConstant(registerOffset
, 3);
70 emitPutJITStubArgConstant(argCount
, 5);
73 void JIT::compileOpConstructSetupArgs(Instruction
* instruction
)
75 int argCount
= instruction
[3].u
.operand
;
76 int registerOffset
= instruction
[4].u
.operand
;
77 int proto
= instruction
[5].u
.operand
;
78 int thisRegister
= instruction
[6].u
.operand
;
80 emitPutJITStubArg(regT0
, 1);
81 emitPutJITStubArg(regT1
, 2);
82 emitPutJITStubArgConstant(registerOffset
, 3);
83 emitPutJITStubArgConstant(argCount
, 5);
84 emitPutJITStubArgFromVirtualRegister(proto
, 7, regT2
, regT3
);
85 emitPutJITStubArgConstant(thisRegister
, 9);
88 void JIT::compileOpCallVarargsSetupArgs(Instruction
*)
90 emitPutJITStubArg(regT0
, 1);
91 emitPutJITStubArg(regT1
, 2);
92 emitPutJITStubArg(regT3
, 3); // registerOffset
93 emitPutJITStubArg(regT2
, 5); // argCount
96 void JIT::compileOpCallVarargs(Instruction
* instruction
)
98 int dst
= instruction
[1].u
.operand
;
99 int callee
= instruction
[2].u
.operand
;
100 int argCountRegister
= instruction
[3].u
.operand
;
101 int registerOffset
= instruction
[4].u
.operand
;
103 emitLoad(callee
, regT1
, regT0
);
104 emitLoadPayload(argCountRegister
, regT2
); // argCount
105 addPtr(Imm32(registerOffset
), regT2
, regT3
); // registerOffset
107 compileOpCallVarargsSetupArgs(instruction
);
109 emitJumpSlowCaseIfNotJSCell(callee
, regT1
);
110 addSlowCase(branchPtr(NotEqual
, Address(regT0
), ImmPtr(m_globalData
->jsFunctionVPtr
)));
112 // Speculatively roll the callframe, assuming argCount will match the arity.
113 mul32(Imm32(sizeof(Register
)), regT3
, regT3
);
114 addPtr(callFrameRegister
, regT3
);
115 storePtr(callFrameRegister
, Address(regT3
, RegisterFile::CallerFrame
* static_cast<int>(sizeof(Register
))));
116 move(regT3
, callFrameRegister
);
118 move(regT2
, regT1
); // argCount
120 emitNakedCall(m_globalData
->jitStubs
.ctiVirtualCall());
122 emitStore(dst
, regT1
, regT0
);
124 sampleCodeBlock(m_codeBlock
);
127 void JIT::compileOpCallVarargsSlowCase(Instruction
* instruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
129 int dst
= instruction
[1].u
.operand
;
130 int callee
= instruction
[2].u
.operand
;
132 linkSlowCaseIfNotJSCell(iter
, callee
);
135 JITStubCall
stubCall(this, cti_op_call_NotJSFunction
);
136 stubCall
.call(dst
); // In the interpreter, the callee puts the return value in dst.
138 map(m_bytecodeIndex
+ OPCODE_LENGTH(op_call_varargs
), dst
, regT1
, regT0
);
139 sampleCodeBlock(m_codeBlock
);
142 void JIT::emit_op_ret(Instruction
* currentInstruction
)
144 unsigned dst
= currentInstruction
[1].u
.operand
;
146 // We could JIT generate the deref, only calling out to C when the refcount hits zero.
147 if (m_codeBlock
->needsFullScopeChain())
148 JITStubCall(this, cti_op_ret_scopeChain
).call();
150 emitLoad(dst
, regT1
, regT0
);
151 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC
, regT2
);
152 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame
, callFrameRegister
);
154 restoreReturnAddressBeforeReturn(regT2
);
158 void JIT::emit_op_construct_verify(Instruction
* currentInstruction
)
160 unsigned dst
= currentInstruction
[1].u
.operand
;
162 emitLoad(dst
, regT1
, regT0
);
163 addSlowCase(branch32(NotEqual
, regT1
, Imm32(JSValue::CellTag
)));
164 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSCell
, m_structure
)), regT2
);
165 addSlowCase(branch32(NotEqual
, Address(regT2
, OBJECT_OFFSETOF(Structure
, m_typeInfo
) + OBJECT_OFFSETOF(TypeInfo
, m_type
)), Imm32(ObjectType
)));
168 void JIT::emitSlow_op_construct_verify(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
170 unsigned dst
= currentInstruction
[1].u
.operand
;
171 unsigned src
= currentInstruction
[2].u
.operand
;
175 emitLoad(src
, regT1
, regT0
);
176 emitStore(dst
, regT1
, regT0
);
179 void JIT::emitSlow_op_call(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
181 compileOpCallSlowCase(currentInstruction
, iter
, m_callLinkInfoIndex
++, op_call
);
184 void JIT::emitSlow_op_call_eval(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
186 compileOpCallSlowCase(currentInstruction
, iter
, m_callLinkInfoIndex
++, op_call_eval
);
189 void JIT::emitSlow_op_call_varargs(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
191 compileOpCallVarargsSlowCase(currentInstruction
, iter
);
194 void JIT::emitSlow_op_construct(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
196 compileOpCallSlowCase(currentInstruction
, iter
, m_callLinkInfoIndex
++, op_construct
);
199 void JIT::emit_op_call(Instruction
* currentInstruction
)
201 compileOpCall(op_call
, currentInstruction
, m_callLinkInfoIndex
++);
204 void JIT::emit_op_call_eval(Instruction
* currentInstruction
)
206 compileOpCall(op_call_eval
, currentInstruction
, m_callLinkInfoIndex
++);
209 void JIT::emit_op_load_varargs(Instruction
* currentInstruction
)
211 int argCountDst
= currentInstruction
[1].u
.operand
;
212 int argsOffset
= currentInstruction
[2].u
.operand
;
214 JITStubCall
stubCall(this, cti_op_load_varargs
);
215 stubCall
.addArgument(Imm32(argsOffset
));
217 // Stores a naked int32 in the register file.
218 store32(returnValueRegister
, Address(callFrameRegister
, argCountDst
* sizeof(Register
)));
221 void JIT::emit_op_call_varargs(Instruction
* currentInstruction
)
223 compileOpCallVarargs(currentInstruction
);
226 void JIT::emit_op_construct(Instruction
* currentInstruction
)
228 compileOpCall(op_construct
, currentInstruction
, m_callLinkInfoIndex
++);
231 #if !ENABLE(JIT_OPTIMIZE_CALL)
233 /* ------------------------------ BEGIN: !ENABLE(JIT_OPTIMIZE_CALL) ------------------------------ */
235 void JIT::compileOpCall(OpcodeID opcodeID
, Instruction
* instruction
, unsigned)
237 int dst
= instruction
[1].u
.operand
;
238 int callee
= instruction
[2].u
.operand
;
239 int argCount
= instruction
[3].u
.operand
;
240 int registerOffset
= instruction
[4].u
.operand
;
244 if (opcodeID
== op_call_eval
) {
245 JITStubCall
stubCall(this, cti_op_call_eval
);
246 stubCall
.addArgument(callee
);
247 stubCall
.addArgument(JIT::Imm32(registerOffset
));
248 stubCall
.addArgument(JIT::Imm32(argCount
));
250 wasEval1
= branchTest32(NonZero
, regT0
);
251 wasEval2
= branch32(NotEqual
, regT1
, Imm32(JSValue::CellTag
));
254 emitLoad(callee
, regT1
, regT2
);
256 if (opcodeID
== op_call
)
257 compileOpCallSetupArgs(instruction
);
258 else if (opcodeID
== op_construct
)
259 compileOpConstructSetupArgs(instruction
);
261 emitJumpSlowCaseIfNotJSCell(callee
, regT1
);
262 addSlowCase(branchPtr(NotEqual
, Address(regT2
), ImmPtr(m_globalData
->jsFunctionVPtr
)));
264 // First, in the case of a construct, allocate the new object.
265 if (opcodeID
== op_construct
) {
266 JITStubCall(this, cti_op_construct_JSConstruct
).call(registerOffset
- RegisterFile::CallFrameHeaderSize
- argCount
);
267 emitLoad(callee
, regT1
, regT2
);
270 // Speculatively roll the callframe, assuming argCount will match the arity.
271 storePtr(callFrameRegister
, Address(callFrameRegister
, (RegisterFile::CallerFrame
+ registerOffset
) * static_cast<int>(sizeof(Register
))));
272 addPtr(Imm32(registerOffset
* static_cast<int>(sizeof(Register
))), callFrameRegister
);
273 move(Imm32(argCount
), regT1
);
275 emitNakedCall(m_globalData
->jitStubs
.ctiVirtualCall());
277 if (opcodeID
== op_call_eval
) {
282 emitStore(dst
, regT1
, regT0
);;
284 sampleCodeBlock(m_codeBlock
);
287 void JIT::compileOpCallSlowCase(Instruction
* instruction
, Vector
<SlowCaseEntry
>::iterator
& iter
, unsigned, OpcodeID opcodeID
)
289 int dst
= instruction
[1].u
.operand
;
290 int callee
= instruction
[2].u
.operand
;
292 linkSlowCaseIfNotJSCell(iter
, callee
);
295 JITStubCall
stubCall(this, opcodeID
== op_construct
? cti_op_construct_NotJSConstruct
: cti_op_call_NotJSFunction
);
296 stubCall
.call(dst
); // In the interpreter, the callee puts the return value in dst.
298 sampleCodeBlock(m_codeBlock
);
301 #else // !ENABLE(JIT_OPTIMIZE_CALL)
303 /* ------------------------------ BEGIN: ENABLE(JIT_OPTIMIZE_CALL) ------------------------------ */
305 void JIT::compileOpCall(OpcodeID opcodeID
, Instruction
* instruction
, unsigned callLinkInfoIndex
)
307 int dst
= instruction
[1].u
.operand
;
308 int callee
= instruction
[2].u
.operand
;
309 int argCount
= instruction
[3].u
.operand
;
310 int registerOffset
= instruction
[4].u
.operand
;
314 if (opcodeID
== op_call_eval
) {
315 JITStubCall
stubCall(this, cti_op_call_eval
);
316 stubCall
.addArgument(callee
);
317 stubCall
.addArgument(JIT::Imm32(registerOffset
));
318 stubCall
.addArgument(JIT::Imm32(argCount
));
320 wasEval1
= branchTest32(NonZero
, regT0
);
321 wasEval2
= branch32(NotEqual
, regT1
, Imm32(JSValue::CellTag
));
324 emitLoad(callee
, regT1
, regT0
);
326 DataLabelPtr addressOfLinkedFunctionCheck
;
327 Jump jumpToSlow
= branchPtrWithPatch(NotEqual
, regT0
, addressOfLinkedFunctionCheck
, ImmPtr(0));
328 addSlowCase(jumpToSlow
);
329 ASSERT(differenceBetween(addressOfLinkedFunctionCheck
, jumpToSlow
) == patchOffsetOpCallCompareToJump
);
330 m_callStructureStubCompilationInfo
[callLinkInfoIndex
].hotPathBegin
= addressOfLinkedFunctionCheck
;
332 addSlowCase(branch32(NotEqual
, regT1
, Imm32(JSValue::CellTag
)));
334 // The following is the fast case, only used whan a callee can be linked.
336 // In the case of OpConstruct, call out to a cti_ function to create the new object.
337 if (opcodeID
== op_construct
) {
338 int proto
= instruction
[5].u
.operand
;
339 int thisRegister
= instruction
[6].u
.operand
;
341 JITStubCall
stubCall(this, cti_op_construct_JSConstruct
);
342 stubCall
.addArgument(regT1
, regT0
);
343 stubCall
.addArgument(Imm32(0)); // FIXME: Remove this unused JITStub argument.
344 stubCall
.addArgument(Imm32(0)); // FIXME: Remove this unused JITStub argument.
345 stubCall
.addArgument(proto
);
346 stubCall
.call(thisRegister
);
348 emitLoad(callee
, regT1
, regT0
);
351 // Fast version of stack frame initialization, directly relative to edi.
352 // Note that this omits to set up RegisterFile::CodeBlock, which is set in the callee
353 emitStore(registerOffset
+ RegisterFile::OptionalCalleeArguments
, JSValue());
354 emitStore(registerOffset
+ RegisterFile::Callee
, regT1
, regT0
);
356 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSFunction
, m_data
) + OBJECT_OFFSETOF(ScopeChain
, m_node
)), regT1
); // newScopeChain
357 store32(Imm32(argCount
), Address(callFrameRegister
, (registerOffset
+ RegisterFile::ArgumentCount
) * static_cast<int>(sizeof(Register
))));
358 storePtr(callFrameRegister
, Address(callFrameRegister
, (registerOffset
+ RegisterFile::CallerFrame
) * static_cast<int>(sizeof(Register
))));
359 storePtr(regT1
, Address(callFrameRegister
, (registerOffset
+ RegisterFile::ScopeChain
) * static_cast<int>(sizeof(Register
))));
360 addPtr(Imm32(registerOffset
* sizeof(Register
)), callFrameRegister
);
362 // Call to the callee
363 m_callStructureStubCompilationInfo
[callLinkInfoIndex
].hotPathOther
= emitNakedCall();
365 if (opcodeID
== op_call_eval
) {
370 // Put the return value in dst. In the interpreter, op_ret does this.
371 emitStore(dst
, regT1
, regT0
);
372 map(m_bytecodeIndex
+ opcodeLengths
[opcodeID
], dst
, regT1
, regT0
);
374 sampleCodeBlock(m_codeBlock
);
377 void JIT::compileOpCallSlowCase(Instruction
* instruction
, Vector
<SlowCaseEntry
>::iterator
& iter
, unsigned callLinkInfoIndex
, OpcodeID opcodeID
)
379 int dst
= instruction
[1].u
.operand
;
380 int callee
= instruction
[2].u
.operand
;
381 int argCount
= instruction
[3].u
.operand
;
382 int registerOffset
= instruction
[4].u
.operand
;
387 // The arguments have been set up on the hot path for op_call_eval
388 if (opcodeID
== op_call
)
389 compileOpCallSetupArgs(instruction
);
390 else if (opcodeID
== op_construct
)
391 compileOpConstructSetupArgs(instruction
);
393 // Fast check for JS function.
394 Jump callLinkFailNotObject
= branch32(NotEqual
, regT1
, Imm32(JSValue::CellTag
));
395 Jump callLinkFailNotJSFunction
= branchPtr(NotEqual
, Address(regT0
), ImmPtr(m_globalData
->jsFunctionVPtr
));
397 // First, in the case of a construct, allocate the new object.
398 if (opcodeID
== op_construct
) {
399 JITStubCall(this, cti_op_construct_JSConstruct
).call(registerOffset
- RegisterFile::CallFrameHeaderSize
- argCount
);
400 emitLoad(callee
, regT1
, regT0
);
403 // Speculatively roll the callframe, assuming argCount will match the arity.
404 storePtr(callFrameRegister
, Address(callFrameRegister
, (RegisterFile::CallerFrame
+ registerOffset
) * static_cast<int>(sizeof(Register
))));
405 addPtr(Imm32(registerOffset
* static_cast<int>(sizeof(Register
))), callFrameRegister
);
406 move(Imm32(argCount
), regT1
);
408 m_callStructureStubCompilationInfo
[callLinkInfoIndex
].callReturnLocation
= emitNakedCall(m_globalData
->jitStubs
.ctiVirtualCallPreLink());
410 // Put the return value in dst.
411 emitStore(dst
, regT1
, regT0
);;
412 sampleCodeBlock(m_codeBlock
);
414 // If not, we need an extra case in the if below!
415 ASSERT(OPCODE_LENGTH(op_call
) == OPCODE_LENGTH(op_call_eval
));
417 // Done! - return back to the hot path.
418 if (opcodeID
== op_construct
)
419 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_construct
));
421 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_call
));
423 // This handles host functions
424 callLinkFailNotObject
.link(this);
425 callLinkFailNotJSFunction
.link(this);
426 JITStubCall(this, opcodeID
== op_construct
? cti_op_construct_NotJSConstruct
: cti_op_call_NotJSFunction
).call();
428 emitStore(dst
, regT1
, regT0
);;
429 sampleCodeBlock(m_codeBlock
);
432 /* ------------------------------ END: !ENABLE / ENABLE(JIT_OPTIMIZE_CALL) ------------------------------ */
434 #endif // !ENABLE(JIT_OPTIMIZE_CALL)
436 #else // USE(JSVALUE32_64)
438 void JIT::compileOpCallInitializeCallFrame()
440 store32(regT1
, Address(callFrameRegister
, RegisterFile::ArgumentCount
* static_cast<int>(sizeof(Register
))));
442 loadPtr(Address(regT2
, OBJECT_OFFSETOF(JSFunction
, m_data
) + OBJECT_OFFSETOF(ScopeChain
, m_node
)), regT1
); // newScopeChain
444 storePtr(ImmPtr(JSValue::encode(JSValue())), Address(callFrameRegister
, RegisterFile::OptionalCalleeArguments
* static_cast<int>(sizeof(Register
))));
445 storePtr(regT2
, Address(callFrameRegister
, RegisterFile::Callee
* static_cast<int>(sizeof(Register
))));
446 storePtr(regT1
, Address(callFrameRegister
, RegisterFile::ScopeChain
* static_cast<int>(sizeof(Register
))));
449 void JIT::compileOpCallSetupArgs(Instruction
* instruction
)
451 int argCount
= instruction
[3].u
.operand
;
452 int registerOffset
= instruction
[4].u
.operand
;
455 emitPutJITStubArg(regT2
, 1);
456 emitPutJITStubArgConstant(argCount
, 3);
457 emitPutJITStubArgConstant(registerOffset
, 2);
460 void JIT::compileOpCallVarargsSetupArgs(Instruction
* instruction
)
462 int registerOffset
= instruction
[4].u
.operand
;
465 emitPutJITStubArg(regT2
, 1);
466 emitPutJITStubArg(regT1
, 3);
467 addPtr(Imm32(registerOffset
), regT1
, regT0
);
468 emitPutJITStubArg(regT0
, 2);
471 void JIT::compileOpConstructSetupArgs(Instruction
* instruction
)
473 int argCount
= instruction
[3].u
.operand
;
474 int registerOffset
= instruction
[4].u
.operand
;
475 int proto
= instruction
[5].u
.operand
;
476 int thisRegister
= instruction
[6].u
.operand
;
479 emitPutJITStubArg(regT2
, 1);
480 emitPutJITStubArgConstant(registerOffset
, 2);
481 emitPutJITStubArgConstant(argCount
, 3);
482 emitPutJITStubArgFromVirtualRegister(proto
, 4, regT0
);
483 emitPutJITStubArgConstant(thisRegister
, 5);
486 void JIT::compileOpCallVarargs(Instruction
* instruction
)
488 int dst
= instruction
[1].u
.operand
;
489 int callee
= instruction
[2].u
.operand
;
490 int argCountRegister
= instruction
[3].u
.operand
;
492 emitGetVirtualRegister(argCountRegister
, regT1
);
493 emitGetVirtualRegister(callee
, regT2
);
494 compileOpCallVarargsSetupArgs(instruction
);
496 // Check for JSFunctions.
497 emitJumpSlowCaseIfNotJSCell(regT2
);
498 addSlowCase(branchPtr(NotEqual
, Address(regT2
), ImmPtr(m_globalData
->jsFunctionVPtr
)));
500 // Speculatively roll the callframe, assuming argCount will match the arity.
501 mul32(Imm32(sizeof(Register
)), regT0
, regT0
);
502 intptr_t offset
= (intptr_t)sizeof(Register
) * (intptr_t)RegisterFile::CallerFrame
;
503 addPtr(Imm32((int32_t)offset
), regT0
, regT3
);
504 addPtr(callFrameRegister
, regT3
);
505 storePtr(callFrameRegister
, regT3
);
506 addPtr(regT0
, callFrameRegister
);
507 emitNakedCall(m_globalData
->jitStubs
.ctiVirtualCall());
509 // Put the return value in dst. In the interpreter, op_ret does this.
510 emitPutVirtualRegister(dst
);
512 sampleCodeBlock(m_codeBlock
);
515 void JIT::compileOpCallVarargsSlowCase(Instruction
* instruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
517 int dst
= instruction
[1].u
.operand
;
521 JITStubCall
stubCall(this, cti_op_call_NotJSFunction
);
522 stubCall
.call(dst
); // In the interpreter, the callee puts the return value in dst.
524 sampleCodeBlock(m_codeBlock
);
527 #if !ENABLE(JIT_OPTIMIZE_CALL)
529 /* ------------------------------ BEGIN: !ENABLE(JIT_OPTIMIZE_CALL) ------------------------------ */
531 void JIT::compileOpCall(OpcodeID opcodeID
, Instruction
* instruction
, unsigned)
533 int dst
= instruction
[1].u
.operand
;
534 int callee
= instruction
[2].u
.operand
;
535 int argCount
= instruction
[3].u
.operand
;
536 int registerOffset
= instruction
[4].u
.operand
;
540 if (opcodeID
== op_call_eval
) {
541 JITStubCall
stubCall(this, cti_op_call_eval
);
542 stubCall
.addArgument(callee
, regT2
);
543 stubCall
.addArgument(JIT::Imm32(registerOffset
));
544 stubCall
.addArgument(JIT::Imm32(argCount
));
546 wasEval
= branchPtr(NotEqual
, regT0
, ImmPtr(JSValue::encode(JSValue())));
549 emitGetVirtualRegister(callee
, regT2
);
550 // The arguments have been set up on the hot path for op_call_eval
551 if (opcodeID
== op_call
)
552 compileOpCallSetupArgs(instruction
);
553 else if (opcodeID
== op_construct
)
554 compileOpConstructSetupArgs(instruction
);
556 // Check for JSFunctions.
557 emitJumpSlowCaseIfNotJSCell(regT2
);
558 addSlowCase(branchPtr(NotEqual
, Address(regT2
), ImmPtr(m_globalData
->jsFunctionVPtr
)));
560 // First, in the case of a construct, allocate the new object.
561 if (opcodeID
== op_construct
) {
562 JITStubCall(this, cti_op_construct_JSConstruct
).call(registerOffset
- RegisterFile::CallFrameHeaderSize
- argCount
);
563 emitGetVirtualRegister(callee
, regT2
);
566 // Speculatively roll the callframe, assuming argCount will match the arity.
567 storePtr(callFrameRegister
, Address(callFrameRegister
, (RegisterFile::CallerFrame
+ registerOffset
) * static_cast<int>(sizeof(Register
))));
568 addPtr(Imm32(registerOffset
* static_cast<int>(sizeof(Register
))), callFrameRegister
);
569 move(Imm32(argCount
), regT1
);
571 emitNakedCall(m_globalData
->jitStubs
.ctiVirtualCall());
573 if (opcodeID
== op_call_eval
)
576 // Put the return value in dst. In the interpreter, op_ret does this.
577 emitPutVirtualRegister(dst
);
579 sampleCodeBlock(m_codeBlock
);
582 void JIT::compileOpCallSlowCase(Instruction
* instruction
, Vector
<SlowCaseEntry
>::iterator
& iter
, unsigned, OpcodeID opcodeID
)
584 int dst
= instruction
[1].u
.operand
;
588 JITStubCall
stubCall(this, opcodeID
== op_construct
? cti_op_construct_NotJSConstruct
: cti_op_call_NotJSFunction
);
589 stubCall
.call(dst
); // In the interpreter, the callee puts the return value in dst.
591 sampleCodeBlock(m_codeBlock
);
594 #else // !ENABLE(JIT_OPTIMIZE_CALL)
596 /* ------------------------------ BEGIN: ENABLE(JIT_OPTIMIZE_CALL) ------------------------------ */
598 void JIT::compileOpCall(OpcodeID opcodeID
, Instruction
* instruction
, unsigned callLinkInfoIndex
)
600 int dst
= instruction
[1].u
.operand
;
601 int callee
= instruction
[2].u
.operand
;
602 int argCount
= instruction
[3].u
.operand
;
603 int registerOffset
= instruction
[4].u
.operand
;
607 if (opcodeID
== op_call_eval
) {
608 JITStubCall
stubCall(this, cti_op_call_eval
);
609 stubCall
.addArgument(callee
, regT2
);
610 stubCall
.addArgument(JIT::Imm32(registerOffset
));
611 stubCall
.addArgument(JIT::Imm32(argCount
));
613 wasEval
= branchPtr(NotEqual
, regT0
, ImmPtr(JSValue::encode(JSValue())));
616 // This plants a check for a cached JSFunction value, so we can plant a fast link to the callee.
617 // This deliberately leaves the callee in ecx, used when setting up the stack frame below
618 emitGetVirtualRegister(callee
, regT2
);
619 DataLabelPtr addressOfLinkedFunctionCheck
;
620 Jump jumpToSlow
= branchPtrWithPatch(NotEqual
, regT2
, addressOfLinkedFunctionCheck
, ImmPtr(JSValue::encode(JSValue())));
621 addSlowCase(jumpToSlow
);
622 ASSERT(differenceBetween(addressOfLinkedFunctionCheck
, jumpToSlow
) == patchOffsetOpCallCompareToJump
);
623 m_callStructureStubCompilationInfo
[callLinkInfoIndex
].hotPathBegin
= addressOfLinkedFunctionCheck
;
625 // The following is the fast case, only used whan a callee can be linked.
627 // In the case of OpConstruct, call out to a cti_ function to create the new object.
628 if (opcodeID
== op_construct
) {
629 int proto
= instruction
[5].u
.operand
;
630 int thisRegister
= instruction
[6].u
.operand
;
632 emitPutJITStubArg(regT2
, 1);
633 emitPutJITStubArgFromVirtualRegister(proto
, 4, regT0
);
634 JITStubCall
stubCall(this, cti_op_construct_JSConstruct
);
635 stubCall
.call(thisRegister
);
636 emitGetVirtualRegister(callee
, regT2
);
639 // Fast version of stack frame initialization, directly relative to edi.
640 // Note that this omits to set up RegisterFile::CodeBlock, which is set in the callee
641 storePtr(ImmPtr(JSValue::encode(JSValue())), Address(callFrameRegister
, (registerOffset
+ RegisterFile::OptionalCalleeArguments
) * static_cast<int>(sizeof(Register
))));
642 storePtr(regT2
, Address(callFrameRegister
, (registerOffset
+ RegisterFile::Callee
) * static_cast<int>(sizeof(Register
))));
643 loadPtr(Address(regT2
, OBJECT_OFFSETOF(JSFunction
, m_data
) + OBJECT_OFFSETOF(ScopeChain
, m_node
)), regT1
); // newScopeChain
644 store32(Imm32(argCount
), Address(callFrameRegister
, (registerOffset
+ RegisterFile::ArgumentCount
) * static_cast<int>(sizeof(Register
))));
645 storePtr(callFrameRegister
, Address(callFrameRegister
, (registerOffset
+ RegisterFile::CallerFrame
) * static_cast<int>(sizeof(Register
))));
646 storePtr(regT1
, Address(callFrameRegister
, (registerOffset
+ RegisterFile::ScopeChain
) * static_cast<int>(sizeof(Register
))));
647 addPtr(Imm32(registerOffset
* sizeof(Register
)), callFrameRegister
);
649 // Call to the callee
650 m_callStructureStubCompilationInfo
[callLinkInfoIndex
].hotPathOther
= emitNakedCall();
652 if (opcodeID
== op_call_eval
)
655 // Put the return value in dst. In the interpreter, op_ret does this.
656 emitPutVirtualRegister(dst
);
658 sampleCodeBlock(m_codeBlock
);
661 void JIT::compileOpCallSlowCase(Instruction
* instruction
, Vector
<SlowCaseEntry
>::iterator
& iter
, unsigned callLinkInfoIndex
, OpcodeID opcodeID
)
663 int dst
= instruction
[1].u
.operand
;
664 int callee
= instruction
[2].u
.operand
;
665 int argCount
= instruction
[3].u
.operand
;
666 int registerOffset
= instruction
[4].u
.operand
;
670 // The arguments have been set up on the hot path for op_call_eval
671 if (opcodeID
== op_call
)
672 compileOpCallSetupArgs(instruction
);
673 else if (opcodeID
== op_construct
)
674 compileOpConstructSetupArgs(instruction
);
676 // Fast check for JS function.
677 Jump callLinkFailNotObject
= emitJumpIfNotJSCell(regT2
);
678 Jump callLinkFailNotJSFunction
= branchPtr(NotEqual
, Address(regT2
), ImmPtr(m_globalData
->jsFunctionVPtr
));
680 // First, in the case of a construct, allocate the new object.
681 if (opcodeID
== op_construct
) {
682 JITStubCall(this, cti_op_construct_JSConstruct
).call(registerOffset
- RegisterFile::CallFrameHeaderSize
- argCount
);
683 emitGetVirtualRegister(callee
, regT2
);
686 // Speculatively roll the callframe, assuming argCount will match the arity.
687 storePtr(callFrameRegister
, Address(callFrameRegister
, (RegisterFile::CallerFrame
+ registerOffset
) * static_cast<int>(sizeof(Register
))));
688 addPtr(Imm32(registerOffset
* static_cast<int>(sizeof(Register
))), callFrameRegister
);
689 move(Imm32(argCount
), regT1
);
691 m_callStructureStubCompilationInfo
[callLinkInfoIndex
].callReturnLocation
= emitNakedCall(m_globalData
->jitStubs
.ctiVirtualCallPreLink());
693 // Put the return value in dst.
694 emitPutVirtualRegister(dst
);
695 sampleCodeBlock(m_codeBlock
);
697 // If not, we need an extra case in the if below!
698 ASSERT(OPCODE_LENGTH(op_call
) == OPCODE_LENGTH(op_call_eval
));
700 // Done! - return back to the hot path.
701 if (opcodeID
== op_construct
)
702 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_construct
));
704 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_call
));
706 // This handles host functions
707 callLinkFailNotObject
.link(this);
708 callLinkFailNotJSFunction
.link(this);
709 JITStubCall(this, opcodeID
== op_construct
? cti_op_construct_NotJSConstruct
: cti_op_call_NotJSFunction
).call();
711 emitPutVirtualRegister(dst
);
712 sampleCodeBlock(m_codeBlock
);
715 /* ------------------------------ END: !ENABLE / ENABLE(JIT_OPTIMIZE_CALL) ------------------------------ */
717 #endif // !ENABLE(JIT_OPTIMIZE_CALL)
719 #endif // USE(JSVALUE32_64)
723 #endif // ENABLE(JIT)