2 * Copyright (C) 2009 Apple Inc. All rights reserved.
3 * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
14 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
18 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 #include "Arguments.h"
32 #include "JITInlineMethods.h"
33 #include "JITStubCall.h"
36 #include "JSFunction.h"
37 #include "JSPropertyNameIterator.h"
38 #include "LinkBuffer.h"
44 void JIT::privateCompileCTIMachineTrampolines(RefPtr
<ExecutablePool
>* executablePool
, JSGlobalData
* globalData
, TrampolineStructure
*trampolines
)
46 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
47 // (2) The second function provides fast property access for string length
48 Label stringLengthBegin
= align();
50 // Check eax is a string
51 Jump string_failureCases1
= emitJumpIfNotJSCell(regT0
);
52 Jump string_failureCases2
= branchPtr(NotEqual
, Address(regT0
), TrustedImmPtr(m_globalData
->jsStringVPtr
));
54 // Checks out okay! - get the length from the Ustring.
55 load32(Address(regT0
, OBJECT_OFFSETOF(JSString
, m_length
)), regT0
);
57 Jump string_failureCases3
= branch32(LessThan
, regT0
, TrustedImm32(0));
59 // regT0 contains a 64 bit value (is positive, is zero extended) so we don't need sign extend here.
60 emitFastArithIntToImmNoCheck(regT0
, regT0
);
65 // (3) Trampolines for the slow cases of op_call / op_call_eval / op_construct.
66 COMPILE_ASSERT(sizeof(CodeType
) == 4, CodeTypeEnumMustBe32Bit
);
68 // VirtualCallLink Trampoline
69 // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
70 JumpList callLinkFailures
;
71 Label virtualCallLinkBegin
= align();
72 compileOpCallInitializeCallFrame();
73 preserveReturnAddressAfterCall(regT3
);
74 emitPutToCallFrameHeader(regT3
, RegisterFile::ReturnPC
);
75 restoreArgumentReference();
76 Call callLazyLinkCall
= call();
77 callLinkFailures
.append(branchTestPtr(Zero
, regT0
));
78 restoreReturnAddressBeforeReturn(regT3
);
79 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount
, regT1
);
82 // VirtualConstructLink Trampoline
83 // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
84 Label virtualConstructLinkBegin
= align();
85 compileOpCallInitializeCallFrame();
86 preserveReturnAddressAfterCall(regT3
);
87 emitPutToCallFrameHeader(regT3
, RegisterFile::ReturnPC
);
88 restoreArgumentReference();
89 Call callLazyLinkConstruct
= call();
90 callLinkFailures
.append(branchTestPtr(Zero
, regT0
));
91 restoreReturnAddressBeforeReturn(regT3
);
92 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount
, regT1
);
95 // VirtualCall Trampoline
96 // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
97 Label virtualCallBegin
= align();
98 compileOpCallInitializeCallFrame();
100 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSFunction
, m_executable
)), regT2
);
102 Jump hasCodeBlock3
= branch32(GreaterThanOrEqual
, Address(regT2
, OBJECT_OFFSETOF(FunctionExecutable
, m_numParametersForCall
)), TrustedImm32(0));
103 preserveReturnAddressAfterCall(regT3
);
104 restoreArgumentReference();
105 Call callCompileCall
= call();
106 callLinkFailures
.append(branchTestPtr(Zero
, regT0
));
107 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount
, regT1
);
108 restoreReturnAddressBeforeReturn(regT3
);
109 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSFunction
, m_executable
)), regT2
);
110 hasCodeBlock3
.link(this);
112 loadPtr(Address(regT2
, OBJECT_OFFSETOF(FunctionExecutable
, m_jitCodeForCallWithArityCheck
)), regT0
);
115 // VirtualConstruct Trampoline
116 // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
117 Label virtualConstructBegin
= align();
118 compileOpCallInitializeCallFrame();
120 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSFunction
, m_executable
)), regT2
);
122 Jump hasCodeBlock4
= branch32(GreaterThanOrEqual
, Address(regT2
, OBJECT_OFFSETOF(FunctionExecutable
, m_numParametersForConstruct
)), TrustedImm32(0));
123 preserveReturnAddressAfterCall(regT3
);
124 restoreArgumentReference();
125 Call callCompileConstruct
= call();
126 callLinkFailures
.append(branchTestPtr(Zero
, regT0
));
127 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount
, regT1
);
128 restoreReturnAddressBeforeReturn(regT3
);
129 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSFunction
, m_executable
)), regT2
);
130 hasCodeBlock4
.link(this);
132 loadPtr(Address(regT2
, OBJECT_OFFSETOF(FunctionExecutable
, m_jitCodeForConstructWithArityCheck
)), regT0
);
135 // If the parser fails we want to be able to be able to keep going,
136 // So we handle this as a parse failure.
137 callLinkFailures
.link(this);
138 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC
, regT1
);
139 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame
, callFrameRegister
);
140 restoreReturnAddressBeforeReturn(regT1
);
141 move(TrustedImmPtr(&globalData
->exceptionLocation
), regT2
);
142 storePtr(regT1
, regT2
);
143 poke(callFrameRegister
, 1 + OBJECT_OFFSETOF(struct JITStackFrame
, callFrame
) / sizeof(void*));
144 poke(TrustedImmPtr(FunctionPtr(ctiVMThrowTrampoline
).value()));
147 // NativeCall Trampoline
148 Label nativeCallThunk
= privateCompileCTINativeCall(globalData
);
149 Label nativeConstructThunk
= privateCompileCTINativeCall(globalData
, true);
151 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
152 Call string_failureCases1Call
= makeTailRecursiveCall(string_failureCases1
);
153 Call string_failureCases2Call
= makeTailRecursiveCall(string_failureCases2
);
154 Call string_failureCases3Call
= makeTailRecursiveCall(string_failureCases3
);
157 // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
158 LinkBuffer
patchBuffer(*m_globalData
, this, m_globalData
->executableAllocator
);
160 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
161 patchBuffer
.link(string_failureCases1Call
, FunctionPtr(cti_op_get_by_id_string_fail
));
162 patchBuffer
.link(string_failureCases2Call
, FunctionPtr(cti_op_get_by_id_string_fail
));
163 patchBuffer
.link(string_failureCases3Call
, FunctionPtr(cti_op_get_by_id_string_fail
));
165 #if ENABLE(JIT_OPTIMIZE_CALL)
166 patchBuffer
.link(callLazyLinkCall
, FunctionPtr(cti_vm_lazyLinkCall
));
167 patchBuffer
.link(callLazyLinkConstruct
, FunctionPtr(cti_vm_lazyLinkConstruct
));
169 patchBuffer
.link(callCompileCall
, FunctionPtr(cti_op_call_jitCompile
));
170 patchBuffer
.link(callCompileConstruct
, FunctionPtr(cti_op_construct_jitCompile
));
172 CodeRef finalCode
= patchBuffer
.finalizeCode();
173 *executablePool
= finalCode
.m_executablePool
;
175 trampolines
->ctiVirtualCallLink
= patchBuffer
.trampolineAt(virtualCallLinkBegin
);
176 trampolines
->ctiVirtualConstructLink
= patchBuffer
.trampolineAt(virtualConstructLinkBegin
);
177 trampolines
->ctiVirtualCall
= patchBuffer
.trampolineAt(virtualCallBegin
);
178 trampolines
->ctiVirtualConstruct
= patchBuffer
.trampolineAt(virtualConstructBegin
);
179 trampolines
->ctiNativeCall
= patchBuffer
.trampolineAt(nativeCallThunk
);
180 trampolines
->ctiNativeConstruct
= patchBuffer
.trampolineAt(nativeConstructThunk
);
181 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
182 trampolines
->ctiStringLengthTrampoline
= patchBuffer
.trampolineAt(stringLengthBegin
);
186 JIT::Label
JIT::privateCompileCTINativeCall(JSGlobalData
* globalData
, bool isConstruct
)
188 int executableOffsetToFunction
= isConstruct
? OBJECT_OFFSETOF(NativeExecutable
, m_constructor
) : OBJECT_OFFSETOF(NativeExecutable
, m_function
);
190 Label nativeCallThunk
= align();
192 emitPutImmediateToCallFrameHeader(0, RegisterFile::CodeBlock
);
195 // Load caller frame's scope chain into this callframe so that whatever we call can
196 // get to its global data.
197 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame
, regT0
);
198 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain
, regT1
, regT0
);
199 emitPutCellToCallFrameHeader(regT1
, RegisterFile::ScopeChain
);
202 emitPutToCallFrameHeader(regT1
, RegisterFile::ReturnPC
);
204 // Calling convention: f(edi, esi, edx, ecx, ...);
205 // Host function signature: f(ExecState*);
206 move(callFrameRegister
, X86Registers::edi
);
208 subPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister
); // Align stack after call.
210 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee
, X86Registers::esi
);
211 loadPtr(Address(X86Registers::esi
, OBJECT_OFFSETOF(JSFunction
, m_executable
)), X86Registers::r9
);
212 move(regT0
, callFrameRegister
); // Eagerly restore caller frame register to avoid loading from stack.
213 call(Address(X86Registers::r9
, executableOffsetToFunction
));
215 addPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister
);
218 // Load caller frame's scope chain into this callframe so that whatever we call can
219 // get to its global data.
220 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame
, regT2
);
221 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain
, regT1
, regT2
);
222 emitPutCellToCallFrameHeader(regT1
, RegisterFile::ScopeChain
);
224 preserveReturnAddressAfterCall(regT3
); // Callee preserved
225 emitPutToCallFrameHeader(regT3
, RegisterFile::ReturnPC
);
227 // Calling convention: f(r0 == regT0, r1 == regT1, ...);
228 // Host function signature: f(ExecState*);
229 move(callFrameRegister
, ARMRegisters::r0
);
231 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee
, ARMRegisters::r1
);
232 move(regT2
, callFrameRegister
); // Eagerly restore caller frame register to avoid loading from stack.
233 loadPtr(Address(ARMRegisters::r1
, OBJECT_OFFSETOF(JSFunction
, m_executable
)), regT2
);
234 call(Address(regT2
, executableOffsetToFunction
));
236 restoreReturnAddressBeforeReturn(regT3
);
239 // Load caller frame's scope chain into this callframe so that whatever we call can
240 // get to its global data.
241 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame
, regT0
);
242 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain
, regT1
, regT0
);
243 emitPutCellToCallFrameHeader(regT1
, RegisterFile::ScopeChain
);
245 preserveReturnAddressAfterCall(regT3
); // Callee preserved
246 emitPutToCallFrameHeader(regT3
, RegisterFile::ReturnPC
);
248 // Calling convention: f(a0, a1, a2, a3);
249 // Host function signature: f(ExecState*);
251 // Allocate stack space for 16 bytes (8-byte aligned)
252 // 16 bytes (unused) for 4 arguments
253 subPtr(TrustedImm32(16), stackPointerRegister
);
256 move(callFrameRegister
, MIPSRegisters::a0
);
259 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee
, MIPSRegisters::a2
);
260 loadPtr(Address(MIPSRegisters::a2
, OBJECT_OFFSETOF(JSFunction
, m_executable
)), regT2
);
261 move(regT0
, callFrameRegister
); // Eagerly restore caller frame register to avoid loading from stack.
262 call(Address(regT2
, executableOffsetToFunction
));
264 // Restore stack space
265 addPtr(TrustedImm32(16), stackPointerRegister
);
267 restoreReturnAddressBeforeReturn(regT3
);
269 #elif ENABLE(JIT_OPTIMIZE_NATIVE_CALL)
270 #error "JIT_OPTIMIZE_NATIVE_CALL not yet supported on this platform."
272 UNUSED_PARAM(executableOffsetToFunction
);
276 // Check for an exception
277 loadPtr(&(globalData
->exception
), regT2
);
278 Jump exceptionHandler
= branchTestPtr(NonZero
, regT2
);
283 // Handle an exception
284 exceptionHandler
.link(this);
286 // Grab the return address.
287 preserveReturnAddressAfterCall(regT1
);
289 move(TrustedImmPtr(&globalData
->exceptionLocation
), regT2
);
290 storePtr(regT1
, regT2
);
291 poke(callFrameRegister
, OBJECT_OFFSETOF(struct JITStackFrame
, callFrame
) / sizeof(void*));
293 // Set the return address.
294 move(TrustedImmPtr(FunctionPtr(ctiVMThrowTrampoline
).value()), regT1
);
295 restoreReturnAddressBeforeReturn(regT1
);
299 return nativeCallThunk
;
302 JIT::CodePtr
JIT::privateCompileCTINativeCall(PassRefPtr
<ExecutablePool
>, JSGlobalData
* globalData
, NativeFunction
)
304 return globalData
->jitStubs
->ctiNativeCall();
307 void JIT::emit_op_mov(Instruction
* currentInstruction
)
309 int dst
= currentInstruction
[1].u
.operand
;
310 int src
= currentInstruction
[2].u
.operand
;
312 if (m_codeBlock
->isConstantRegisterIndex(src
)) {
313 storePtr(ImmPtr(JSValue::encode(getConstantOperand(src
))), Address(callFrameRegister
, dst
* sizeof(Register
)));
314 if (dst
== m_lastResultBytecodeRegister
)
315 killLastResultRegister();
316 } else if ((src
== m_lastResultBytecodeRegister
) || (dst
== m_lastResultBytecodeRegister
)) {
317 // If either the src or dst is the cached register go though
318 // get/put registers to make sure we track this correctly.
319 emitGetVirtualRegister(src
, regT0
);
320 emitPutVirtualRegister(dst
);
322 // Perform the copy via regT1; do not disturb any mapping in regT0.
323 loadPtr(Address(callFrameRegister
, src
* sizeof(Register
)), regT1
);
324 storePtr(regT1
, Address(callFrameRegister
, dst
* sizeof(Register
)));
328 void JIT::emit_op_end(Instruction
* currentInstruction
)
330 ASSERT(returnValueRegister
!= callFrameRegister
);
331 emitGetVirtualRegister(currentInstruction
[1].u
.operand
, returnValueRegister
);
332 restoreReturnAddressBeforeReturn(Address(callFrameRegister
, RegisterFile::ReturnPC
* static_cast<int>(sizeof(Register
))));
336 void JIT::emit_op_jmp(Instruction
* currentInstruction
)
338 unsigned target
= currentInstruction
[1].u
.operand
;
339 addJump(jump(), target
);
342 void JIT::emit_op_loop_if_lesseq(Instruction
* currentInstruction
)
346 unsigned op1
= currentInstruction
[1].u
.operand
;
347 unsigned op2
= currentInstruction
[2].u
.operand
;
348 unsigned target
= currentInstruction
[3].u
.operand
;
349 if (isOperandConstantImmediateInt(op2
)) {
350 emitGetVirtualRegister(op1
, regT0
);
351 emitJumpSlowCaseIfNotImmediateInteger(regT0
);
352 int32_t op2imm
= getConstantOperandImmediateInt(op2
);
353 addJump(branch32(LessThanOrEqual
, regT0
, Imm32(op2imm
)), target
);
355 emitGetVirtualRegisters(op1
, regT0
, op2
, regT1
);
356 emitJumpSlowCaseIfNotImmediateInteger(regT0
);
357 emitJumpSlowCaseIfNotImmediateInteger(regT1
);
358 addJump(branch32(LessThanOrEqual
, regT0
, regT1
), target
);
362 void JIT::emit_op_new_object(Instruction
* currentInstruction
)
364 JITStubCall(this, cti_op_new_object
).call(currentInstruction
[1].u
.operand
);
367 void JIT::emit_op_check_has_instance(Instruction
* currentInstruction
)
369 unsigned baseVal
= currentInstruction
[1].u
.operand
;
371 emitGetVirtualRegister(baseVal
, regT0
);
373 // Check that baseVal is a cell.
374 emitJumpSlowCaseIfNotJSCell(regT0
, baseVal
);
376 // Check that baseVal 'ImplementsHasInstance'.
377 loadPtr(Address(regT0
, JSCell::structureOffset()), regT0
);
378 addSlowCase(branchTest8(Zero
, Address(regT0
, Structure::typeInfoFlagsOffset()), TrustedImm32(ImplementsHasInstance
)));
381 void JIT::emit_op_instanceof(Instruction
* currentInstruction
)
383 unsigned dst
= currentInstruction
[1].u
.operand
;
384 unsigned value
= currentInstruction
[2].u
.operand
;
385 unsigned baseVal
= currentInstruction
[3].u
.operand
;
386 unsigned proto
= currentInstruction
[4].u
.operand
;
388 // Load the operands (baseVal, proto, and value respectively) into registers.
389 // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
390 emitGetVirtualRegister(value
, regT2
);
391 emitGetVirtualRegister(baseVal
, regT0
);
392 emitGetVirtualRegister(proto
, regT1
);
394 // Check that proto are cells. baseVal must be a cell - this is checked by op_check_has_instance.
395 emitJumpSlowCaseIfNotJSCell(regT2
, value
);
396 emitJumpSlowCaseIfNotJSCell(regT1
, proto
);
398 // Check that prototype is an object
399 loadPtr(Address(regT1
, JSCell::structureOffset()), regT3
);
400 addSlowCase(branch8(NotEqual
, Address(regT3
, Structure::typeInfoTypeOffset()), TrustedImm32(ObjectType
)));
402 // Fixme: this check is only needed because the JSC API allows HasInstance to be overridden; we should deprecate this.
403 // Check that baseVal 'ImplementsDefaultHasInstance'.
404 loadPtr(Address(regT0
, JSCell::structureOffset()), regT0
);
405 addSlowCase(branchTest8(Zero
, Address(regT0
, Structure::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance
)));
407 // Optimistically load the result true, and start looping.
408 // Initially, regT1 still contains proto and regT2 still contains value.
409 // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
410 move(TrustedImmPtr(JSValue::encode(jsBoolean(true))), regT0
);
413 // Load the prototype of the object in regT2. If this is equal to regT1 - WIN!
414 // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
415 loadPtr(Address(regT2
, JSCell::structureOffset()), regT2
);
416 loadPtr(Address(regT2
, Structure::prototypeOffset()), regT2
);
417 Jump isInstance
= branchPtr(Equal
, regT2
, regT1
);
418 emitJumpIfJSCell(regT2
).linkTo(loop
, this);
420 // We get here either by dropping out of the loop, or if value was not an Object. Result is false.
421 move(TrustedImmPtr(JSValue::encode(jsBoolean(false))), regT0
);
423 // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
424 isInstance
.link(this);
425 emitPutVirtualRegister(dst
);
428 void JIT::emit_op_call(Instruction
* currentInstruction
)
430 compileOpCall(op_call
, currentInstruction
, m_callLinkInfoIndex
++);
433 void JIT::emit_op_call_eval(Instruction
* currentInstruction
)
435 compileOpCall(op_call_eval
, currentInstruction
, m_callLinkInfoIndex
++);
438 void JIT::emit_op_call_varargs(Instruction
* currentInstruction
)
440 compileOpCallVarargs(currentInstruction
);
443 void JIT::emit_op_construct(Instruction
* currentInstruction
)
445 compileOpCall(op_construct
, currentInstruction
, m_callLinkInfoIndex
++);
448 void JIT::emit_op_get_global_var(Instruction
* currentInstruction
)
450 JSVariableObject
* globalObject
= m_codeBlock
->globalObject();
451 loadPtr(&globalObject
->m_registers
, regT0
);
452 loadPtr(Address(regT0
, currentInstruction
[2].u
.operand
* sizeof(Register
)), regT0
);
453 emitPutVirtualRegister(currentInstruction
[1].u
.operand
);
456 void JIT::emit_op_put_global_var(Instruction
* currentInstruction
)
458 emitGetVirtualRegister(currentInstruction
[2].u
.operand
, regT1
);
459 JSVariableObject
* globalObject
= m_codeBlock
->globalObject();
460 loadPtr(&globalObject
->m_registers
, regT0
);
461 storePtr(regT1
, Address(regT0
, currentInstruction
[1].u
.operand
* sizeof(Register
)));
464 void JIT::emit_op_get_scoped_var(Instruction
* currentInstruction
)
466 int skip
= currentInstruction
[3].u
.operand
;
468 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain
, regT0
);
469 bool checkTopLevel
= m_codeBlock
->codeType() == FunctionCode
&& m_codeBlock
->needsFullScopeChain();
470 ASSERT(skip
|| !checkTopLevel
);
471 if (checkTopLevel
&& skip
--) {
472 Jump activationNotCreated
;
474 activationNotCreated
= branchTestPtr(Zero
, addressFor(m_codeBlock
->activationRegister()));
475 loadPtr(Address(regT0
, OBJECT_OFFSETOF(ScopeChainNode
, next
)), regT0
);
476 activationNotCreated
.link(this);
479 loadPtr(Address(regT0
, OBJECT_OFFSETOF(ScopeChainNode
, next
)), regT0
);
481 loadPtr(Address(regT0
, OBJECT_OFFSETOF(ScopeChainNode
, object
)), regT0
);
482 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSVariableObject
, m_registers
)), regT0
);
483 loadPtr(Address(regT0
, currentInstruction
[2].u
.operand
* sizeof(Register
)), regT0
);
484 emitPutVirtualRegister(currentInstruction
[1].u
.operand
);
487 void JIT::emit_op_put_scoped_var(Instruction
* currentInstruction
)
489 int skip
= currentInstruction
[2].u
.operand
;
491 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain
, regT1
);
492 emitGetVirtualRegister(currentInstruction
[3].u
.operand
, regT0
);
493 bool checkTopLevel
= m_codeBlock
->codeType() == FunctionCode
&& m_codeBlock
->needsFullScopeChain();
494 ASSERT(skip
|| !checkTopLevel
);
495 if (checkTopLevel
&& skip
--) {
496 Jump activationNotCreated
;
498 activationNotCreated
= branchTestPtr(Zero
, addressFor(m_codeBlock
->activationRegister()));
499 loadPtr(Address(regT1
, OBJECT_OFFSETOF(ScopeChainNode
, next
)), regT1
);
500 activationNotCreated
.link(this);
503 loadPtr(Address(regT1
, OBJECT_OFFSETOF(ScopeChainNode
, next
)), regT1
);
505 loadPtr(Address(regT1
, OBJECT_OFFSETOF(ScopeChainNode
, object
)), regT1
);
506 loadPtr(Address(regT1
, OBJECT_OFFSETOF(JSVariableObject
, m_registers
)), regT1
);
507 storePtr(regT0
, Address(regT1
, currentInstruction
[1].u
.operand
* sizeof(Register
)));
510 void JIT::emit_op_tear_off_activation(Instruction
* currentInstruction
)
512 unsigned activation
= currentInstruction
[1].u
.operand
;
513 unsigned arguments
= currentInstruction
[2].u
.operand
;
514 Jump activationCreated
= branchTestPtr(NonZero
, addressFor(activation
));
515 Jump argumentsNotCreated
= branchTestPtr(Zero
, addressFor(arguments
));
516 activationCreated
.link(this);
517 JITStubCall
stubCall(this, cti_op_tear_off_activation
);
518 stubCall
.addArgument(activation
, regT2
);
519 stubCall
.addArgument(unmodifiedArgumentsRegister(arguments
), regT2
);
521 argumentsNotCreated
.link(this);
524 void JIT::emit_op_tear_off_arguments(Instruction
* currentInstruction
)
526 unsigned dst
= currentInstruction
[1].u
.operand
;
528 Jump argsNotCreated
= branchTestPtr(Zero
, Address(callFrameRegister
, sizeof(Register
) * (unmodifiedArgumentsRegister(dst
))));
529 JITStubCall
stubCall(this, cti_op_tear_off_arguments
);
530 stubCall
.addArgument(unmodifiedArgumentsRegister(dst
), regT2
);
532 argsNotCreated
.link(this);
535 void JIT::emit_op_ret(Instruction
* currentInstruction
)
537 ASSERT(callFrameRegister
!= regT1
);
538 ASSERT(regT1
!= returnValueRegister
);
539 ASSERT(returnValueRegister
!= callFrameRegister
);
541 // Return the result in %eax.
542 emitGetVirtualRegister(currentInstruction
[1].u
.operand
, returnValueRegister
);
544 // Grab the return address.
545 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC
, regT1
);
547 // Restore our caller's "r".
548 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame
, callFrameRegister
);
551 restoreReturnAddressBeforeReturn(regT1
);
555 void JIT::emit_op_ret_object_or_this(Instruction
* currentInstruction
)
557 ASSERT(callFrameRegister
!= regT1
);
558 ASSERT(regT1
!= returnValueRegister
);
559 ASSERT(returnValueRegister
!= callFrameRegister
);
561 // Return the result in %eax.
562 emitGetVirtualRegister(currentInstruction
[1].u
.operand
, returnValueRegister
);
563 Jump notJSCell
= emitJumpIfNotJSCell(returnValueRegister
);
564 loadPtr(Address(returnValueRegister
, JSCell::structureOffset()), regT2
);
565 Jump notObject
= branch8(NotEqual
, Address(regT2
, Structure::typeInfoTypeOffset()), TrustedImm32(ObjectType
));
567 // Grab the return address.
568 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC
, regT1
);
570 // Restore our caller's "r".
571 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame
, callFrameRegister
);
574 restoreReturnAddressBeforeReturn(regT1
);
577 // Return 'this' in %eax.
578 notJSCell
.link(this);
579 notObject
.link(this);
580 emitGetVirtualRegister(currentInstruction
[2].u
.operand
, returnValueRegister
);
582 // Grab the return address.
583 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC
, regT1
);
585 // Restore our caller's "r".
586 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame
, callFrameRegister
);
589 restoreReturnAddressBeforeReturn(regT1
);
593 void JIT::emit_op_resolve(Instruction
* currentInstruction
)
595 JITStubCall
stubCall(this, cti_op_resolve
);
596 stubCall
.addArgument(TrustedImmPtr(&m_codeBlock
->identifier(currentInstruction
[2].u
.operand
)));
597 stubCall
.call(currentInstruction
[1].u
.operand
);
600 void JIT::emit_op_to_primitive(Instruction
* currentInstruction
)
602 int dst
= currentInstruction
[1].u
.operand
;
603 int src
= currentInstruction
[2].u
.operand
;
605 emitGetVirtualRegister(src
, regT0
);
607 Jump isImm
= emitJumpIfNotJSCell(regT0
);
608 addSlowCase(branchPtr(NotEqual
, Address(regT0
), TrustedImmPtr(m_globalData
->jsStringVPtr
)));
612 emitPutVirtualRegister(dst
);
616 void JIT::emit_op_strcat(Instruction
* currentInstruction
)
618 JITStubCall
stubCall(this, cti_op_strcat
);
619 stubCall
.addArgument(Imm32(currentInstruction
[2].u
.operand
));
620 stubCall
.addArgument(Imm32(currentInstruction
[3].u
.operand
));
621 stubCall
.call(currentInstruction
[1].u
.operand
);
624 void JIT::emit_op_resolve_base(Instruction
* currentInstruction
)
626 JITStubCall
stubCall(this, currentInstruction
[3].u
.operand
? cti_op_resolve_base_strict_put
: cti_op_resolve_base
);
627 stubCall
.addArgument(TrustedImmPtr(&m_codeBlock
->identifier(currentInstruction
[2].u
.operand
)));
628 stubCall
.call(currentInstruction
[1].u
.operand
);
631 void JIT::emit_op_ensure_property_exists(Instruction
* currentInstruction
)
633 JITStubCall
stubCall(this, cti_op_ensure_property_exists
);
634 stubCall
.addArgument(Imm32(currentInstruction
[1].u
.operand
));
635 stubCall
.addArgument(TrustedImmPtr(&m_codeBlock
->identifier(currentInstruction
[2].u
.operand
)));
636 stubCall
.call(currentInstruction
[1].u
.operand
);
639 void JIT::emit_op_resolve_skip(Instruction
* currentInstruction
)
641 JITStubCall
stubCall(this, cti_op_resolve_skip
);
642 stubCall
.addArgument(TrustedImmPtr(&m_codeBlock
->identifier(currentInstruction
[2].u
.operand
)));
643 stubCall
.addArgument(Imm32(currentInstruction
[3].u
.operand
));
644 stubCall
.call(currentInstruction
[1].u
.operand
);
647 void JIT::emit_op_resolve_global(Instruction
* currentInstruction
, bool)
650 void* globalObject
= m_codeBlock
->globalObject();
651 unsigned currentIndex
= m_globalResolveInfoIndex
++;
652 GlobalResolveInfo
* resolveInfoAddress
= &(m_codeBlock
->globalResolveInfo(currentIndex
));
654 // Check Structure of global object
655 move(TrustedImmPtr(globalObject
), regT0
);
656 move(TrustedImmPtr(resolveInfoAddress
), regT2
);
657 loadPtr(Address(regT2
, OBJECT_OFFSETOF(GlobalResolveInfo
, structure
)), regT1
);
658 addSlowCase(branchPtr(NotEqual
, regT1
, Address(regT0
, JSCell::structureOffset()))); // Structures don't match
660 // Load cached property
661 // Assume that the global object always uses external storage.
662 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSGlobalObject
, m_propertyStorage
)), regT0
);
663 load32(Address(regT2
, OBJECT_OFFSETOF(GlobalResolveInfo
, offset
)), regT1
);
664 loadPtr(BaseIndex(regT0
, regT1
, ScalePtr
), regT0
);
665 emitPutVirtualRegister(currentInstruction
[1].u
.operand
);
668 void JIT::emitSlow_op_resolve_global(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
670 unsigned dst
= currentInstruction
[1].u
.operand
;
671 Identifier
* ident
= &m_codeBlock
->identifier(currentInstruction
[2].u
.operand
);
673 unsigned currentIndex
= m_globalResolveInfoIndex
++;
676 JITStubCall
stubCall(this, cti_op_resolve_global
);
677 stubCall
.addArgument(TrustedImmPtr(ident
));
678 stubCall
.addArgument(Imm32(currentIndex
));
679 stubCall
.addArgument(regT0
);
683 void JIT::emit_op_not(Instruction
* currentInstruction
)
685 emitGetVirtualRegister(currentInstruction
[2].u
.operand
, regT0
);
687 // Invert against JSValue(false); if the value was tagged as a boolean, then all bits will be
688 // clear other than the low bit (which will be 0 or 1 for false or true inputs respectively).
689 // Then invert against JSValue(true), which will add the tag back in, and flip the low bit.
690 xorPtr(TrustedImm32(static_cast<int32_t>(ValueFalse
)), regT0
);
691 addSlowCase(branchTestPtr(NonZero
, regT0
, TrustedImm32(static_cast<int32_t>(~1))));
692 xorPtr(TrustedImm32(static_cast<int32_t>(ValueTrue
)), regT0
);
694 emitPutVirtualRegister(currentInstruction
[1].u
.operand
);
697 void JIT::emit_op_jfalse(Instruction
* currentInstruction
)
699 unsigned target
= currentInstruction
[2].u
.operand
;
700 emitGetVirtualRegister(currentInstruction
[1].u
.operand
, regT0
);
702 addJump(branchPtr(Equal
, regT0
, TrustedImmPtr(JSValue::encode(jsNumber(0)))), target
);
703 Jump isNonZero
= emitJumpIfImmediateInteger(regT0
);
705 addJump(branchPtr(Equal
, regT0
, TrustedImmPtr(JSValue::encode(jsBoolean(false)))), target
);
706 addSlowCase(branchPtr(NotEqual
, regT0
, TrustedImmPtr(JSValue::encode(jsBoolean(true)))));
708 isNonZero
.link(this);
711 void JIT::emit_op_jeq_null(Instruction
* currentInstruction
)
713 unsigned src
= currentInstruction
[1].u
.operand
;
714 unsigned target
= currentInstruction
[2].u
.operand
;
716 emitGetVirtualRegister(src
, regT0
);
717 Jump isImmediate
= emitJumpIfNotJSCell(regT0
);
719 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
720 loadPtr(Address(regT0
, JSCell::structureOffset()), regT2
);
721 addJump(branchTest8(NonZero
, Address(regT2
, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined
)), target
);
722 Jump wasNotImmediate
= jump();
724 // Now handle the immediate cases - undefined & null
725 isImmediate
.link(this);
726 andPtr(TrustedImm32(~TagBitUndefined
), regT0
);
727 addJump(branchPtr(Equal
, regT0
, TrustedImmPtr(JSValue::encode(jsNull()))), target
);
729 wasNotImmediate
.link(this);
731 void JIT::emit_op_jneq_null(Instruction
* currentInstruction
)
733 unsigned src
= currentInstruction
[1].u
.operand
;
734 unsigned target
= currentInstruction
[2].u
.operand
;
736 emitGetVirtualRegister(src
, regT0
);
737 Jump isImmediate
= emitJumpIfNotJSCell(regT0
);
739 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
740 loadPtr(Address(regT0
, JSCell::structureOffset()), regT2
);
741 addJump(branchTest8(Zero
, Address(regT2
, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined
)), target
);
742 Jump wasNotImmediate
= jump();
744 // Now handle the immediate cases - undefined & null
745 isImmediate
.link(this);
746 andPtr(TrustedImm32(~TagBitUndefined
), regT0
);
747 addJump(branchPtr(NotEqual
, regT0
, TrustedImmPtr(JSValue::encode(jsNull()))), target
);
749 wasNotImmediate
.link(this);
752 void JIT::emit_op_jneq_ptr(Instruction
* currentInstruction
)
754 unsigned src
= currentInstruction
[1].u
.operand
;
755 JSCell
* ptr
= currentInstruction
[2].u
.jsCell
.get();
756 unsigned target
= currentInstruction
[3].u
.operand
;
758 emitGetVirtualRegister(src
, regT0
);
759 addJump(branchPtr(NotEqual
, regT0
, TrustedImmPtr(JSValue::encode(JSValue(ptr
)))), target
);
762 void JIT::emit_op_jsr(Instruction
* currentInstruction
)
764 int retAddrDst
= currentInstruction
[1].u
.operand
;
765 int target
= currentInstruction
[2].u
.operand
;
766 DataLabelPtr storeLocation
= storePtrWithPatch(TrustedImmPtr(0), Address(callFrameRegister
, sizeof(Register
) * retAddrDst
));
767 addJump(jump(), target
);
768 m_jsrSites
.append(JSRInfo(storeLocation
, label()));
769 killLastResultRegister();
772 void JIT::emit_op_sret(Instruction
* currentInstruction
)
774 jump(Address(callFrameRegister
, sizeof(Register
) * currentInstruction
[1].u
.operand
));
775 killLastResultRegister();
778 void JIT::emit_op_eq(Instruction
* currentInstruction
)
780 emitGetVirtualRegisters(currentInstruction
[2].u
.operand
, regT0
, currentInstruction
[3].u
.operand
, regT1
);
781 emitJumpSlowCaseIfNotImmediateIntegers(regT0
, regT1
, regT2
);
782 compare32(Equal
, regT1
, regT0
, regT0
);
783 emitTagAsBoolImmediate(regT0
);
784 emitPutVirtualRegister(currentInstruction
[1].u
.operand
);
787 void JIT::emit_op_bitnot(Instruction
* currentInstruction
)
789 emitGetVirtualRegister(currentInstruction
[2].u
.operand
, regT0
);
790 emitJumpSlowCaseIfNotImmediateInteger(regT0
);
792 emitFastArithIntToImmNoCheck(regT0
, regT0
);
793 emitPutVirtualRegister(currentInstruction
[1].u
.operand
);
796 void JIT::emit_op_resolve_with_base(Instruction
* currentInstruction
)
798 JITStubCall
stubCall(this, cti_op_resolve_with_base
);
799 stubCall
.addArgument(TrustedImmPtr(&m_codeBlock
->identifier(currentInstruction
[3].u
.operand
)));
800 stubCall
.addArgument(Imm32(currentInstruction
[1].u
.operand
));
801 stubCall
.call(currentInstruction
[2].u
.operand
);
804 void JIT::emit_op_new_func_exp(Instruction
* currentInstruction
)
806 JITStubCall
stubCall(this, cti_op_new_func_exp
);
807 stubCall
.addArgument(TrustedImmPtr(m_codeBlock
->functionExpr(currentInstruction
[2].u
.operand
)));
808 stubCall
.call(currentInstruction
[1].u
.operand
);
811 void JIT::emit_op_jtrue(Instruction
* currentInstruction
)
813 unsigned target
= currentInstruction
[2].u
.operand
;
814 emitGetVirtualRegister(currentInstruction
[1].u
.operand
, regT0
);
816 Jump isZero
= branchPtr(Equal
, regT0
, TrustedImmPtr(JSValue::encode(jsNumber(0))));
817 addJump(emitJumpIfImmediateInteger(regT0
), target
);
819 addJump(branchPtr(Equal
, regT0
, TrustedImmPtr(JSValue::encode(jsBoolean(true)))), target
);
820 addSlowCase(branchPtr(NotEqual
, regT0
, TrustedImmPtr(JSValue::encode(jsBoolean(false)))));
825 void JIT::emit_op_neq(Instruction
* currentInstruction
)
827 emitGetVirtualRegisters(currentInstruction
[2].u
.operand
, regT0
, currentInstruction
[3].u
.operand
, regT1
);
828 emitJumpSlowCaseIfNotImmediateIntegers(regT0
, regT1
, regT2
);
829 compare32(NotEqual
, regT1
, regT0
, regT0
);
830 emitTagAsBoolImmediate(regT0
);
832 emitPutVirtualRegister(currentInstruction
[1].u
.operand
);
836 void JIT::emit_op_bitxor(Instruction
* currentInstruction
)
838 emitGetVirtualRegisters(currentInstruction
[2].u
.operand
, regT0
, currentInstruction
[3].u
.operand
, regT1
);
839 emitJumpSlowCaseIfNotImmediateIntegers(regT0
, regT1
, regT2
);
840 xorPtr(regT1
, regT0
);
841 emitFastArithReTagImmediate(regT0
, regT0
);
842 emitPutVirtualRegister(currentInstruction
[1].u
.operand
);
845 void JIT::emit_op_bitor(Instruction
* currentInstruction
)
847 emitGetVirtualRegisters(currentInstruction
[2].u
.operand
, regT0
, currentInstruction
[3].u
.operand
, regT1
);
848 emitJumpSlowCaseIfNotImmediateIntegers(regT0
, regT1
, regT2
);
850 emitPutVirtualRegister(currentInstruction
[1].u
.operand
);
853 void JIT::emit_op_throw(Instruction
* currentInstruction
)
855 JITStubCall
stubCall(this, cti_op_throw
);
856 stubCall
.addArgument(currentInstruction
[1].u
.operand
, regT2
);
858 ASSERT(regT0
== returnValueRegister
);
860 // cti_op_throw always changes it's return address,
861 // this point in the code should never be reached.
866 void JIT::emit_op_get_pnames(Instruction
* currentInstruction
)
868 int dst
= currentInstruction
[1].u
.operand
;
869 int base
= currentInstruction
[2].u
.operand
;
870 int i
= currentInstruction
[3].u
.operand
;
871 int size
= currentInstruction
[4].u
.operand
;
872 int breakTarget
= currentInstruction
[5].u
.operand
;
874 JumpList isNotObject
;
876 emitGetVirtualRegister(base
, regT0
);
877 if (!m_codeBlock
->isKnownNotImmediate(base
))
878 isNotObject
.append(emitJumpIfNotJSCell(regT0
));
879 if (base
!= m_codeBlock
->thisRegister() || m_codeBlock
->isStrictMode()) {
880 loadPtr(Address(regT0
, JSCell::structureOffset()), regT2
);
881 isNotObject
.append(branch8(NotEqual
, Address(regT2
, Structure::typeInfoTypeOffset()), TrustedImm32(ObjectType
)));
884 // We could inline the case where you have a valid cache, but
885 // this call doesn't seem to be hot.
886 Label
isObject(this);
887 JITStubCall
getPnamesStubCall(this, cti_op_get_pnames
);
888 getPnamesStubCall
.addArgument(regT0
);
889 getPnamesStubCall
.call(dst
);
890 load32(Address(regT0
, OBJECT_OFFSETOF(JSPropertyNameIterator
, m_jsStringsSize
)), regT3
);
891 storePtr(tagTypeNumberRegister
, payloadFor(i
));
892 store32(TrustedImm32(Int32Tag
), intTagFor(size
));
893 store32(regT3
, intPayloadFor(size
));
896 isNotObject
.link(this);
898 and32(TrustedImm32(~TagBitUndefined
), regT1
);
899 addJump(branch32(Equal
, regT1
, TrustedImm32(ValueNull
)), breakTarget
);
901 JITStubCall
toObjectStubCall(this, cti_to_object
);
902 toObjectStubCall
.addArgument(regT0
);
903 toObjectStubCall
.call(base
);
904 jump().linkTo(isObject
, this);
909 void JIT::emit_op_next_pname(Instruction
* currentInstruction
)
911 int dst
= currentInstruction
[1].u
.operand
;
912 int base
= currentInstruction
[2].u
.operand
;
913 int i
= currentInstruction
[3].u
.operand
;
914 int size
= currentInstruction
[4].u
.operand
;
915 int it
= currentInstruction
[5].u
.operand
;
916 int target
= currentInstruction
[6].u
.operand
;
918 JumpList callHasProperty
;
921 load32(intPayloadFor(i
), regT0
);
922 Jump end
= branch32(Equal
, regT0
, intPayloadFor(size
));
925 loadPtr(addressFor(it
), regT1
);
926 loadPtr(Address(regT1
, OBJECT_OFFSETOF(JSPropertyNameIterator
, m_jsStrings
)), regT2
);
928 loadPtr(BaseIndex(regT2
, regT0
, TimesEight
), regT2
);
930 emitPutVirtualRegister(dst
, regT2
);
933 add32(TrustedImm32(1), regT0
);
934 store32(regT0
, intPayloadFor(i
));
936 // Verify that i is valid:
937 emitGetVirtualRegister(base
, regT0
);
939 // Test base's structure
940 loadPtr(Address(regT0
, JSCell::structureOffset()), regT2
);
941 callHasProperty
.append(branchPtr(NotEqual
, regT2
, Address(Address(regT1
, OBJECT_OFFSETOF(JSPropertyNameIterator
, m_cachedStructure
)))));
943 // Test base's prototype chain
944 loadPtr(Address(Address(regT1
, OBJECT_OFFSETOF(JSPropertyNameIterator
, m_cachedPrototypeChain
))), regT3
);
945 loadPtr(Address(regT3
, OBJECT_OFFSETOF(StructureChain
, m_vector
)), regT3
);
946 addJump(branchTestPtr(Zero
, Address(regT3
)), target
);
948 Label
checkPrototype(this);
949 loadPtr(Address(regT2
, Structure::prototypeOffset()), regT2
);
950 callHasProperty
.append(emitJumpIfNotJSCell(regT2
));
951 loadPtr(Address(regT2
, JSCell::structureOffset()), regT2
);
952 callHasProperty
.append(branchPtr(NotEqual
, regT2
, Address(regT3
)));
953 addPtr(TrustedImm32(sizeof(Structure
*)), regT3
);
954 branchTestPtr(NonZero
, Address(regT3
)).linkTo(checkPrototype
, this);
957 addJump(jump(), target
);
959 // Slow case: Ask the object if i is valid.
960 callHasProperty
.link(this);
961 emitGetVirtualRegister(dst
, regT1
);
962 JITStubCall
stubCall(this, cti_has_property
);
963 stubCall
.addArgument(regT0
);
964 stubCall
.addArgument(regT1
);
967 // Test for valid key.
968 addJump(branchTest32(NonZero
, regT0
), target
);
969 jump().linkTo(begin
, this);
975 void JIT::emit_op_push_scope(Instruction
* currentInstruction
)
977 JITStubCall
stubCall(this, cti_op_push_scope
);
978 stubCall
.addArgument(currentInstruction
[1].u
.operand
, regT2
);
979 stubCall
.call(currentInstruction
[1].u
.operand
);
982 void JIT::emit_op_pop_scope(Instruction
*)
984 JITStubCall(this, cti_op_pop_scope
).call();
987 void JIT::compileOpStrictEq(Instruction
* currentInstruction
, CompileOpStrictEqType type
)
989 unsigned dst
= currentInstruction
[1].u
.operand
;
990 unsigned src1
= currentInstruction
[2].u
.operand
;
991 unsigned src2
= currentInstruction
[3].u
.operand
;
993 emitGetVirtualRegisters(src1
, regT0
, src2
, regT1
);
995 // Jump to a slow case if either operand is a number, or if both are JSCell*s.
998 addSlowCase(emitJumpIfJSCell(regT2
));
999 addSlowCase(emitJumpIfImmediateNumber(regT2
));
1001 if (type
== OpStrictEq
)
1002 compare32(Equal
, regT1
, regT0
, regT0
);
1004 compare32(NotEqual
, regT1
, regT0
, regT0
);
1005 emitTagAsBoolImmediate(regT0
);
1007 emitPutVirtualRegister(dst
);
1010 void JIT::emit_op_stricteq(Instruction
* currentInstruction
)
1012 compileOpStrictEq(currentInstruction
, OpStrictEq
);
1015 void JIT::emit_op_nstricteq(Instruction
* currentInstruction
)
1017 compileOpStrictEq(currentInstruction
, OpNStrictEq
);
1020 void JIT::emit_op_to_jsnumber(Instruction
* currentInstruction
)
1022 int srcVReg
= currentInstruction
[2].u
.operand
;
1023 emitGetVirtualRegister(srcVReg
, regT0
);
1025 Jump wasImmediate
= emitJumpIfImmediateInteger(regT0
);
1027 emitJumpSlowCaseIfNotJSCell(regT0
, srcVReg
);
1028 loadPtr(Address(regT0
, JSCell::structureOffset()), regT2
);
1029 addSlowCase(branch8(NotEqual
, Address(regT2
, Structure::typeInfoTypeOffset()), TrustedImm32(NumberType
)));
1031 wasImmediate
.link(this);
1033 emitPutVirtualRegister(currentInstruction
[1].u
.operand
);
1036 void JIT::emit_op_push_new_scope(Instruction
* currentInstruction
)
1038 JITStubCall
stubCall(this, cti_op_push_new_scope
);
1039 stubCall
.addArgument(TrustedImmPtr(&m_codeBlock
->identifier(currentInstruction
[2].u
.operand
)));
1040 stubCall
.addArgument(currentInstruction
[3].u
.operand
, regT2
);
1041 stubCall
.call(currentInstruction
[1].u
.operand
);
1044 void JIT::emit_op_catch(Instruction
* currentInstruction
)
1046 killLastResultRegister(); // FIXME: Implicitly treat op_catch as a labeled statement, and remove this line of code.
1047 move(regT0
, callFrameRegister
);
1048 peek(regT3
, OBJECT_OFFSETOF(struct JITStackFrame
, globalData
) / sizeof(void*));
1049 loadPtr(Address(regT3
, OBJECT_OFFSETOF(JSGlobalData
, exception
)), regT0
);
1050 storePtr(TrustedImmPtr(JSValue::encode(JSValue())), Address(regT3
, OBJECT_OFFSETOF(JSGlobalData
, exception
)));
1051 emitPutVirtualRegister(currentInstruction
[1].u
.operand
);
1054 void JIT::emit_op_jmp_scopes(Instruction
* currentInstruction
)
1056 JITStubCall
stubCall(this, cti_op_jmp_scopes
);
1057 stubCall
.addArgument(Imm32(currentInstruction
[1].u
.operand
));
1059 addJump(jump(), currentInstruction
[2].u
.operand
);
1062 void JIT::emit_op_switch_imm(Instruction
* currentInstruction
)
1064 unsigned tableIndex
= currentInstruction
[1].u
.operand
;
1065 unsigned defaultOffset
= currentInstruction
[2].u
.operand
;
1066 unsigned scrutinee
= currentInstruction
[3].u
.operand
;
1068 // create jump table for switch destinations, track this switch statement.
1069 SimpleJumpTable
* jumpTable
= &m_codeBlock
->immediateSwitchJumpTable(tableIndex
);
1070 m_switches
.append(SwitchRecord(jumpTable
, m_bytecodeOffset
, defaultOffset
, SwitchRecord::Immediate
));
1071 jumpTable
->ctiOffsets
.grow(jumpTable
->branchOffsets
.size());
1073 JITStubCall
stubCall(this, cti_op_switch_imm
);
1074 stubCall
.addArgument(scrutinee
, regT2
);
1075 stubCall
.addArgument(Imm32(tableIndex
));
1080 void JIT::emit_op_switch_char(Instruction
* currentInstruction
)
1082 unsigned tableIndex
= currentInstruction
[1].u
.operand
;
1083 unsigned defaultOffset
= currentInstruction
[2].u
.operand
;
1084 unsigned scrutinee
= currentInstruction
[3].u
.operand
;
1086 // create jump table for switch destinations, track this switch statement.
1087 SimpleJumpTable
* jumpTable
= &m_codeBlock
->characterSwitchJumpTable(tableIndex
);
1088 m_switches
.append(SwitchRecord(jumpTable
, m_bytecodeOffset
, defaultOffset
, SwitchRecord::Character
));
1089 jumpTable
->ctiOffsets
.grow(jumpTable
->branchOffsets
.size());
1091 JITStubCall
stubCall(this, cti_op_switch_char
);
1092 stubCall
.addArgument(scrutinee
, regT2
);
1093 stubCall
.addArgument(Imm32(tableIndex
));
1098 void JIT::emit_op_switch_string(Instruction
* currentInstruction
)
1100 unsigned tableIndex
= currentInstruction
[1].u
.operand
;
1101 unsigned defaultOffset
= currentInstruction
[2].u
.operand
;
1102 unsigned scrutinee
= currentInstruction
[3].u
.operand
;
1104 // create jump table for switch destinations, track this switch statement.
1105 StringJumpTable
* jumpTable
= &m_codeBlock
->stringSwitchJumpTable(tableIndex
);
1106 m_switches
.append(SwitchRecord(jumpTable
, m_bytecodeOffset
, defaultOffset
));
1108 JITStubCall
stubCall(this, cti_op_switch_string
);
1109 stubCall
.addArgument(scrutinee
, regT2
);
1110 stubCall
.addArgument(Imm32(tableIndex
));
1115 void JIT::emit_op_throw_reference_error(Instruction
* currentInstruction
)
1117 JITStubCall
stubCall(this, cti_op_throw_reference_error
);
1118 stubCall
.addArgument(ImmPtr(JSValue::encode(m_codeBlock
->getConstant(currentInstruction
[1].u
.operand
))));
1122 void JIT::emit_op_debug(Instruction
* currentInstruction
)
1124 #if ENABLE(DEBUG_WITH_BREAKPOINT)
1125 UNUSED_PARAM(currentInstruction
);
1128 JITStubCall
stubCall(this, cti_op_debug
);
1129 stubCall
.addArgument(Imm32(currentInstruction
[1].u
.operand
));
1130 stubCall
.addArgument(Imm32(currentInstruction
[2].u
.operand
));
1131 stubCall
.addArgument(Imm32(currentInstruction
[3].u
.operand
));
1136 void JIT::emit_op_eq_null(Instruction
* currentInstruction
)
1138 unsigned dst
= currentInstruction
[1].u
.operand
;
1139 unsigned src1
= currentInstruction
[2].u
.operand
;
1141 emitGetVirtualRegister(src1
, regT0
);
1142 Jump isImmediate
= emitJumpIfNotJSCell(regT0
);
1144 loadPtr(Address(regT0
, JSCell::structureOffset()), regT2
);
1145 test8(NonZero
, Address(regT2
, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined
), regT0
);
1147 Jump wasNotImmediate
= jump();
1149 isImmediate
.link(this);
1151 andPtr(TrustedImm32(~TagBitUndefined
), regT0
);
1152 comparePtr(Equal
, regT0
, TrustedImm32(ValueNull
), regT0
);
1154 wasNotImmediate
.link(this);
1156 emitTagAsBoolImmediate(regT0
);
1157 emitPutVirtualRegister(dst
);
1161 void JIT::emit_op_neq_null(Instruction
* currentInstruction
)
1163 unsigned dst
= currentInstruction
[1].u
.operand
;
1164 unsigned src1
= currentInstruction
[2].u
.operand
;
1166 emitGetVirtualRegister(src1
, regT0
);
1167 Jump isImmediate
= emitJumpIfNotJSCell(regT0
);
1169 loadPtr(Address(regT0
, JSCell::structureOffset()), regT2
);
1170 test8(Zero
, Address(regT2
, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined
), regT0
);
1172 Jump wasNotImmediate
= jump();
1174 isImmediate
.link(this);
1176 andPtr(TrustedImm32(~TagBitUndefined
), regT0
);
1177 comparePtr(NotEqual
, regT0
, TrustedImm32(ValueNull
), regT0
);
1179 wasNotImmediate
.link(this);
1181 emitTagAsBoolImmediate(regT0
);
1182 emitPutVirtualRegister(dst
);
1185 void JIT::emit_op_enter(Instruction
*)
1187 // Even though CTI doesn't use them, we initialize our constant
1188 // registers to zap stale pointers, to avoid unnecessarily prolonging
1189 // object lifetime and increasing GC pressure.
1190 size_t count
= m_codeBlock
->m_numVars
;
1191 for (size_t j
= 0; j
< count
; ++j
)
1192 emitInitRegister(j
);
1196 void JIT::emit_op_create_activation(Instruction
* currentInstruction
)
1198 unsigned dst
= currentInstruction
[1].u
.operand
;
1200 Jump activationCreated
= branchTestPtr(NonZero
, Address(callFrameRegister
, sizeof(Register
) * dst
));
1201 JITStubCall(this, cti_op_push_activation
).call(currentInstruction
[1].u
.operand
);
1202 emitPutVirtualRegister(dst
);
1203 activationCreated
.link(this);
1206 void JIT::emit_op_create_arguments(Instruction
* currentInstruction
)
1208 unsigned dst
= currentInstruction
[1].u
.operand
;
1210 Jump argsCreated
= branchTestPtr(NonZero
, Address(callFrameRegister
, sizeof(Register
) * dst
));
1211 if (m_codeBlock
->m_numParameters
== 1)
1212 JITStubCall(this, cti_op_create_arguments_no_params
).call();
1214 JITStubCall(this, cti_op_create_arguments
).call();
1215 emitPutVirtualRegister(dst
);
1216 emitPutVirtualRegister(unmodifiedArgumentsRegister(dst
));
1217 argsCreated
.link(this);
1220 void JIT::emit_op_init_lazy_reg(Instruction
* currentInstruction
)
1222 unsigned dst
= currentInstruction
[1].u
.operand
;
1224 storePtr(TrustedImmPtr(0), Address(callFrameRegister
, sizeof(Register
) * dst
));
1227 void JIT::emit_op_convert_this(Instruction
* currentInstruction
)
1229 emitGetVirtualRegister(currentInstruction
[1].u
.operand
, regT0
);
1231 emitJumpSlowCaseIfNotJSCell(regT0
);
1232 loadPtr(Address(regT0
, JSCell::structureOffset()), regT1
);
1233 addSlowCase(branchTest8(NonZero
, Address(regT1
, Structure::typeInfoFlagsOffset()), TrustedImm32(NeedsThisConversion
)));
1236 void JIT::emit_op_convert_this_strict(Instruction
* currentInstruction
)
1238 emitGetVirtualRegister(currentInstruction
[1].u
.operand
, regT0
);
1239 Jump notNull
= branchTestPtr(NonZero
, regT0
);
1240 move(TrustedImmPtr(JSValue::encode(jsNull())), regT0
);
1241 emitPutVirtualRegister(currentInstruction
[1].u
.operand
, regT0
);
1242 Jump setThis
= jump();
1244 Jump isImmediate
= emitJumpIfNotJSCell(regT0
);
1245 loadPtr(Address(regT0
, JSCell::structureOffset()), regT1
);
1246 Jump notAnObject
= branch8(NotEqual
, Address(regT1
, Structure::typeInfoTypeOffset()), TrustedImm32(ObjectType
));
1247 addSlowCase(branchTest8(NonZero
, Address(regT1
, Structure::typeInfoFlagsOffset()), TrustedImm32(NeedsThisConversion
)));
1248 isImmediate
.link(this);
1249 notAnObject
.link(this);
1253 void JIT::emit_op_get_callee(Instruction
* currentInstruction
)
1255 unsigned result
= currentInstruction
[1].u
.operand
;
1256 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee
, regT0
);
1257 emitPutVirtualRegister(result
);
1260 void JIT::emit_op_create_this(Instruction
* currentInstruction
)
1262 JITStubCall
stubCall(this, cti_op_create_this
);
1263 stubCall
.addArgument(currentInstruction
[2].u
.operand
, regT1
);
1264 stubCall
.call(currentInstruction
[1].u
.operand
);
1267 void JIT::emit_op_profile_will_call(Instruction
* currentInstruction
)
1269 peek(regT1
, OBJECT_OFFSETOF(JITStackFrame
, enabledProfilerReference
) / sizeof(void*));
1270 Jump noProfiler
= branchTestPtr(Zero
, Address(regT1
));
1272 JITStubCall
stubCall(this, cti_op_profile_will_call
);
1273 stubCall
.addArgument(currentInstruction
[1].u
.operand
, regT1
);
1275 noProfiler
.link(this);
1279 void JIT::emit_op_profile_did_call(Instruction
* currentInstruction
)
1281 peek(regT1
, OBJECT_OFFSETOF(JITStackFrame
, enabledProfilerReference
) / sizeof(void*));
1282 Jump noProfiler
= branchTestPtr(Zero
, Address(regT1
));
1284 JITStubCall
stubCall(this, cti_op_profile_did_call
);
1285 stubCall
.addArgument(currentInstruction
[1].u
.operand
, regT1
);
1287 noProfiler
.link(this);
1293 void JIT::emitSlow_op_convert_this(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1297 JITStubCall
stubCall(this, cti_op_convert_this
);
1298 stubCall
.addArgument(regT0
);
1299 stubCall
.call(currentInstruction
[1].u
.operand
);
1302 void JIT::emitSlow_op_convert_this_strict(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1305 JITStubCall
stubCall(this, cti_op_convert_this_strict
);
1306 stubCall
.addArgument(regT0
);
1307 stubCall
.call(currentInstruction
[1].u
.operand
);
1310 void JIT::emitSlow_op_to_primitive(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1314 JITStubCall
stubCall(this, cti_op_to_primitive
);
1315 stubCall
.addArgument(regT0
);
1316 stubCall
.call(currentInstruction
[1].u
.operand
);
1319 void JIT::emitSlow_op_loop_if_lesseq(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1321 unsigned op2
= currentInstruction
[2].u
.operand
;
1322 unsigned target
= currentInstruction
[3].u
.operand
;
1323 if (isOperandConstantImmediateInt(op2
)) {
1325 JITStubCall
stubCall(this, cti_op_loop_if_lesseq
);
1326 stubCall
.addArgument(regT0
);
1327 stubCall
.addArgument(currentInstruction
[2].u
.operand
, regT2
);
1329 emitJumpSlowToHot(branchTest32(NonZero
, regT0
), target
);
1333 JITStubCall
stubCall(this, cti_op_loop_if_lesseq
);
1334 stubCall
.addArgument(regT0
);
1335 stubCall
.addArgument(regT1
);
1337 emitJumpSlowToHot(branchTest32(NonZero
, regT0
), target
);
1341 void JIT::emitSlow_op_put_by_val(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1343 unsigned base
= currentInstruction
[1].u
.operand
;
1344 unsigned property
= currentInstruction
[2].u
.operand
;
1345 unsigned value
= currentInstruction
[3].u
.operand
;
1347 linkSlowCase(iter
); // property int32 check
1348 linkSlowCaseIfNotJSCell(iter
, base
); // base cell check
1349 linkSlowCase(iter
); // base not array check
1350 linkSlowCase(iter
); // in vector check
1352 JITStubCall
stubPutByValCall(this, cti_op_put_by_val
);
1353 stubPutByValCall
.addArgument(regT0
);
1354 stubPutByValCall
.addArgument(property
, regT2
);
1355 stubPutByValCall
.addArgument(value
, regT2
);
1356 stubPutByValCall
.call();
1359 void JIT::emitSlow_op_not(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1362 xorPtr(TrustedImm32(static_cast<int32_t>(ValueFalse
)), regT0
);
1363 JITStubCall
stubCall(this, cti_op_not
);
1364 stubCall
.addArgument(regT0
);
1365 stubCall
.call(currentInstruction
[1].u
.operand
);
1368 void JIT::emitSlow_op_jfalse(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1371 JITStubCall
stubCall(this, cti_op_jtrue
);
1372 stubCall
.addArgument(regT0
);
1374 emitJumpSlowToHot(branchTest32(Zero
, regT0
), currentInstruction
[2].u
.operand
); // inverted!
1377 void JIT::emitSlow_op_bitnot(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1380 JITStubCall
stubCall(this, cti_op_bitnot
);
1381 stubCall
.addArgument(regT0
);
1382 stubCall
.call(currentInstruction
[1].u
.operand
);
1385 void JIT::emitSlow_op_jtrue(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1388 JITStubCall
stubCall(this, cti_op_jtrue
);
1389 stubCall
.addArgument(regT0
);
1391 emitJumpSlowToHot(branchTest32(NonZero
, regT0
), currentInstruction
[2].u
.operand
);
1394 void JIT::emitSlow_op_bitxor(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1397 JITStubCall
stubCall(this, cti_op_bitxor
);
1398 stubCall
.addArgument(regT0
);
1399 stubCall
.addArgument(regT1
);
1400 stubCall
.call(currentInstruction
[1].u
.operand
);
1403 void JIT::emitSlow_op_bitor(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1406 JITStubCall
stubCall(this, cti_op_bitor
);
1407 stubCall
.addArgument(regT0
);
1408 stubCall
.addArgument(regT1
);
1409 stubCall
.call(currentInstruction
[1].u
.operand
);
1412 void JIT::emitSlow_op_eq(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1415 JITStubCall
stubCall(this, cti_op_eq
);
1416 stubCall
.addArgument(regT0
);
1417 stubCall
.addArgument(regT1
);
1419 emitTagAsBoolImmediate(regT0
);
1420 emitPutVirtualRegister(currentInstruction
[1].u
.operand
);
1423 void JIT::emitSlow_op_neq(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1426 JITStubCall
stubCall(this, cti_op_eq
);
1427 stubCall
.addArgument(regT0
);
1428 stubCall
.addArgument(regT1
);
1430 xor32(TrustedImm32(0x1), regT0
);
1431 emitTagAsBoolImmediate(regT0
);
1432 emitPutVirtualRegister(currentInstruction
[1].u
.operand
);
1435 void JIT::emitSlow_op_stricteq(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1439 JITStubCall
stubCall(this, cti_op_stricteq
);
1440 stubCall
.addArgument(regT0
);
1441 stubCall
.addArgument(regT1
);
1442 stubCall
.call(currentInstruction
[1].u
.operand
);
1445 void JIT::emitSlow_op_nstricteq(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1449 JITStubCall
stubCall(this, cti_op_nstricteq
);
1450 stubCall
.addArgument(regT0
);
1451 stubCall
.addArgument(regT1
);
1452 stubCall
.call(currentInstruction
[1].u
.operand
);
1455 void JIT::emitSlow_op_check_has_instance(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1457 unsigned baseVal
= currentInstruction
[1].u
.operand
;
1459 linkSlowCaseIfNotJSCell(iter
, baseVal
);
1461 JITStubCall
stubCall(this, cti_op_check_has_instance
);
1462 stubCall
.addArgument(baseVal
, regT2
);
1466 void JIT::emitSlow_op_instanceof(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1468 unsigned dst
= currentInstruction
[1].u
.operand
;
1469 unsigned value
= currentInstruction
[2].u
.operand
;
1470 unsigned baseVal
= currentInstruction
[3].u
.operand
;
1471 unsigned proto
= currentInstruction
[4].u
.operand
;
1473 linkSlowCaseIfNotJSCell(iter
, value
);
1474 linkSlowCaseIfNotJSCell(iter
, proto
);
1477 JITStubCall
stubCall(this, cti_op_instanceof
);
1478 stubCall
.addArgument(value
, regT2
);
1479 stubCall
.addArgument(baseVal
, regT2
);
1480 stubCall
.addArgument(proto
, regT2
);
1484 void JIT::emitSlow_op_call(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1486 compileOpCallSlowCase(currentInstruction
, iter
, m_callLinkInfoIndex
++, op_call
);
1489 void JIT::emitSlow_op_call_eval(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1491 compileOpCallSlowCase(currentInstruction
, iter
, m_callLinkInfoIndex
++, op_call_eval
);
1494 void JIT::emitSlow_op_call_varargs(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1496 compileOpCallVarargsSlowCase(currentInstruction
, iter
);
1499 void JIT::emitSlow_op_construct(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1501 compileOpCallSlowCase(currentInstruction
, iter
, m_callLinkInfoIndex
++, op_construct
);
1504 void JIT::emitSlow_op_to_jsnumber(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1506 linkSlowCaseIfNotJSCell(iter
, currentInstruction
[2].u
.operand
);
1509 JITStubCall
stubCall(this, cti_op_to_jsnumber
);
1510 stubCall
.addArgument(regT0
);
1511 stubCall
.call(currentInstruction
[1].u
.operand
);
1514 void JIT::emit_op_get_arguments_length(Instruction
* currentInstruction
)
1516 int dst
= currentInstruction
[1].u
.operand
;
1517 int argumentsRegister
= currentInstruction
[2].u
.operand
;
1518 addSlowCase(branchTestPtr(NonZero
, addressFor(argumentsRegister
)));
1519 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount
, regT0
);
1520 sub32(TrustedImm32(1), regT0
);
1521 emitFastArithReTagImmediate(regT0
, regT0
);
1522 emitPutVirtualRegister(dst
, regT0
);
1525 void JIT::emitSlow_op_get_arguments_length(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1528 unsigned dst
= currentInstruction
[1].u
.operand
;
1529 unsigned base
= currentInstruction
[2].u
.operand
;
1530 Identifier
* ident
= &(m_codeBlock
->identifier(currentInstruction
[3].u
.operand
));
1532 emitGetVirtualRegister(base
, regT0
);
1533 JITStubCall
stubCall(this, cti_op_get_by_id_generic
);
1534 stubCall
.addArgument(regT0
);
1535 stubCall
.addArgument(TrustedImmPtr(ident
));
1539 void JIT::emit_op_get_argument_by_val(Instruction
* currentInstruction
)
1541 int dst
= currentInstruction
[1].u
.operand
;
1542 int argumentsRegister
= currentInstruction
[2].u
.operand
;
1543 int property
= currentInstruction
[3].u
.operand
;
1544 addSlowCase(branchTestPtr(NonZero
, addressFor(argumentsRegister
)));
1545 emitGetVirtualRegister(property
, regT1
);
1546 addSlowCase(emitJumpIfNotImmediateInteger(regT1
));
1547 add32(TrustedImm32(1), regT1
);
1548 // regT1 now contains the integer index of the argument we want, including this
1549 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount
, regT2
);
1550 addSlowCase(branch32(AboveOrEqual
, regT1
, regT2
));
1552 Jump skipOutofLineParams
;
1553 int numArgs
= m_codeBlock
->m_numParameters
;
1555 Jump notInInPlaceArgs
= branch32(AboveOrEqual
, regT1
, Imm32(numArgs
));
1556 addPtr(Imm32(static_cast<unsigned>(-(RegisterFile::CallFrameHeaderSize
+ numArgs
) * sizeof(Register
))), callFrameRegister
, regT0
);
1557 loadPtr(BaseIndex(regT0
, regT1
, TimesEight
, 0), regT0
);
1558 skipOutofLineParams
= jump();
1559 notInInPlaceArgs
.link(this);
1562 addPtr(Imm32(static_cast<unsigned>(-(RegisterFile::CallFrameHeaderSize
+ numArgs
) * sizeof(Register
))), callFrameRegister
, regT0
);
1563 mul32(TrustedImm32(sizeof(Register
)), regT2
, regT2
);
1564 subPtr(regT2
, regT0
);
1565 loadPtr(BaseIndex(regT0
, regT1
, TimesEight
, 0), regT0
);
1567 skipOutofLineParams
.link(this);
1568 emitPutVirtualRegister(dst
, regT0
);
1571 void JIT::emitSlow_op_get_argument_by_val(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1573 unsigned dst
= currentInstruction
[1].u
.operand
;
1574 unsigned arguments
= currentInstruction
[2].u
.operand
;
1575 unsigned property
= currentInstruction
[3].u
.operand
;
1578 Jump skipArgumentsCreation
= jump();
1582 if (m_codeBlock
->m_numParameters
== 1)
1583 JITStubCall(this, cti_op_create_arguments_no_params
).call();
1585 JITStubCall(this, cti_op_create_arguments
).call();
1586 emitPutVirtualRegister(arguments
);
1587 emitPutVirtualRegister(unmodifiedArgumentsRegister(arguments
));
1589 skipArgumentsCreation
.link(this);
1590 JITStubCall
stubCall(this, cti_op_get_by_val
);
1591 stubCall
.addArgument(arguments
, regT2
);
1592 stubCall
.addArgument(property
, regT2
);
1596 #endif // USE(JSVALUE64)
1598 void JIT::emit_op_resolve_global_dynamic(Instruction
* currentInstruction
)
1600 int skip
= currentInstruction
[5].u
.operand
;
1602 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain
, regT0
);
1604 bool checkTopLevel
= m_codeBlock
->codeType() == FunctionCode
&& m_codeBlock
->needsFullScopeChain();
1605 ASSERT(skip
|| !checkTopLevel
);
1606 if (checkTopLevel
&& skip
--) {
1607 Jump activationNotCreated
;
1609 activationNotCreated
= branchTestPtr(Zero
, addressFor(m_codeBlock
->activationRegister()));
1610 loadPtr(Address(regT0
, OBJECT_OFFSETOF(ScopeChainNode
, object
)), regT1
);
1611 addSlowCase(checkStructure(regT1
, m_globalData
->activationStructure
.get()));
1612 loadPtr(Address(regT0
, OBJECT_OFFSETOF(ScopeChainNode
, next
)), regT0
);
1613 activationNotCreated
.link(this);
1616 loadPtr(Address(regT0
, OBJECT_OFFSETOF(ScopeChainNode
, object
)), regT1
);
1617 addSlowCase(checkStructure(regT1
, m_globalData
->activationStructure
.get()));
1618 loadPtr(Address(regT0
, OBJECT_OFFSETOF(ScopeChainNode
, next
)), regT0
);
1620 emit_op_resolve_global(currentInstruction
, true);
1623 void JIT::emitSlow_op_resolve_global_dynamic(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1625 unsigned dst
= currentInstruction
[1].u
.operand
;
1626 Identifier
* ident
= &m_codeBlock
->identifier(currentInstruction
[2].u
.operand
);
1627 int skip
= currentInstruction
[5].u
.operand
;
1630 JITStubCall
resolveStubCall(this, cti_op_resolve
);
1631 resolveStubCall
.addArgument(TrustedImmPtr(ident
));
1632 resolveStubCall
.call(dst
);
1633 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_resolve_global_dynamic
));
1635 unsigned currentIndex
= m_globalResolveInfoIndex
++;
1637 linkSlowCase(iter
); // We managed to skip all the nodes in the scope chain, but the cache missed.
1638 JITStubCall
stubCall(this, cti_op_resolve_global
);
1639 stubCall
.addArgument(TrustedImmPtr(ident
));
1640 stubCall
.addArgument(Imm32(currentIndex
));
1641 stubCall
.addArgument(regT0
);
1645 void JIT::emit_op_new_regexp(Instruction
* currentInstruction
)
1647 JITStubCall
stubCall(this, cti_op_new_regexp
);
1648 stubCall
.addArgument(TrustedImmPtr(m_codeBlock
->regexp(currentInstruction
[2].u
.operand
)));
1649 stubCall
.call(currentInstruction
[1].u
.operand
);
1652 void JIT::emit_op_load_varargs(Instruction
* currentInstruction
)
1654 int argCountDst
= currentInstruction
[1].u
.operand
;
1655 int argsOffset
= currentInstruction
[2].u
.operand
;
1656 int registerOffset
= currentInstruction
[3].u
.operand
;
1657 ASSERT(argsOffset
<= registerOffset
);
1659 int expectedParams
= m_codeBlock
->m_numParameters
- 1;
1660 // Don't do inline copying if we aren't guaranteed to have a single stream
1662 if (expectedParams
) {
1663 JITStubCall
stubCall(this, cti_op_load_varargs
);
1664 stubCall
.addArgument(Imm32(argsOffset
));
1666 // Stores a naked int32 in the register file.
1667 store32(returnValueRegister
, Address(callFrameRegister
, argCountDst
* sizeof(Register
)));
1671 #if USE(JSVALUE32_64)
1672 addSlowCase(branch32(NotEqual
, tagFor(argsOffset
), TrustedImm32(JSValue::EmptyValueTag
)));
1674 addSlowCase(branchTestPtr(NonZero
, addressFor(argsOffset
)));
1676 // Load arg count into regT0
1677 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount
, regT0
);
1678 store32(TrustedImm32(Int32Tag
), intTagFor(argCountDst
));
1679 store32(regT0
, intPayloadFor(argCountDst
));
1680 Jump endBranch
= branch32(Equal
, regT0
, TrustedImm32(1));
1682 mul32(TrustedImm32(sizeof(Register
)), regT0
, regT3
);
1683 addPtr(TrustedImm32(static_cast<unsigned>(sizeof(Register
) - RegisterFile::CallFrameHeaderSize
* sizeof(Register
))), callFrameRegister
, regT1
);
1684 subPtr(regT3
, regT1
); // regT1 is now the start of the out of line arguments
1685 addPtr(Imm32(argsOffset
* sizeof(Register
)), callFrameRegister
, regT2
); // regT2 is the target buffer
1687 // Bounds check the registerfile
1688 addPtr(regT2
, regT3
);
1689 addPtr(Imm32((registerOffset
- argsOffset
) * sizeof(Register
)), regT3
);
1690 addSlowCase(branchPtr(Below
, AbsoluteAddress(m_globalData
->interpreter
->registerFile().addressOfEnd()), regT3
));
1692 sub32(TrustedImm32(1), regT0
);
1693 Label loopStart
= label();
1694 loadPtr(BaseIndex(regT1
, regT0
, TimesEight
, static_cast<unsigned>(0 - 2 * sizeof(Register
))), regT3
);
1695 storePtr(regT3
, BaseIndex(regT2
, regT0
, TimesEight
, static_cast<unsigned>(0 - sizeof(Register
))));
1696 #if USE(JSVALUE32_64)
1697 loadPtr(BaseIndex(regT1
, regT0
, TimesEight
, static_cast<unsigned>(sizeof(void*) - 2 * sizeof(Register
))), regT3
);
1698 storePtr(regT3
, BaseIndex(regT2
, regT0
, TimesEight
, static_cast<unsigned>(sizeof(void*) - sizeof(Register
))));
1700 branchSubPtr(NonZero
, TrustedImm32(1), regT0
).linkTo(loopStart
, this);
1701 endBranch
.link(this);
1704 void JIT::emitSlow_op_load_varargs(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1706 int argCountDst
= currentInstruction
[1].u
.operand
;
1707 int argsOffset
= currentInstruction
[2].u
.operand
;
1708 int expectedParams
= m_codeBlock
->m_numParameters
- 1;
1714 JITStubCall
stubCall(this, cti_op_load_varargs
);
1715 stubCall
.addArgument(Imm32(argsOffset
));
1718 store32(TrustedImm32(Int32Tag
), intTagFor(argCountDst
));
1719 store32(returnValueRegister
, intPayloadFor(argCountDst
));
1722 void JIT::emit_op_new_func(Instruction
* currentInstruction
)
1725 int dst
= currentInstruction
[1].u
.operand
;
1726 if (currentInstruction
[3].u
.operand
) {
1727 #if USE(JSVALUE32_64)
1728 lazyJump
= branch32(NotEqual
, tagFor(dst
), TrustedImm32(JSValue::EmptyValueTag
));
1730 lazyJump
= branchTestPtr(NonZero
, addressFor(dst
));
1733 JITStubCall
stubCall(this, cti_op_new_func
);
1734 stubCall
.addArgument(TrustedImmPtr(m_codeBlock
->functionDecl(currentInstruction
[2].u
.operand
)));
1735 stubCall
.call(currentInstruction
[1].u
.operand
);
1736 if (currentInstruction
[3].u
.operand
)
1737 lazyJump
.link(this);
1740 void JIT::emit_op_new_array(Instruction
* currentInstruction
)
1742 JITStubCall
stubCall(this, cti_op_new_array
);
1743 stubCall
.addArgument(Imm32(currentInstruction
[2].u
.operand
));
1744 stubCall
.addArgument(Imm32(currentInstruction
[3].u
.operand
));
1745 stubCall
.call(currentInstruction
[1].u
.operand
);
1748 void JIT::emit_op_new_array_buffer(Instruction
* currentInstruction
)
1750 JITStubCall
stubCall(this, cti_op_new_array_buffer
);
1751 stubCall
.addArgument(Imm32(currentInstruction
[2].u
.operand
));
1752 stubCall
.addArgument(Imm32(currentInstruction
[3].u
.operand
));
1753 stubCall
.call(currentInstruction
[1].u
.operand
);
1758 #endif // ENABLE(JIT)