2 * Copyright (C) 2009 Apple Inc. All rights reserved.
3 * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
14 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
18 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32 #include "JITInlineMethods.h"
33 #include "JITStubCall.h"
36 #include "JSFunction.h"
37 #include "JSPropertyNameIterator.h"
38 #include "LinkBuffer.h"
42 #if !USE(JSVALUE32_64)
44 #define RECORD_JUMP_TARGET(targetOffset) \
45 do { m_labels[m_bytecodeIndex + (targetOffset)].used(); } while (false)
47 void JIT::privateCompileCTIMachineTrampolines(RefPtr
<ExecutablePool
>* executablePool
, JSGlobalData
* globalData
, TrampolineStructure
*trampolines
)
49 #if ENABLE(JIT_OPTIMIZE_MOD)
50 Label softModBegin
= align();
53 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
54 // (2) The second function provides fast property access for string length
55 Label stringLengthBegin
= align();
57 // Check eax is a string
58 Jump string_failureCases1
= emitJumpIfNotJSCell(regT0
);
59 Jump string_failureCases2
= branchPtr(NotEqual
, Address(regT0
), ImmPtr(m_globalData
->jsStringVPtr
));
61 // Checks out okay! - get the length from the Ustring.
62 load32(Address(regT0
, OBJECT_OFFSETOF(JSString
, m_length
)), regT0
);
64 Jump string_failureCases3
= branch32(Above
, regT0
, Imm32(JSImmediate::maxImmediateInt
));
66 // regT0 contains a 64 bit value (is positive, is zero extended) so we don't need sign extend here.
67 emitFastArithIntToImmNoCheck(regT0
, regT0
);
72 // (3) Trampolines for the slow cases of op_call / op_call_eval / op_construct.
73 COMPILE_ASSERT(sizeof(CodeType
) == 4, CodeTypeEnumMustBe32Bit
);
75 // VirtualCallLink Trampoline
76 // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
77 Label virtualCallLinkBegin
= align();
78 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSFunction
, m_executable
)), regT2
);
80 Jump isNativeFunc2
= branch32(Equal
, Address(regT2
, OBJECT_OFFSETOF(FunctionExecutable
, m_numParameters
)), Imm32(0));
82 Jump hasCodeBlock2
= branch32(GreaterThan
, Address(regT2
, OBJECT_OFFSETOF(FunctionExecutable
, m_numParameters
)), Imm32(0));
83 preserveReturnAddressAfterCall(regT3
);
84 restoreArgumentReference();
85 Call callJSFunction2
= call();
86 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSFunction
, m_executable
)), regT2
);
87 emitGetJITStubArg(2, regT1
); // argCount
88 restoreReturnAddressBeforeReturn(regT3
);
89 hasCodeBlock2
.link(this);
91 // Check argCount matches callee arity.
92 Jump arityCheckOkay2
= branch32(Equal
, Address(regT2
, OBJECT_OFFSETOF(FunctionExecutable
, m_numParameters
)), regT1
);
93 preserveReturnAddressAfterCall(regT3
);
94 emitPutJITStubArg(regT3
, 1); // return address
95 restoreArgumentReference();
96 Call callArityCheck2
= call();
97 move(regT1
, callFrameRegister
);
98 emitGetJITStubArg(2, regT1
); // argCount
99 restoreReturnAddressBeforeReturn(regT3
);
100 arityCheckOkay2
.link(this);
102 isNativeFunc2
.link(this);
104 compileOpCallInitializeCallFrame();
105 preserveReturnAddressAfterCall(regT3
);
106 emitPutJITStubArg(regT3
, 1); // return address
107 restoreArgumentReference();
108 Call callLazyLinkCall
= call();
109 restoreReturnAddressBeforeReturn(regT3
);
112 // VirtualCall Trampoline
113 // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
114 Label virtualCallBegin
= align();
115 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSFunction
, m_executable
)), regT2
);
117 Jump isNativeFunc3
= branch32(Equal
, Address(regT2
, OBJECT_OFFSETOF(FunctionExecutable
, m_numParameters
)), Imm32(0));
119 Jump hasCodeBlock3
= branch32(GreaterThan
, Address(regT2
, OBJECT_OFFSETOF(FunctionExecutable
, m_numParameters
)), Imm32(0));
120 preserveReturnAddressAfterCall(regT3
);
121 restoreArgumentReference();
122 Call callJSFunction1
= call();
123 emitGetJITStubArg(2, regT1
); // argCount
124 restoreReturnAddressBeforeReturn(regT3
);
125 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSFunction
, m_executable
)), regT2
);
126 hasCodeBlock3
.link(this);
128 // Check argCount matches callee arity.
129 Jump arityCheckOkay3
= branch32(Equal
, Address(regT2
, OBJECT_OFFSETOF(FunctionExecutable
, m_numParameters
)), regT1
);
130 preserveReturnAddressAfterCall(regT3
);
131 emitPutJITStubArg(regT3
, 1); // return address
132 restoreArgumentReference();
133 Call callArityCheck1
= call();
134 move(regT1
, callFrameRegister
);
135 emitGetJITStubArg(2, regT1
); // argCount
136 restoreReturnAddressBeforeReturn(regT3
);
137 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSFunction
, m_executable
)), regT2
);
138 arityCheckOkay3
.link(this);
140 isNativeFunc3
.link(this);
142 compileOpCallInitializeCallFrame();
143 loadPtr(Address(regT2
, OBJECT_OFFSETOF(FunctionExecutable
, m_jitCode
)), regT0
);
146 Label nativeCallThunk
= align();
147 preserveReturnAddressAfterCall(regT0
);
148 emitPutToCallFrameHeader(regT0
, RegisterFile::ReturnPC
); // Push return address
150 // Load caller frame's scope chain into this callframe so that whatever we call can
151 // get to its global data.
152 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame
, regT1
);
153 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain
, regT1
, regT1
);
154 emitPutToCallFrameHeader(regT1
, RegisterFile::ScopeChain
);
158 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount
, X86Registers::ecx
);
160 // Allocate stack space for our arglist
161 subPtr(Imm32(sizeof(ArgList
)), stackPointerRegister
);
162 COMPILE_ASSERT((sizeof(ArgList
) & 0xf) == 0, ArgList_should_by_16byte_aligned
);
165 subPtr(Imm32(1), X86Registers::ecx
); // Don't include 'this' in argcount
168 storePtr(X86Registers::ecx
, Address(stackPointerRegister
, OBJECT_OFFSETOF(ArgList
, m_argCount
)));
170 // Calculate the start of the callframe header, and store in edx
171 addPtr(Imm32(-RegisterFile::CallFrameHeaderSize
* (int32_t)sizeof(Register
)), callFrameRegister
, X86Registers::edx
);
173 // Calculate start of arguments as callframe header - sizeof(Register) * argcount (ecx)
174 mul32(Imm32(sizeof(Register
)), X86Registers::ecx
, X86Registers::ecx
);
175 subPtr(X86Registers::ecx
, X86Registers::edx
);
177 // push pointer to arguments
178 storePtr(X86Registers::edx
, Address(stackPointerRegister
, OBJECT_OFFSETOF(ArgList
, m_args
)));
180 // ArgList is passed by reference so is stackPointerRegister
181 move(stackPointerRegister
, X86Registers::ecx
);
183 // edx currently points to the first argument, edx-sizeof(Register) points to 'this'
184 loadPtr(Address(X86Registers::edx
, -(int32_t)sizeof(Register
)), X86Registers::edx
);
186 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee
, X86Registers::esi
);
188 move(callFrameRegister
, X86Registers::edi
);
190 call(Address(X86Registers::esi
, OBJECT_OFFSETOF(JSFunction
, m_data
)));
192 addPtr(Imm32(sizeof(ArgList
)), stackPointerRegister
);
194 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount
, regT0
);
196 /* We have two structs that we use to describe the stackframe we set up for our
197 * call to native code. NativeCallFrameStructure describes the how we set up the stack
198 * in advance of the call. NativeFunctionCalleeSignature describes the callframe
199 * as the native code expects it. We do this as we are using the fastcall calling
200 * convention which results in the callee popping its arguments off the stack, but
201 * not the rest of the callframe so we need a nice way to ensure we increment the
202 * stack pointer by the right amount after the call.
204 #if COMPILER(MSVC) || OS(LINUX)
205 struct NativeCallFrameStructure
{
206 // CallFrame* callFrame; // passed in EDX
213 struct NativeFunctionCalleeSignature
{
219 struct NativeCallFrameStructure
{
220 // CallFrame* callFrame; // passed in ECX
221 // JSObject* callee; // passed in EDX
226 struct NativeFunctionCalleeSignature
{
231 const int NativeCallFrameSize
= (sizeof(NativeCallFrameStructure
) + 15) & ~15;
232 // Allocate system stack frame
233 subPtr(Imm32(NativeCallFrameSize
), stackPointerRegister
);
236 subPtr(Imm32(1), regT0
); // Don't include 'this' in argcount
239 storePtr(regT0
, Address(stackPointerRegister
, OBJECT_OFFSETOF(NativeCallFrameStructure
, args
) + OBJECT_OFFSETOF(ArgList
, m_argCount
)));
241 // Calculate the start of the callframe header, and store in regT1
242 addPtr(Imm32(-RegisterFile::CallFrameHeaderSize
* (int)sizeof(Register
)), callFrameRegister
, regT1
);
244 // Calculate start of arguments as callframe header - sizeof(Register) * argcount (regT0)
245 mul32(Imm32(sizeof(Register
)), regT0
, regT0
);
246 subPtr(regT0
, regT1
);
247 storePtr(regT1
, Address(stackPointerRegister
, OBJECT_OFFSETOF(NativeCallFrameStructure
, args
) + OBJECT_OFFSETOF(ArgList
, m_args
)));
249 // ArgList is passed by reference so is stackPointerRegister + 4 * sizeof(Register)
250 addPtr(Imm32(OBJECT_OFFSETOF(NativeCallFrameStructure
, args
)), stackPointerRegister
, regT0
);
251 storePtr(regT0
, Address(stackPointerRegister
, OBJECT_OFFSETOF(NativeCallFrameStructure
, argPointer
)));
253 // regT1 currently points to the first argument, regT1 - sizeof(Register) points to 'this'
254 loadPtr(Address(regT1
, -(int)sizeof(Register
)), regT1
);
255 storePtr(regT1
, Address(stackPointerRegister
, OBJECT_OFFSETOF(NativeCallFrameStructure
, thisValue
)));
257 #if COMPILER(MSVC) || OS(LINUX)
258 // ArgList is passed by reference so is stackPointerRegister + 4 * sizeof(Register)
259 addPtr(Imm32(OBJECT_OFFSETOF(NativeCallFrameStructure
, result
)), stackPointerRegister
, X86Registers::ecx
);
262 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee
, X86Registers::eax
);
263 storePtr(X86Registers::eax
, Address(stackPointerRegister
, OBJECT_OFFSETOF(NativeCallFrameStructure
, callee
)));
266 move(callFrameRegister
, X86Registers::edx
);
268 call(Address(X86Registers::eax
, OBJECT_OFFSETOF(JSFunction
, m_data
)));
270 // JSValue is a non-POD type
271 loadPtr(Address(X86Registers::eax
), X86Registers::eax
);
274 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee
, X86Registers::edx
);
277 move(callFrameRegister
, X86Registers::ecx
);
278 call(Address(X86Registers::edx
, OBJECT_OFFSETOF(JSFunction
, m_data
)));
281 // We've put a few temporaries on the stack in addition to the actual arguments
282 // so pull them off now
283 addPtr(Imm32(NativeCallFrameSize
- sizeof(NativeFunctionCalleeSignature
)), stackPointerRegister
);
286 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount
, regT0
);
288 // Allocate stack space for our arglist
289 COMPILE_ASSERT((sizeof(ArgList
) & 0x7) == 0, ArgList_should_by_8byte_aligned
);
290 subPtr(Imm32(sizeof(ArgList
)), stackPointerRegister
);
293 subPtr(Imm32(1), regT0
); // Don't include 'this' in argcount
296 storePtr(regT0
, Address(stackPointerRegister
, OBJECT_OFFSETOF(ArgList
, m_argCount
)));
298 // Calculate the start of the callframe header, and store in regT1
299 move(callFrameRegister
, regT1
);
300 sub32(Imm32(RegisterFile::CallFrameHeaderSize
* (int32_t)sizeof(Register
)), regT1
);
302 // Calculate start of arguments as callframe header - sizeof(Register) * argcount (regT1)
303 mul32(Imm32(sizeof(Register
)), regT0
, regT0
);
304 subPtr(regT0
, regT1
);
306 // push pointer to arguments
307 storePtr(regT1
, Address(stackPointerRegister
, OBJECT_OFFSETOF(ArgList
, m_args
)));
309 // regT1 currently points to the first argument, regT1-sizeof(Register) points to 'this'
313 loadPtr(Address(regT1
, -(int32_t)sizeof(Register
)), ARMRegisters::r3
);
316 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee
, regT2
);
319 move(callFrameRegister
, regT1
);
322 move(stackPointerRegister
, regT0
);
323 subPtr(Imm32(sizeof(Register
)), stackPointerRegister
);
324 storePtr(regT0
, Address(stackPointerRegister
));
326 call(Address(regT2
, OBJECT_OFFSETOF(JSFunction
, m_data
)));
328 loadPtr(Address(regT0
), regT0
);
330 addPtr(Imm32(sizeof(Register
) + sizeof(ArgList
)), stackPointerRegister
);
333 loadPtr(Address(regT1
, -(int32_t)sizeof(Register
)), regT2
);
336 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee
, regT1
);
339 move(callFrameRegister
, regT0
);
341 // Setup arg4: This is a plain hack
342 move(stackPointerRegister
, ARMRegisters::r3
);
344 call(Address(regT1
, OBJECT_OFFSETOF(JSFunction
, m_data
)));
346 addPtr(Imm32(sizeof(ArgList
)), stackPointerRegister
);
350 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount
, regT0
);
352 // Allocate stack space for our arglist
353 COMPILE_ASSERT(!(sizeof(ArgList
) & 0x7), ArgList_should_by_8byte_aligned
);
354 subPtr(Imm32(sizeof(ArgList
) + 24), stackPointerRegister
);
357 subPtr(Imm32(1), regT0
); // Don't include 'this' in argcount
359 // Push argcount to 24 + offset($sp)
360 storePtr(regT0
, Address(stackPointerRegister
, 24 + OBJECT_OFFSETOF(ArgList
, m_argCount
)));
362 // Calculate the start of the callframe header, and store in regT1
363 move(callFrameRegister
, regT1
);
364 sub32(Imm32(RegisterFile::CallFrameHeaderSize
* (int32_t)sizeof(Register
)), regT1
);
366 // Calculate start of arguments as callframe header - sizeof(Register) * argcount (regT1)
367 mul32(Imm32(sizeof(Register
)), regT0
, regT0
);
368 subPtr(regT0
, regT1
);
370 // push pointer to arguments to 24 + offset($sp)
371 storePtr(regT1
, Address(stackPointerRegister
, 24 + OBJECT_OFFSETOF(ArgList
, m_args
)));
373 // Setup arg3: regT1 currently points to the first argument, regT1-sizeof(Register) points to 'this'
374 loadPtr(Address(regT1
, -(int32_t)sizeof(Register
)), MIPSRegisters::a3
);
377 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee
, MIPSRegisters::a2
);
380 move(callFrameRegister
, MIPSRegisters::a1
);
382 // Setup arg4: ArgList is passed by reference. At 16($sp), store ($sp + 24)
383 addPtr(Imm32(24), stackPointerRegister
, regT2
);
384 storePtr(regT2
, Address(stackPointerRegister
, 16));
386 // Setup arg0 as 20($sp) to hold the returned structure.
387 ASSERT(sizeof(JSValue
) == 4);
388 addPtr(Imm32(20), stackPointerRegister
, MIPSRegisters::a0
);
391 call(Address(MIPSRegisters::a2
, OBJECT_OFFSETOF(JSFunction
, m_data
)));
393 // Get returned value from 0($v0) which is the same as 20($sp)
394 loadPtr(Address(returnValueRegister
, 0), returnValueRegister
);
396 // Restore stack space
397 addPtr(Imm32(sizeof(ArgList
) + 24), stackPointerRegister
);
399 #elif ENABLE(JIT_OPTIMIZE_NATIVE_CALL)
400 #error "JIT_OPTIMIZE_NATIVE_CALL not yet supported on this platform."
405 // Check for an exception
406 loadPtr(&(globalData
->exception
), regT2
);
407 Jump exceptionHandler
= branchTestPtr(NonZero
, regT2
);
409 // Grab the return address.
410 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC
, regT1
);
412 // Restore our caller's "r".
413 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame
, callFrameRegister
);
416 restoreReturnAddressBeforeReturn(regT1
);
419 // Handle an exception
420 exceptionHandler
.link(this);
421 // Grab the return address.
422 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC
, regT1
);
423 move(ImmPtr(&globalData
->exceptionLocation
), regT2
);
424 storePtr(regT1
, regT2
);
425 move(ImmPtr(FunctionPtr(ctiVMThrowTrampoline
).value()), regT2
);
426 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame
, callFrameRegister
);
427 poke(callFrameRegister
, OBJECT_OFFSETOF(struct JITStackFrame
, callFrame
) / sizeof (void*));
428 restoreReturnAddressBeforeReturn(regT2
);
432 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
433 Call string_failureCases1Call
= makeTailRecursiveCall(string_failureCases1
);
434 Call string_failureCases2Call
= makeTailRecursiveCall(string_failureCases2
);
435 Call string_failureCases3Call
= makeTailRecursiveCall(string_failureCases3
);
438 // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
439 LinkBuffer
patchBuffer(this, m_globalData
->executableAllocator
.poolForSize(m_assembler
.size()));
441 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
442 patchBuffer
.link(string_failureCases1Call
, FunctionPtr(cti_op_get_by_id_string_fail
));
443 patchBuffer
.link(string_failureCases2Call
, FunctionPtr(cti_op_get_by_id_string_fail
));
444 patchBuffer
.link(string_failureCases3Call
, FunctionPtr(cti_op_get_by_id_string_fail
));
446 patchBuffer
.link(callArityCheck1
, FunctionPtr(cti_op_call_arityCheck
));
447 patchBuffer
.link(callJSFunction1
, FunctionPtr(cti_op_call_JSFunction
));
448 #if ENABLE(JIT_OPTIMIZE_CALL)
449 patchBuffer
.link(callArityCheck2
, FunctionPtr(cti_op_call_arityCheck
));
450 patchBuffer
.link(callJSFunction2
, FunctionPtr(cti_op_call_JSFunction
));
451 patchBuffer
.link(callLazyLinkCall
, FunctionPtr(cti_vm_lazyLinkCall
));
454 CodeRef finalCode
= patchBuffer
.finalizeCode();
455 *executablePool
= finalCode
.m_executablePool
;
457 trampolines
->ctiVirtualCallLink
= trampolineAt(finalCode
, virtualCallLinkBegin
);
458 trampolines
->ctiVirtualCall
= trampolineAt(finalCode
, virtualCallBegin
);
459 trampolines
->ctiNativeCallThunk
= adoptRef(new NativeExecutable(JITCode(JITCode::HostFunction(trampolineAt(finalCode
, nativeCallThunk
)))));
460 #if ENABLE(JIT_OPTIMIZE_MOD)
461 trampolines
->ctiSoftModulo
= trampolineAt(finalCode
, softModBegin
);
463 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
464 trampolines
->ctiStringLengthTrampoline
= trampolineAt(finalCode
, stringLengthBegin
);
466 UNUSED_PARAM(ctiStringLengthTrampoline
);
470 void JIT::emit_op_mov(Instruction
* currentInstruction
)
472 int dst
= currentInstruction
[1].u
.operand
;
473 int src
= currentInstruction
[2].u
.operand
;
475 if (m_codeBlock
->isConstantRegisterIndex(src
)) {
476 storePtr(ImmPtr(JSValue::encode(getConstantOperand(src
))), Address(callFrameRegister
, dst
* sizeof(Register
)));
477 if (dst
== m_lastResultBytecodeRegister
)
478 killLastResultRegister();
479 } else if ((src
== m_lastResultBytecodeRegister
) || (dst
== m_lastResultBytecodeRegister
)) {
480 // If either the src or dst is the cached register go though
481 // get/put registers to make sure we track this correctly.
482 emitGetVirtualRegister(src
, regT0
);
483 emitPutVirtualRegister(dst
);
485 // Perform the copy via regT1; do not disturb any mapping in regT0.
486 loadPtr(Address(callFrameRegister
, src
* sizeof(Register
)), regT1
);
487 storePtr(regT1
, Address(callFrameRegister
, dst
* sizeof(Register
)));
491 void JIT::emit_op_end(Instruction
* currentInstruction
)
493 if (m_codeBlock
->needsFullScopeChain())
494 JITStubCall(this, cti_op_end
).call();
495 ASSERT(returnValueRegister
!= callFrameRegister
);
496 emitGetVirtualRegister(currentInstruction
[1].u
.operand
, returnValueRegister
);
497 restoreReturnAddressBeforeReturn(Address(callFrameRegister
, RegisterFile::ReturnPC
* static_cast<int>(sizeof(Register
))));
501 void JIT::emit_op_jmp(Instruction
* currentInstruction
)
503 unsigned target
= currentInstruction
[1].u
.operand
;
504 addJump(jump(), target
);
505 RECORD_JUMP_TARGET(target
);
508 void JIT::emit_op_loop_if_lesseq(Instruction
* currentInstruction
)
512 unsigned op1
= currentInstruction
[1].u
.operand
;
513 unsigned op2
= currentInstruction
[2].u
.operand
;
514 unsigned target
= currentInstruction
[3].u
.operand
;
515 if (isOperandConstantImmediateInt(op2
)) {
516 emitGetVirtualRegister(op1
, regT0
);
517 emitJumpSlowCaseIfNotImmediateInteger(regT0
);
519 int32_t op2imm
= getConstantOperandImmediateInt(op2
);
521 int32_t op2imm
= static_cast<int32_t>(JSImmediate::rawValue(getConstantOperand(op2
)));
523 addJump(branch32(LessThanOrEqual
, regT0
, Imm32(op2imm
)), target
);
525 emitGetVirtualRegisters(op1
, regT0
, op2
, regT1
);
526 emitJumpSlowCaseIfNotImmediateInteger(regT0
);
527 emitJumpSlowCaseIfNotImmediateInteger(regT1
);
528 addJump(branch32(LessThanOrEqual
, regT0
, regT1
), target
);
532 void JIT::emit_op_new_object(Instruction
* currentInstruction
)
534 JITStubCall(this, cti_op_new_object
).call(currentInstruction
[1].u
.operand
);
537 void JIT::emit_op_instanceof(Instruction
* currentInstruction
)
539 unsigned dst
= currentInstruction
[1].u
.operand
;
540 unsigned value
= currentInstruction
[2].u
.operand
;
541 unsigned baseVal
= currentInstruction
[3].u
.operand
;
542 unsigned proto
= currentInstruction
[4].u
.operand
;
544 // Load the operands (baseVal, proto, and value respectively) into registers.
545 // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
546 emitGetVirtualRegister(value
, regT2
);
547 emitGetVirtualRegister(baseVal
, regT0
);
548 emitGetVirtualRegister(proto
, regT1
);
550 // Check that baseVal & proto are cells.
551 emitJumpSlowCaseIfNotJSCell(regT2
, value
);
552 emitJumpSlowCaseIfNotJSCell(regT0
, baseVal
);
553 emitJumpSlowCaseIfNotJSCell(regT1
, proto
);
555 // Check that baseVal 'ImplementsDefaultHasInstance'.
556 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSCell
, m_structure
)), regT0
);
557 addSlowCase(branchTest8(Zero
, Address(regT0
, OBJECT_OFFSETOF(Structure
, m_typeInfo
.m_flags
)), Imm32(ImplementsDefaultHasInstance
)));
559 // Optimistically load the result true, and start looping.
560 // Initially, regT1 still contains proto and regT2 still contains value.
561 // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
562 move(ImmPtr(JSValue::encode(jsBoolean(true))), regT0
);
565 // Load the prototype of the object in regT2. If this is equal to regT1 - WIN!
566 // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
567 loadPtr(Address(regT2
, OBJECT_OFFSETOF(JSCell
, m_structure
)), regT2
);
568 loadPtr(Address(regT2
, OBJECT_OFFSETOF(Structure
, m_prototype
)), regT2
);
569 Jump isInstance
= branchPtr(Equal
, regT2
, regT1
);
570 emitJumpIfJSCell(regT2
).linkTo(loop
, this);
572 // We get here either by dropping out of the loop, or if value was not an Object. Result is false.
573 move(ImmPtr(JSValue::encode(jsBoolean(false))), regT0
);
575 // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
576 isInstance
.link(this);
577 emitPutVirtualRegister(dst
);
580 void JIT::emit_op_new_func(Instruction
* currentInstruction
)
582 JITStubCall
stubCall(this, cti_op_new_func
);
583 stubCall
.addArgument(ImmPtr(m_codeBlock
->functionDecl(currentInstruction
[2].u
.operand
)));
584 stubCall
.call(currentInstruction
[1].u
.operand
);
587 void JIT::emit_op_call(Instruction
* currentInstruction
)
589 compileOpCall(op_call
, currentInstruction
, m_callLinkInfoIndex
++);
592 void JIT::emit_op_call_eval(Instruction
* currentInstruction
)
594 compileOpCall(op_call_eval
, currentInstruction
, m_callLinkInfoIndex
++);
597 void JIT::emit_op_load_varargs(Instruction
* currentInstruction
)
599 int argCountDst
= currentInstruction
[1].u
.operand
;
600 int argsOffset
= currentInstruction
[2].u
.operand
;
602 JITStubCall
stubCall(this, cti_op_load_varargs
);
603 stubCall
.addArgument(Imm32(argsOffset
));
605 // Stores a naked int32 in the register file.
606 store32(returnValueRegister
, Address(callFrameRegister
, argCountDst
* sizeof(Register
)));
609 void JIT::emit_op_call_varargs(Instruction
* currentInstruction
)
611 compileOpCallVarargs(currentInstruction
);
614 void JIT::emit_op_construct(Instruction
* currentInstruction
)
616 compileOpCall(op_construct
, currentInstruction
, m_callLinkInfoIndex
++);
619 void JIT::emit_op_get_global_var(Instruction
* currentInstruction
)
621 JSVariableObject
* globalObject
= static_cast<JSVariableObject
*>(currentInstruction
[2].u
.jsCell
);
622 move(ImmPtr(globalObject
), regT0
);
623 emitGetVariableObjectRegister(regT0
, currentInstruction
[3].u
.operand
, regT0
);
624 emitPutVirtualRegister(currentInstruction
[1].u
.operand
);
627 void JIT::emit_op_put_global_var(Instruction
* currentInstruction
)
629 emitGetVirtualRegister(currentInstruction
[3].u
.operand
, regT1
);
630 JSVariableObject
* globalObject
= static_cast<JSVariableObject
*>(currentInstruction
[1].u
.jsCell
);
631 move(ImmPtr(globalObject
), regT0
);
632 emitPutVariableObjectRegister(regT1
, regT0
, currentInstruction
[2].u
.operand
);
635 void JIT::emit_op_get_scoped_var(Instruction
* currentInstruction
)
637 int skip
= currentInstruction
[3].u
.operand
+ m_codeBlock
->needsFullScopeChain();
639 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain
, regT0
);
641 loadPtr(Address(regT0
, OBJECT_OFFSETOF(ScopeChainNode
, next
)), regT0
);
643 loadPtr(Address(regT0
, OBJECT_OFFSETOF(ScopeChainNode
, object
)), regT0
);
644 emitGetVariableObjectRegister(regT0
, currentInstruction
[2].u
.operand
, regT0
);
645 emitPutVirtualRegister(currentInstruction
[1].u
.operand
);
648 void JIT::emit_op_put_scoped_var(Instruction
* currentInstruction
)
650 int skip
= currentInstruction
[2].u
.operand
+ m_codeBlock
->needsFullScopeChain();
652 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain
, regT1
);
653 emitGetVirtualRegister(currentInstruction
[3].u
.operand
, regT0
);
655 loadPtr(Address(regT1
, OBJECT_OFFSETOF(ScopeChainNode
, next
)), regT1
);
657 loadPtr(Address(regT1
, OBJECT_OFFSETOF(ScopeChainNode
, object
)), regT1
);
658 emitPutVariableObjectRegister(regT0
, regT1
, currentInstruction
[1].u
.operand
);
661 void JIT::emit_op_tear_off_activation(Instruction
* currentInstruction
)
663 JITStubCall
stubCall(this, cti_op_tear_off_activation
);
664 stubCall
.addArgument(currentInstruction
[1].u
.operand
, regT2
);
668 void JIT::emit_op_tear_off_arguments(Instruction
*)
670 JITStubCall(this, cti_op_tear_off_arguments
).call();
673 void JIT::emit_op_ret(Instruction
* currentInstruction
)
675 // We could JIT generate the deref, only calling out to C when the refcount hits zero.
676 if (m_codeBlock
->needsFullScopeChain())
677 JITStubCall(this, cti_op_ret_scopeChain
).call();
679 ASSERT(callFrameRegister
!= regT1
);
680 ASSERT(regT1
!= returnValueRegister
);
681 ASSERT(returnValueRegister
!= callFrameRegister
);
683 // Return the result in %eax.
684 emitGetVirtualRegister(currentInstruction
[1].u
.operand
, returnValueRegister
);
686 // Grab the return address.
687 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC
, regT1
);
689 // Restore our caller's "r".
690 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame
, callFrameRegister
);
693 restoreReturnAddressBeforeReturn(regT1
);
697 void JIT::emit_op_new_array(Instruction
* currentInstruction
)
699 JITStubCall
stubCall(this, cti_op_new_array
);
700 stubCall
.addArgument(Imm32(currentInstruction
[2].u
.operand
));
701 stubCall
.addArgument(Imm32(currentInstruction
[3].u
.operand
));
702 stubCall
.call(currentInstruction
[1].u
.operand
);
705 void JIT::emit_op_resolve(Instruction
* currentInstruction
)
707 JITStubCall
stubCall(this, cti_op_resolve
);
708 stubCall
.addArgument(ImmPtr(&m_codeBlock
->identifier(currentInstruction
[2].u
.operand
)));
709 stubCall
.call(currentInstruction
[1].u
.operand
);
712 void JIT::emit_op_construct_verify(Instruction
* currentInstruction
)
714 emitGetVirtualRegister(currentInstruction
[1].u
.operand
, regT0
);
716 emitJumpSlowCaseIfNotJSCell(regT0
);
717 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSCell
, m_structure
)), regT2
);
718 addSlowCase(branch8(NotEqual
, Address(regT2
, OBJECT_OFFSETOF(Structure
, m_typeInfo
.m_type
)), Imm32(ObjectType
)));
722 void JIT::emit_op_to_primitive(Instruction
* currentInstruction
)
724 int dst
= currentInstruction
[1].u
.operand
;
725 int src
= currentInstruction
[2].u
.operand
;
727 emitGetVirtualRegister(src
, regT0
);
729 Jump isImm
= emitJumpIfNotJSCell(regT0
);
730 addSlowCase(branchPtr(NotEqual
, Address(regT0
), ImmPtr(m_globalData
->jsStringVPtr
)));
734 emitPutVirtualRegister(dst
);
738 void JIT::emit_op_strcat(Instruction
* currentInstruction
)
740 JITStubCall
stubCall(this, cti_op_strcat
);
741 stubCall
.addArgument(Imm32(currentInstruction
[2].u
.operand
));
742 stubCall
.addArgument(Imm32(currentInstruction
[3].u
.operand
));
743 stubCall
.call(currentInstruction
[1].u
.operand
);
746 void JIT::emit_op_resolve_base(Instruction
* currentInstruction
)
748 JITStubCall
stubCall(this, cti_op_resolve_base
);
749 stubCall
.addArgument(ImmPtr(&m_codeBlock
->identifier(currentInstruction
[2].u
.operand
)));
750 stubCall
.call(currentInstruction
[1].u
.operand
);
753 void JIT::emit_op_resolve_skip(Instruction
* currentInstruction
)
755 JITStubCall
stubCall(this, cti_op_resolve_skip
);
756 stubCall
.addArgument(ImmPtr(&m_codeBlock
->identifier(currentInstruction
[2].u
.operand
)));
757 stubCall
.addArgument(Imm32(currentInstruction
[3].u
.operand
+ m_codeBlock
->needsFullScopeChain()));
758 stubCall
.call(currentInstruction
[1].u
.operand
);
761 void JIT::emit_op_resolve_global(Instruction
* currentInstruction
, bool)
764 void* globalObject
= currentInstruction
[2].u
.jsCell
;
765 unsigned currentIndex
= m_globalResolveInfoIndex
++;
766 void* structureAddress
= &(m_codeBlock
->globalResolveInfo(currentIndex
).structure
);
767 void* offsetAddr
= &(m_codeBlock
->globalResolveInfo(currentIndex
).offset
);
769 // Check Structure of global object
770 move(ImmPtr(globalObject
), regT0
);
771 loadPtr(structureAddress
, regT1
);
772 addSlowCase(branchPtr(NotEqual
, regT1
, Address(regT0
, OBJECT_OFFSETOF(JSCell
, m_structure
)))); // Structures don't match
774 // Load cached property
775 // Assume that the global object always uses external storage.
776 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSGlobalObject
, m_externalStorage
)), regT0
);
777 load32(offsetAddr
, regT1
);
778 loadPtr(BaseIndex(regT0
, regT1
, ScalePtr
), regT0
);
779 emitPutVirtualRegister(currentInstruction
[1].u
.operand
);
782 void JIT::emitSlow_op_resolve_global(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
784 unsigned dst
= currentInstruction
[1].u
.operand
;
785 void* globalObject
= currentInstruction
[2].u
.jsCell
;
786 Identifier
* ident
= &m_codeBlock
->identifier(currentInstruction
[3].u
.operand
);
788 unsigned currentIndex
= m_globalResolveInfoIndex
++;
791 JITStubCall
stubCall(this, cti_op_resolve_global
);
792 stubCall
.addArgument(ImmPtr(globalObject
));
793 stubCall
.addArgument(ImmPtr(ident
));
794 stubCall
.addArgument(Imm32(currentIndex
));
798 void JIT::emit_op_not(Instruction
* currentInstruction
)
800 emitGetVirtualRegister(currentInstruction
[2].u
.operand
, regT0
);
801 xorPtr(Imm32(static_cast<int32_t>(JSImmediate::FullTagTypeBool
)), regT0
);
802 addSlowCase(branchTestPtr(NonZero
, regT0
, Imm32(static_cast<int32_t>(~JSImmediate::ExtendedPayloadBitBoolValue
))));
803 xorPtr(Imm32(static_cast<int32_t>(JSImmediate::FullTagTypeBool
| JSImmediate::ExtendedPayloadBitBoolValue
)), regT0
);
804 emitPutVirtualRegister(currentInstruction
[1].u
.operand
);
807 void JIT::emit_op_jfalse(Instruction
* currentInstruction
)
809 unsigned target
= currentInstruction
[2].u
.operand
;
810 emitGetVirtualRegister(currentInstruction
[1].u
.operand
, regT0
);
812 addJump(branchPtr(Equal
, regT0
, ImmPtr(JSValue::encode(jsNumber(m_globalData
, 0)))), target
);
813 Jump isNonZero
= emitJumpIfImmediateInteger(regT0
);
815 addJump(branchPtr(Equal
, regT0
, ImmPtr(JSValue::encode(jsBoolean(false)))), target
);
816 addSlowCase(branchPtr(NotEqual
, regT0
, ImmPtr(JSValue::encode(jsBoolean(true)))));
818 isNonZero
.link(this);
819 RECORD_JUMP_TARGET(target
);
822 void JIT::emit_op_jeq_null(Instruction
* currentInstruction
)
824 unsigned src
= currentInstruction
[1].u
.operand
;
825 unsigned target
= currentInstruction
[2].u
.operand
;
827 emitGetVirtualRegister(src
, regT0
);
828 Jump isImmediate
= emitJumpIfNotJSCell(regT0
);
830 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
831 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSCell
, m_structure
)), regT2
);
832 addJump(branchTest8(NonZero
, Address(regT2
, OBJECT_OFFSETOF(Structure
, m_typeInfo
.m_flags
)), Imm32(MasqueradesAsUndefined
)), target
);
833 Jump wasNotImmediate
= jump();
835 // Now handle the immediate cases - undefined & null
836 isImmediate
.link(this);
837 andPtr(Imm32(~JSImmediate::ExtendedTagBitUndefined
), regT0
);
838 addJump(branchPtr(Equal
, regT0
, ImmPtr(JSValue::encode(jsNull()))), target
);
840 wasNotImmediate
.link(this);
841 RECORD_JUMP_TARGET(target
);
843 void JIT::emit_op_jneq_null(Instruction
* currentInstruction
)
845 unsigned src
= currentInstruction
[1].u
.operand
;
846 unsigned target
= currentInstruction
[2].u
.operand
;
848 emitGetVirtualRegister(src
, regT0
);
849 Jump isImmediate
= emitJumpIfNotJSCell(regT0
);
851 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
852 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSCell
, m_structure
)), regT2
);
853 addJump(branchTest8(Zero
, Address(regT2
, OBJECT_OFFSETOF(Structure
, m_typeInfo
.m_flags
)), Imm32(MasqueradesAsUndefined
)), target
);
854 Jump wasNotImmediate
= jump();
856 // Now handle the immediate cases - undefined & null
857 isImmediate
.link(this);
858 andPtr(Imm32(~JSImmediate::ExtendedTagBitUndefined
), regT0
);
859 addJump(branchPtr(NotEqual
, regT0
, ImmPtr(JSValue::encode(jsNull()))), target
);
861 wasNotImmediate
.link(this);
862 RECORD_JUMP_TARGET(target
);
865 void JIT::emit_op_jneq_ptr(Instruction
* currentInstruction
)
867 unsigned src
= currentInstruction
[1].u
.operand
;
868 JSCell
* ptr
= currentInstruction
[2].u
.jsCell
;
869 unsigned target
= currentInstruction
[3].u
.operand
;
871 emitGetVirtualRegister(src
, regT0
);
872 addJump(branchPtr(NotEqual
, regT0
, ImmPtr(JSValue::encode(JSValue(ptr
)))), target
);
874 RECORD_JUMP_TARGET(target
);
877 void JIT::emit_op_jsr(Instruction
* currentInstruction
)
879 int retAddrDst
= currentInstruction
[1].u
.operand
;
880 int target
= currentInstruction
[2].u
.operand
;
881 DataLabelPtr storeLocation
= storePtrWithPatch(ImmPtr(0), Address(callFrameRegister
, sizeof(Register
) * retAddrDst
));
882 addJump(jump(), target
);
883 m_jsrSites
.append(JSRInfo(storeLocation
, label()));
884 killLastResultRegister();
885 RECORD_JUMP_TARGET(target
);
888 void JIT::emit_op_sret(Instruction
* currentInstruction
)
890 jump(Address(callFrameRegister
, sizeof(Register
) * currentInstruction
[1].u
.operand
));
891 killLastResultRegister();
894 void JIT::emit_op_eq(Instruction
* currentInstruction
)
896 emitGetVirtualRegisters(currentInstruction
[2].u
.operand
, regT0
, currentInstruction
[3].u
.operand
, regT1
);
897 emitJumpSlowCaseIfNotImmediateIntegers(regT0
, regT1
, regT2
);
898 set32(Equal
, regT1
, regT0
, regT0
);
899 emitTagAsBoolImmediate(regT0
);
900 emitPutVirtualRegister(currentInstruction
[1].u
.operand
);
903 void JIT::emit_op_bitnot(Instruction
* currentInstruction
)
905 emitGetVirtualRegister(currentInstruction
[2].u
.operand
, regT0
);
906 emitJumpSlowCaseIfNotImmediateInteger(regT0
);
909 emitFastArithIntToImmNoCheck(regT0
, regT0
);
911 xorPtr(Imm32(~JSImmediate::TagTypeNumber
), regT0
);
913 emitPutVirtualRegister(currentInstruction
[1].u
.operand
);
916 void JIT::emit_op_resolve_with_base(Instruction
* currentInstruction
)
918 JITStubCall
stubCall(this, cti_op_resolve_with_base
);
919 stubCall
.addArgument(ImmPtr(&m_codeBlock
->identifier(currentInstruction
[3].u
.operand
)));
920 stubCall
.addArgument(Imm32(currentInstruction
[1].u
.operand
));
921 stubCall
.call(currentInstruction
[2].u
.operand
);
924 void JIT::emit_op_new_func_exp(Instruction
* currentInstruction
)
926 JITStubCall
stubCall(this, cti_op_new_func_exp
);
927 stubCall
.addArgument(ImmPtr(m_codeBlock
->functionExpr(currentInstruction
[2].u
.operand
)));
928 stubCall
.call(currentInstruction
[1].u
.operand
);
931 void JIT::emit_op_jtrue(Instruction
* currentInstruction
)
933 unsigned target
= currentInstruction
[2].u
.operand
;
934 emitGetVirtualRegister(currentInstruction
[1].u
.operand
, regT0
);
936 Jump isZero
= branchPtr(Equal
, regT0
, ImmPtr(JSValue::encode(jsNumber(m_globalData
, 0))));
937 addJump(emitJumpIfImmediateInteger(regT0
), target
);
939 addJump(branchPtr(Equal
, regT0
, ImmPtr(JSValue::encode(jsBoolean(true)))), target
);
940 addSlowCase(branchPtr(NotEqual
, regT0
, ImmPtr(JSValue::encode(jsBoolean(false)))));
943 RECORD_JUMP_TARGET(target
);
946 void JIT::emit_op_neq(Instruction
* currentInstruction
)
948 emitGetVirtualRegisters(currentInstruction
[2].u
.operand
, regT0
, currentInstruction
[3].u
.operand
, regT1
);
949 emitJumpSlowCaseIfNotImmediateIntegers(regT0
, regT1
, regT2
);
950 set32(NotEqual
, regT1
, regT0
, regT0
);
951 emitTagAsBoolImmediate(regT0
);
953 emitPutVirtualRegister(currentInstruction
[1].u
.operand
);
957 void JIT::emit_op_bitxor(Instruction
* currentInstruction
)
959 emitGetVirtualRegisters(currentInstruction
[2].u
.operand
, regT0
, currentInstruction
[3].u
.operand
, regT1
);
960 emitJumpSlowCaseIfNotImmediateIntegers(regT0
, regT1
, regT2
);
961 xorPtr(regT1
, regT0
);
962 emitFastArithReTagImmediate(regT0
, regT0
);
963 emitPutVirtualRegister(currentInstruction
[1].u
.operand
);
966 void JIT::emit_op_bitor(Instruction
* currentInstruction
)
968 emitGetVirtualRegisters(currentInstruction
[2].u
.operand
, regT0
, currentInstruction
[3].u
.operand
, regT1
);
969 emitJumpSlowCaseIfNotImmediateIntegers(regT0
, regT1
, regT2
);
971 emitPutVirtualRegister(currentInstruction
[1].u
.operand
);
974 void JIT::emit_op_throw(Instruction
* currentInstruction
)
976 JITStubCall
stubCall(this, cti_op_throw
);
977 stubCall
.addArgument(currentInstruction
[1].u
.operand
, regT2
);
979 ASSERT(regT0
== returnValueRegister
);
981 // cti_op_throw always changes it's return address,
982 // this point in the code should never be reached.
987 void JIT::emit_op_get_pnames(Instruction
* currentInstruction
)
989 int dst
= currentInstruction
[1].u
.operand
;
990 int base
= currentInstruction
[2].u
.operand
;
991 int i
= currentInstruction
[3].u
.operand
;
992 int size
= currentInstruction
[4].u
.operand
;
993 int breakTarget
= currentInstruction
[5].u
.operand
;
995 JumpList isNotObject
;
997 emitGetVirtualRegister(base
, regT0
);
998 if (!m_codeBlock
->isKnownNotImmediate(base
))
999 isNotObject
.append(emitJumpIfNotJSCell(regT0
));
1000 if (base
!= m_codeBlock
->thisRegister()) {
1001 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSCell
, m_structure
)), regT2
);
1002 isNotObject
.append(branch8(NotEqual
, Address(regT2
, OBJECT_OFFSETOF(Structure
, m_typeInfo
.m_type
)), Imm32(ObjectType
)));
1005 // We could inline the case where you have a valid cache, but
1006 // this call doesn't seem to be hot.
1007 Label
isObject(this);
1008 JITStubCall
getPnamesStubCall(this, cti_op_get_pnames
);
1009 getPnamesStubCall
.addArgument(regT0
);
1010 getPnamesStubCall
.call(dst
);
1011 load32(Address(regT0
, OBJECT_OFFSETOF(JSPropertyNameIterator
, m_jsStringsSize
)), regT3
);
1012 store32(Imm32(0), addressFor(i
));
1013 store32(regT3
, addressFor(size
));
1016 isNotObject
.link(this);
1018 and32(Imm32(~JSImmediate::ExtendedTagBitUndefined
), regT1
);
1019 addJump(branch32(Equal
, regT1
, Imm32(JSImmediate::FullTagTypeNull
)), breakTarget
);
1021 JITStubCall
toObjectStubCall(this, cti_to_object
);
1022 toObjectStubCall
.addArgument(regT0
);
1023 toObjectStubCall
.call(base
);
1024 jump().linkTo(isObject
, this);
1029 void JIT::emit_op_next_pname(Instruction
* currentInstruction
)
1031 int dst
= currentInstruction
[1].u
.operand
;
1032 int base
= currentInstruction
[2].u
.operand
;
1033 int i
= currentInstruction
[3].u
.operand
;
1034 int size
= currentInstruction
[4].u
.operand
;
1035 int it
= currentInstruction
[5].u
.operand
;
1036 int target
= currentInstruction
[6].u
.operand
;
1038 JumpList callHasProperty
;
1041 load32(addressFor(i
), regT0
);
1042 Jump end
= branch32(Equal
, regT0
, addressFor(size
));
1045 loadPtr(addressFor(it
), regT1
);
1046 loadPtr(Address(regT1
, OBJECT_OFFSETOF(JSPropertyNameIterator
, m_jsStrings
)), regT2
);
1049 loadPtr(BaseIndex(regT2
, regT0
, TimesEight
), regT2
);
1051 loadPtr(BaseIndex(regT2
, regT0
, TimesFour
), regT2
);
1054 emitPutVirtualRegister(dst
, regT2
);
1057 add32(Imm32(1), regT0
);
1058 store32(regT0
, addressFor(i
));
1060 // Verify that i is valid:
1061 emitGetVirtualRegister(base
, regT0
);
1063 // Test base's structure
1064 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSCell
, m_structure
)), regT2
);
1065 callHasProperty
.append(branchPtr(NotEqual
, regT2
, Address(Address(regT1
, OBJECT_OFFSETOF(JSPropertyNameIterator
, m_cachedStructure
)))));
1067 // Test base's prototype chain
1068 loadPtr(Address(Address(regT1
, OBJECT_OFFSETOF(JSPropertyNameIterator
, m_cachedPrototypeChain
))), regT3
);
1069 loadPtr(Address(regT3
, OBJECT_OFFSETOF(StructureChain
, m_vector
)), regT3
);
1070 addJump(branchTestPtr(Zero
, Address(regT3
)), target
);
1072 Label
checkPrototype(this);
1073 loadPtr(Address(regT2
, OBJECT_OFFSETOF(Structure
, m_prototype
)), regT2
);
1074 callHasProperty
.append(emitJumpIfNotJSCell(regT2
));
1075 loadPtr(Address(regT2
, OBJECT_OFFSETOF(JSCell
, m_structure
)), regT2
);
1076 callHasProperty
.append(branchPtr(NotEqual
, regT2
, Address(regT3
)));
1077 addPtr(Imm32(sizeof(Structure
*)), regT3
);
1078 branchTestPtr(NonZero
, Address(regT3
)).linkTo(checkPrototype
, this);
1081 addJump(jump(), target
);
1083 // Slow case: Ask the object if i is valid.
1084 callHasProperty
.link(this);
1085 emitGetVirtualRegister(dst
, regT1
);
1086 JITStubCall
stubCall(this, cti_has_property
);
1087 stubCall
.addArgument(regT0
);
1088 stubCall
.addArgument(regT1
);
1091 // Test for valid key.
1092 addJump(branchTest32(NonZero
, regT0
), target
);
1093 jump().linkTo(begin
, this);
1099 void JIT::emit_op_push_scope(Instruction
* currentInstruction
)
1101 JITStubCall
stubCall(this, cti_op_push_scope
);
1102 stubCall
.addArgument(currentInstruction
[1].u
.operand
, regT2
);
1103 stubCall
.call(currentInstruction
[1].u
.operand
);
1106 void JIT::emit_op_pop_scope(Instruction
*)
1108 JITStubCall(this, cti_op_pop_scope
).call();
1111 void JIT::compileOpStrictEq(Instruction
* currentInstruction
, CompileOpStrictEqType type
)
1113 unsigned dst
= currentInstruction
[1].u
.operand
;
1114 unsigned src1
= currentInstruction
[2].u
.operand
;
1115 unsigned src2
= currentInstruction
[3].u
.operand
;
1117 emitGetVirtualRegisters(src1
, regT0
, src2
, regT1
);
1119 // Jump to a slow case if either operand is a number, or if both are JSCell*s.
1121 orPtr(regT1
, regT2
);
1122 addSlowCase(emitJumpIfJSCell(regT2
));
1123 addSlowCase(emitJumpIfImmediateNumber(regT2
));
1125 if (type
== OpStrictEq
)
1126 set32(Equal
, regT1
, regT0
, regT0
);
1128 set32(NotEqual
, regT1
, regT0
, regT0
);
1129 emitTagAsBoolImmediate(regT0
);
1131 emitPutVirtualRegister(dst
);
1134 void JIT::emit_op_stricteq(Instruction
* currentInstruction
)
1136 compileOpStrictEq(currentInstruction
, OpStrictEq
);
1139 void JIT::emit_op_nstricteq(Instruction
* currentInstruction
)
1141 compileOpStrictEq(currentInstruction
, OpNStrictEq
);
1144 void JIT::emit_op_to_jsnumber(Instruction
* currentInstruction
)
1146 int srcVReg
= currentInstruction
[2].u
.operand
;
1147 emitGetVirtualRegister(srcVReg
, regT0
);
1149 Jump wasImmediate
= emitJumpIfImmediateInteger(regT0
);
1151 emitJumpSlowCaseIfNotJSCell(regT0
, srcVReg
);
1152 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSCell
, m_structure
)), regT2
);
1153 addSlowCase(branch8(NotEqual
, Address(regT2
, OBJECT_OFFSETOF(Structure
, m_typeInfo
.m_type
)), Imm32(NumberType
)));
1155 wasImmediate
.link(this);
1157 emitPutVirtualRegister(currentInstruction
[1].u
.operand
);
1160 void JIT::emit_op_push_new_scope(Instruction
* currentInstruction
)
1162 JITStubCall
stubCall(this, cti_op_push_new_scope
);
1163 stubCall
.addArgument(ImmPtr(&m_codeBlock
->identifier(currentInstruction
[2].u
.operand
)));
1164 stubCall
.addArgument(currentInstruction
[3].u
.operand
, regT2
);
1165 stubCall
.call(currentInstruction
[1].u
.operand
);
1168 void JIT::emit_op_catch(Instruction
* currentInstruction
)
1170 killLastResultRegister(); // FIXME: Implicitly treat op_catch as a labeled statement, and remove this line of code.
1171 peek(callFrameRegister
, OBJECT_OFFSETOF(struct JITStackFrame
, callFrame
) / sizeof (void*));
1172 emitPutVirtualRegister(currentInstruction
[1].u
.operand
);
1175 void JIT::emit_op_jmp_scopes(Instruction
* currentInstruction
)
1177 JITStubCall
stubCall(this, cti_op_jmp_scopes
);
1178 stubCall
.addArgument(Imm32(currentInstruction
[1].u
.operand
));
1180 addJump(jump(), currentInstruction
[2].u
.operand
);
1181 RECORD_JUMP_TARGET(currentInstruction
[2].u
.operand
);
1184 void JIT::emit_op_switch_imm(Instruction
* currentInstruction
)
1186 unsigned tableIndex
= currentInstruction
[1].u
.operand
;
1187 unsigned defaultOffset
= currentInstruction
[2].u
.operand
;
1188 unsigned scrutinee
= currentInstruction
[3].u
.operand
;
1190 // create jump table for switch destinations, track this switch statement.
1191 SimpleJumpTable
* jumpTable
= &m_codeBlock
->immediateSwitchJumpTable(tableIndex
);
1192 m_switches
.append(SwitchRecord(jumpTable
, m_bytecodeIndex
, defaultOffset
, SwitchRecord::Immediate
));
1193 jumpTable
->ctiOffsets
.grow(jumpTable
->branchOffsets
.size());
1195 JITStubCall
stubCall(this, cti_op_switch_imm
);
1196 stubCall
.addArgument(scrutinee
, regT2
);
1197 stubCall
.addArgument(Imm32(tableIndex
));
1202 void JIT::emit_op_switch_char(Instruction
* currentInstruction
)
1204 unsigned tableIndex
= currentInstruction
[1].u
.operand
;
1205 unsigned defaultOffset
= currentInstruction
[2].u
.operand
;
1206 unsigned scrutinee
= currentInstruction
[3].u
.operand
;
1208 // create jump table for switch destinations, track this switch statement.
1209 SimpleJumpTable
* jumpTable
= &m_codeBlock
->characterSwitchJumpTable(tableIndex
);
1210 m_switches
.append(SwitchRecord(jumpTable
, m_bytecodeIndex
, defaultOffset
, SwitchRecord::Character
));
1211 jumpTable
->ctiOffsets
.grow(jumpTable
->branchOffsets
.size());
1213 JITStubCall
stubCall(this, cti_op_switch_char
);
1214 stubCall
.addArgument(scrutinee
, regT2
);
1215 stubCall
.addArgument(Imm32(tableIndex
));
1220 void JIT::emit_op_switch_string(Instruction
* currentInstruction
)
1222 unsigned tableIndex
= currentInstruction
[1].u
.operand
;
1223 unsigned defaultOffset
= currentInstruction
[2].u
.operand
;
1224 unsigned scrutinee
= currentInstruction
[3].u
.operand
;
1226 // create jump table for switch destinations, track this switch statement.
1227 StringJumpTable
* jumpTable
= &m_codeBlock
->stringSwitchJumpTable(tableIndex
);
1228 m_switches
.append(SwitchRecord(jumpTable
, m_bytecodeIndex
, defaultOffset
));
1230 JITStubCall
stubCall(this, cti_op_switch_string
);
1231 stubCall
.addArgument(scrutinee
, regT2
);
1232 stubCall
.addArgument(Imm32(tableIndex
));
1237 void JIT::emit_op_new_error(Instruction
* currentInstruction
)
1239 JITStubCall
stubCall(this, cti_op_new_error
);
1240 stubCall
.addArgument(Imm32(currentInstruction
[2].u
.operand
));
1241 stubCall
.addArgument(ImmPtr(JSValue::encode(m_codeBlock
->getConstant(currentInstruction
[3].u
.operand
))));
1242 stubCall
.addArgument(Imm32(m_bytecodeIndex
));
1243 stubCall
.call(currentInstruction
[1].u
.operand
);
1246 void JIT::emit_op_debug(Instruction
* currentInstruction
)
1248 #if ENABLE(DEBUG_WITH_BREAKPOINT)
1249 UNUSED_PARAM(currentInstruction
);
1252 JITStubCall
stubCall(this, cti_op_debug
);
1253 stubCall
.addArgument(Imm32(currentInstruction
[1].u
.operand
));
1254 stubCall
.addArgument(Imm32(currentInstruction
[2].u
.operand
));
1255 stubCall
.addArgument(Imm32(currentInstruction
[3].u
.operand
));
1260 void JIT::emit_op_eq_null(Instruction
* currentInstruction
)
1262 unsigned dst
= currentInstruction
[1].u
.operand
;
1263 unsigned src1
= currentInstruction
[2].u
.operand
;
1265 emitGetVirtualRegister(src1
, regT0
);
1266 Jump isImmediate
= emitJumpIfNotJSCell(regT0
);
1268 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSCell
, m_structure
)), regT2
);
1269 setTest8(NonZero
, Address(regT2
, OBJECT_OFFSETOF(Structure
, m_typeInfo
.m_flags
)), Imm32(MasqueradesAsUndefined
), regT0
);
1271 Jump wasNotImmediate
= jump();
1273 isImmediate
.link(this);
1275 andPtr(Imm32(~JSImmediate::ExtendedTagBitUndefined
), regT0
);
1276 setPtr(Equal
, regT0
, Imm32(JSImmediate::FullTagTypeNull
), regT0
);
1278 wasNotImmediate
.link(this);
1280 emitTagAsBoolImmediate(regT0
);
1281 emitPutVirtualRegister(dst
);
1285 void JIT::emit_op_neq_null(Instruction
* currentInstruction
)
1287 unsigned dst
= currentInstruction
[1].u
.operand
;
1288 unsigned src1
= currentInstruction
[2].u
.operand
;
1290 emitGetVirtualRegister(src1
, regT0
);
1291 Jump isImmediate
= emitJumpIfNotJSCell(regT0
);
1293 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSCell
, m_structure
)), regT2
);
1294 setTest8(Zero
, Address(regT2
, OBJECT_OFFSETOF(Structure
, m_typeInfo
.m_flags
)), Imm32(MasqueradesAsUndefined
), regT0
);
1296 Jump wasNotImmediate
= jump();
1298 isImmediate
.link(this);
1300 andPtr(Imm32(~JSImmediate::ExtendedTagBitUndefined
), regT0
);
1301 setPtr(NotEqual
, regT0
, Imm32(JSImmediate::FullTagTypeNull
), regT0
);
1303 wasNotImmediate
.link(this);
1305 emitTagAsBoolImmediate(regT0
);
1306 emitPutVirtualRegister(dst
);
1310 void JIT::emit_op_enter(Instruction
*)
1312 // Even though CTI doesn't use them, we initialize our constant
1313 // registers to zap stale pointers, to avoid unnecessarily prolonging
1314 // object lifetime and increasing GC pressure.
1315 size_t count
= m_codeBlock
->m_numVars
;
1316 for (size_t j
= 0; j
< count
; ++j
)
1317 emitInitRegister(j
);
1321 void JIT::emit_op_enter_with_activation(Instruction
* currentInstruction
)
1323 // Even though CTI doesn't use them, we initialize our constant
1324 // registers to zap stale pointers, to avoid unnecessarily prolonging
1325 // object lifetime and increasing GC pressure.
1326 size_t count
= m_codeBlock
->m_numVars
;
1327 for (size_t j
= 0; j
< count
; ++j
)
1328 emitInitRegister(j
);
1330 JITStubCall(this, cti_op_push_activation
).call(currentInstruction
[1].u
.operand
);
1333 void JIT::emit_op_create_arguments(Instruction
*)
1335 Jump argsCreated
= branchTestPtr(NonZero
, Address(callFrameRegister
, sizeof(Register
) * RegisterFile::ArgumentsRegister
));
1336 if (m_codeBlock
->m_numParameters
== 1)
1337 JITStubCall(this, cti_op_create_arguments_no_params
).call();
1339 JITStubCall(this, cti_op_create_arguments
).call();
1340 argsCreated
.link(this);
1343 void JIT::emit_op_init_arguments(Instruction
*)
1345 storePtr(ImmPtr(0), Address(callFrameRegister
, sizeof(Register
) * RegisterFile::ArgumentsRegister
));
1348 void JIT::emit_op_convert_this(Instruction
* currentInstruction
)
1350 emitGetVirtualRegister(currentInstruction
[1].u
.operand
, regT0
);
1352 emitJumpSlowCaseIfNotJSCell(regT0
);
1353 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSCell
, m_structure
)), regT1
);
1354 addSlowCase(branchTest8(NonZero
, Address(regT1
, OBJECT_OFFSETOF(Structure
, m_typeInfo
.m_flags
)), Imm32(NeedsThisConversion
)));
1358 void JIT::emit_op_profile_will_call(Instruction
* currentInstruction
)
1360 peek(regT1
, OBJECT_OFFSETOF(JITStackFrame
, enabledProfilerReference
) / sizeof (void*));
1361 Jump noProfiler
= branchTestPtr(Zero
, Address(regT1
));
1363 JITStubCall
stubCall(this, cti_op_profile_will_call
);
1364 stubCall
.addArgument(currentInstruction
[1].u
.operand
, regT1
);
1366 noProfiler
.link(this);
1370 void JIT::emit_op_profile_did_call(Instruction
* currentInstruction
)
1372 peek(regT1
, OBJECT_OFFSETOF(JITStackFrame
, enabledProfilerReference
) / sizeof (void*));
1373 Jump noProfiler
= branchTestPtr(Zero
, Address(regT1
));
1375 JITStubCall
stubCall(this, cti_op_profile_did_call
);
1376 stubCall
.addArgument(currentInstruction
[1].u
.operand
, regT1
);
1378 noProfiler
.link(this);
1384 void JIT::emitSlow_op_convert_this(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1388 JITStubCall
stubCall(this, cti_op_convert_this
);
1389 stubCall
.addArgument(regT0
);
1390 stubCall
.call(currentInstruction
[1].u
.operand
);
1393 void JIT::emitSlow_op_construct_verify(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1397 emitGetVirtualRegister(currentInstruction
[2].u
.operand
, regT0
);
1398 emitPutVirtualRegister(currentInstruction
[1].u
.operand
);
1401 void JIT::emitSlow_op_to_primitive(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1405 JITStubCall
stubCall(this, cti_op_to_primitive
);
1406 stubCall
.addArgument(regT0
);
1407 stubCall
.call(currentInstruction
[1].u
.operand
);
1410 void JIT::emitSlow_op_loop_if_lesseq(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1412 unsigned op2
= currentInstruction
[2].u
.operand
;
1413 unsigned target
= currentInstruction
[3].u
.operand
;
1414 if (isOperandConstantImmediateInt(op2
)) {
1416 JITStubCall
stubCall(this, cti_op_loop_if_lesseq
);
1417 stubCall
.addArgument(regT0
);
1418 stubCall
.addArgument(currentInstruction
[2].u
.operand
, regT2
);
1420 emitJumpSlowToHot(branchTest32(NonZero
, regT0
), target
);
1424 JITStubCall
stubCall(this, cti_op_loop_if_lesseq
);
1425 stubCall
.addArgument(regT0
);
1426 stubCall
.addArgument(regT1
);
1428 emitJumpSlowToHot(branchTest32(NonZero
, regT0
), target
);
1432 void JIT::emitSlow_op_put_by_val(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1434 unsigned base
= currentInstruction
[1].u
.operand
;
1435 unsigned property
= currentInstruction
[2].u
.operand
;
1436 unsigned value
= currentInstruction
[3].u
.operand
;
1438 linkSlowCase(iter
); // property int32 check
1439 linkSlowCaseIfNotJSCell(iter
, base
); // base cell check
1440 linkSlowCase(iter
); // base not array check
1441 linkSlowCase(iter
); // in vector check
1443 JITStubCall
stubPutByValCall(this, cti_op_put_by_val
);
1444 stubPutByValCall
.addArgument(regT0
);
1445 stubPutByValCall
.addArgument(property
, regT2
);
1446 stubPutByValCall
.addArgument(value
, regT2
);
1447 stubPutByValCall
.call();
1450 void JIT::emitSlow_op_not(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1453 xorPtr(Imm32(static_cast<int32_t>(JSImmediate::FullTagTypeBool
)), regT0
);
1454 JITStubCall
stubCall(this, cti_op_not
);
1455 stubCall
.addArgument(regT0
);
1456 stubCall
.call(currentInstruction
[1].u
.operand
);
1459 void JIT::emitSlow_op_jfalse(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1462 JITStubCall
stubCall(this, cti_op_jtrue
);
1463 stubCall
.addArgument(regT0
);
1465 emitJumpSlowToHot(branchTest32(Zero
, regT0
), currentInstruction
[2].u
.operand
); // inverted!
1468 void JIT::emitSlow_op_bitnot(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1471 JITStubCall
stubCall(this, cti_op_bitnot
);
1472 stubCall
.addArgument(regT0
);
1473 stubCall
.call(currentInstruction
[1].u
.operand
);
1476 void JIT::emitSlow_op_jtrue(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1479 JITStubCall
stubCall(this, cti_op_jtrue
);
1480 stubCall
.addArgument(regT0
);
1482 emitJumpSlowToHot(branchTest32(NonZero
, regT0
), currentInstruction
[2].u
.operand
);
1485 void JIT::emitSlow_op_bitxor(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1488 JITStubCall
stubCall(this, cti_op_bitxor
);
1489 stubCall
.addArgument(regT0
);
1490 stubCall
.addArgument(regT1
);
1491 stubCall
.call(currentInstruction
[1].u
.operand
);
1494 void JIT::emitSlow_op_bitor(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1497 JITStubCall
stubCall(this, cti_op_bitor
);
1498 stubCall
.addArgument(regT0
);
1499 stubCall
.addArgument(regT1
);
1500 stubCall
.call(currentInstruction
[1].u
.operand
);
1503 void JIT::emitSlow_op_eq(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1506 JITStubCall
stubCall(this, cti_op_eq
);
1507 stubCall
.addArgument(regT0
);
1508 stubCall
.addArgument(regT1
);
1510 emitTagAsBoolImmediate(regT0
);
1511 emitPutVirtualRegister(currentInstruction
[1].u
.operand
);
1514 void JIT::emitSlow_op_neq(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1517 JITStubCall
stubCall(this, cti_op_eq
);
1518 stubCall
.addArgument(regT0
);
1519 stubCall
.addArgument(regT1
);
1521 xor32(Imm32(0x1), regT0
);
1522 emitTagAsBoolImmediate(regT0
);
1523 emitPutVirtualRegister(currentInstruction
[1].u
.operand
);
1526 void JIT::emitSlow_op_stricteq(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1530 JITStubCall
stubCall(this, cti_op_stricteq
);
1531 stubCall
.addArgument(regT0
);
1532 stubCall
.addArgument(regT1
);
1533 stubCall
.call(currentInstruction
[1].u
.operand
);
1536 void JIT::emitSlow_op_nstricteq(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1540 JITStubCall
stubCall(this, cti_op_nstricteq
);
1541 stubCall
.addArgument(regT0
);
1542 stubCall
.addArgument(regT1
);
1543 stubCall
.call(currentInstruction
[1].u
.operand
);
1546 void JIT::emitSlow_op_instanceof(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1548 unsigned dst
= currentInstruction
[1].u
.operand
;
1549 unsigned value
= currentInstruction
[2].u
.operand
;
1550 unsigned baseVal
= currentInstruction
[3].u
.operand
;
1551 unsigned proto
= currentInstruction
[4].u
.operand
;
1553 linkSlowCaseIfNotJSCell(iter
, value
);
1554 linkSlowCaseIfNotJSCell(iter
, baseVal
);
1555 linkSlowCaseIfNotJSCell(iter
, proto
);
1557 JITStubCall
stubCall(this, cti_op_instanceof
);
1558 stubCall
.addArgument(value
, regT2
);
1559 stubCall
.addArgument(baseVal
, regT2
);
1560 stubCall
.addArgument(proto
, regT2
);
1564 void JIT::emitSlow_op_call(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1566 compileOpCallSlowCase(currentInstruction
, iter
, m_callLinkInfoIndex
++, op_call
);
1569 void JIT::emitSlow_op_call_eval(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1571 compileOpCallSlowCase(currentInstruction
, iter
, m_callLinkInfoIndex
++, op_call_eval
);
1574 void JIT::emitSlow_op_call_varargs(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1576 compileOpCallVarargsSlowCase(currentInstruction
, iter
);
1579 void JIT::emitSlow_op_construct(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1581 compileOpCallSlowCase(currentInstruction
, iter
, m_callLinkInfoIndex
++, op_construct
);
1584 void JIT::emitSlow_op_to_jsnumber(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1586 linkSlowCaseIfNotJSCell(iter
, currentInstruction
[2].u
.operand
);
1589 JITStubCall
stubCall(this, cti_op_to_jsnumber
);
1590 stubCall
.addArgument(regT0
);
1591 stubCall
.call(currentInstruction
[1].u
.operand
);
1594 #endif // !USE(JSVALUE32_64)
1596 void JIT::emit_op_resolve_global_dynamic(Instruction
* currentInstruction
)
1598 int skip
= currentInstruction
[6].u
.operand
+ m_codeBlock
->needsFullScopeChain();
1600 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain
, regT0
);
1602 loadPtr(Address(regT0
, OBJECT_OFFSETOF(ScopeChainNode
, object
)), regT1
);
1603 addSlowCase(checkStructure(regT1
, m_globalData
->activationStructure
.get()));
1604 loadPtr(Address(regT0
, OBJECT_OFFSETOF(ScopeChainNode
, next
)), regT0
);
1606 emit_op_resolve_global(currentInstruction
, true);
1609 void JIT::emitSlow_op_resolve_global_dynamic(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1611 unsigned dst
= currentInstruction
[1].u
.operand
;
1612 void* globalObject
= currentInstruction
[2].u
.jsCell
;
1613 Identifier
* ident
= &m_codeBlock
->identifier(currentInstruction
[3].u
.operand
);
1614 int skip
= currentInstruction
[6].u
.operand
+ m_codeBlock
->needsFullScopeChain();
1617 JITStubCall
resolveStubCall(this, cti_op_resolve
);
1618 resolveStubCall
.addArgument(ImmPtr(ident
));
1619 resolveStubCall
.call(dst
);
1620 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_resolve_global_dynamic
));
1622 unsigned currentIndex
= m_globalResolveInfoIndex
++;
1624 linkSlowCase(iter
); // We managed to skip all the nodes in the scope chain, but the cache missed.
1625 JITStubCall
stubCall(this, cti_op_resolve_global
);
1626 stubCall
.addArgument(ImmPtr(globalObject
));
1627 stubCall
.addArgument(ImmPtr(ident
));
1628 stubCall
.addArgument(Imm32(currentIndex
));
1632 void JIT::emit_op_new_regexp(Instruction
* currentInstruction
)
1634 JITStubCall
stubCall(this, cti_op_new_regexp
);
1635 stubCall
.addArgument(ImmPtr(m_codeBlock
->regexp(currentInstruction
[2].u
.operand
)));
1636 stubCall
.call(currentInstruction
[1].u
.operand
);
1639 // For both JSValue32_64 and JSValue32
1640 #if ENABLE(JIT_OPTIMIZE_MOD)
1641 #if CPU(ARM_TRADITIONAL)
1642 void JIT::softModulo()
1648 #if USE(JSVALUE32_64)
1649 m_assembler
.mov_r(regT3
, regT2
);
1650 m_assembler
.mov_r(regT2
, regT0
);
1652 m_assembler
.mov_r(regT3
, m_assembler
.asr(regT2
, 1));
1653 m_assembler
.mov_r(regT2
, m_assembler
.asr(regT0
, 1));
1655 m_assembler
.mov_r(regT1
, ARMAssembler::getOp2(0));
1657 m_assembler
.teq_r(regT3
, ARMAssembler::getOp2(0));
1658 m_assembler
.rsb_r(regT3
, regT3
, ARMAssembler::getOp2(0), ARMAssembler::MI
);
1659 m_assembler
.eor_r(regT1
, regT1
, ARMAssembler::getOp2(1), ARMAssembler::MI
);
1661 m_assembler
.teq_r(regT2
, ARMAssembler::getOp2(0));
1662 m_assembler
.rsb_r(regT2
, regT2
, ARMAssembler::getOp2(0), ARMAssembler::MI
);
1663 m_assembler
.eor_r(regT1
, regT1
, ARMAssembler::getOp2(2), ARMAssembler::MI
);
1665 Jump exitBranch
= branch32(LessThan
, regT2
, regT3
);
1667 m_assembler
.sub_r(regS1
, regT3
, ARMAssembler::getOp2(1));
1668 m_assembler
.tst_r(regS1
, regT3
);
1669 m_assembler
.and_r(regT2
, regT2
, regS1
, ARMAssembler::EQ
);
1670 m_assembler
.and_r(regT0
, regS1
, regT3
);
1671 Jump exitBranch2
= branchTest32(Zero
, regT0
);
1673 m_assembler
.clz_r(regS1
, regT2
);
1674 m_assembler
.clz_r(regS0
, regT3
);
1675 m_assembler
.sub_r(regS0
, regS0
, regS1
);
1677 m_assembler
.rsbs_r(regS0
, regS0
, ARMAssembler::getOp2(31));
1679 m_assembler
.mov_r(regS0
, m_assembler
.lsl(regS0
, 1), ARMAssembler::NE
);
1681 m_assembler
.add_r(ARMRegisters::pc
, ARMRegisters::pc
, m_assembler
.lsl(regS0
, 2), ARMAssembler::NE
);
1682 m_assembler
.mov_r(regT0
, regT0
);
1684 for (int i
= 31; i
> 0; --i
) {
1685 m_assembler
.cmp_r(regT2
, m_assembler
.lsl(regT3
, i
));
1686 m_assembler
.sub_r(regT2
, regT2
, m_assembler
.lsl(regT3
, i
), ARMAssembler::CS
);
1689 m_assembler
.cmp_r(regT2
, regT3
);
1690 m_assembler
.sub_r(regT2
, regT2
, regT3
, ARMAssembler::CS
);
1692 exitBranch
.link(this);
1693 exitBranch2
.link(this);
1695 m_assembler
.teq_r(regT1
, ARMAssembler::getOp2(0));
1696 m_assembler
.rsb_r(regT2
, regT2
, ARMAssembler::getOp2(0), ARMAssembler::GT
);
1698 #if USE(JSVALUE32_64)
1699 m_assembler
.mov_r(regT0
, regT2
);
1701 m_assembler
.mov_r(regT0
, m_assembler
.lsl(regT2
, 1));
1702 m_assembler
.eor_r(regT0
, regT0
, ARMAssembler::getOp2(1));
1711 #error "JIT_OPTIMIZE_MOD not yet supported on this platform."
1712 #endif // CPU(ARM_TRADITIONAL)
1716 #endif // ENABLE(JIT)