2 * Copyright (C) 2009 Apple Inc. All rights reserved.
3 * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
14 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
18 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 #if ENABLE(JIT) && USE(JSVALUE32_64)
32 #include "JITInlineMethods.h"
33 #include "JITStubCall.h"
36 #include "JSFunction.h"
37 #include "JSPropertyNameIterator.h"
38 #include "LinkBuffer.h"
42 void JIT::privateCompileCTIMachineTrampolines(RefPtr
<ExecutablePool
>* executablePool
, JSGlobalData
* globalData
, TrampolineStructure
*trampolines
)
44 #if ENABLE(JIT_OPTIMIZE_MOD)
45 Label softModBegin
= align();
48 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
49 // (1) This function provides fast property access for string length
50 Label stringLengthBegin
= align();
52 // regT0 holds payload, regT1 holds tag
54 Jump string_failureCases1
= branch32(NotEqual
, regT1
, Imm32(JSValue::CellTag
));
55 Jump string_failureCases2
= branchPtr(NotEqual
, Address(regT0
), ImmPtr(m_globalData
->jsStringVPtr
));
57 // Checks out okay! - get the length from the Ustring.
58 load32(Address(regT0
, OBJECT_OFFSETOF(JSString
, m_length
)), regT2
);
60 Jump string_failureCases3
= branch32(Above
, regT2
, Imm32(INT_MAX
));
62 move(Imm32(JSValue::Int32Tag
), regT1
);
67 // (2) Trampolines for the slow cases of op_call / op_call_eval / op_construct.
69 #if ENABLE(JIT_OPTIMIZE_CALL)
70 // VirtualCallLink Trampoline
71 // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
72 Label virtualCallLinkBegin
= align();
73 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSFunction
, m_executable
)), regT2
);
75 Jump isNativeFunc2
= branch32(Equal
, Address(regT2
, OBJECT_OFFSETOF(FunctionExecutable
, m_numParameters
)), Imm32(0));
77 Jump hasCodeBlock2
= branch32(GreaterThan
, Address(regT2
, OBJECT_OFFSETOF(FunctionExecutable
, m_numParameters
)), Imm32(0));
78 preserveReturnAddressAfterCall(regT3
);
79 restoreArgumentReference();
80 Call callJSFunction2
= call();
81 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSFunction
, m_executable
)), regT2
);
82 emitGetJITStubArg(2, regT1
); // argCount
83 restoreReturnAddressBeforeReturn(regT3
);
84 hasCodeBlock2
.link(this);
86 // Check argCount matches callee arity.
87 Jump arityCheckOkay2
= branch32(Equal
, Address(regT2
, OBJECT_OFFSETOF(FunctionExecutable
, m_numParameters
)), regT1
);
88 preserveReturnAddressAfterCall(regT3
);
89 emitPutJITStubArg(regT3
, 1); // return address
90 restoreArgumentReference();
91 Call callArityCheck2
= call();
92 move(regT1
, callFrameRegister
);
93 emitGetJITStubArg(2, regT1
); // argCount
94 restoreReturnAddressBeforeReturn(regT3
);
95 arityCheckOkay2
.link(this);
97 isNativeFunc2
.link(this);
99 compileOpCallInitializeCallFrame();
101 preserveReturnAddressAfterCall(regT3
);
102 emitPutJITStubArg(regT3
, 1); // return address
103 restoreArgumentReference();
104 Call callLazyLinkCall
= call();
105 restoreReturnAddressBeforeReturn(regT3
);
107 #endif // ENABLE(JIT_OPTIMIZE_CALL)
109 // VirtualCall Trampoline
110 // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
111 Label virtualCallBegin
= align();
112 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSFunction
, m_executable
)), regT2
);
114 Jump isNativeFunc3
= branch32(Equal
, Address(regT2
, OBJECT_OFFSETOF(FunctionExecutable
, m_numParameters
)), Imm32(0));
116 Jump hasCodeBlock3
= branch32(GreaterThan
, Address(regT2
, OBJECT_OFFSETOF(FunctionExecutable
, m_numParameters
)), Imm32(0));
117 preserveReturnAddressAfterCall(regT3
);
118 restoreArgumentReference();
119 Call callJSFunction1
= call();
120 emitGetJITStubArg(2, regT1
); // argCount
121 restoreReturnAddressBeforeReturn(regT3
);
122 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSFunction
, m_executable
)), regT2
);
123 hasCodeBlock3
.link(this);
125 // Check argCount matches callee arity.
126 Jump arityCheckOkay3
= branch32(Equal
, Address(regT2
, OBJECT_OFFSETOF(FunctionExecutable
, m_numParameters
)), regT1
);
127 preserveReturnAddressAfterCall(regT3
);
128 emitPutJITStubArg(regT3
, 1); // return address
129 restoreArgumentReference();
130 Call callArityCheck1
= call();
131 move(regT1
, callFrameRegister
);
132 emitGetJITStubArg(2, regT1
); // argCount
133 restoreReturnAddressBeforeReturn(regT3
);
134 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSFunction
, m_executable
)), regT2
);
135 arityCheckOkay3
.link(this);
137 isNativeFunc3
.link(this);
139 compileOpCallInitializeCallFrame();
140 loadPtr(Address(regT2
, OBJECT_OFFSETOF(FunctionExecutable
, m_jitCode
)), regT0
);
143 #if CPU(X86) || CPU(ARM_TRADITIONAL)
144 Label nativeCallThunk
= align();
145 preserveReturnAddressAfterCall(regT0
);
146 emitPutToCallFrameHeader(regT0
, RegisterFile::ReturnPC
); // Push return address
148 // Load caller frame's scope chain into this callframe so that whatever we call can
149 // get to its global data.
150 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame
, regT1
);
151 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain
, regT1
, regT1
);
152 emitPutToCallFrameHeader(regT1
, RegisterFile::ScopeChain
);
155 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount
, regT0
);
157 /* We have two structs that we use to describe the stackframe we set up for our
158 * call to native code. NativeCallFrameStructure describes the how we set up the stack
159 * in advance of the call. NativeFunctionCalleeSignature describes the callframe
160 * as the native code expects it. We do this as we are using the fastcall calling
161 * convention which results in the callee popping its arguments off the stack, but
162 * not the rest of the callframe so we need a nice way to ensure we increment the
163 * stack pointer by the right amount after the call.
166 #if COMPILER(MSVC) || OS(LINUX)
170 #endif // COMPILER(MSVC)
171 struct NativeCallFrameStructure
{
172 // CallFrame* callFrame; // passed in EDX
179 struct NativeFunctionCalleeSignature
{
186 #endif // COMPILER(MSVC)
188 struct NativeCallFrameStructure
{
189 // CallFrame* callFrame; // passed in ECX
190 // JSObject* callee; // passed in EDX
195 struct NativeFunctionCalleeSignature
{
201 const int NativeCallFrameSize
= (sizeof(NativeCallFrameStructure
) + 15) & ~15;
202 // Allocate system stack frame
203 subPtr(Imm32(NativeCallFrameSize
), stackPointerRegister
);
206 subPtr(Imm32(1), regT0
); // Don't include 'this' in argcount
209 storePtr(regT0
, Address(stackPointerRegister
, OBJECT_OFFSETOF(NativeCallFrameStructure
, args
) + OBJECT_OFFSETOF(ArgList
, m_argCount
)));
211 // Calculate the start of the callframe header, and store in regT1
212 addPtr(Imm32(-RegisterFile::CallFrameHeaderSize
* (int)sizeof(Register
)), callFrameRegister
, regT1
);
214 // Calculate start of arguments as callframe header - sizeof(Register) * argcount (regT0)
215 mul32(Imm32(sizeof(Register
)), regT0
, regT0
);
216 subPtr(regT0
, regT1
);
217 storePtr(regT1
, Address(stackPointerRegister
, OBJECT_OFFSETOF(NativeCallFrameStructure
, args
) + OBJECT_OFFSETOF(ArgList
, m_args
)));
219 // ArgList is passed by reference so is stackPointerRegister + 4 * sizeof(Register)
220 addPtr(Imm32(OBJECT_OFFSETOF(NativeCallFrameStructure
, args
)), stackPointerRegister
, regT0
);
221 storePtr(regT0
, Address(stackPointerRegister
, OBJECT_OFFSETOF(NativeCallFrameStructure
, argPointer
)));
223 // regT1 currently points to the first argument, regT1 - sizeof(Register) points to 'this'
224 loadPtr(Address(regT1
, -(int)sizeof(Register
) + OBJECT_OFFSETOF(JSValue
, u
.asBits
.payload
)), regT2
);
225 loadPtr(Address(regT1
, -(int)sizeof(Register
) + OBJECT_OFFSETOF(JSValue
, u
.asBits
.tag
)), regT3
);
226 storePtr(regT2
, Address(stackPointerRegister
, OBJECT_OFFSETOF(NativeCallFrameStructure
, thisValue
) + OBJECT_OFFSETOF(JSValue
, u
.asBits
.payload
)));
227 storePtr(regT3
, Address(stackPointerRegister
, OBJECT_OFFSETOF(NativeCallFrameStructure
, thisValue
) + OBJECT_OFFSETOF(JSValue
, u
.asBits
.tag
)));
229 #if COMPILER(MSVC) || OS(LINUX)
230 // ArgList is passed by reference so is stackPointerRegister + 4 * sizeof(Register)
231 addPtr(Imm32(OBJECT_OFFSETOF(NativeCallFrameStructure
, result
)), stackPointerRegister
, X86Registers::ecx
);
234 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee
, X86Registers::eax
);
235 storePtr(X86Registers::eax
, Address(stackPointerRegister
, OBJECT_OFFSETOF(NativeCallFrameStructure
, callee
)));
238 move(callFrameRegister
, X86Registers::edx
);
240 call(Address(X86Registers::eax
, OBJECT_OFFSETOF(JSFunction
, m_data
)));
242 // JSValue is a non-POD type, so eax points to it
243 emitLoad(0, regT1
, regT0
, X86Registers::eax
);
245 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee
, X86Registers::edx
); // callee
246 move(callFrameRegister
, X86Registers::ecx
); // callFrame
247 call(Address(X86Registers::edx
, OBJECT_OFFSETOF(JSFunction
, m_data
)));
250 // We've put a few temporaries on the stack in addition to the actual arguments
251 // so pull them off now
252 addPtr(Imm32(NativeCallFrameSize
- sizeof(NativeFunctionCalleeSignature
)), stackPointerRegister
);
254 #elif CPU(ARM_TRADITIONAL)
255 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount
, regT0
);
257 // Allocate stack space for our arglist
258 COMPILE_ASSERT((sizeof(ArgList
) & 0x7) == 0 && sizeof(JSValue
) == 8 && sizeof(Register
) == 8, ArgList_should_by_8byte_aligned
);
259 subPtr(Imm32(sizeof(ArgList
)), stackPointerRegister
);
262 subPtr(Imm32(1), regT0
); // Don't include 'this' in argcount
265 storePtr(regT0
, Address(stackPointerRegister
, OBJECT_OFFSETOF(ArgList
, m_argCount
)));
267 // Calculate the start of the callframe header, and store in regT1
268 move(callFrameRegister
, regT1
);
269 sub32(Imm32(RegisterFile::CallFrameHeaderSize
* (int32_t)sizeof(Register
)), regT1
);
271 // Calculate start of arguments as callframe header - sizeof(Register) * argcount (regT1)
272 mul32(Imm32(sizeof(Register
)), regT0
, regT0
);
273 subPtr(regT0
, regT1
);
275 // push pointer to arguments
276 storePtr(regT1
, Address(stackPointerRegister
, OBJECT_OFFSETOF(ArgList
, m_args
)));
278 // Argument passing method:
279 // r0 - points to return value
282 // stack: this(JSValue) and a pointer to ArgList
286 push(stackPointerRegister
);
289 // regT1 currently points to the first argument, regT1-sizeof(Register) points to 'this'
290 load32(Address(regT1
, -(int32_t)sizeof(void*) * 2), ARMRegisters::r3
);
291 push(ARMRegisters::r3
);
292 load32(Address(regT1
, -(int32_t)sizeof(void*)), regT3
);
293 storePtr(regT3
, Address(stackPointerRegister
));
296 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee
, regT2
);
299 move(callFrameRegister
, regT1
);
302 move(stackPointerRegister
, regT0
);
304 call(Address(regT2
, OBJECT_OFFSETOF(JSFunction
, m_data
)));
306 load32(Address(stackPointerRegister
, 0), regT0
);
307 load32(Address(stackPointerRegister
, 4), regT1
);
309 addPtr(Imm32(sizeof(ArgList
) + 8), stackPointerRegister
);
311 move(stackPointerRegister
, regT3
);
312 subPtr(Imm32(8), stackPointerRegister
);
313 move(stackPointerRegister
, regT0
);
314 subPtr(Imm32(8 + 4 + 4 /* padding */), stackPointerRegister
);
317 storePtr(regT3
, Address(stackPointerRegister
, 8));
320 // regT1 currently points to the first argument, regT1-sizeof(Register) points to 'this'
321 load32(Address(regT1
, -(int32_t)sizeof(void*) * 2), regT3
);
322 storePtr(regT3
, Address(stackPointerRegister
, 0));
323 load32(Address(regT1
, -(int32_t)sizeof(void*)), regT3
);
324 storePtr(regT3
, Address(stackPointerRegister
, 4));
327 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee
, regT2
);
330 move(callFrameRegister
, regT1
);
332 call(Address(regT2
, OBJECT_OFFSETOF(JSFunction
, m_data
)));
335 load32(Address(stackPointerRegister
, 16), regT0
);
336 load32(Address(stackPointerRegister
, 20), regT1
);
338 addPtr(Imm32(sizeof(ArgList
) + 16 + 8), stackPointerRegister
);
343 // Check for an exception
344 move(ImmPtr(&globalData
->exception
), regT2
);
345 Jump sawException
= branch32(NotEqual
, tagFor(0, regT2
), Imm32(JSValue::EmptyValueTag
));
347 // Grab the return address.
348 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC
, regT3
);
350 // Restore our caller's "r".
351 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame
, callFrameRegister
);
354 restoreReturnAddressBeforeReturn(regT3
);
357 // Handle an exception
358 sawException
.link(this);
359 // Grab the return address.
360 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC
, regT1
);
361 move(ImmPtr(&globalData
->exceptionLocation
), regT2
);
362 storePtr(regT1
, regT2
);
363 move(ImmPtr(FunctionPtr(ctiVMThrowTrampoline
).value()), regT2
);
364 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame
, callFrameRegister
);
365 poke(callFrameRegister
, OBJECT_OFFSETOF(struct JITStackFrame
, callFrame
) / sizeof(void*));
366 restoreReturnAddressBeforeReturn(regT2
);
369 #elif ENABLE(JIT_OPTIMIZE_NATIVE_CALL)
370 #error "JIT_OPTIMIZE_NATIVE_CALL not yet supported on this platform."
372 UNUSED_PARAM(globalData
);
376 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
377 Call string_failureCases1Call
= makeTailRecursiveCall(string_failureCases1
);
378 Call string_failureCases2Call
= makeTailRecursiveCall(string_failureCases2
);
379 Call string_failureCases3Call
= makeTailRecursiveCall(string_failureCases3
);
382 // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
383 LinkBuffer
patchBuffer(this, m_globalData
->executableAllocator
.poolForSize(m_assembler
.size()));
385 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
386 patchBuffer
.link(string_failureCases1Call
, FunctionPtr(cti_op_get_by_id_string_fail
));
387 patchBuffer
.link(string_failureCases2Call
, FunctionPtr(cti_op_get_by_id_string_fail
));
388 patchBuffer
.link(string_failureCases3Call
, FunctionPtr(cti_op_get_by_id_string_fail
));
390 patchBuffer
.link(callArityCheck1
, FunctionPtr(cti_op_call_arityCheck
));
391 patchBuffer
.link(callJSFunction1
, FunctionPtr(cti_op_call_JSFunction
));
392 #if ENABLE(JIT_OPTIMIZE_CALL)
393 patchBuffer
.link(callArityCheck2
, FunctionPtr(cti_op_call_arityCheck
));
394 patchBuffer
.link(callJSFunction2
, FunctionPtr(cti_op_call_JSFunction
));
395 patchBuffer
.link(callLazyLinkCall
, FunctionPtr(cti_vm_lazyLinkCall
));
398 CodeRef finalCode
= patchBuffer
.finalizeCode();
399 *executablePool
= finalCode
.m_executablePool
;
401 trampolines
->ctiVirtualCall
= trampolineAt(finalCode
, virtualCallBegin
);
402 #if ENABLE(JIT_OPTIMIZE_NATIVE_CALL)
403 trampolines
->ctiNativeCallThunk
= adoptRef(new NativeExecutable(JITCode(JITCode::HostFunction(trampolineAt(finalCode
, nativeCallThunk
)))));
405 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
406 trampolines
->ctiStringLengthTrampoline
= trampolineAt(finalCode
, stringLengthBegin
);
408 UNUSED_PARAM(ctiStringLengthTrampoline
);
410 #if ENABLE(JIT_OPTIMIZE_CALL)
411 trampolines
->ctiVirtualCallLink
= trampolineAt(finalCode
, virtualCallLinkBegin
);
413 UNUSED_PARAM(ctiVirtualCallLink
);
415 #if ENABLE(JIT_OPTIMIZE_MOD)
416 trampolines
->ctiSoftModulo
= trampolineAt(finalCode
, softModBegin
);
420 void JIT::emit_op_mov(Instruction
* currentInstruction
)
422 unsigned dst
= currentInstruction
[1].u
.operand
;
423 unsigned src
= currentInstruction
[2].u
.operand
;
425 if (m_codeBlock
->isConstantRegisterIndex(src
))
426 emitStore(dst
, getConstantOperand(src
));
428 emitLoad(src
, regT1
, regT0
);
429 emitStore(dst
, regT1
, regT0
);
430 map(m_bytecodeIndex
+ OPCODE_LENGTH(op_mov
), dst
, regT1
, regT0
);
434 void JIT::emit_op_end(Instruction
* currentInstruction
)
436 if (m_codeBlock
->needsFullScopeChain())
437 JITStubCall(this, cti_op_end
).call();
438 ASSERT(returnValueRegister
!= callFrameRegister
);
439 emitLoad(currentInstruction
[1].u
.operand
, regT1
, regT0
);
440 restoreReturnAddressBeforeReturn(Address(callFrameRegister
, RegisterFile::ReturnPC
* static_cast<int>(sizeof(Register
))));
444 void JIT::emit_op_jmp(Instruction
* currentInstruction
)
446 unsigned target
= currentInstruction
[1].u
.operand
;
447 addJump(jump(), target
);
450 void JIT::emit_op_loop_if_lesseq(Instruction
* currentInstruction
)
452 unsigned op1
= currentInstruction
[1].u
.operand
;
453 unsigned op2
= currentInstruction
[2].u
.operand
;
454 unsigned target
= currentInstruction
[3].u
.operand
;
458 if (isOperandConstantImmediateInt(op1
)) {
459 emitLoad(op2
, regT1
, regT0
);
460 addSlowCase(branch32(NotEqual
, regT1
, Imm32(JSValue::Int32Tag
)));
461 addJump(branch32(GreaterThanOrEqual
, regT0
, Imm32(getConstantOperand(op1
).asInt32())), target
);
465 if (isOperandConstantImmediateInt(op2
)) {
466 emitLoad(op1
, regT1
, regT0
);
467 addSlowCase(branch32(NotEqual
, regT1
, Imm32(JSValue::Int32Tag
)));
468 addJump(branch32(LessThanOrEqual
, regT0
, Imm32(getConstantOperand(op2
).asInt32())), target
);
472 emitLoad2(op1
, regT1
, regT0
, op2
, regT3
, regT2
);
473 addSlowCase(branch32(NotEqual
, regT1
, Imm32(JSValue::Int32Tag
)));
474 addSlowCase(branch32(NotEqual
, regT3
, Imm32(JSValue::Int32Tag
)));
475 addJump(branch32(LessThanOrEqual
, regT0
, regT2
), target
);
478 void JIT::emitSlow_op_loop_if_lesseq(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
480 unsigned op1
= currentInstruction
[1].u
.operand
;
481 unsigned op2
= currentInstruction
[2].u
.operand
;
482 unsigned target
= currentInstruction
[3].u
.operand
;
484 if (!isOperandConstantImmediateInt(op1
) && !isOperandConstantImmediateInt(op2
))
485 linkSlowCase(iter
); // int32 check
486 linkSlowCase(iter
); // int32 check
488 JITStubCall
stubCall(this, cti_op_loop_if_lesseq
);
489 stubCall
.addArgument(op1
);
490 stubCall
.addArgument(op2
);
492 emitJumpSlowToHot(branchTest32(NonZero
, regT0
), target
);
495 void JIT::emit_op_new_object(Instruction
* currentInstruction
)
497 JITStubCall(this, cti_op_new_object
).call(currentInstruction
[1].u
.operand
);
500 void JIT::emit_op_instanceof(Instruction
* currentInstruction
)
502 unsigned dst
= currentInstruction
[1].u
.operand
;
503 unsigned value
= currentInstruction
[2].u
.operand
;
504 unsigned baseVal
= currentInstruction
[3].u
.operand
;
505 unsigned proto
= currentInstruction
[4].u
.operand
;
507 // Load the operands into registers.
508 // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
509 emitLoadPayload(value
, regT2
);
510 emitLoadPayload(baseVal
, regT0
);
511 emitLoadPayload(proto
, regT1
);
513 // Check that value, baseVal, and proto are cells.
514 emitJumpSlowCaseIfNotJSCell(value
);
515 emitJumpSlowCaseIfNotJSCell(baseVal
);
516 emitJumpSlowCaseIfNotJSCell(proto
);
518 // Check that baseVal 'ImplementsDefaultHasInstance'.
519 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSCell
, m_structure
)), regT0
);
520 addSlowCase(branchTest8(Zero
, Address(regT0
, OBJECT_OFFSETOF(Structure
, m_typeInfo
.m_flags
)), Imm32(ImplementsDefaultHasInstance
)));
522 // Optimistically load the result true, and start looping.
523 // Initially, regT1 still contains proto and regT2 still contains value.
524 // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
525 move(Imm32(JSValue::TrueTag
), regT0
);
528 // Load the prototype of the cell in regT2. If this is equal to regT1 - WIN!
529 // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
530 loadPtr(Address(regT2
, OBJECT_OFFSETOF(JSCell
, m_structure
)), regT2
);
531 load32(Address(regT2
, OBJECT_OFFSETOF(Structure
, m_prototype
) + OBJECT_OFFSETOF(JSValue
, u
.asBits
.payload
)), regT2
);
532 Jump isInstance
= branchPtr(Equal
, regT2
, regT1
);
533 branchTest32(NonZero
, regT2
).linkTo(loop
, this);
535 // We get here either by dropping out of the loop, or if value was not an Object. Result is false.
536 move(Imm32(JSValue::FalseTag
), regT0
);
538 // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
539 isInstance
.link(this);
540 emitStoreBool(dst
, regT0
);
543 void JIT::emitSlow_op_instanceof(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
545 unsigned dst
= currentInstruction
[1].u
.operand
;
546 unsigned value
= currentInstruction
[2].u
.operand
;
547 unsigned baseVal
= currentInstruction
[3].u
.operand
;
548 unsigned proto
= currentInstruction
[4].u
.operand
;
550 linkSlowCaseIfNotJSCell(iter
, value
);
551 linkSlowCaseIfNotJSCell(iter
, baseVal
);
552 linkSlowCaseIfNotJSCell(iter
, proto
);
555 JITStubCall
stubCall(this, cti_op_instanceof
);
556 stubCall
.addArgument(value
);
557 stubCall
.addArgument(baseVal
);
558 stubCall
.addArgument(proto
);
562 void JIT::emit_op_new_func(Instruction
* currentInstruction
)
564 JITStubCall
stubCall(this, cti_op_new_func
);
565 stubCall
.addArgument(ImmPtr(m_codeBlock
->functionDecl(currentInstruction
[2].u
.operand
)));
566 stubCall
.call(currentInstruction
[1].u
.operand
);
569 void JIT::emit_op_get_global_var(Instruction
* currentInstruction
)
571 int dst
= currentInstruction
[1].u
.operand
;
572 JSGlobalObject
* globalObject
= static_cast<JSGlobalObject
*>(currentInstruction
[2].u
.jsCell
);
573 ASSERT(globalObject
->isGlobalObject());
574 int index
= currentInstruction
[3].u
.operand
;
576 loadPtr(&globalObject
->d()->registers
, regT2
);
578 emitLoad(index
, regT1
, regT0
, regT2
);
579 emitStore(dst
, regT1
, regT0
);
580 map(m_bytecodeIndex
+ OPCODE_LENGTH(op_get_global_var
), dst
, regT1
, regT0
);
583 void JIT::emit_op_put_global_var(Instruction
* currentInstruction
)
585 JSGlobalObject
* globalObject
= static_cast<JSGlobalObject
*>(currentInstruction
[1].u
.jsCell
);
586 ASSERT(globalObject
->isGlobalObject());
587 int index
= currentInstruction
[2].u
.operand
;
588 int value
= currentInstruction
[3].u
.operand
;
590 emitLoad(value
, regT1
, regT0
);
592 loadPtr(&globalObject
->d()->registers
, regT2
);
593 emitStore(index
, regT1
, regT0
, regT2
);
594 map(m_bytecodeIndex
+ OPCODE_LENGTH(op_put_global_var
), value
, regT1
, regT0
);
597 void JIT::emit_op_get_scoped_var(Instruction
* currentInstruction
)
599 int dst
= currentInstruction
[1].u
.operand
;
600 int index
= currentInstruction
[2].u
.operand
;
601 int skip
= currentInstruction
[3].u
.operand
+ m_codeBlock
->needsFullScopeChain();
603 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain
, regT2
);
605 loadPtr(Address(regT2
, OBJECT_OFFSETOF(ScopeChainNode
, next
)), regT2
);
607 loadPtr(Address(regT2
, OBJECT_OFFSETOF(ScopeChainNode
, object
)), regT2
);
608 loadPtr(Address(regT2
, OBJECT_OFFSETOF(JSVariableObject
, d
)), regT2
);
609 loadPtr(Address(regT2
, OBJECT_OFFSETOF(JSVariableObject::JSVariableObjectData
, registers
)), regT2
);
611 emitLoad(index
, regT1
, regT0
, regT2
);
612 emitStore(dst
, regT1
, regT0
);
613 map(m_bytecodeIndex
+ OPCODE_LENGTH(op_get_scoped_var
), dst
, regT1
, regT0
);
616 void JIT::emit_op_put_scoped_var(Instruction
* currentInstruction
)
618 int index
= currentInstruction
[1].u
.operand
;
619 int skip
= currentInstruction
[2].u
.operand
+ m_codeBlock
->needsFullScopeChain();
620 int value
= currentInstruction
[3].u
.operand
;
622 emitLoad(value
, regT1
, regT0
);
624 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain
, regT2
);
626 loadPtr(Address(regT2
, OBJECT_OFFSETOF(ScopeChainNode
, next
)), regT2
);
628 loadPtr(Address(regT2
, OBJECT_OFFSETOF(ScopeChainNode
, object
)), regT2
);
629 loadPtr(Address(regT2
, OBJECT_OFFSETOF(JSVariableObject
, d
)), regT2
);
630 loadPtr(Address(regT2
, OBJECT_OFFSETOF(JSVariableObject::JSVariableObjectData
, registers
)), regT2
);
632 emitStore(index
, regT1
, regT0
, regT2
);
633 map(m_bytecodeIndex
+ OPCODE_LENGTH(op_put_scoped_var
), value
, regT1
, regT0
);
636 void JIT::emit_op_tear_off_activation(Instruction
* currentInstruction
)
638 JITStubCall
stubCall(this, cti_op_tear_off_activation
);
639 stubCall
.addArgument(currentInstruction
[1].u
.operand
);
643 void JIT::emit_op_tear_off_arguments(Instruction
*)
645 JITStubCall(this, cti_op_tear_off_arguments
).call();
648 void JIT::emit_op_new_array(Instruction
* currentInstruction
)
650 JITStubCall
stubCall(this, cti_op_new_array
);
651 stubCall
.addArgument(Imm32(currentInstruction
[2].u
.operand
));
652 stubCall
.addArgument(Imm32(currentInstruction
[3].u
.operand
));
653 stubCall
.call(currentInstruction
[1].u
.operand
);
656 void JIT::emit_op_resolve(Instruction
* currentInstruction
)
658 JITStubCall
stubCall(this, cti_op_resolve
);
659 stubCall
.addArgument(ImmPtr(&m_codeBlock
->identifier(currentInstruction
[2].u
.operand
)));
660 stubCall
.call(currentInstruction
[1].u
.operand
);
663 void JIT::emit_op_to_primitive(Instruction
* currentInstruction
)
665 int dst
= currentInstruction
[1].u
.operand
;
666 int src
= currentInstruction
[2].u
.operand
;
668 emitLoad(src
, regT1
, regT0
);
670 Jump isImm
= branch32(NotEqual
, regT1
, Imm32(JSValue::CellTag
));
671 addSlowCase(branchPtr(NotEqual
, Address(regT0
), ImmPtr(m_globalData
->jsStringVPtr
)));
675 emitStore(dst
, regT1
, regT0
);
676 map(m_bytecodeIndex
+ OPCODE_LENGTH(op_to_primitive
), dst
, regT1
, regT0
);
679 void JIT::emitSlow_op_to_primitive(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
681 int dst
= currentInstruction
[1].u
.operand
;
685 JITStubCall
stubCall(this, cti_op_to_primitive
);
686 stubCall
.addArgument(regT1
, regT0
);
690 void JIT::emit_op_strcat(Instruction
* currentInstruction
)
692 JITStubCall
stubCall(this, cti_op_strcat
);
693 stubCall
.addArgument(Imm32(currentInstruction
[2].u
.operand
));
694 stubCall
.addArgument(Imm32(currentInstruction
[3].u
.operand
));
695 stubCall
.call(currentInstruction
[1].u
.operand
);
698 void JIT::emit_op_resolve_base(Instruction
* currentInstruction
)
700 JITStubCall
stubCall(this, cti_op_resolve_base
);
701 stubCall
.addArgument(ImmPtr(&m_codeBlock
->identifier(currentInstruction
[2].u
.operand
)));
702 stubCall
.call(currentInstruction
[1].u
.operand
);
705 void JIT::emit_op_resolve_skip(Instruction
* currentInstruction
)
707 JITStubCall
stubCall(this, cti_op_resolve_skip
);
708 stubCall
.addArgument(ImmPtr(&m_codeBlock
->identifier(currentInstruction
[2].u
.operand
)));
709 stubCall
.addArgument(Imm32(currentInstruction
[3].u
.operand
+ m_codeBlock
->needsFullScopeChain()));
710 stubCall
.call(currentInstruction
[1].u
.operand
);
713 void JIT::emit_op_resolve_global(Instruction
* currentInstruction
, bool dynamic
)
715 // FIXME: Optimize to use patching instead of so many memory accesses.
717 unsigned dst
= currentInstruction
[1].u
.operand
;
718 void* globalObject
= currentInstruction
[2].u
.jsCell
;
720 unsigned currentIndex
= m_globalResolveInfoIndex
++;
721 void* structureAddress
= &(m_codeBlock
->globalResolveInfo(currentIndex
).structure
);
722 void* offsetAddr
= &(m_codeBlock
->globalResolveInfo(currentIndex
).offset
);
725 move(ImmPtr(globalObject
), regT0
);
726 loadPtr(structureAddress
, regT1
);
727 addSlowCase(branchPtr(NotEqual
, regT1
, Address(regT0
, OBJECT_OFFSETOF(JSCell
, m_structure
))));
730 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSGlobalObject
, m_externalStorage
)), regT2
);
731 load32(offsetAddr
, regT3
);
732 load32(BaseIndex(regT2
, regT3
, TimesEight
), regT0
); // payload
733 load32(BaseIndex(regT2
, regT3
, TimesEight
, 4), regT1
); // tag
734 emitStore(dst
, regT1
, regT0
);
735 map(m_bytecodeIndex
+ dynamic
? OPCODE_LENGTH(op_resolve_global_dynamic
) : OPCODE_LENGTH(op_resolve_global
), dst
, regT1
, regT0
);
738 void JIT::emitSlow_op_resolve_global(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
740 unsigned dst
= currentInstruction
[1].u
.operand
;
741 void* globalObject
= currentInstruction
[2].u
.jsCell
;
742 Identifier
* ident
= &m_codeBlock
->identifier(currentInstruction
[3].u
.operand
);
744 unsigned currentIndex
= m_globalResolveInfoIndex
++;
747 JITStubCall
stubCall(this, cti_op_resolve_global
);
748 stubCall
.addArgument(ImmPtr(globalObject
));
749 stubCall
.addArgument(ImmPtr(ident
));
750 stubCall
.addArgument(Imm32(currentIndex
));
754 void JIT::emit_op_not(Instruction
* currentInstruction
)
756 unsigned dst
= currentInstruction
[1].u
.operand
;
757 unsigned src
= currentInstruction
[2].u
.operand
;
759 emitLoadTag(src
, regT0
);
761 xor32(Imm32(JSValue::FalseTag
), regT0
);
762 addSlowCase(branchTest32(NonZero
, regT0
, Imm32(~1)));
763 xor32(Imm32(JSValue::TrueTag
), regT0
);
765 emitStoreBool(dst
, regT0
, (dst
== src
));
768 void JIT::emitSlow_op_not(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
770 unsigned dst
= currentInstruction
[1].u
.operand
;
771 unsigned src
= currentInstruction
[2].u
.operand
;
775 JITStubCall
stubCall(this, cti_op_not
);
776 stubCall
.addArgument(src
);
780 void JIT::emit_op_jfalse(Instruction
* currentInstruction
)
782 unsigned cond
= currentInstruction
[1].u
.operand
;
783 unsigned target
= currentInstruction
[2].u
.operand
;
785 emitLoad(cond
, regT1
, regT0
);
787 Jump isTrue
= branch32(Equal
, regT1
, Imm32(JSValue::TrueTag
));
788 addJump(branch32(Equal
, regT1
, Imm32(JSValue::FalseTag
)), target
);
790 Jump isNotInteger
= branch32(NotEqual
, regT1
, Imm32(JSValue::Int32Tag
));
791 Jump isTrue2
= branch32(NotEqual
, regT0
, Imm32(0));
792 addJump(jump(), target
);
794 if (supportsFloatingPoint()) {
795 isNotInteger
.link(this);
797 addSlowCase(branch32(Above
, regT1
, Imm32(JSValue::LowestTag
)));
800 emitLoadDouble(cond
, fpRegT1
);
801 addJump(branchDouble(DoubleEqualOrUnordered
, fpRegT0
, fpRegT1
), target
);
803 addSlowCase(isNotInteger
);
809 void JIT::emitSlow_op_jfalse(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
811 unsigned cond
= currentInstruction
[1].u
.operand
;
812 unsigned target
= currentInstruction
[2].u
.operand
;
815 JITStubCall
stubCall(this, cti_op_jtrue
);
816 stubCall
.addArgument(cond
);
818 emitJumpSlowToHot(branchTest32(Zero
, regT0
), target
); // Inverted.
821 void JIT::emit_op_jtrue(Instruction
* currentInstruction
)
823 unsigned cond
= currentInstruction
[1].u
.operand
;
824 unsigned target
= currentInstruction
[2].u
.operand
;
826 emitLoad(cond
, regT1
, regT0
);
828 Jump isFalse
= branch32(Equal
, regT1
, Imm32(JSValue::FalseTag
));
829 addJump(branch32(Equal
, regT1
, Imm32(JSValue::TrueTag
)), target
);
831 Jump isNotInteger
= branch32(NotEqual
, regT1
, Imm32(JSValue::Int32Tag
));
832 Jump isFalse2
= branch32(Equal
, regT0
, Imm32(0));
833 addJump(jump(), target
);
835 if (supportsFloatingPoint()) {
836 isNotInteger
.link(this);
838 addSlowCase(branch32(Above
, regT1
, Imm32(JSValue::LowestTag
)));
841 emitLoadDouble(cond
, fpRegT1
);
842 addJump(branchDouble(DoubleNotEqual
, fpRegT0
, fpRegT1
), target
);
844 addSlowCase(isNotInteger
);
850 void JIT::emitSlow_op_jtrue(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
852 unsigned cond
= currentInstruction
[1].u
.operand
;
853 unsigned target
= currentInstruction
[2].u
.operand
;
856 JITStubCall
stubCall(this, cti_op_jtrue
);
857 stubCall
.addArgument(cond
);
859 emitJumpSlowToHot(branchTest32(NonZero
, regT0
), target
);
862 void JIT::emit_op_jeq_null(Instruction
* currentInstruction
)
864 unsigned src
= currentInstruction
[1].u
.operand
;
865 unsigned target
= currentInstruction
[2].u
.operand
;
867 emitLoad(src
, regT1
, regT0
);
869 Jump isImmediate
= branch32(NotEqual
, regT1
, Imm32(JSValue::CellTag
));
871 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
872 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSCell
, m_structure
)), regT2
);
873 addJump(branchTest8(NonZero
, Address(regT2
, OBJECT_OFFSETOF(Structure
, m_typeInfo
.m_flags
)), Imm32(MasqueradesAsUndefined
)), target
);
875 Jump wasNotImmediate
= jump();
877 // Now handle the immediate cases - undefined & null
878 isImmediate
.link(this);
880 set32(Equal
, regT1
, Imm32(JSValue::NullTag
), regT2
);
881 set32(Equal
, regT1
, Imm32(JSValue::UndefinedTag
), regT1
);
884 addJump(branchTest32(NonZero
, regT1
), target
);
886 wasNotImmediate
.link(this);
889 void JIT::emit_op_jneq_null(Instruction
* currentInstruction
)
891 unsigned src
= currentInstruction
[1].u
.operand
;
892 unsigned target
= currentInstruction
[2].u
.operand
;
894 emitLoad(src
, regT1
, regT0
);
896 Jump isImmediate
= branch32(NotEqual
, regT1
, Imm32(JSValue::CellTag
));
898 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
899 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSCell
, m_structure
)), regT2
);
900 addJump(branchTest8(Zero
, Address(regT2
, OBJECT_OFFSETOF(Structure
, m_typeInfo
.m_flags
)), Imm32(MasqueradesAsUndefined
)), target
);
902 Jump wasNotImmediate
= jump();
904 // Now handle the immediate cases - undefined & null
905 isImmediate
.link(this);
907 set32(Equal
, regT1
, Imm32(JSValue::NullTag
), regT2
);
908 set32(Equal
, regT1
, Imm32(JSValue::UndefinedTag
), regT1
);
911 addJump(branchTest32(Zero
, regT1
), target
);
913 wasNotImmediate
.link(this);
916 void JIT::emit_op_jneq_ptr(Instruction
* currentInstruction
)
918 unsigned src
= currentInstruction
[1].u
.operand
;
919 JSCell
* ptr
= currentInstruction
[2].u
.jsCell
;
920 unsigned target
= currentInstruction
[3].u
.operand
;
922 emitLoad(src
, regT1
, regT0
);
923 addJump(branch32(NotEqual
, regT1
, Imm32(JSValue::CellTag
)), target
);
924 addJump(branchPtr(NotEqual
, regT0
, ImmPtr(ptr
)), target
);
927 void JIT::emit_op_jsr(Instruction
* currentInstruction
)
929 int retAddrDst
= currentInstruction
[1].u
.operand
;
930 int target
= currentInstruction
[2].u
.operand
;
931 DataLabelPtr storeLocation
= storePtrWithPatch(ImmPtr(0), Address(callFrameRegister
, sizeof(Register
) * retAddrDst
));
932 addJump(jump(), target
);
933 m_jsrSites
.append(JSRInfo(storeLocation
, label()));
936 void JIT::emit_op_sret(Instruction
* currentInstruction
)
938 jump(Address(callFrameRegister
, sizeof(Register
) * currentInstruction
[1].u
.operand
));
941 void JIT::emit_op_eq(Instruction
* currentInstruction
)
943 unsigned dst
= currentInstruction
[1].u
.operand
;
944 unsigned src1
= currentInstruction
[2].u
.operand
;
945 unsigned src2
= currentInstruction
[3].u
.operand
;
947 emitLoad2(src1
, regT1
, regT0
, src2
, regT3
, regT2
);
948 addSlowCase(branch32(NotEqual
, regT1
, regT3
));
949 addSlowCase(branch32(Equal
, regT1
, Imm32(JSValue::CellTag
)));
950 addSlowCase(branch32(Below
, regT1
, Imm32(JSValue::LowestTag
)));
952 set8(Equal
, regT0
, regT2
, regT0
);
953 or32(Imm32(JSValue::FalseTag
), regT0
);
955 emitStoreBool(dst
, regT0
);
958 void JIT::emitSlow_op_eq(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
960 unsigned dst
= currentInstruction
[1].u
.operand
;
961 unsigned op1
= currentInstruction
[2].u
.operand
;
962 unsigned op2
= currentInstruction
[3].u
.operand
;
964 JumpList storeResult
;
965 JumpList genericCase
;
967 genericCase
.append(getSlowCase(iter
)); // tags not equal
969 linkSlowCase(iter
); // tags equal and JSCell
970 genericCase
.append(branchPtr(NotEqual
, Address(regT0
), ImmPtr(m_globalData
->jsStringVPtr
)));
971 genericCase
.append(branchPtr(NotEqual
, Address(regT2
), ImmPtr(m_globalData
->jsStringVPtr
)));
974 JITStubCall
stubCallEqStrings(this, cti_op_eq_strings
);
975 stubCallEqStrings
.addArgument(regT0
);
976 stubCallEqStrings
.addArgument(regT2
);
977 stubCallEqStrings
.call();
978 storeResult
.append(jump());
981 genericCase
.append(getSlowCase(iter
)); // doubles
982 genericCase
.link(this);
983 JITStubCall
stubCallEq(this, cti_op_eq
);
984 stubCallEq
.addArgument(op1
);
985 stubCallEq
.addArgument(op2
);
986 stubCallEq
.call(regT0
);
988 storeResult
.link(this);
989 or32(Imm32(JSValue::FalseTag
), regT0
);
990 emitStoreBool(dst
, regT0
);
993 void JIT::emit_op_neq(Instruction
* currentInstruction
)
995 unsigned dst
= currentInstruction
[1].u
.operand
;
996 unsigned src1
= currentInstruction
[2].u
.operand
;
997 unsigned src2
= currentInstruction
[3].u
.operand
;
999 emitLoad2(src1
, regT1
, regT0
, src2
, regT3
, regT2
);
1000 addSlowCase(branch32(NotEqual
, regT1
, regT3
));
1001 addSlowCase(branch32(Equal
, regT1
, Imm32(JSValue::CellTag
)));
1002 addSlowCase(branch32(Below
, regT1
, Imm32(JSValue::LowestTag
)));
1004 set8(NotEqual
, regT0
, regT2
, regT0
);
1005 or32(Imm32(JSValue::FalseTag
), regT0
);
1007 emitStoreBool(dst
, regT0
);
1010 void JIT::emitSlow_op_neq(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1012 unsigned dst
= currentInstruction
[1].u
.operand
;
1014 JumpList storeResult
;
1015 JumpList genericCase
;
1017 genericCase
.append(getSlowCase(iter
)); // tags not equal
1019 linkSlowCase(iter
); // tags equal and JSCell
1020 genericCase
.append(branchPtr(NotEqual
, Address(regT0
), ImmPtr(m_globalData
->jsStringVPtr
)));
1021 genericCase
.append(branchPtr(NotEqual
, Address(regT2
), ImmPtr(m_globalData
->jsStringVPtr
)));
1024 JITStubCall
stubCallEqStrings(this, cti_op_eq_strings
);
1025 stubCallEqStrings
.addArgument(regT0
);
1026 stubCallEqStrings
.addArgument(regT2
);
1027 stubCallEqStrings
.call(regT0
);
1028 storeResult
.append(jump());
1031 genericCase
.append(getSlowCase(iter
)); // doubles
1032 genericCase
.link(this);
1033 JITStubCall
stubCallEq(this, cti_op_eq
);
1034 stubCallEq
.addArgument(regT1
, regT0
);
1035 stubCallEq
.addArgument(regT3
, regT2
);
1036 stubCallEq
.call(regT0
);
1038 storeResult
.link(this);
1039 xor32(Imm32(0x1), regT0
);
1040 or32(Imm32(JSValue::FalseTag
), regT0
);
1041 emitStoreBool(dst
, regT0
);
1044 void JIT::compileOpStrictEq(Instruction
* currentInstruction
, CompileOpStrictEqType type
)
1046 unsigned dst
= currentInstruction
[1].u
.operand
;
1047 unsigned src1
= currentInstruction
[2].u
.operand
;
1048 unsigned src2
= currentInstruction
[3].u
.operand
;
1050 emitLoadTag(src1
, regT0
);
1051 emitLoadTag(src2
, regT1
);
1053 // Jump to a slow case if either operand is double, or if both operands are
1054 // cells and/or Int32s.
1056 and32(regT1
, regT2
);
1057 addSlowCase(branch32(Below
, regT2
, Imm32(JSValue::LowestTag
)));
1058 addSlowCase(branch32(AboveOrEqual
, regT2
, Imm32(JSValue::CellTag
)));
1060 if (type
== OpStrictEq
)
1061 set8(Equal
, regT0
, regT1
, regT0
);
1063 set8(NotEqual
, regT0
, regT1
, regT0
);
1065 or32(Imm32(JSValue::FalseTag
), regT0
);
1067 emitStoreBool(dst
, regT0
);
1070 void JIT::emit_op_stricteq(Instruction
* currentInstruction
)
1072 compileOpStrictEq(currentInstruction
, OpStrictEq
);
1075 void JIT::emitSlow_op_stricteq(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1077 unsigned dst
= currentInstruction
[1].u
.operand
;
1078 unsigned src1
= currentInstruction
[2].u
.operand
;
1079 unsigned src2
= currentInstruction
[3].u
.operand
;
1084 JITStubCall
stubCall(this, cti_op_stricteq
);
1085 stubCall
.addArgument(src1
);
1086 stubCall
.addArgument(src2
);
1090 void JIT::emit_op_nstricteq(Instruction
* currentInstruction
)
1092 compileOpStrictEq(currentInstruction
, OpNStrictEq
);
1095 void JIT::emitSlow_op_nstricteq(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1097 unsigned dst
= currentInstruction
[1].u
.operand
;
1098 unsigned src1
= currentInstruction
[2].u
.operand
;
1099 unsigned src2
= currentInstruction
[3].u
.operand
;
1104 JITStubCall
stubCall(this, cti_op_nstricteq
);
1105 stubCall
.addArgument(src1
);
1106 stubCall
.addArgument(src2
);
1110 void JIT::emit_op_eq_null(Instruction
* currentInstruction
)
1112 unsigned dst
= currentInstruction
[1].u
.operand
;
1113 unsigned src
= currentInstruction
[2].u
.operand
;
1115 emitLoad(src
, regT1
, regT0
);
1116 Jump isImmediate
= branch32(NotEqual
, regT1
, Imm32(JSValue::CellTag
));
1118 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSCell
, m_structure
)), regT1
);
1119 setTest8(NonZero
, Address(regT1
, OBJECT_OFFSETOF(Structure
, m_typeInfo
.m_flags
)), Imm32(MasqueradesAsUndefined
), regT1
);
1121 Jump wasNotImmediate
= jump();
1123 isImmediate
.link(this);
1125 set8(Equal
, regT1
, Imm32(JSValue::NullTag
), regT2
);
1126 set8(Equal
, regT1
, Imm32(JSValue::UndefinedTag
), regT1
);
1129 wasNotImmediate
.link(this);
1131 or32(Imm32(JSValue::FalseTag
), regT1
);
1133 emitStoreBool(dst
, regT1
);
1136 void JIT::emit_op_neq_null(Instruction
* currentInstruction
)
1138 unsigned dst
= currentInstruction
[1].u
.operand
;
1139 unsigned src
= currentInstruction
[2].u
.operand
;
1141 emitLoad(src
, regT1
, regT0
);
1142 Jump isImmediate
= branch32(NotEqual
, regT1
, Imm32(JSValue::CellTag
));
1144 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSCell
, m_structure
)), regT1
);
1145 setTest8(Zero
, Address(regT1
, OBJECT_OFFSETOF(Structure
, m_typeInfo
.m_flags
)), Imm32(MasqueradesAsUndefined
), regT1
);
1147 Jump wasNotImmediate
= jump();
1149 isImmediate
.link(this);
1151 set8(NotEqual
, regT1
, Imm32(JSValue::NullTag
), regT2
);
1152 set8(NotEqual
, regT1
, Imm32(JSValue::UndefinedTag
), regT1
);
1153 and32(regT2
, regT1
);
1155 wasNotImmediate
.link(this);
1157 or32(Imm32(JSValue::FalseTag
), regT1
);
1159 emitStoreBool(dst
, regT1
);
1162 void JIT::emit_op_resolve_with_base(Instruction
* currentInstruction
)
1164 JITStubCall
stubCall(this, cti_op_resolve_with_base
);
1165 stubCall
.addArgument(ImmPtr(&m_codeBlock
->identifier(currentInstruction
[3].u
.operand
)));
1166 stubCall
.addArgument(Imm32(currentInstruction
[1].u
.operand
));
1167 stubCall
.call(currentInstruction
[2].u
.operand
);
1170 void JIT::emit_op_new_func_exp(Instruction
* currentInstruction
)
1172 JITStubCall
stubCall(this, cti_op_new_func_exp
);
1173 stubCall
.addArgument(ImmPtr(m_codeBlock
->functionExpr(currentInstruction
[2].u
.operand
)));
1174 stubCall
.call(currentInstruction
[1].u
.operand
);
1177 void JIT::emit_op_throw(Instruction
* currentInstruction
)
1179 unsigned exception
= currentInstruction
[1].u
.operand
;
1180 JITStubCall
stubCall(this, cti_op_throw
);
1181 stubCall
.addArgument(exception
);
1185 // cti_op_throw always changes it's return address,
1186 // this point in the code should never be reached.
1191 void JIT::emit_op_get_pnames(Instruction
* currentInstruction
)
1193 int dst
= currentInstruction
[1].u
.operand
;
1194 int base
= currentInstruction
[2].u
.operand
;
1195 int i
= currentInstruction
[3].u
.operand
;
1196 int size
= currentInstruction
[4].u
.operand
;
1197 int breakTarget
= currentInstruction
[5].u
.operand
;
1199 JumpList isNotObject
;
1201 emitLoad(base
, regT1
, regT0
);
1202 if (!m_codeBlock
->isKnownNotImmediate(base
))
1203 isNotObject
.append(branch32(NotEqual
, regT1
, Imm32(JSValue::CellTag
)));
1204 if (base
!= m_codeBlock
->thisRegister()) {
1205 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSCell
, m_structure
)), regT2
);
1206 isNotObject
.append(branch8(NotEqual
, Address(regT2
, OBJECT_OFFSETOF(Structure
, m_typeInfo
.m_type
)), Imm32(ObjectType
)));
1209 // We could inline the case where you have a valid cache, but
1210 // this call doesn't seem to be hot.
1211 Label
isObject(this);
1212 JITStubCall
getPnamesStubCall(this, cti_op_get_pnames
);
1213 getPnamesStubCall
.addArgument(regT0
);
1214 getPnamesStubCall
.call(dst
);
1215 load32(Address(regT0
, OBJECT_OFFSETOF(JSPropertyNameIterator
, m_jsStringsSize
)), regT3
);
1216 store32(Imm32(0), addressFor(i
));
1217 store32(regT3
, addressFor(size
));
1220 isNotObject
.link(this);
1221 addJump(branch32(Equal
, regT1
, Imm32(JSValue::NullTag
)), breakTarget
);
1222 addJump(branch32(Equal
, regT1
, Imm32(JSValue::UndefinedTag
)), breakTarget
);
1223 JITStubCall
toObjectStubCall(this, cti_to_object
);
1224 toObjectStubCall
.addArgument(regT1
, regT0
);
1225 toObjectStubCall
.call(base
);
1226 jump().linkTo(isObject
, this);
1231 void JIT::emit_op_next_pname(Instruction
* currentInstruction
)
1233 int dst
= currentInstruction
[1].u
.operand
;
1234 int base
= currentInstruction
[2].u
.operand
;
1235 int i
= currentInstruction
[3].u
.operand
;
1236 int size
= currentInstruction
[4].u
.operand
;
1237 int it
= currentInstruction
[5].u
.operand
;
1238 int target
= currentInstruction
[6].u
.operand
;
1240 JumpList callHasProperty
;
1243 load32(addressFor(i
), regT0
);
1244 Jump end
= branch32(Equal
, regT0
, addressFor(size
));
1247 loadPtr(addressFor(it
), regT1
);
1248 loadPtr(Address(regT1
, OBJECT_OFFSETOF(JSPropertyNameIterator
, m_jsStrings
)), regT2
);
1249 load32(BaseIndex(regT2
, regT0
, TimesEight
), regT2
);
1250 store32(Imm32(JSValue::CellTag
), tagFor(dst
));
1251 store32(regT2
, payloadFor(dst
));
1254 add32(Imm32(1), regT0
);
1255 store32(regT0
, addressFor(i
));
1257 // Verify that i is valid:
1258 loadPtr(addressFor(base
), regT0
);
1260 // Test base's structure
1261 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSCell
, m_structure
)), regT2
);
1262 callHasProperty
.append(branchPtr(NotEqual
, regT2
, Address(Address(regT1
, OBJECT_OFFSETOF(JSPropertyNameIterator
, m_cachedStructure
)))));
1264 // Test base's prototype chain
1265 loadPtr(Address(Address(regT1
, OBJECT_OFFSETOF(JSPropertyNameIterator
, m_cachedPrototypeChain
))), regT3
);
1266 loadPtr(Address(regT3
, OBJECT_OFFSETOF(StructureChain
, m_vector
)), regT3
);
1267 addJump(branchTestPtr(Zero
, Address(regT3
)), target
);
1269 Label
checkPrototype(this);
1270 callHasProperty
.append(branch32(Equal
, Address(regT2
, OBJECT_OFFSETOF(Structure
, m_prototype
) + OBJECT_OFFSETOF(JSValue
, u
.asBits
.tag
)), Imm32(JSValue::NullTag
)));
1271 loadPtr(Address(regT2
, OBJECT_OFFSETOF(Structure
, m_prototype
) + OBJECT_OFFSETOF(JSValue
, u
.asBits
.payload
)), regT2
);
1272 loadPtr(Address(regT2
, OBJECT_OFFSETOF(JSCell
, m_structure
)), regT2
);
1273 callHasProperty
.append(branchPtr(NotEqual
, regT2
, Address(regT3
)));
1274 addPtr(Imm32(sizeof(Structure
*)), regT3
);
1275 branchTestPtr(NonZero
, Address(regT3
)).linkTo(checkPrototype
, this);
1278 addJump(jump(), target
);
1280 // Slow case: Ask the object if i is valid.
1281 callHasProperty
.link(this);
1282 loadPtr(addressFor(dst
), regT1
);
1283 JITStubCall
stubCall(this, cti_has_property
);
1284 stubCall
.addArgument(regT0
);
1285 stubCall
.addArgument(regT1
);
1288 // Test for valid key.
1289 addJump(branchTest32(NonZero
, regT0
), target
);
1290 jump().linkTo(begin
, this);
1296 void JIT::emit_op_push_scope(Instruction
* currentInstruction
)
1298 JITStubCall
stubCall(this, cti_op_push_scope
);
1299 stubCall
.addArgument(currentInstruction
[1].u
.operand
);
1300 stubCall
.call(currentInstruction
[1].u
.operand
);
1303 void JIT::emit_op_pop_scope(Instruction
*)
1305 JITStubCall(this, cti_op_pop_scope
).call();
1308 void JIT::emit_op_to_jsnumber(Instruction
* currentInstruction
)
1310 int dst
= currentInstruction
[1].u
.operand
;
1311 int src
= currentInstruction
[2].u
.operand
;
1313 emitLoad(src
, regT1
, regT0
);
1315 Jump isInt32
= branch32(Equal
, regT1
, Imm32(JSValue::Int32Tag
));
1316 addSlowCase(branch32(AboveOrEqual
, regT1
, Imm32(JSValue::EmptyValueTag
)));
1320 emitStore(dst
, regT1
, regT0
);
1321 map(m_bytecodeIndex
+ OPCODE_LENGTH(op_to_jsnumber
), dst
, regT1
, regT0
);
1324 void JIT::emitSlow_op_to_jsnumber(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1326 int dst
= currentInstruction
[1].u
.operand
;
1330 JITStubCall
stubCall(this, cti_op_to_jsnumber
);
1331 stubCall
.addArgument(regT1
, regT0
);
1335 void JIT::emit_op_push_new_scope(Instruction
* currentInstruction
)
1337 JITStubCall
stubCall(this, cti_op_push_new_scope
);
1338 stubCall
.addArgument(ImmPtr(&m_codeBlock
->identifier(currentInstruction
[2].u
.operand
)));
1339 stubCall
.addArgument(currentInstruction
[3].u
.operand
);
1340 stubCall
.call(currentInstruction
[1].u
.operand
);
1343 void JIT::emit_op_catch(Instruction
* currentInstruction
)
1345 unsigned exception
= currentInstruction
[1].u
.operand
;
1347 // This opcode only executes after a return from cti_op_throw.
1349 // cti_op_throw may have taken us to a call frame further up the stack; reload
1350 // the call frame pointer to adjust.
1351 peek(callFrameRegister
, OBJECT_OFFSETOF(struct JITStackFrame
, callFrame
) / sizeof(void*));
1353 // Now store the exception returned by cti_op_throw.
1354 emitStore(exception
, regT1
, regT0
);
1355 map(m_bytecodeIndex
+ OPCODE_LENGTH(op_catch
), exception
, regT1
, regT0
);
1358 void JIT::emit_op_jmp_scopes(Instruction
* currentInstruction
)
1360 JITStubCall
stubCall(this, cti_op_jmp_scopes
);
1361 stubCall
.addArgument(Imm32(currentInstruction
[1].u
.operand
));
1363 addJump(jump(), currentInstruction
[2].u
.operand
);
1366 void JIT::emit_op_switch_imm(Instruction
* currentInstruction
)
1368 unsigned tableIndex
= currentInstruction
[1].u
.operand
;
1369 unsigned defaultOffset
= currentInstruction
[2].u
.operand
;
1370 unsigned scrutinee
= currentInstruction
[3].u
.operand
;
1372 // create jump table for switch destinations, track this switch statement.
1373 SimpleJumpTable
* jumpTable
= &m_codeBlock
->immediateSwitchJumpTable(tableIndex
);
1374 m_switches
.append(SwitchRecord(jumpTable
, m_bytecodeIndex
, defaultOffset
, SwitchRecord::Immediate
));
1375 jumpTable
->ctiOffsets
.grow(jumpTable
->branchOffsets
.size());
1377 JITStubCall
stubCall(this, cti_op_switch_imm
);
1378 stubCall
.addArgument(scrutinee
);
1379 stubCall
.addArgument(Imm32(tableIndex
));
1384 void JIT::emit_op_switch_char(Instruction
* currentInstruction
)
1386 unsigned tableIndex
= currentInstruction
[1].u
.operand
;
1387 unsigned defaultOffset
= currentInstruction
[2].u
.operand
;
1388 unsigned scrutinee
= currentInstruction
[3].u
.operand
;
1390 // create jump table for switch destinations, track this switch statement.
1391 SimpleJumpTable
* jumpTable
= &m_codeBlock
->characterSwitchJumpTable(tableIndex
);
1392 m_switches
.append(SwitchRecord(jumpTable
, m_bytecodeIndex
, defaultOffset
, SwitchRecord::Character
));
1393 jumpTable
->ctiOffsets
.grow(jumpTable
->branchOffsets
.size());
1395 JITStubCall
stubCall(this, cti_op_switch_char
);
1396 stubCall
.addArgument(scrutinee
);
1397 stubCall
.addArgument(Imm32(tableIndex
));
1402 void JIT::emit_op_switch_string(Instruction
* currentInstruction
)
1404 unsigned tableIndex
= currentInstruction
[1].u
.operand
;
1405 unsigned defaultOffset
= currentInstruction
[2].u
.operand
;
1406 unsigned scrutinee
= currentInstruction
[3].u
.operand
;
1408 // create jump table for switch destinations, track this switch statement.
1409 StringJumpTable
* jumpTable
= &m_codeBlock
->stringSwitchJumpTable(tableIndex
);
1410 m_switches
.append(SwitchRecord(jumpTable
, m_bytecodeIndex
, defaultOffset
));
1412 JITStubCall
stubCall(this, cti_op_switch_string
);
1413 stubCall
.addArgument(scrutinee
);
1414 stubCall
.addArgument(Imm32(tableIndex
));
1419 void JIT::emit_op_new_error(Instruction
* currentInstruction
)
1421 unsigned dst
= currentInstruction
[1].u
.operand
;
1422 unsigned type
= currentInstruction
[2].u
.operand
;
1423 unsigned message
= currentInstruction
[3].u
.operand
;
1425 JITStubCall
stubCall(this, cti_op_new_error
);
1426 stubCall
.addArgument(Imm32(type
));
1427 stubCall
.addArgument(m_codeBlock
->getConstant(message
));
1428 stubCall
.addArgument(Imm32(m_bytecodeIndex
));
1432 void JIT::emit_op_debug(Instruction
* currentInstruction
)
1434 #if ENABLE(DEBUG_WITH_BREAKPOINT)
1435 UNUSED_PARAM(currentInstruction
);
1438 JITStubCall
stubCall(this, cti_op_debug
);
1439 stubCall
.addArgument(Imm32(currentInstruction
[1].u
.operand
));
1440 stubCall
.addArgument(Imm32(currentInstruction
[2].u
.operand
));
1441 stubCall
.addArgument(Imm32(currentInstruction
[3].u
.operand
));
1447 void JIT::emit_op_enter(Instruction
*)
1449 // Even though JIT code doesn't use them, we initialize our constant
1450 // registers to zap stale pointers, to avoid unnecessarily prolonging
1451 // object lifetime and increasing GC pressure.
1452 for (int i
= 0; i
< m_codeBlock
->m_numVars
; ++i
)
1453 emitStore(i
, jsUndefined());
1456 void JIT::emit_op_enter_with_activation(Instruction
* currentInstruction
)
1458 emit_op_enter(currentInstruction
);
1460 JITStubCall(this, cti_op_push_activation
).call(currentInstruction
[1].u
.operand
);
1463 void JIT::emit_op_create_arguments(Instruction
*)
1465 Jump argsCreated
= branch32(NotEqual
, tagFor(RegisterFile::ArgumentsRegister
, callFrameRegister
), Imm32(JSValue::EmptyValueTag
));
1467 // If we get here the arguments pointer is a null cell - i.e. arguments need lazy creation.
1468 if (m_codeBlock
->m_numParameters
== 1)
1469 JITStubCall(this, cti_op_create_arguments_no_params
).call();
1471 JITStubCall(this, cti_op_create_arguments
).call();
1473 argsCreated
.link(this);
1476 void JIT::emit_op_init_arguments(Instruction
*)
1478 emitStore(RegisterFile::ArgumentsRegister
, JSValue(), callFrameRegister
);
1481 void JIT::emit_op_convert_this(Instruction
* currentInstruction
)
1483 unsigned thisRegister
= currentInstruction
[1].u
.operand
;
1485 emitLoad(thisRegister
, regT1
, regT0
);
1487 addSlowCase(branch32(NotEqual
, regT1
, Imm32(JSValue::CellTag
)));
1489 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSCell
, m_structure
)), regT2
);
1490 addSlowCase(branchTest8(NonZero
, Address(regT2
, OBJECT_OFFSETOF(Structure
, m_typeInfo
.m_flags
)), Imm32(NeedsThisConversion
)));
1492 map(m_bytecodeIndex
+ OPCODE_LENGTH(op_convert_this
), thisRegister
, regT1
, regT0
);
1495 void JIT::emitSlow_op_convert_this(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1497 unsigned thisRegister
= currentInstruction
[1].u
.operand
;
1502 JITStubCall
stubCall(this, cti_op_convert_this
);
1503 stubCall
.addArgument(regT1
, regT0
);
1504 stubCall
.call(thisRegister
);
1507 void JIT::emit_op_profile_will_call(Instruction
* currentInstruction
)
1509 peek(regT2
, OBJECT_OFFSETOF(JITStackFrame
, enabledProfilerReference
) / sizeof(void*));
1510 Jump noProfiler
= branchTestPtr(Zero
, Address(regT2
));
1512 JITStubCall
stubCall(this, cti_op_profile_will_call
);
1513 stubCall
.addArgument(currentInstruction
[1].u
.operand
);
1515 noProfiler
.link(this);
1518 void JIT::emit_op_profile_did_call(Instruction
* currentInstruction
)
1520 peek(regT2
, OBJECT_OFFSETOF(JITStackFrame
, enabledProfilerReference
) / sizeof(void*));
1521 Jump noProfiler
= branchTestPtr(Zero
, Address(regT2
));
1523 JITStubCall
stubCall(this, cti_op_profile_did_call
);
1524 stubCall
.addArgument(currentInstruction
[1].u
.operand
);
1526 noProfiler
.link(this);
1531 #endif // ENABLE(JIT) && USE(JSVALUE32_64)