2 * Copyright (C) 2009 Apple Inc. All rights reserved.
3 * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
14 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
18 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 #if ENABLE(JIT) && USE(JSVALUE32_64)
32 #include "JITInlineMethods.h"
33 #include "JITStubCall.h"
36 #include "JSFunction.h"
37 #include "JSPropertyNameIterator.h"
38 #include "LinkBuffer.h"
42 void JIT::privateCompileCTIMachineTrampolines(RefPtr
<ExecutablePool
>* executablePool
, JSGlobalData
* globalData
, TrampolineStructure
*trampolines
)
44 #if ENABLE(JIT_OPTIMIZE_MOD)
45 Label softModBegin
= align();
48 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
49 // (1) This function provides fast property access for string length
50 Label stringLengthBegin
= align();
52 // regT0 holds payload, regT1 holds tag
54 Jump string_failureCases1
= branch32(NotEqual
, regT1
, Imm32(JSValue::CellTag
));
55 Jump string_failureCases2
= branchPtr(NotEqual
, Address(regT0
), ImmPtr(m_globalData
->jsStringVPtr
));
57 // Checks out okay! - get the length from the Ustring.
58 load32(Address(regT0
, OBJECT_OFFSETOF(JSString
, m_length
)), regT2
);
60 Jump string_failureCases3
= branch32(Above
, regT2
, Imm32(INT_MAX
));
62 move(Imm32(JSValue::Int32Tag
), regT1
);
67 // (2) Trampolines for the slow cases of op_call / op_call_eval / op_construct.
69 #if ENABLE(JIT_OPTIMIZE_CALL)
70 // VirtualCallLink Trampoline
71 // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
72 Label virtualCallLinkBegin
= align();
73 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSFunction
, m_executable
)), regT2
);
75 Jump isNativeFunc2
= branch32(Equal
, Address(regT2
, OBJECT_OFFSETOF(FunctionExecutable
, m_numParameters
)), Imm32(0));
77 Jump hasCodeBlock2
= branch32(GreaterThan
, Address(regT2
, OBJECT_OFFSETOF(FunctionExecutable
, m_numParameters
)), Imm32(0));
78 preserveReturnAddressAfterCall(regT3
);
79 restoreArgumentReference();
80 Call callJSFunction2
= call();
81 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSFunction
, m_executable
)), regT2
);
82 emitGetJITStubArg(2, regT1
); // argCount
83 restoreReturnAddressBeforeReturn(regT3
);
84 hasCodeBlock2
.link(this);
86 // Check argCount matches callee arity.
87 Jump arityCheckOkay2
= branch32(Equal
, Address(regT2
, OBJECT_OFFSETOF(FunctionExecutable
, m_numParameters
)), regT1
);
88 preserveReturnAddressAfterCall(regT3
);
89 emitPutJITStubArg(regT3
, 1); // return address
90 restoreArgumentReference();
91 Call callArityCheck2
= call();
92 move(regT1
, callFrameRegister
);
93 emitGetJITStubArg(2, regT1
); // argCount
94 restoreReturnAddressBeforeReturn(regT3
);
95 arityCheckOkay2
.link(this);
97 isNativeFunc2
.link(this);
99 compileOpCallInitializeCallFrame();
101 preserveReturnAddressAfterCall(regT3
);
102 emitPutJITStubArg(regT3
, 1); // return address
103 restoreArgumentReference();
104 Call callLazyLinkCall
= call();
105 restoreReturnAddressBeforeReturn(regT3
);
107 #endif // ENABLE(JIT_OPTIMIZE_CALL)
109 // VirtualCall Trampoline
110 // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
111 Label virtualCallBegin
= align();
112 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSFunction
, m_executable
)), regT2
);
114 Jump isNativeFunc3
= branch32(Equal
, Address(regT2
, OBJECT_OFFSETOF(FunctionExecutable
, m_numParameters
)), Imm32(0));
116 Jump hasCodeBlock3
= branch32(GreaterThan
, Address(regT2
, OBJECT_OFFSETOF(FunctionExecutable
, m_numParameters
)), Imm32(0));
117 preserveReturnAddressAfterCall(regT3
);
118 restoreArgumentReference();
119 Call callJSFunction1
= call();
120 emitGetJITStubArg(2, regT1
); // argCount
121 restoreReturnAddressBeforeReturn(regT3
);
122 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSFunction
, m_executable
)), regT2
);
123 hasCodeBlock3
.link(this);
125 // Check argCount matches callee arity.
126 Jump arityCheckOkay3
= branch32(Equal
, Address(regT2
, OBJECT_OFFSETOF(FunctionExecutable
, m_numParameters
)), regT1
);
127 preserveReturnAddressAfterCall(regT3
);
128 emitPutJITStubArg(regT3
, 1); // return address
129 restoreArgumentReference();
130 Call callArityCheck1
= call();
131 move(regT1
, callFrameRegister
);
132 emitGetJITStubArg(2, regT1
); // argCount
133 restoreReturnAddressBeforeReturn(regT3
);
134 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSFunction
, m_executable
)), regT2
);
135 arityCheckOkay3
.link(this);
137 isNativeFunc3
.link(this);
139 compileOpCallInitializeCallFrame();
140 loadPtr(Address(regT2
, OBJECT_OFFSETOF(FunctionExecutable
, m_jitCode
)), regT0
);
143 #if CPU(X86) || CPU(ARM_TRADITIONAL)
144 Label nativeCallThunk
= align();
145 preserveReturnAddressAfterCall(regT0
);
146 emitPutToCallFrameHeader(regT0
, RegisterFile::ReturnPC
); // Push return address
148 // Load caller frame's scope chain into this callframe so that whatever we call can
149 // get to its global data.
150 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame
, regT1
);
151 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain
, regT1
, regT1
);
152 emitPutToCallFrameHeader(regT1
, RegisterFile::ScopeChain
);
155 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount
, regT0
);
157 /* We have two structs that we use to describe the stackframe we set up for our
158 * call to native code. NativeCallFrameStructure describes the how we set up the stack
159 * in advance of the call. NativeFunctionCalleeSignature describes the callframe
160 * as the native code expects it. We do this as we are using the fastcall calling
161 * convention which results in the callee popping its arguments off the stack, but
162 * not the rest of the callframe so we need a nice way to ensure we increment the
163 * stack pointer by the right amount after the call.
166 #if COMPILER(MSVC) || OS(LINUX)
170 #endif // COMPILER(MSVC)
171 struct NativeCallFrameStructure
{
172 // CallFrame* callFrame; // passed in EDX
179 struct NativeFunctionCalleeSignature
{
186 #endif // COMPILER(MSVC)
188 struct NativeCallFrameStructure
{
189 // CallFrame* callFrame; // passed in ECX
190 // JSObject* callee; // passed in EDX
195 struct NativeFunctionCalleeSignature
{
201 const int NativeCallFrameSize
= (sizeof(NativeCallFrameStructure
) + 15) & ~15;
202 // Allocate system stack frame
203 subPtr(Imm32(NativeCallFrameSize
), stackPointerRegister
);
206 subPtr(Imm32(1), regT0
); // Don't include 'this' in argcount
209 storePtr(regT0
, Address(stackPointerRegister
, OBJECT_OFFSETOF(NativeCallFrameStructure
, args
) + OBJECT_OFFSETOF(ArgList
, m_argCount
)));
211 // Calculate the start of the callframe header, and store in regT1
212 addPtr(Imm32(-RegisterFile::CallFrameHeaderSize
* (int)sizeof(Register
)), callFrameRegister
, regT1
);
214 // Calculate start of arguments as callframe header - sizeof(Register) * argcount (regT0)
215 mul32(Imm32(sizeof(Register
)), regT0
, regT0
);
216 subPtr(regT0
, regT1
);
217 storePtr(regT1
, Address(stackPointerRegister
, OBJECT_OFFSETOF(NativeCallFrameStructure
, args
) + OBJECT_OFFSETOF(ArgList
, m_args
)));
219 // ArgList is passed by reference so is stackPointerRegister + 4 * sizeof(Register)
220 addPtr(Imm32(OBJECT_OFFSETOF(NativeCallFrameStructure
, args
)), stackPointerRegister
, regT0
);
221 storePtr(regT0
, Address(stackPointerRegister
, OBJECT_OFFSETOF(NativeCallFrameStructure
, argPointer
)));
223 // regT1 currently points to the first argument, regT1 - sizeof(Register) points to 'this'
224 loadPtr(Address(regT1
, -(int)sizeof(Register
) + OBJECT_OFFSETOF(JSValue
, u
.asBits
.payload
)), regT2
);
225 loadPtr(Address(regT1
, -(int)sizeof(Register
) + OBJECT_OFFSETOF(JSValue
, u
.asBits
.tag
)), regT3
);
226 storePtr(regT2
, Address(stackPointerRegister
, OBJECT_OFFSETOF(NativeCallFrameStructure
, thisValue
) + OBJECT_OFFSETOF(JSValue
, u
.asBits
.payload
)));
227 storePtr(regT3
, Address(stackPointerRegister
, OBJECT_OFFSETOF(NativeCallFrameStructure
, thisValue
) + OBJECT_OFFSETOF(JSValue
, u
.asBits
.tag
)));
229 #if COMPILER(MSVC) || OS(LINUX)
230 // ArgList is passed by reference so is stackPointerRegister + 4 * sizeof(Register)
231 addPtr(Imm32(OBJECT_OFFSETOF(NativeCallFrameStructure
, result
)), stackPointerRegister
, X86Registers::ecx
);
234 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee
, X86Registers::eax
);
235 storePtr(X86Registers::eax
, Address(stackPointerRegister
, OBJECT_OFFSETOF(NativeCallFrameStructure
, callee
)));
238 move(callFrameRegister
, X86Registers::edx
);
240 call(Address(X86Registers::eax
, OBJECT_OFFSETOF(JSFunction
, m_data
)));
242 // JSValue is a non-POD type, so eax points to it
243 emitLoad(0, regT1
, regT0
, X86Registers::eax
);
245 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee
, X86Registers::edx
); // callee
246 move(callFrameRegister
, X86Registers::ecx
); // callFrame
247 call(Address(X86Registers::edx
, OBJECT_OFFSETOF(JSFunction
, m_data
)));
250 // We've put a few temporaries on the stack in addition to the actual arguments
251 // so pull them off now
252 addPtr(Imm32(NativeCallFrameSize
- sizeof(NativeFunctionCalleeSignature
)), stackPointerRegister
);
254 #elif CPU(ARM_TRADITIONAL)
255 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount
, regT0
);
257 // Allocate stack space for our arglist
258 COMPILE_ASSERT((sizeof(ArgList
) & 0x7) == 0 && sizeof(JSValue
) == 8 && sizeof(Register
) == 8, ArgList_should_by_8byte_aligned
);
259 subPtr(Imm32(sizeof(ArgList
)), stackPointerRegister
);
262 subPtr(Imm32(1), regT0
); // Don't include 'this' in argcount
265 storePtr(regT0
, Address(stackPointerRegister
, OBJECT_OFFSETOF(ArgList
, m_argCount
)));
267 // Calculate the start of the callframe header, and store in regT1
268 move(callFrameRegister
, regT1
);
269 sub32(Imm32(RegisterFile::CallFrameHeaderSize
* (int32_t)sizeof(Register
)), regT1
);
271 // Calculate start of arguments as callframe header - sizeof(Register) * argcount (regT1)
272 mul32(Imm32(sizeof(Register
)), regT0
, regT0
);
273 subPtr(regT0
, regT1
);
275 // push pointer to arguments
276 storePtr(regT1
, Address(stackPointerRegister
, OBJECT_OFFSETOF(ArgList
, m_args
)));
278 // Argument passing method:
279 // r0 - points to return value
282 // stack: this(JSValue) and a pointer to ArgList
286 push(stackPointerRegister
);
289 // regT1 currently points to the first argument, regT1-sizeof(Register) points to 'this'
290 load32(Address(regT1
, -(int32_t)sizeof(void*) * 2), ARMRegisters::r3
);
291 push(ARMRegisters::r3
);
292 load32(Address(regT1
, -(int32_t)sizeof(void*)), regT3
);
293 storePtr(regT3
, Address(stackPointerRegister
));
296 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee
, regT2
);
299 move(callFrameRegister
, regT1
);
302 move(stackPointerRegister
, regT0
);
304 call(Address(regT2
, OBJECT_OFFSETOF(JSFunction
, m_data
)));
306 load32(Address(stackPointerRegister
, 0), regT0
);
307 load32(Address(stackPointerRegister
, 4), regT1
);
309 addPtr(Imm32(sizeof(ArgList
) + 8), stackPointerRegister
);
311 move(stackPointerRegister
, regT3
);
312 subPtr(Imm32(8), stackPointerRegister
);
313 move(stackPointerRegister
, regT0
);
314 subPtr(Imm32(8 + 4 + 4 /* padding */), stackPointerRegister
);
317 storePtr(regT3
, Address(stackPointerRegister
, 8));
320 // regT1 currently points to the first argument, regT1-sizeof(Register) points to 'this'
321 load32(Address(regT1
, -(int32_t)sizeof(void*) * 2), regT3
);
322 storePtr(regT3
, Address(stackPointerRegister
, 0));
323 load32(Address(regT1
, -(int32_t)sizeof(void*)), regT3
);
324 storePtr(regT3
, Address(stackPointerRegister
, 4));
327 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee
, regT2
);
330 move(callFrameRegister
, regT1
);
332 call(Address(regT2
, OBJECT_OFFSETOF(JSFunction
, m_data
)));
335 load32(Address(stackPointerRegister
, 16), regT0
);
336 load32(Address(stackPointerRegister
, 20), regT1
);
338 addPtr(Imm32(sizeof(ArgList
) + 16 + 8), stackPointerRegister
);
343 // Check for an exception
344 move(ImmPtr(&globalData
->exception
), regT2
);
345 Jump sawException
= branch32(NotEqual
, tagFor(0, regT2
), Imm32(JSValue::EmptyValueTag
));
347 // Grab the return address.
348 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC
, regT3
);
350 // Restore our caller's "r".
351 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame
, callFrameRegister
);
354 restoreReturnAddressBeforeReturn(regT3
);
357 // Handle an exception
358 sawException
.link(this);
359 // Grab the return address.
360 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC
, regT1
);
361 move(ImmPtr(&globalData
->exceptionLocation
), regT2
);
362 storePtr(regT1
, regT2
);
363 move(ImmPtr(FunctionPtr(ctiVMThrowTrampoline
).value()), regT2
);
364 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame
, callFrameRegister
);
365 poke(callFrameRegister
, OBJECT_OFFSETOF(struct JITStackFrame
, callFrame
) / sizeof(void*));
366 restoreReturnAddressBeforeReturn(regT2
);
369 #elif ENABLE(JIT_OPTIMIZE_NATIVE_CALL)
370 #error "JIT_OPTIMIZE_NATIVE_CALL not yet supported on this platform."
372 UNUSED_PARAM(globalData
);
376 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
377 Call string_failureCases1Call
= makeTailRecursiveCall(string_failureCases1
);
378 Call string_failureCases2Call
= makeTailRecursiveCall(string_failureCases2
);
379 Call string_failureCases3Call
= makeTailRecursiveCall(string_failureCases3
);
382 // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
383 LinkBuffer
patchBuffer(this, m_globalData
->executableAllocator
.poolForSize(m_assembler
.size()), 0);
385 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
386 patchBuffer
.link(string_failureCases1Call
, FunctionPtr(cti_op_get_by_id_string_fail
));
387 patchBuffer
.link(string_failureCases2Call
, FunctionPtr(cti_op_get_by_id_string_fail
));
388 patchBuffer
.link(string_failureCases3Call
, FunctionPtr(cti_op_get_by_id_string_fail
));
390 patchBuffer
.link(callArityCheck1
, FunctionPtr(cti_op_call_arityCheck
));
391 patchBuffer
.link(callJSFunction1
, FunctionPtr(cti_op_call_JSFunction
));
392 #if ENABLE(JIT_OPTIMIZE_CALL)
393 patchBuffer
.link(callArityCheck2
, FunctionPtr(cti_op_call_arityCheck
));
394 patchBuffer
.link(callJSFunction2
, FunctionPtr(cti_op_call_JSFunction
));
395 patchBuffer
.link(callLazyLinkCall
, FunctionPtr(cti_vm_lazyLinkCall
));
398 CodeRef finalCode
= patchBuffer
.finalizeCode();
399 *executablePool
= finalCode
.m_executablePool
;
401 trampolines
->ctiVirtualCall
= patchBuffer
.trampolineAt(virtualCallBegin
);
402 #if ENABLE(JIT_OPTIMIZE_NATIVE_CALL)
403 trampolines
->ctiNativeCallThunk
= adoptRef(new NativeExecutable(JITCode(JITCode::HostFunction(patchBuffer
.trampolineAt(nativeCallThunk
)))));
405 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
406 trampolines
->ctiStringLengthTrampoline
= patchBuffer
.trampolineAt(stringLengthBegin
);
408 UNUSED_PARAM(ctiStringLengthTrampoline
);
410 #if ENABLE(JIT_OPTIMIZE_CALL)
411 trampolines
->ctiVirtualCallLink
= patchBuffer
.trampolineAt(virtualCallLinkBegin
);
413 UNUSED_PARAM(ctiVirtualCallLink
);
415 #if ENABLE(JIT_OPTIMIZE_MOD)
416 trampolines
->ctiSoftModulo
= patchBuffer
.trampolineAt(softModBegin
);
420 void JIT::emit_op_mov(Instruction
* currentInstruction
)
422 unsigned dst
= currentInstruction
[1].u
.operand
;
423 unsigned src
= currentInstruction
[2].u
.operand
;
425 if (m_codeBlock
->isConstantRegisterIndex(src
))
426 emitStore(dst
, getConstantOperand(src
));
428 emitLoad(src
, regT1
, regT0
);
429 emitStore(dst
, regT1
, regT0
);
430 map(m_bytecodeIndex
+ OPCODE_LENGTH(op_mov
), dst
, regT1
, regT0
);
434 void JIT::emit_op_end(Instruction
* currentInstruction
)
436 if (m_codeBlock
->needsFullScopeChain())
437 JITStubCall(this, cti_op_end
).call();
438 ASSERT(returnValueRegister
!= callFrameRegister
);
439 emitLoad(currentInstruction
[1].u
.operand
, regT1
, regT0
);
440 restoreReturnAddressBeforeReturn(Address(callFrameRegister
, RegisterFile::ReturnPC
* static_cast<int>(sizeof(Register
))));
444 void JIT::emit_op_jmp(Instruction
* currentInstruction
)
446 unsigned target
= currentInstruction
[1].u
.operand
;
447 addJump(jump(), target
);
450 void JIT::emit_op_loop_if_lesseq(Instruction
* currentInstruction
)
452 unsigned op1
= currentInstruction
[1].u
.operand
;
453 unsigned op2
= currentInstruction
[2].u
.operand
;
454 unsigned target
= currentInstruction
[3].u
.operand
;
458 if (isOperandConstantImmediateInt(op1
)) {
459 emitLoad(op2
, regT1
, regT0
);
460 addSlowCase(branch32(NotEqual
, regT1
, Imm32(JSValue::Int32Tag
)));
461 addJump(branch32(GreaterThanOrEqual
, regT0
, Imm32(getConstantOperand(op1
).asInt32())), target
);
465 if (isOperandConstantImmediateInt(op2
)) {
466 emitLoad(op1
, regT1
, regT0
);
467 addSlowCase(branch32(NotEqual
, regT1
, Imm32(JSValue::Int32Tag
)));
468 addJump(branch32(LessThanOrEqual
, regT0
, Imm32(getConstantOperand(op2
).asInt32())), target
);
472 emitLoad2(op1
, regT1
, regT0
, op2
, regT3
, regT2
);
473 addSlowCase(branch32(NotEqual
, regT1
, Imm32(JSValue::Int32Tag
)));
474 addSlowCase(branch32(NotEqual
, regT3
, Imm32(JSValue::Int32Tag
)));
475 addJump(branch32(LessThanOrEqual
, regT0
, regT2
), target
);
478 void JIT::emitSlow_op_loop_if_lesseq(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
480 unsigned op1
= currentInstruction
[1].u
.operand
;
481 unsigned op2
= currentInstruction
[2].u
.operand
;
482 unsigned target
= currentInstruction
[3].u
.operand
;
484 if (!isOperandConstantImmediateInt(op1
) && !isOperandConstantImmediateInt(op2
))
485 linkSlowCase(iter
); // int32 check
486 linkSlowCase(iter
); // int32 check
488 JITStubCall
stubCall(this, cti_op_loop_if_lesseq
);
489 stubCall
.addArgument(op1
);
490 stubCall
.addArgument(op2
);
492 emitJumpSlowToHot(branchTest32(NonZero
, regT0
), target
);
495 void JIT::emit_op_new_object(Instruction
* currentInstruction
)
497 JITStubCall(this, cti_op_new_object
).call(currentInstruction
[1].u
.operand
);
500 void JIT::emit_op_instanceof(Instruction
* currentInstruction
)
502 unsigned dst
= currentInstruction
[1].u
.operand
;
503 unsigned value
= currentInstruction
[2].u
.operand
;
504 unsigned baseVal
= currentInstruction
[3].u
.operand
;
505 unsigned proto
= currentInstruction
[4].u
.operand
;
507 // Load the operands into registers.
508 // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
509 emitLoadPayload(value
, regT2
);
510 emitLoadPayload(baseVal
, regT0
);
511 emitLoadPayload(proto
, regT1
);
513 // Check that value, baseVal, and proto are cells.
514 emitJumpSlowCaseIfNotJSCell(value
);
515 emitJumpSlowCaseIfNotJSCell(baseVal
);
516 emitJumpSlowCaseIfNotJSCell(proto
);
518 // Check that baseVal 'ImplementsDefaultHasInstance'.
519 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSCell
, m_structure
)), regT0
);
520 addSlowCase(branchTest8(Zero
, Address(regT0
, OBJECT_OFFSETOF(Structure
, m_typeInfo
.m_flags
)), Imm32(ImplementsDefaultHasInstance
)));
522 // Optimistically load the result true, and start looping.
523 // Initially, regT1 still contains proto and regT2 still contains value.
524 // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
525 move(Imm32(JSValue::TrueTag
), regT0
);
528 // Load the prototype of the cell in regT2. If this is equal to regT1 - WIN!
529 // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
530 loadPtr(Address(regT2
, OBJECT_OFFSETOF(JSCell
, m_structure
)), regT2
);
531 load32(Address(regT2
, OBJECT_OFFSETOF(Structure
, m_prototype
) + OBJECT_OFFSETOF(JSValue
, u
.asBits
.payload
)), regT2
);
532 Jump isInstance
= branchPtr(Equal
, regT2
, regT1
);
533 branchTest32(NonZero
, regT2
).linkTo(loop
, this);
535 // We get here either by dropping out of the loop, or if value was not an Object. Result is false.
536 move(Imm32(JSValue::FalseTag
), regT0
);
538 // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
539 isInstance
.link(this);
540 emitStoreBool(dst
, regT0
);
543 void JIT::emitSlow_op_instanceof(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
545 unsigned dst
= currentInstruction
[1].u
.operand
;
546 unsigned value
= currentInstruction
[2].u
.operand
;
547 unsigned baseVal
= currentInstruction
[3].u
.operand
;
548 unsigned proto
= currentInstruction
[4].u
.operand
;
550 linkSlowCaseIfNotJSCell(iter
, value
);
551 linkSlowCaseIfNotJSCell(iter
, baseVal
);
552 linkSlowCaseIfNotJSCell(iter
, proto
);
555 JITStubCall
stubCall(this, cti_op_instanceof
);
556 stubCall
.addArgument(value
);
557 stubCall
.addArgument(baseVal
);
558 stubCall
.addArgument(proto
);
562 void JIT::emit_op_new_func(Instruction
* currentInstruction
)
564 JITStubCall
stubCall(this, cti_op_new_func
);
565 stubCall
.addArgument(ImmPtr(m_codeBlock
->functionDecl(currentInstruction
[2].u
.operand
)));
566 stubCall
.call(currentInstruction
[1].u
.operand
);
569 void JIT::emit_op_get_global_var(Instruction
* currentInstruction
)
571 int dst
= currentInstruction
[1].u
.operand
;
572 JSGlobalObject
* globalObject
= static_cast<JSGlobalObject
*>(currentInstruction
[2].u
.jsCell
);
573 ASSERT(globalObject
->isGlobalObject());
574 int index
= currentInstruction
[3].u
.operand
;
576 loadPtr(&globalObject
->d()->registers
, regT2
);
578 emitLoad(index
, regT1
, regT0
, regT2
);
579 emitStore(dst
, regT1
, regT0
);
580 map(m_bytecodeIndex
+ OPCODE_LENGTH(op_get_global_var
), dst
, regT1
, regT0
);
583 void JIT::emit_op_put_global_var(Instruction
* currentInstruction
)
585 JSGlobalObject
* globalObject
= static_cast<JSGlobalObject
*>(currentInstruction
[1].u
.jsCell
);
586 ASSERT(globalObject
->isGlobalObject());
587 int index
= currentInstruction
[2].u
.operand
;
588 int value
= currentInstruction
[3].u
.operand
;
590 emitLoad(value
, regT1
, regT0
);
592 loadPtr(&globalObject
->d()->registers
, regT2
);
593 emitStore(index
, regT1
, regT0
, regT2
);
594 map(m_bytecodeIndex
+ OPCODE_LENGTH(op_put_global_var
), value
, regT1
, regT0
);
597 void JIT::emit_op_get_scoped_var(Instruction
* currentInstruction
)
599 int dst
= currentInstruction
[1].u
.operand
;
600 int index
= currentInstruction
[2].u
.operand
;
601 int skip
= currentInstruction
[3].u
.operand
+ m_codeBlock
->needsFullScopeChain();
603 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain
, regT2
);
605 loadPtr(Address(regT2
, OBJECT_OFFSETOF(ScopeChainNode
, next
)), regT2
);
607 loadPtr(Address(regT2
, OBJECT_OFFSETOF(ScopeChainNode
, object
)), regT2
);
608 loadPtr(Address(regT2
, OBJECT_OFFSETOF(JSVariableObject
, d
)), regT2
);
609 loadPtr(Address(regT2
, OBJECT_OFFSETOF(JSVariableObject::JSVariableObjectData
, registers
)), regT2
);
611 emitLoad(index
, regT1
, regT0
, regT2
);
612 emitStore(dst
, regT1
, regT0
);
613 map(m_bytecodeIndex
+ OPCODE_LENGTH(op_get_scoped_var
), dst
, regT1
, regT0
);
616 void JIT::emit_op_put_scoped_var(Instruction
* currentInstruction
)
618 int index
= currentInstruction
[1].u
.operand
;
619 int skip
= currentInstruction
[2].u
.operand
+ m_codeBlock
->needsFullScopeChain();
620 int value
= currentInstruction
[3].u
.operand
;
622 emitLoad(value
, regT1
, regT0
);
624 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain
, regT2
);
626 loadPtr(Address(regT2
, OBJECT_OFFSETOF(ScopeChainNode
, next
)), regT2
);
628 loadPtr(Address(regT2
, OBJECT_OFFSETOF(ScopeChainNode
, object
)), regT2
);
629 loadPtr(Address(regT2
, OBJECT_OFFSETOF(JSVariableObject
, d
)), regT2
);
630 loadPtr(Address(regT2
, OBJECT_OFFSETOF(JSVariableObject::JSVariableObjectData
, registers
)), regT2
);
632 emitStore(index
, regT1
, regT0
, regT2
);
633 map(m_bytecodeIndex
+ OPCODE_LENGTH(op_put_scoped_var
), value
, regT1
, regT0
);
636 void JIT::emit_op_tear_off_activation(Instruction
* currentInstruction
)
638 JITStubCall
stubCall(this, cti_op_tear_off_activation
);
639 stubCall
.addArgument(currentInstruction
[1].u
.operand
);
643 void JIT::emit_op_tear_off_arguments(Instruction
*)
645 JITStubCall(this, cti_op_tear_off_arguments
).call();
648 void JIT::emit_op_new_array(Instruction
* currentInstruction
)
650 JITStubCall
stubCall(this, cti_op_new_array
);
651 stubCall
.addArgument(Imm32(currentInstruction
[2].u
.operand
));
652 stubCall
.addArgument(Imm32(currentInstruction
[3].u
.operand
));
653 stubCall
.call(currentInstruction
[1].u
.operand
);
656 void JIT::emit_op_resolve(Instruction
* currentInstruction
)
658 JITStubCall
stubCall(this, cti_op_resolve
);
659 stubCall
.addArgument(ImmPtr(&m_codeBlock
->identifier(currentInstruction
[2].u
.operand
)));
660 stubCall
.call(currentInstruction
[1].u
.operand
);
663 void JIT::emit_op_to_primitive(Instruction
* currentInstruction
)
665 int dst
= currentInstruction
[1].u
.operand
;
666 int src
= currentInstruction
[2].u
.operand
;
668 emitLoad(src
, regT1
, regT0
);
670 Jump isImm
= branch32(NotEqual
, regT1
, Imm32(JSValue::CellTag
));
671 addSlowCase(branchPtr(NotEqual
, Address(regT0
), ImmPtr(m_globalData
->jsStringVPtr
)));
675 emitStore(dst
, regT1
, regT0
);
676 map(m_bytecodeIndex
+ OPCODE_LENGTH(op_to_primitive
), dst
, regT1
, regT0
);
679 void JIT::emitSlow_op_to_primitive(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
681 int dst
= currentInstruction
[1].u
.operand
;
685 JITStubCall
stubCall(this, cti_op_to_primitive
);
686 stubCall
.addArgument(regT1
, regT0
);
690 void JIT::emit_op_strcat(Instruction
* currentInstruction
)
692 JITStubCall
stubCall(this, cti_op_strcat
);
693 stubCall
.addArgument(Imm32(currentInstruction
[2].u
.operand
));
694 stubCall
.addArgument(Imm32(currentInstruction
[3].u
.operand
));
695 stubCall
.call(currentInstruction
[1].u
.operand
);
698 void JIT::emit_op_resolve_base(Instruction
* currentInstruction
)
700 JITStubCall
stubCall(this, cti_op_resolve_base
);
701 stubCall
.addArgument(ImmPtr(&m_codeBlock
->identifier(currentInstruction
[2].u
.operand
)));
702 stubCall
.call(currentInstruction
[1].u
.operand
);
705 void JIT::emit_op_resolve_skip(Instruction
* currentInstruction
)
707 JITStubCall
stubCall(this, cti_op_resolve_skip
);
708 stubCall
.addArgument(ImmPtr(&m_codeBlock
->identifier(currentInstruction
[2].u
.operand
)));
709 stubCall
.addArgument(Imm32(currentInstruction
[3].u
.operand
+ m_codeBlock
->needsFullScopeChain()));
710 stubCall
.call(currentInstruction
[1].u
.operand
);
713 void JIT::emit_op_resolve_global(Instruction
* currentInstruction
, bool dynamic
)
715 // FIXME: Optimize to use patching instead of so many memory accesses.
717 unsigned dst
= currentInstruction
[1].u
.operand
;
718 void* globalObject
= currentInstruction
[2].u
.jsCell
;
720 unsigned currentIndex
= m_globalResolveInfoIndex
++;
721 void* structureAddress
= &(m_codeBlock
->globalResolveInfo(currentIndex
).structure
);
722 void* offsetAddr
= &(m_codeBlock
->globalResolveInfo(currentIndex
).offset
);
725 move(ImmPtr(globalObject
), regT0
);
726 loadPtr(structureAddress
, regT1
);
727 addSlowCase(branchPtr(NotEqual
, regT1
, Address(regT0
, OBJECT_OFFSETOF(JSCell
, m_structure
))));
730 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSGlobalObject
, m_externalStorage
)), regT2
);
731 load32(offsetAddr
, regT3
);
732 load32(BaseIndex(regT2
, regT3
, TimesEight
), regT0
); // payload
733 load32(BaseIndex(regT2
, regT3
, TimesEight
, 4), regT1
); // tag
734 emitStore(dst
, regT1
, regT0
);
735 map(m_bytecodeIndex
+ dynamic
? OPCODE_LENGTH(op_resolve_global_dynamic
) : OPCODE_LENGTH(op_resolve_global
), dst
, regT1
, regT0
);
738 void JIT::emitSlow_op_resolve_global(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
740 unsigned dst
= currentInstruction
[1].u
.operand
;
741 void* globalObject
= currentInstruction
[2].u
.jsCell
;
742 Identifier
* ident
= &m_codeBlock
->identifier(currentInstruction
[3].u
.operand
);
744 unsigned currentIndex
= m_globalResolveInfoIndex
++;
747 JITStubCall
stubCall(this, cti_op_resolve_global
);
748 stubCall
.addArgument(ImmPtr(globalObject
));
749 stubCall
.addArgument(ImmPtr(ident
));
750 stubCall
.addArgument(Imm32(currentIndex
));
754 void JIT::emit_op_not(Instruction
* currentInstruction
)
756 unsigned dst
= currentInstruction
[1].u
.operand
;
757 unsigned src
= currentInstruction
[2].u
.operand
;
759 emitLoadTag(src
, regT0
);
761 xor32(Imm32(JSValue::FalseTag
), regT0
);
762 addSlowCase(branchTest32(NonZero
, regT0
, Imm32(~1)));
763 xor32(Imm32(JSValue::TrueTag
), regT0
);
765 emitStoreBool(dst
, regT0
, (dst
== src
));
768 void JIT::emitSlow_op_not(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
770 unsigned dst
= currentInstruction
[1].u
.operand
;
771 unsigned src
= currentInstruction
[2].u
.operand
;
775 JITStubCall
stubCall(this, cti_op_not
);
776 stubCall
.addArgument(src
);
780 void JIT::emit_op_jfalse(Instruction
* currentInstruction
)
782 unsigned cond
= currentInstruction
[1].u
.operand
;
783 unsigned target
= currentInstruction
[2].u
.operand
;
785 emitLoad(cond
, regT1
, regT0
);
787 Jump isTrue
= branch32(Equal
, regT1
, Imm32(JSValue::TrueTag
));
788 addJump(branch32(Equal
, regT1
, Imm32(JSValue::FalseTag
)), target
);
790 Jump isNotInteger
= branch32(NotEqual
, regT1
, Imm32(JSValue::Int32Tag
));
791 Jump isTrue2
= branch32(NotEqual
, regT0
, Imm32(0));
792 addJump(jump(), target
);
794 if (supportsFloatingPoint()) {
795 isNotInteger
.link(this);
797 addSlowCase(branch32(Above
, regT1
, Imm32(JSValue::LowestTag
)));
799 emitLoadDouble(cond
, fpRegT0
);
800 addJump(branchDoubleZeroOrNaN(fpRegT0
, fpRegT1
), target
);
802 addSlowCase(isNotInteger
);
808 void JIT::emitSlow_op_jfalse(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
810 unsigned cond
= currentInstruction
[1].u
.operand
;
811 unsigned target
= currentInstruction
[2].u
.operand
;
814 JITStubCall
stubCall(this, cti_op_jtrue
);
815 stubCall
.addArgument(cond
);
817 emitJumpSlowToHot(branchTest32(Zero
, regT0
), target
); // Inverted.
820 void JIT::emit_op_jtrue(Instruction
* currentInstruction
)
822 unsigned cond
= currentInstruction
[1].u
.operand
;
823 unsigned target
= currentInstruction
[2].u
.operand
;
825 emitLoad(cond
, regT1
, regT0
);
827 Jump isFalse
= branch32(Equal
, regT1
, Imm32(JSValue::FalseTag
));
828 addJump(branch32(Equal
, regT1
, Imm32(JSValue::TrueTag
)), target
);
830 Jump isNotInteger
= branch32(NotEqual
, regT1
, Imm32(JSValue::Int32Tag
));
831 Jump isFalse2
= branch32(Equal
, regT0
, Imm32(0));
832 addJump(jump(), target
);
834 if (supportsFloatingPoint()) {
835 isNotInteger
.link(this);
837 addSlowCase(branch32(Above
, regT1
, Imm32(JSValue::LowestTag
)));
839 emitLoadDouble(cond
, fpRegT0
);
840 addJump(branchDoubleNonZero(fpRegT0
, fpRegT1
), target
);
842 addSlowCase(isNotInteger
);
848 void JIT::emitSlow_op_jtrue(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
850 unsigned cond
= currentInstruction
[1].u
.operand
;
851 unsigned target
= currentInstruction
[2].u
.operand
;
854 JITStubCall
stubCall(this, cti_op_jtrue
);
855 stubCall
.addArgument(cond
);
857 emitJumpSlowToHot(branchTest32(NonZero
, regT0
), target
);
860 void JIT::emit_op_jeq_null(Instruction
* currentInstruction
)
862 unsigned src
= currentInstruction
[1].u
.operand
;
863 unsigned target
= currentInstruction
[2].u
.operand
;
865 emitLoad(src
, regT1
, regT0
);
867 Jump isImmediate
= branch32(NotEqual
, regT1
, Imm32(JSValue::CellTag
));
869 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
870 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSCell
, m_structure
)), regT2
);
871 addJump(branchTest8(NonZero
, Address(regT2
, OBJECT_OFFSETOF(Structure
, m_typeInfo
.m_flags
)), Imm32(MasqueradesAsUndefined
)), target
);
873 Jump wasNotImmediate
= jump();
875 // Now handle the immediate cases - undefined & null
876 isImmediate
.link(this);
878 set32(Equal
, regT1
, Imm32(JSValue::NullTag
), regT2
);
879 set32(Equal
, regT1
, Imm32(JSValue::UndefinedTag
), regT1
);
882 addJump(branchTest32(NonZero
, regT1
), target
);
884 wasNotImmediate
.link(this);
887 void JIT::emit_op_jneq_null(Instruction
* currentInstruction
)
889 unsigned src
= currentInstruction
[1].u
.operand
;
890 unsigned target
= currentInstruction
[2].u
.operand
;
892 emitLoad(src
, regT1
, regT0
);
894 Jump isImmediate
= branch32(NotEqual
, regT1
, Imm32(JSValue::CellTag
));
896 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
897 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSCell
, m_structure
)), regT2
);
898 addJump(branchTest8(Zero
, Address(regT2
, OBJECT_OFFSETOF(Structure
, m_typeInfo
.m_flags
)), Imm32(MasqueradesAsUndefined
)), target
);
900 Jump wasNotImmediate
= jump();
902 // Now handle the immediate cases - undefined & null
903 isImmediate
.link(this);
905 set32(Equal
, regT1
, Imm32(JSValue::NullTag
), regT2
);
906 set32(Equal
, regT1
, Imm32(JSValue::UndefinedTag
), regT1
);
909 addJump(branchTest32(Zero
, regT1
), target
);
911 wasNotImmediate
.link(this);
914 void JIT::emit_op_jneq_ptr(Instruction
* currentInstruction
)
916 unsigned src
= currentInstruction
[1].u
.operand
;
917 JSCell
* ptr
= currentInstruction
[2].u
.jsCell
;
918 unsigned target
= currentInstruction
[3].u
.operand
;
920 emitLoad(src
, regT1
, regT0
);
921 addJump(branch32(NotEqual
, regT1
, Imm32(JSValue::CellTag
)), target
);
922 addJump(branchPtr(NotEqual
, regT0
, ImmPtr(ptr
)), target
);
925 void JIT::emit_op_jsr(Instruction
* currentInstruction
)
927 int retAddrDst
= currentInstruction
[1].u
.operand
;
928 int target
= currentInstruction
[2].u
.operand
;
929 DataLabelPtr storeLocation
= storePtrWithPatch(ImmPtr(0), Address(callFrameRegister
, sizeof(Register
) * retAddrDst
));
930 addJump(jump(), target
);
931 m_jsrSites
.append(JSRInfo(storeLocation
, label()));
934 void JIT::emit_op_sret(Instruction
* currentInstruction
)
936 jump(Address(callFrameRegister
, sizeof(Register
) * currentInstruction
[1].u
.operand
));
939 void JIT::emit_op_eq(Instruction
* currentInstruction
)
941 unsigned dst
= currentInstruction
[1].u
.operand
;
942 unsigned src1
= currentInstruction
[2].u
.operand
;
943 unsigned src2
= currentInstruction
[3].u
.operand
;
945 emitLoad2(src1
, regT1
, regT0
, src2
, regT3
, regT2
);
946 addSlowCase(branch32(NotEqual
, regT1
, regT3
));
947 addSlowCase(branch32(Equal
, regT1
, Imm32(JSValue::CellTag
)));
948 addSlowCase(branch32(Below
, regT1
, Imm32(JSValue::LowestTag
)));
950 set8(Equal
, regT0
, regT2
, regT0
);
951 or32(Imm32(JSValue::FalseTag
), regT0
);
953 emitStoreBool(dst
, regT0
);
956 void JIT::emitSlow_op_eq(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
958 unsigned dst
= currentInstruction
[1].u
.operand
;
959 unsigned op1
= currentInstruction
[2].u
.operand
;
960 unsigned op2
= currentInstruction
[3].u
.operand
;
962 JumpList storeResult
;
963 JumpList genericCase
;
965 genericCase
.append(getSlowCase(iter
)); // tags not equal
967 linkSlowCase(iter
); // tags equal and JSCell
968 genericCase
.append(branchPtr(NotEqual
, Address(regT0
), ImmPtr(m_globalData
->jsStringVPtr
)));
969 genericCase
.append(branchPtr(NotEqual
, Address(regT2
), ImmPtr(m_globalData
->jsStringVPtr
)));
972 JITStubCall
stubCallEqStrings(this, cti_op_eq_strings
);
973 stubCallEqStrings
.addArgument(regT0
);
974 stubCallEqStrings
.addArgument(regT2
);
975 stubCallEqStrings
.call();
976 storeResult
.append(jump());
979 genericCase
.append(getSlowCase(iter
)); // doubles
980 genericCase
.link(this);
981 JITStubCall
stubCallEq(this, cti_op_eq
);
982 stubCallEq
.addArgument(op1
);
983 stubCallEq
.addArgument(op2
);
984 stubCallEq
.call(regT0
);
986 storeResult
.link(this);
987 or32(Imm32(JSValue::FalseTag
), regT0
);
988 emitStoreBool(dst
, regT0
);
991 void JIT::emit_op_neq(Instruction
* currentInstruction
)
993 unsigned dst
= currentInstruction
[1].u
.operand
;
994 unsigned src1
= currentInstruction
[2].u
.operand
;
995 unsigned src2
= currentInstruction
[3].u
.operand
;
997 emitLoad2(src1
, regT1
, regT0
, src2
, regT3
, regT2
);
998 addSlowCase(branch32(NotEqual
, regT1
, regT3
));
999 addSlowCase(branch32(Equal
, regT1
, Imm32(JSValue::CellTag
)));
1000 addSlowCase(branch32(Below
, regT1
, Imm32(JSValue::LowestTag
)));
1002 set8(NotEqual
, regT0
, regT2
, regT0
);
1003 or32(Imm32(JSValue::FalseTag
), regT0
);
1005 emitStoreBool(dst
, regT0
);
1008 void JIT::emitSlow_op_neq(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1010 unsigned dst
= currentInstruction
[1].u
.operand
;
1012 JumpList storeResult
;
1013 JumpList genericCase
;
1015 genericCase
.append(getSlowCase(iter
)); // tags not equal
1017 linkSlowCase(iter
); // tags equal and JSCell
1018 genericCase
.append(branchPtr(NotEqual
, Address(regT0
), ImmPtr(m_globalData
->jsStringVPtr
)));
1019 genericCase
.append(branchPtr(NotEqual
, Address(regT2
), ImmPtr(m_globalData
->jsStringVPtr
)));
1022 JITStubCall
stubCallEqStrings(this, cti_op_eq_strings
);
1023 stubCallEqStrings
.addArgument(regT0
);
1024 stubCallEqStrings
.addArgument(regT2
);
1025 stubCallEqStrings
.call(regT0
);
1026 storeResult
.append(jump());
1029 genericCase
.append(getSlowCase(iter
)); // doubles
1030 genericCase
.link(this);
1031 JITStubCall
stubCallEq(this, cti_op_eq
);
1032 stubCallEq
.addArgument(regT1
, regT0
);
1033 stubCallEq
.addArgument(regT3
, regT2
);
1034 stubCallEq
.call(regT0
);
1036 storeResult
.link(this);
1037 xor32(Imm32(0x1), regT0
);
1038 or32(Imm32(JSValue::FalseTag
), regT0
);
1039 emitStoreBool(dst
, regT0
);
1042 void JIT::compileOpStrictEq(Instruction
* currentInstruction
, CompileOpStrictEqType type
)
1044 unsigned dst
= currentInstruction
[1].u
.operand
;
1045 unsigned src1
= currentInstruction
[2].u
.operand
;
1046 unsigned src2
= currentInstruction
[3].u
.operand
;
1048 emitLoadTag(src1
, regT0
);
1049 emitLoadTag(src2
, regT1
);
1051 // Jump to a slow case if either operand is double, or if both operands are
1052 // cells and/or Int32s.
1054 and32(regT1
, regT2
);
1055 addSlowCase(branch32(Below
, regT2
, Imm32(JSValue::LowestTag
)));
1056 addSlowCase(branch32(AboveOrEqual
, regT2
, Imm32(JSValue::CellTag
)));
1058 if (type
== OpStrictEq
)
1059 set8(Equal
, regT0
, regT1
, regT0
);
1061 set8(NotEqual
, regT0
, regT1
, regT0
);
1063 or32(Imm32(JSValue::FalseTag
), regT0
);
1065 emitStoreBool(dst
, regT0
);
1068 void JIT::emit_op_stricteq(Instruction
* currentInstruction
)
1070 compileOpStrictEq(currentInstruction
, OpStrictEq
);
1073 void JIT::emitSlow_op_stricteq(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1075 unsigned dst
= currentInstruction
[1].u
.operand
;
1076 unsigned src1
= currentInstruction
[2].u
.operand
;
1077 unsigned src2
= currentInstruction
[3].u
.operand
;
1082 JITStubCall
stubCall(this, cti_op_stricteq
);
1083 stubCall
.addArgument(src1
);
1084 stubCall
.addArgument(src2
);
1088 void JIT::emit_op_nstricteq(Instruction
* currentInstruction
)
1090 compileOpStrictEq(currentInstruction
, OpNStrictEq
);
1093 void JIT::emitSlow_op_nstricteq(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1095 unsigned dst
= currentInstruction
[1].u
.operand
;
1096 unsigned src1
= currentInstruction
[2].u
.operand
;
1097 unsigned src2
= currentInstruction
[3].u
.operand
;
1102 JITStubCall
stubCall(this, cti_op_nstricteq
);
1103 stubCall
.addArgument(src1
);
1104 stubCall
.addArgument(src2
);
1108 void JIT::emit_op_eq_null(Instruction
* currentInstruction
)
1110 unsigned dst
= currentInstruction
[1].u
.operand
;
1111 unsigned src
= currentInstruction
[2].u
.operand
;
1113 emitLoad(src
, regT1
, regT0
);
1114 Jump isImmediate
= branch32(NotEqual
, regT1
, Imm32(JSValue::CellTag
));
1116 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSCell
, m_structure
)), regT1
);
1117 setTest8(NonZero
, Address(regT1
, OBJECT_OFFSETOF(Structure
, m_typeInfo
.m_flags
)), Imm32(MasqueradesAsUndefined
), regT1
);
1119 Jump wasNotImmediate
= jump();
1121 isImmediate
.link(this);
1123 set8(Equal
, regT1
, Imm32(JSValue::NullTag
), regT2
);
1124 set8(Equal
, regT1
, Imm32(JSValue::UndefinedTag
), regT1
);
1127 wasNotImmediate
.link(this);
1129 or32(Imm32(JSValue::FalseTag
), regT1
);
1131 emitStoreBool(dst
, regT1
);
1134 void JIT::emit_op_neq_null(Instruction
* currentInstruction
)
1136 unsigned dst
= currentInstruction
[1].u
.operand
;
1137 unsigned src
= currentInstruction
[2].u
.operand
;
1139 emitLoad(src
, regT1
, regT0
);
1140 Jump isImmediate
= branch32(NotEqual
, regT1
, Imm32(JSValue::CellTag
));
1142 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSCell
, m_structure
)), regT1
);
1143 setTest8(Zero
, Address(regT1
, OBJECT_OFFSETOF(Structure
, m_typeInfo
.m_flags
)), Imm32(MasqueradesAsUndefined
), regT1
);
1145 Jump wasNotImmediate
= jump();
1147 isImmediate
.link(this);
1149 set8(NotEqual
, regT1
, Imm32(JSValue::NullTag
), regT2
);
1150 set8(NotEqual
, regT1
, Imm32(JSValue::UndefinedTag
), regT1
);
1151 and32(regT2
, regT1
);
1153 wasNotImmediate
.link(this);
1155 or32(Imm32(JSValue::FalseTag
), regT1
);
1157 emitStoreBool(dst
, regT1
);
1160 void JIT::emit_op_resolve_with_base(Instruction
* currentInstruction
)
1162 JITStubCall
stubCall(this, cti_op_resolve_with_base
);
1163 stubCall
.addArgument(ImmPtr(&m_codeBlock
->identifier(currentInstruction
[3].u
.operand
)));
1164 stubCall
.addArgument(Imm32(currentInstruction
[1].u
.operand
));
1165 stubCall
.call(currentInstruction
[2].u
.operand
);
1168 void JIT::emit_op_new_func_exp(Instruction
* currentInstruction
)
1170 JITStubCall
stubCall(this, cti_op_new_func_exp
);
1171 stubCall
.addArgument(ImmPtr(m_codeBlock
->functionExpr(currentInstruction
[2].u
.operand
)));
1172 stubCall
.call(currentInstruction
[1].u
.operand
);
1175 void JIT::emit_op_throw(Instruction
* currentInstruction
)
1177 unsigned exception
= currentInstruction
[1].u
.operand
;
1178 JITStubCall
stubCall(this, cti_op_throw
);
1179 stubCall
.addArgument(exception
);
1183 // cti_op_throw always changes it's return address,
1184 // this point in the code should never be reached.
1189 void JIT::emit_op_get_pnames(Instruction
* currentInstruction
)
1191 int dst
= currentInstruction
[1].u
.operand
;
1192 int base
= currentInstruction
[2].u
.operand
;
1193 int i
= currentInstruction
[3].u
.operand
;
1194 int size
= currentInstruction
[4].u
.operand
;
1195 int breakTarget
= currentInstruction
[5].u
.operand
;
1197 JumpList isNotObject
;
1199 emitLoad(base
, regT1
, regT0
);
1200 if (!m_codeBlock
->isKnownNotImmediate(base
))
1201 isNotObject
.append(branch32(NotEqual
, regT1
, Imm32(JSValue::CellTag
)));
1202 if (base
!= m_codeBlock
->thisRegister()) {
1203 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSCell
, m_structure
)), regT2
);
1204 isNotObject
.append(branch8(NotEqual
, Address(regT2
, OBJECT_OFFSETOF(Structure
, m_typeInfo
.m_type
)), Imm32(ObjectType
)));
1207 // We could inline the case where you have a valid cache, but
1208 // this call doesn't seem to be hot.
1209 Label
isObject(this);
1210 JITStubCall
getPnamesStubCall(this, cti_op_get_pnames
);
1211 getPnamesStubCall
.addArgument(regT0
);
1212 getPnamesStubCall
.call(dst
);
1213 load32(Address(regT0
, OBJECT_OFFSETOF(JSPropertyNameIterator
, m_jsStringsSize
)), regT3
);
1214 store32(Imm32(0), addressFor(i
));
1215 store32(regT3
, addressFor(size
));
1218 isNotObject
.link(this);
1219 addJump(branch32(Equal
, regT1
, Imm32(JSValue::NullTag
)), breakTarget
);
1220 addJump(branch32(Equal
, regT1
, Imm32(JSValue::UndefinedTag
)), breakTarget
);
1221 JITStubCall
toObjectStubCall(this, cti_to_object
);
1222 toObjectStubCall
.addArgument(regT1
, regT0
);
1223 toObjectStubCall
.call(base
);
1224 jump().linkTo(isObject
, this);
1229 void JIT::emit_op_next_pname(Instruction
* currentInstruction
)
1231 int dst
= currentInstruction
[1].u
.operand
;
1232 int base
= currentInstruction
[2].u
.operand
;
1233 int i
= currentInstruction
[3].u
.operand
;
1234 int size
= currentInstruction
[4].u
.operand
;
1235 int it
= currentInstruction
[5].u
.operand
;
1236 int target
= currentInstruction
[6].u
.operand
;
1238 JumpList callHasProperty
;
1241 load32(addressFor(i
), regT0
);
1242 Jump end
= branch32(Equal
, regT0
, addressFor(size
));
1245 loadPtr(addressFor(it
), regT1
);
1246 loadPtr(Address(regT1
, OBJECT_OFFSETOF(JSPropertyNameIterator
, m_jsStrings
)), regT2
);
1247 load32(BaseIndex(regT2
, regT0
, TimesEight
), regT2
);
1248 store32(Imm32(JSValue::CellTag
), tagFor(dst
));
1249 store32(regT2
, payloadFor(dst
));
1252 add32(Imm32(1), regT0
);
1253 store32(regT0
, addressFor(i
));
1255 // Verify that i is valid:
1256 loadPtr(addressFor(base
), regT0
);
1258 // Test base's structure
1259 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSCell
, m_structure
)), regT2
);
1260 callHasProperty
.append(branchPtr(NotEqual
, regT2
, Address(Address(regT1
, OBJECT_OFFSETOF(JSPropertyNameIterator
, m_cachedStructure
)))));
1262 // Test base's prototype chain
1263 loadPtr(Address(Address(regT1
, OBJECT_OFFSETOF(JSPropertyNameIterator
, m_cachedPrototypeChain
))), regT3
);
1264 loadPtr(Address(regT3
, OBJECT_OFFSETOF(StructureChain
, m_vector
)), regT3
);
1265 addJump(branchTestPtr(Zero
, Address(regT3
)), target
);
1267 Label
checkPrototype(this);
1268 callHasProperty
.append(branch32(Equal
, Address(regT2
, OBJECT_OFFSETOF(Structure
, m_prototype
) + OBJECT_OFFSETOF(JSValue
, u
.asBits
.tag
)), Imm32(JSValue::NullTag
)));
1269 loadPtr(Address(regT2
, OBJECT_OFFSETOF(Structure
, m_prototype
) + OBJECT_OFFSETOF(JSValue
, u
.asBits
.payload
)), regT2
);
1270 loadPtr(Address(regT2
, OBJECT_OFFSETOF(JSCell
, m_structure
)), regT2
);
1271 callHasProperty
.append(branchPtr(NotEqual
, regT2
, Address(regT3
)));
1272 addPtr(Imm32(sizeof(Structure
*)), regT3
);
1273 branchTestPtr(NonZero
, Address(regT3
)).linkTo(checkPrototype
, this);
1276 addJump(jump(), target
);
1278 // Slow case: Ask the object if i is valid.
1279 callHasProperty
.link(this);
1280 loadPtr(addressFor(dst
), regT1
);
1281 JITStubCall
stubCall(this, cti_has_property
);
1282 stubCall
.addArgument(regT0
);
1283 stubCall
.addArgument(regT1
);
1286 // Test for valid key.
1287 addJump(branchTest32(NonZero
, regT0
), target
);
1288 jump().linkTo(begin
, this);
1294 void JIT::emit_op_push_scope(Instruction
* currentInstruction
)
1296 JITStubCall
stubCall(this, cti_op_push_scope
);
1297 stubCall
.addArgument(currentInstruction
[1].u
.operand
);
1298 stubCall
.call(currentInstruction
[1].u
.operand
);
1301 void JIT::emit_op_pop_scope(Instruction
*)
1303 JITStubCall(this, cti_op_pop_scope
).call();
1306 void JIT::emit_op_to_jsnumber(Instruction
* currentInstruction
)
1308 int dst
= currentInstruction
[1].u
.operand
;
1309 int src
= currentInstruction
[2].u
.operand
;
1311 emitLoad(src
, regT1
, regT0
);
1313 Jump isInt32
= branch32(Equal
, regT1
, Imm32(JSValue::Int32Tag
));
1314 addSlowCase(branch32(AboveOrEqual
, regT1
, Imm32(JSValue::EmptyValueTag
)));
1318 emitStore(dst
, regT1
, regT0
);
1319 map(m_bytecodeIndex
+ OPCODE_LENGTH(op_to_jsnumber
), dst
, regT1
, regT0
);
1322 void JIT::emitSlow_op_to_jsnumber(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1324 int dst
= currentInstruction
[1].u
.operand
;
1328 JITStubCall
stubCall(this, cti_op_to_jsnumber
);
1329 stubCall
.addArgument(regT1
, regT0
);
1333 void JIT::emit_op_push_new_scope(Instruction
* currentInstruction
)
1335 JITStubCall
stubCall(this, cti_op_push_new_scope
);
1336 stubCall
.addArgument(ImmPtr(&m_codeBlock
->identifier(currentInstruction
[2].u
.operand
)));
1337 stubCall
.addArgument(currentInstruction
[3].u
.operand
);
1338 stubCall
.call(currentInstruction
[1].u
.operand
);
1341 void JIT::emit_op_catch(Instruction
* currentInstruction
)
1343 unsigned exception
= currentInstruction
[1].u
.operand
;
1345 // This opcode only executes after a return from cti_op_throw.
1347 // cti_op_throw may have taken us to a call frame further up the stack; reload
1348 // the call frame pointer to adjust.
1349 peek(callFrameRegister
, OBJECT_OFFSETOF(struct JITStackFrame
, callFrame
) / sizeof(void*));
1351 // Now store the exception returned by cti_op_throw.
1352 emitStore(exception
, regT1
, regT0
);
1353 map(m_bytecodeIndex
+ OPCODE_LENGTH(op_catch
), exception
, regT1
, regT0
);
1356 void JIT::emit_op_jmp_scopes(Instruction
* currentInstruction
)
1358 JITStubCall
stubCall(this, cti_op_jmp_scopes
);
1359 stubCall
.addArgument(Imm32(currentInstruction
[1].u
.operand
));
1361 addJump(jump(), currentInstruction
[2].u
.operand
);
1364 void JIT::emit_op_switch_imm(Instruction
* currentInstruction
)
1366 unsigned tableIndex
= currentInstruction
[1].u
.operand
;
1367 unsigned defaultOffset
= currentInstruction
[2].u
.operand
;
1368 unsigned scrutinee
= currentInstruction
[3].u
.operand
;
1370 // create jump table for switch destinations, track this switch statement.
1371 SimpleJumpTable
* jumpTable
= &m_codeBlock
->immediateSwitchJumpTable(tableIndex
);
1372 m_switches
.append(SwitchRecord(jumpTable
, m_bytecodeIndex
, defaultOffset
, SwitchRecord::Immediate
));
1373 jumpTable
->ctiOffsets
.grow(jumpTable
->branchOffsets
.size());
1375 JITStubCall
stubCall(this, cti_op_switch_imm
);
1376 stubCall
.addArgument(scrutinee
);
1377 stubCall
.addArgument(Imm32(tableIndex
));
1382 void JIT::emit_op_switch_char(Instruction
* currentInstruction
)
1384 unsigned tableIndex
= currentInstruction
[1].u
.operand
;
1385 unsigned defaultOffset
= currentInstruction
[2].u
.operand
;
1386 unsigned scrutinee
= currentInstruction
[3].u
.operand
;
1388 // create jump table for switch destinations, track this switch statement.
1389 SimpleJumpTable
* jumpTable
= &m_codeBlock
->characterSwitchJumpTable(tableIndex
);
1390 m_switches
.append(SwitchRecord(jumpTable
, m_bytecodeIndex
, defaultOffset
, SwitchRecord::Character
));
1391 jumpTable
->ctiOffsets
.grow(jumpTable
->branchOffsets
.size());
1393 JITStubCall
stubCall(this, cti_op_switch_char
);
1394 stubCall
.addArgument(scrutinee
);
1395 stubCall
.addArgument(Imm32(tableIndex
));
1400 void JIT::emit_op_switch_string(Instruction
* currentInstruction
)
1402 unsigned tableIndex
= currentInstruction
[1].u
.operand
;
1403 unsigned defaultOffset
= currentInstruction
[2].u
.operand
;
1404 unsigned scrutinee
= currentInstruction
[3].u
.operand
;
1406 // create jump table for switch destinations, track this switch statement.
1407 StringJumpTable
* jumpTable
= &m_codeBlock
->stringSwitchJumpTable(tableIndex
);
1408 m_switches
.append(SwitchRecord(jumpTable
, m_bytecodeIndex
, defaultOffset
));
1410 JITStubCall
stubCall(this, cti_op_switch_string
);
1411 stubCall
.addArgument(scrutinee
);
1412 stubCall
.addArgument(Imm32(tableIndex
));
1417 void JIT::emit_op_new_error(Instruction
* currentInstruction
)
1419 unsigned dst
= currentInstruction
[1].u
.operand
;
1420 unsigned type
= currentInstruction
[2].u
.operand
;
1421 unsigned message
= currentInstruction
[3].u
.operand
;
1423 JITStubCall
stubCall(this, cti_op_new_error
);
1424 stubCall
.addArgument(Imm32(type
));
1425 stubCall
.addArgument(m_codeBlock
->getConstant(message
));
1426 stubCall
.addArgument(Imm32(m_bytecodeIndex
));
1430 void JIT::emit_op_debug(Instruction
* currentInstruction
)
1432 #if ENABLE(DEBUG_WITH_BREAKPOINT)
1433 UNUSED_PARAM(currentInstruction
);
1436 JITStubCall
stubCall(this, cti_op_debug
);
1437 stubCall
.addArgument(Imm32(currentInstruction
[1].u
.operand
));
1438 stubCall
.addArgument(Imm32(currentInstruction
[2].u
.operand
));
1439 stubCall
.addArgument(Imm32(currentInstruction
[3].u
.operand
));
1445 void JIT::emit_op_enter(Instruction
*)
1447 // Even though JIT code doesn't use them, we initialize our constant
1448 // registers to zap stale pointers, to avoid unnecessarily prolonging
1449 // object lifetime and increasing GC pressure.
1450 for (int i
= 0; i
< m_codeBlock
->m_numVars
; ++i
)
1451 emitStore(i
, jsUndefined());
1454 void JIT::emit_op_enter_with_activation(Instruction
* currentInstruction
)
1456 emit_op_enter(currentInstruction
);
1458 JITStubCall(this, cti_op_push_activation
).call(currentInstruction
[1].u
.operand
);
1461 void JIT::emit_op_create_arguments(Instruction
*)
1463 Jump argsCreated
= branch32(NotEqual
, tagFor(RegisterFile::ArgumentsRegister
, callFrameRegister
), Imm32(JSValue::EmptyValueTag
));
1465 // If we get here the arguments pointer is a null cell - i.e. arguments need lazy creation.
1466 if (m_codeBlock
->m_numParameters
== 1)
1467 JITStubCall(this, cti_op_create_arguments_no_params
).call();
1469 JITStubCall(this, cti_op_create_arguments
).call();
1471 argsCreated
.link(this);
1474 void JIT::emit_op_init_arguments(Instruction
*)
1476 emitStore(RegisterFile::ArgumentsRegister
, JSValue(), callFrameRegister
);
1479 void JIT::emit_op_convert_this(Instruction
* currentInstruction
)
1481 unsigned thisRegister
= currentInstruction
[1].u
.operand
;
1483 emitLoad(thisRegister
, regT1
, regT0
);
1485 addSlowCase(branch32(NotEqual
, regT1
, Imm32(JSValue::CellTag
)));
1487 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSCell
, m_structure
)), regT2
);
1488 addSlowCase(branchTest8(NonZero
, Address(regT2
, OBJECT_OFFSETOF(Structure
, m_typeInfo
.m_flags
)), Imm32(NeedsThisConversion
)));
1490 map(m_bytecodeIndex
+ OPCODE_LENGTH(op_convert_this
), thisRegister
, regT1
, regT0
);
1493 void JIT::emitSlow_op_convert_this(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1495 unsigned thisRegister
= currentInstruction
[1].u
.operand
;
1500 JITStubCall
stubCall(this, cti_op_convert_this
);
1501 stubCall
.addArgument(regT1
, regT0
);
1502 stubCall
.call(thisRegister
);
1505 void JIT::emit_op_profile_will_call(Instruction
* currentInstruction
)
1507 peek(regT2
, OBJECT_OFFSETOF(JITStackFrame
, enabledProfilerReference
) / sizeof(void*));
1508 Jump noProfiler
= branchTestPtr(Zero
, Address(regT2
));
1510 JITStubCall
stubCall(this, cti_op_profile_will_call
);
1511 stubCall
.addArgument(currentInstruction
[1].u
.operand
);
1513 noProfiler
.link(this);
1516 void JIT::emit_op_profile_did_call(Instruction
* currentInstruction
)
1518 peek(regT2
, OBJECT_OFFSETOF(JITStackFrame
, enabledProfilerReference
) / sizeof(void*));
1519 Jump noProfiler
= branchTestPtr(Zero
, Address(regT2
));
1521 JITStubCall
stubCall(this, cti_op_profile_did_call
);
1522 stubCall
.addArgument(currentInstruction
[1].u
.operand
);
1524 noProfiler
.link(this);
1529 #endif // ENABLE(JIT) && USE(JSVALUE32_64)