2 * Copyright (C) 2009 Apple Inc. All rights reserved.
3 * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
14 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
18 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
33 #include "JITInlineMethods.h"
34 #include "JITStubCall.h"
37 #include "JSFunction.h"
38 #include "JSPropertyNameIterator.h"
39 #include "LinkBuffer.h"
43 void JIT::privateCompileCTIMachineTrampolines(RefPtr
<ExecutablePool
>* executablePool
, JSGlobalData
* globalData
, TrampolineStructure
*trampolines
)
45 #if ENABLE(JIT_USE_SOFT_MODULO)
46 Label softModBegin
= align();
49 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
50 // (1) This function provides fast property access for string length
51 Label stringLengthBegin
= align();
53 // regT0 holds payload, regT1 holds tag
55 Jump string_failureCases1
= branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
));
56 Jump string_failureCases2
= branchPtr(NotEqual
, Address(regT0
), TrustedImmPtr(m_globalData
->jsStringVPtr
));
58 // Checks out okay! - get the length from the Ustring.
59 load32(Address(regT0
, OBJECT_OFFSETOF(JSString
, m_length
)), regT2
);
61 Jump string_failureCases3
= branch32(Above
, regT2
, TrustedImm32(INT_MAX
));
63 move(TrustedImm32(JSValue::Int32Tag
), regT1
);
68 JumpList callLinkFailures
;
69 // (2) Trampolines for the slow cases of op_call / op_call_eval / op_construct.
70 #if ENABLE(JIT_OPTIMIZE_CALL)
71 // VirtualCallLink Trampoline
72 // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
73 Label virtualCallLinkBegin
= align();
74 compileOpCallInitializeCallFrame();
75 preserveReturnAddressAfterCall(regT3
);
76 emitPutToCallFrameHeader(regT3
, RegisterFile::ReturnPC
);
77 restoreArgumentReference();
78 Call callLazyLinkCall
= call();
79 callLinkFailures
.append(branchTestPtr(Zero
, regT0
));
80 restoreReturnAddressBeforeReturn(regT3
);
81 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount
, regT1
);
84 // VirtualConstructLink Trampoline
85 // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
86 Label virtualConstructLinkBegin
= align();
87 compileOpCallInitializeCallFrame();
88 preserveReturnAddressAfterCall(regT3
);
89 emitPutToCallFrameHeader(regT3
, RegisterFile::ReturnPC
);
90 restoreArgumentReference();
91 Call callLazyLinkConstruct
= call();
92 restoreReturnAddressBeforeReturn(regT3
);
93 callLinkFailures
.append(branchTestPtr(Zero
, regT0
));
94 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount
, regT1
);
97 #endif // ENABLE(JIT_OPTIMIZE_CALL)
99 // VirtualCall Trampoline
100 // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
101 Label virtualCallBegin
= align();
102 compileOpCallInitializeCallFrame();
104 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSFunction
, m_executable
)), regT2
);
106 Jump hasCodeBlock3
= branch32(GreaterThanOrEqual
, Address(regT2
, OBJECT_OFFSETOF(FunctionExecutable
, m_numParametersForCall
)), TrustedImm32(0));
107 preserveReturnAddressAfterCall(regT3
);
108 restoreArgumentReference();
109 Call callCompileCall
= call();
110 callLinkFailures
.append(branchTestPtr(Zero
, regT0
));
111 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount
, regT1
);
112 restoreReturnAddressBeforeReturn(regT3
);
113 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSFunction
, m_executable
)), regT2
);
114 hasCodeBlock3
.link(this);
116 loadPtr(Address(regT2
, OBJECT_OFFSETOF(FunctionExecutable
, m_jitCodeForCallWithArityCheck
)), regT0
);
119 // VirtualConstruct Trampoline
120 // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
121 Label virtualConstructBegin
= align();
122 compileOpCallInitializeCallFrame();
124 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSFunction
, m_executable
)), regT2
);
126 Jump hasCodeBlock4
= branch32(GreaterThanOrEqual
, Address(regT2
, OBJECT_OFFSETOF(FunctionExecutable
, m_numParametersForConstruct
)), TrustedImm32(0));
127 preserveReturnAddressAfterCall(regT3
);
128 restoreArgumentReference();
129 Call callCompileCconstruct
= call();
130 callLinkFailures
.append(branchTestPtr(Zero
, regT0
));
131 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount
, regT1
);
132 restoreReturnAddressBeforeReturn(regT3
);
133 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSFunction
, m_executable
)), regT2
);
134 hasCodeBlock4
.link(this);
136 loadPtr(Address(regT2
, OBJECT_OFFSETOF(FunctionExecutable
, m_jitCodeForConstructWithArityCheck
)), regT0
);
139 // If the parser fails we want to be able to be able to keep going,
140 // So we handle this as a parse failure.
141 callLinkFailures
.link(this);
142 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC
, regT1
);
143 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame
, callFrameRegister
);
144 restoreReturnAddressBeforeReturn(regT1
);
145 move(TrustedImmPtr(&globalData
->exceptionLocation
), regT2
);
146 storePtr(regT1
, regT2
);
147 poke(callFrameRegister
, 1 + OBJECT_OFFSETOF(struct JITStackFrame
, callFrame
) / sizeof(void*));
148 poke(TrustedImmPtr(FunctionPtr(ctiVMThrowTrampoline
).value()));
151 // NativeCall Trampoline
152 Label nativeCallThunk
= privateCompileCTINativeCall(globalData
);
153 Label nativeConstructThunk
= privateCompileCTINativeCall(globalData
, true);
155 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
156 Call string_failureCases1Call
= makeTailRecursiveCall(string_failureCases1
);
157 Call string_failureCases2Call
= makeTailRecursiveCall(string_failureCases2
);
158 Call string_failureCases3Call
= makeTailRecursiveCall(string_failureCases3
);
161 // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
162 LinkBuffer
patchBuffer(*m_globalData
, this, m_globalData
->executableAllocator
);
164 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
165 patchBuffer
.link(string_failureCases1Call
, FunctionPtr(cti_op_get_by_id_string_fail
));
166 patchBuffer
.link(string_failureCases2Call
, FunctionPtr(cti_op_get_by_id_string_fail
));
167 patchBuffer
.link(string_failureCases3Call
, FunctionPtr(cti_op_get_by_id_string_fail
));
169 #if ENABLE(JIT_OPTIMIZE_CALL)
170 patchBuffer
.link(callLazyLinkCall
, FunctionPtr(cti_vm_lazyLinkCall
));
171 patchBuffer
.link(callLazyLinkConstruct
, FunctionPtr(cti_vm_lazyLinkConstruct
));
173 patchBuffer
.link(callCompileCall
, FunctionPtr(cti_op_call_jitCompile
));
174 patchBuffer
.link(callCompileCconstruct
, FunctionPtr(cti_op_construct_jitCompile
));
176 CodeRef finalCode
= patchBuffer
.finalizeCode();
177 *executablePool
= finalCode
.m_executablePool
;
179 trampolines
->ctiVirtualCall
= patchBuffer
.trampolineAt(virtualCallBegin
);
180 trampolines
->ctiVirtualConstruct
= patchBuffer
.trampolineAt(virtualConstructBegin
);
181 #if ENABLE(JIT_OPTIMIZE_NATIVE_CALL)
182 trampolines
->ctiNativeCall
= patchBuffer
.trampolineAt(nativeCallThunk
);
183 trampolines
->ctiNativeConstruct
= patchBuffer
.trampolineAt(nativeConstructThunk
);
185 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
186 trampolines
->ctiStringLengthTrampoline
= patchBuffer
.trampolineAt(stringLengthBegin
);
188 #if ENABLE(JIT_OPTIMIZE_CALL)
189 trampolines
->ctiVirtualCallLink
= patchBuffer
.trampolineAt(virtualCallLinkBegin
);
190 trampolines
->ctiVirtualConstructLink
= patchBuffer
.trampolineAt(virtualConstructLinkBegin
);
192 #if ENABLE(JIT_USE_SOFT_MODULO)
193 trampolines
->ctiSoftModulo
= patchBuffer
.trampolineAt(softModBegin
);
197 JIT::Label
JIT::privateCompileCTINativeCall(JSGlobalData
* globalData
, bool isConstruct
)
199 int executableOffsetToFunction
= isConstruct
? OBJECT_OFFSETOF(NativeExecutable
, m_constructor
) : OBJECT_OFFSETOF(NativeExecutable
, m_function
);
201 Label nativeCallThunk
= align();
203 emitPutImmediateToCallFrameHeader(0, RegisterFile::CodeBlock
);
206 // Load caller frame's scope chain into this callframe so that whatever we call can
207 // get to its global data.
208 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame
, regT0
);
209 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain
, regT1
, regT0
);
210 emitPutCellToCallFrameHeader(regT1
, RegisterFile::ScopeChain
);
213 emitPutToCallFrameHeader(regT1
, RegisterFile::ReturnPC
);
215 // Calling convention: f(ecx, edx, ...);
216 // Host function signature: f(ExecState*);
217 move(callFrameRegister
, X86Registers::ecx
);
219 subPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister
); // Align stack after call.
222 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee
, regT1
);
223 loadPtr(Address(regT1
, OBJECT_OFFSETOF(JSFunction
, m_executable
)), regT1
);
224 move(regT0
, callFrameRegister
); // Eagerly restore caller frame register to avoid loading from stack.
225 call(Address(regT1
, executableOffsetToFunction
));
227 addPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister
);
230 // Load caller frame's scope chain into this callframe so that whatever we call can
231 // get to its global data.
232 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame
, regT2
);
233 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain
, regT1
, regT2
);
234 emitPutCellToCallFrameHeader(regT1
, RegisterFile::ScopeChain
);
236 preserveReturnAddressAfterCall(regT3
); // Callee preserved
237 emitPutToCallFrameHeader(regT3
, RegisterFile::ReturnPC
);
239 // Calling convention: f(r0 == regT0, r1 == regT1, ...);
240 // Host function signature: f(ExecState*);
241 move(callFrameRegister
, ARMRegisters::r0
);
244 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee
, ARMRegisters::r1
);
245 move(regT2
, callFrameRegister
); // Eagerly restore caller frame register to avoid loading from stack.
246 loadPtr(Address(ARMRegisters::r1
, OBJECT_OFFSETOF(JSFunction
, m_executable
)), regT2
);
247 call(Address(regT2
, executableOffsetToFunction
));
249 restoreReturnAddressBeforeReturn(regT3
);
251 // Load caller frame's scope chain into this callframe so that whatever we call can
252 // get to its global data.
253 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame
, regT2
);
254 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain
, regT1
, regT2
);
255 emitPutToCallFrameHeader(regT1
, RegisterFile::ScopeChain
);
257 preserveReturnAddressAfterCall(regT3
); // Callee preserved
258 emitPutToCallFrameHeader(regT3
, RegisterFile::ReturnPC
);
260 // Calling convention: f(r0 == regT4, r1 == regT5, ...);
261 // Host function signature: f(ExecState*);
262 move(callFrameRegister
, regT4
);
264 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee
, regT5
);
265 move(regT2
, callFrameRegister
); // Eagerly restore caller frame register to avoid loading from stack.
266 loadPtr(Address(regT5
, OBJECT_OFFSETOF(JSFunction
, m_executable
)), regT2
);
268 call(Address(regT2
, executableOffsetToFunction
), regT0
);
269 restoreReturnAddressBeforeReturn(regT3
);
271 // Load caller frame's scope chain into this callframe so that whatever we call can
272 // get to its global data.
273 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame
, regT0
);
274 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain
, regT1
, regT0
);
275 emitPutCellToCallFrameHeader(regT1
, RegisterFile::ScopeChain
);
277 preserveReturnAddressAfterCall(regT3
); // Callee preserved
278 emitPutToCallFrameHeader(regT3
, RegisterFile::ReturnPC
);
280 // Calling convention: f(a0, a1, a2, a3);
281 // Host function signature: f(ExecState*);
283 // Allocate stack space for 16 bytes (8-byte aligned)
284 // 16 bytes (unused) for 4 arguments
285 subPtr(TrustedImm32(16), stackPointerRegister
);
288 move(callFrameRegister
, MIPSRegisters::a0
);
291 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee
, MIPSRegisters::a2
);
292 loadPtr(Address(MIPSRegisters::a2
, OBJECT_OFFSETOF(JSFunction
, m_executable
)), regT2
);
293 move(regT0
, callFrameRegister
); // Eagerly restore caller frame register to avoid loading from stack.
294 call(Address(regT2
, executableOffsetToFunction
));
296 // Restore stack space
297 addPtr(TrustedImm32(16), stackPointerRegister
);
299 restoreReturnAddressBeforeReturn(regT3
);
301 #elif ENABLE(JIT_OPTIMIZE_NATIVE_CALL)
302 #error "JIT_OPTIMIZE_NATIVE_CALL not yet supported on this platform."
304 UNUSED_PARAM(executableOffsetToFunction
);
308 // Check for an exception
309 Jump sawException
= branch32(NotEqual
, AbsoluteAddress(reinterpret_cast<char*>(&globalData
->exception
) + OBJECT_OFFSETOF(JSValue
, u
.asBits
.tag
)), TrustedImm32(JSValue::EmptyValueTag
));
314 // Handle an exception
315 sawException
.link(this);
317 // Grab the return address.
318 preserveReturnAddressAfterCall(regT1
);
320 move(TrustedImmPtr(&globalData
->exceptionLocation
), regT2
);
321 storePtr(regT1
, regT2
);
322 poke(callFrameRegister
, OBJECT_OFFSETOF(struct JITStackFrame
, callFrame
) / sizeof(void*));
324 // Set the return address.
325 move(TrustedImmPtr(FunctionPtr(ctiVMThrowTrampoline
).value()), regT1
);
326 restoreReturnAddressBeforeReturn(regT1
);
330 return nativeCallThunk
;
333 JIT::CodePtr
JIT::privateCompileCTINativeCall(PassRefPtr
<ExecutablePool
> executablePool
, JSGlobalData
* globalData
, NativeFunction func
)
336 Label nativeCallThunk
= align();
338 emitPutImmediateToCallFrameHeader(0, RegisterFile::CodeBlock
);
341 // Load caller frame's scope chain into this callframe so that whatever we call can
342 // get to its global data.
343 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame
, regT0
);
344 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain
, regT1
, regT0
);
345 emitPutCellToCallFrameHeader(regT1
, RegisterFile::ScopeChain
);
348 emitPutToCallFrameHeader(regT1
, RegisterFile::ReturnPC
);
350 // Calling convention: f(ecx, edx, ...);
351 // Host function signature: f(ExecState*);
352 move(callFrameRegister
, X86Registers::ecx
);
354 subPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister
); // Align stack after call.
356 move(regT0
, callFrameRegister
); // Eagerly restore caller frame register to avoid loading from stack.
361 addPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister
);
364 // Load caller frame's scope chain into this callframe so that whatever we call can
365 // get to its global data.
366 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame
, regT2
);
367 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain
, regT1
, regT2
);
368 emitPutCellToCallFrameHeader(regT1
, RegisterFile::ScopeChain
);
370 preserveReturnAddressAfterCall(regT3
); // Callee preserved
371 emitPutToCallFrameHeader(regT3
, RegisterFile::ReturnPC
);
373 // Calling convention: f(r0 == regT0, r1 == regT1, ...);
374 // Host function signature: f(ExecState*);
375 move(callFrameRegister
, ARMRegisters::r0
);
377 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee
, ARMRegisters::r1
);
378 move(regT2
, callFrameRegister
); // Eagerly restore caller frame register to avoid loading from stack.
379 loadPtr(Address(ARMRegisters::r1
, OBJECT_OFFSETOF(JSFunction
, m_executable
)), regT2
);
384 restoreReturnAddressBeforeReturn(regT3
);
387 // Load caller frame's scope chain into this callframe so that whatever we call can
388 // get to its global data.
389 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame
, regT0
);
390 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain
, regT1
, regT0
);
391 emitPutCellToCallFrameHeader(regT1
, RegisterFile::ScopeChain
);
393 preserveReturnAddressAfterCall(regT3
); // Callee preserved
394 emitPutToCallFrameHeader(regT3
, RegisterFile::ReturnPC
);
396 // Calling convention: f(a0, a1, a2, a3);
397 // Host function signature: f(ExecState*);
399 // Allocate stack space for 16 bytes (8-byte aligned)
400 // 16 bytes (unused) for 4 arguments
401 subPtr(TrustedImm32(16), stackPointerRegister
);
404 move(callFrameRegister
, MIPSRegisters::a0
);
407 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee
, MIPSRegisters::a2
);
408 loadPtr(Address(MIPSRegisters::a2
, OBJECT_OFFSETOF(JSFunction
, m_executable
)), regT2
);
409 move(regT0
, callFrameRegister
); // Eagerly restore caller frame register to avoid loading from stack.
414 // Restore stack space
415 addPtr(TrustedImm32(16), stackPointerRegister
);
417 restoreReturnAddressBeforeReturn(regT3
);
419 // Load caller frame's scope chain into this callframe so that whatever we call can
420 // get to its global data.
421 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame
, regT2
);
422 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain
, regT1
, regT2
);
423 emitPutToCallFrameHeader(regT1
, RegisterFile::ScopeChain
);
425 preserveReturnAddressAfterCall(regT3
); // Callee preserved
426 emitPutToCallFrameHeader(regT3
, RegisterFile::ReturnPC
);
428 // Calling convention: f(r0 == regT4, r1 == regT5, ...);
429 // Host function signature: f(ExecState*);
430 move(callFrameRegister
, regT4
);
432 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee
, regT5
);
433 move(regT2
, callFrameRegister
); // Eagerly restore caller frame register to avoid loading from stack.
434 loadPtr(Address(regT5
, OBJECT_OFFSETOF(JSFunction
, m_executable
)), regT2
);
439 restoreReturnAddressBeforeReturn(regT3
);
440 #elif ENABLE(JIT_OPTIMIZE_NATIVE_CALL)
441 #error "JIT_OPTIMIZE_NATIVE_CALL not yet supported on this platform."
446 // Check for an exception
447 Jump sawException
= branch32(NotEqual
, AbsoluteAddress(reinterpret_cast<char*>(&globalData
->exception
) + OBJECT_OFFSETOF(JSValue
, u
.asBits
.tag
)), TrustedImm32(JSValue::EmptyValueTag
));
452 // Handle an exception
453 sawException
.link(this);
455 // Grab the return address.
456 preserveReturnAddressAfterCall(regT1
);
458 move(TrustedImmPtr(&globalData
->exceptionLocation
), regT2
);
459 storePtr(regT1
, regT2
);
460 poke(callFrameRegister
, OBJECT_OFFSETOF(struct JITStackFrame
, callFrame
) / sizeof(void*));
462 // Set the return address.
463 move(TrustedImmPtr(FunctionPtr(ctiVMThrowTrampoline
).value()), regT1
);
464 restoreReturnAddressBeforeReturn(regT1
);
468 // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
469 LinkBuffer
patchBuffer(*m_globalData
, this, executablePool
);
471 patchBuffer
.link(nativeCall
, FunctionPtr(func
));
472 patchBuffer
.finalizeCode();
474 return patchBuffer
.trampolineAt(nativeCallThunk
);
477 void JIT::emit_op_mov(Instruction
* currentInstruction
)
479 unsigned dst
= currentInstruction
[1].u
.operand
;
480 unsigned src
= currentInstruction
[2].u
.operand
;
482 if (m_codeBlock
->isConstantRegisterIndex(src
))
483 emitStore(dst
, getConstantOperand(src
));
485 emitLoad(src
, regT1
, regT0
);
486 emitStore(dst
, regT1
, regT0
);
487 map(m_bytecodeOffset
+ OPCODE_LENGTH(op_mov
), dst
, regT1
, regT0
);
491 void JIT::emit_op_end(Instruction
* currentInstruction
)
493 ASSERT(returnValueRegister
!= callFrameRegister
);
494 emitLoad(currentInstruction
[1].u
.operand
, regT1
, regT0
);
495 restoreReturnAddressBeforeReturn(Address(callFrameRegister
, RegisterFile::ReturnPC
* static_cast<int>(sizeof(Register
))));
499 void JIT::emit_op_jmp(Instruction
* currentInstruction
)
501 unsigned target
= currentInstruction
[1].u
.operand
;
502 addJump(jump(), target
);
505 void JIT::emit_op_loop_if_lesseq(Instruction
* currentInstruction
)
507 unsigned op1
= currentInstruction
[1].u
.operand
;
508 unsigned op2
= currentInstruction
[2].u
.operand
;
509 unsigned target
= currentInstruction
[3].u
.operand
;
513 if (isOperandConstantImmediateInt(op1
)) {
514 emitLoad(op2
, regT1
, regT0
);
515 addSlowCase(branch32(NotEqual
, regT1
, TrustedImm32(JSValue::Int32Tag
)));
516 addJump(branch32(GreaterThanOrEqual
, regT0
, Imm32(getConstantOperand(op1
).asInt32())), target
);
520 if (isOperandConstantImmediateInt(op2
)) {
521 emitLoad(op1
, regT1
, regT0
);
522 addSlowCase(branch32(NotEqual
, regT1
, TrustedImm32(JSValue::Int32Tag
)));
523 addJump(branch32(LessThanOrEqual
, regT0
, Imm32(getConstantOperand(op2
).asInt32())), target
);
527 emitLoad2(op1
, regT1
, regT0
, op2
, regT3
, regT2
);
528 addSlowCase(branch32(NotEqual
, regT1
, TrustedImm32(JSValue::Int32Tag
)));
529 addSlowCase(branch32(NotEqual
, regT3
, TrustedImm32(JSValue::Int32Tag
)));
530 addJump(branch32(LessThanOrEqual
, regT0
, regT2
), target
);
533 void JIT::emitSlow_op_loop_if_lesseq(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
535 unsigned op1
= currentInstruction
[1].u
.operand
;
536 unsigned op2
= currentInstruction
[2].u
.operand
;
537 unsigned target
= currentInstruction
[3].u
.operand
;
539 if (!isOperandConstantImmediateInt(op1
) && !isOperandConstantImmediateInt(op2
))
540 linkSlowCase(iter
); // int32 check
541 linkSlowCase(iter
); // int32 check
543 JITStubCall
stubCall(this, cti_op_loop_if_lesseq
);
544 stubCall
.addArgument(op1
);
545 stubCall
.addArgument(op2
);
547 emitJumpSlowToHot(branchTest32(NonZero
, regT0
), target
);
550 void JIT::emit_op_new_object(Instruction
* currentInstruction
)
552 JITStubCall(this, cti_op_new_object
).call(currentInstruction
[1].u
.operand
);
555 void JIT::emit_op_check_has_instance(Instruction
* currentInstruction
)
557 unsigned baseVal
= currentInstruction
[1].u
.operand
;
559 emitLoadPayload(baseVal
, regT0
);
561 // Check that baseVal is a cell.
562 emitJumpSlowCaseIfNotJSCell(baseVal
);
564 // Check that baseVal 'ImplementsHasInstance'.
565 loadPtr(Address(regT0
, JSCell::structureOffset()), regT0
);
566 addSlowCase(branchTest8(Zero
, Address(regT0
, Structure::typeInfoFlagsOffset()), TrustedImm32(ImplementsHasInstance
)));
569 void JIT::emit_op_instanceof(Instruction
* currentInstruction
)
571 unsigned dst
= currentInstruction
[1].u
.operand
;
572 unsigned value
= currentInstruction
[2].u
.operand
;
573 unsigned baseVal
= currentInstruction
[3].u
.operand
;
574 unsigned proto
= currentInstruction
[4].u
.operand
;
576 // Load the operands into registers.
577 // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
578 emitLoadPayload(value
, regT2
);
579 emitLoadPayload(baseVal
, regT0
);
580 emitLoadPayload(proto
, regT1
);
582 // Check that proto are cells. baseVal must be a cell - this is checked by op_check_has_instance.
583 emitJumpSlowCaseIfNotJSCell(value
);
584 emitJumpSlowCaseIfNotJSCell(proto
);
586 // Check that prototype is an object
587 loadPtr(Address(regT1
, JSCell::structureOffset()), regT3
);
588 addSlowCase(branch8(NotEqual
, Address(regT3
, Structure::typeInfoTypeOffset()), TrustedImm32(ObjectType
)));
590 // Fixme: this check is only needed because the JSC API allows HasInstance to be overridden; we should deprecate this.
591 // Check that baseVal 'ImplementsDefaultHasInstance'.
592 loadPtr(Address(regT0
, JSCell::structureOffset()), regT0
);
593 addSlowCase(branchTest8(Zero
, Address(regT0
, Structure::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance
)));
595 // Optimistically load the result true, and start looping.
596 // Initially, regT1 still contains proto and regT2 still contains value.
597 // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
598 move(TrustedImm32(1), regT0
);
601 // Load the prototype of the cell in regT2. If this is equal to regT1 - WIN!
602 // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
603 loadPtr(Address(regT2
, JSCell::structureOffset()), regT2
);
604 load32(Address(regT2
, Structure::prototypeOffset() + OBJECT_OFFSETOF(JSValue
, u
.asBits
.payload
)), regT2
);
605 Jump isInstance
= branchPtr(Equal
, regT2
, regT1
);
606 branchTest32(NonZero
, regT2
).linkTo(loop
, this);
608 // We get here either by dropping out of the loop, or if value was not an Object. Result is false.
609 move(TrustedImm32(0), regT0
);
611 // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
612 isInstance
.link(this);
613 emitStoreBool(dst
, regT0
);
616 void JIT::emitSlow_op_check_has_instance(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
618 unsigned baseVal
= currentInstruction
[1].u
.operand
;
620 linkSlowCaseIfNotJSCell(iter
, baseVal
);
623 JITStubCall
stubCall(this, cti_op_check_has_instance
);
624 stubCall
.addArgument(baseVal
);
628 void JIT::emitSlow_op_instanceof(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
630 unsigned dst
= currentInstruction
[1].u
.operand
;
631 unsigned value
= currentInstruction
[2].u
.operand
;
632 unsigned baseVal
= currentInstruction
[3].u
.operand
;
633 unsigned proto
= currentInstruction
[4].u
.operand
;
635 linkSlowCaseIfNotJSCell(iter
, value
);
636 linkSlowCaseIfNotJSCell(iter
, proto
);
640 JITStubCall
stubCall(this, cti_op_instanceof
);
641 stubCall
.addArgument(value
);
642 stubCall
.addArgument(baseVal
);
643 stubCall
.addArgument(proto
);
647 void JIT::emit_op_get_global_var(Instruction
* currentInstruction
)
649 int dst
= currentInstruction
[1].u
.operand
;
650 JSGlobalObject
* globalObject
= m_codeBlock
->globalObject();
651 ASSERT(globalObject
->isGlobalObject());
652 int index
= currentInstruction
[2].u
.operand
;
654 loadPtr(&globalObject
->m_registers
, regT2
);
656 emitLoad(index
, regT1
, regT0
, regT2
);
657 emitStore(dst
, regT1
, regT0
);
658 map(m_bytecodeOffset
+ OPCODE_LENGTH(op_get_global_var
), dst
, regT1
, regT0
);
661 void JIT::emit_op_put_global_var(Instruction
* currentInstruction
)
663 JSGlobalObject
* globalObject
= m_codeBlock
->globalObject();
664 ASSERT(globalObject
->isGlobalObject());
665 int index
= currentInstruction
[1].u
.operand
;
666 int value
= currentInstruction
[2].u
.operand
;
668 emitLoad(value
, regT1
, regT0
);
670 loadPtr(&globalObject
->m_registers
, regT2
);
671 emitStore(index
, regT1
, regT0
, regT2
);
672 map(m_bytecodeOffset
+ OPCODE_LENGTH(op_put_global_var
), value
, regT1
, regT0
);
675 void JIT::emit_op_get_scoped_var(Instruction
* currentInstruction
)
677 int dst
= currentInstruction
[1].u
.operand
;
678 int index
= currentInstruction
[2].u
.operand
;
679 int skip
= currentInstruction
[3].u
.operand
;
681 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain
, regT2
);
682 bool checkTopLevel
= m_codeBlock
->codeType() == FunctionCode
&& m_codeBlock
->needsFullScopeChain();
683 ASSERT(skip
|| !checkTopLevel
);
684 if (checkTopLevel
&& skip
--) {
685 Jump activationNotCreated
;
687 activationNotCreated
= branch32(Equal
, tagFor(m_codeBlock
->activationRegister()), TrustedImm32(JSValue::EmptyValueTag
));
688 loadPtr(Address(regT2
, OBJECT_OFFSETOF(ScopeChainNode
, next
)), regT2
);
689 activationNotCreated
.link(this);
692 loadPtr(Address(regT2
, OBJECT_OFFSETOF(ScopeChainNode
, next
)), regT2
);
694 loadPtr(Address(regT2
, OBJECT_OFFSETOF(ScopeChainNode
, object
)), regT2
);
695 loadPtr(Address(regT2
, OBJECT_OFFSETOF(JSVariableObject
, m_registers
)), regT2
);
697 emitLoad(index
, regT1
, regT0
, regT2
);
698 emitStore(dst
, regT1
, regT0
);
699 map(m_bytecodeOffset
+ OPCODE_LENGTH(op_get_scoped_var
), dst
, regT1
, regT0
);
702 void JIT::emit_op_put_scoped_var(Instruction
* currentInstruction
)
704 int index
= currentInstruction
[1].u
.operand
;
705 int skip
= currentInstruction
[2].u
.operand
;
706 int value
= currentInstruction
[3].u
.operand
;
708 emitLoad(value
, regT1
, regT0
);
710 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain
, regT2
);
711 bool checkTopLevel
= m_codeBlock
->codeType() == FunctionCode
&& m_codeBlock
->needsFullScopeChain();
712 ASSERT(skip
|| !checkTopLevel
);
713 if (checkTopLevel
&& skip
--) {
714 Jump activationNotCreated
;
716 activationNotCreated
= branch32(Equal
, tagFor(m_codeBlock
->activationRegister()), TrustedImm32(JSValue::EmptyValueTag
));
717 loadPtr(Address(regT2
, OBJECT_OFFSETOF(ScopeChainNode
, next
)), regT2
);
718 activationNotCreated
.link(this);
721 loadPtr(Address(regT2
, OBJECT_OFFSETOF(ScopeChainNode
, next
)), regT2
);
723 loadPtr(Address(regT2
, OBJECT_OFFSETOF(ScopeChainNode
, object
)), regT2
);
724 loadPtr(Address(regT2
, OBJECT_OFFSETOF(JSVariableObject
, m_registers
)), regT2
);
726 emitStore(index
, regT1
, regT0
, regT2
);
727 map(m_bytecodeOffset
+ OPCODE_LENGTH(op_put_scoped_var
), value
, regT1
, regT0
);
730 void JIT::emit_op_tear_off_activation(Instruction
* currentInstruction
)
732 unsigned activation
= currentInstruction
[1].u
.operand
;
733 unsigned arguments
= currentInstruction
[2].u
.operand
;
734 Jump activationCreated
= branch32(NotEqual
, tagFor(activation
), TrustedImm32(JSValue::EmptyValueTag
));
735 Jump argumentsNotCreated
= branch32(Equal
, tagFor(arguments
), TrustedImm32(JSValue::EmptyValueTag
));
736 activationCreated
.link(this);
737 JITStubCall
stubCall(this, cti_op_tear_off_activation
);
738 stubCall
.addArgument(currentInstruction
[1].u
.operand
);
739 stubCall
.addArgument(unmodifiedArgumentsRegister(currentInstruction
[2].u
.operand
));
741 argumentsNotCreated
.link(this);
744 void JIT::emit_op_tear_off_arguments(Instruction
* currentInstruction
)
746 int dst
= currentInstruction
[1].u
.operand
;
748 Jump argsNotCreated
= branch32(Equal
, tagFor(unmodifiedArgumentsRegister(dst
)), TrustedImm32(JSValue::EmptyValueTag
));
749 JITStubCall
stubCall(this, cti_op_tear_off_arguments
);
750 stubCall
.addArgument(unmodifiedArgumentsRegister(dst
));
752 argsNotCreated
.link(this);
755 void JIT::emit_op_resolve(Instruction
* currentInstruction
)
757 JITStubCall
stubCall(this, cti_op_resolve
);
758 stubCall
.addArgument(TrustedImmPtr(&m_codeBlock
->identifier(currentInstruction
[2].u
.operand
)));
759 stubCall
.call(currentInstruction
[1].u
.operand
);
762 void JIT::emit_op_to_primitive(Instruction
* currentInstruction
)
764 int dst
= currentInstruction
[1].u
.operand
;
765 int src
= currentInstruction
[2].u
.operand
;
767 emitLoad(src
, regT1
, regT0
);
769 Jump isImm
= branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
));
770 addSlowCase(branchPtr(NotEqual
, Address(regT0
), TrustedImmPtr(m_globalData
->jsStringVPtr
)));
774 emitStore(dst
, regT1
, regT0
);
775 map(m_bytecodeOffset
+ OPCODE_LENGTH(op_to_primitive
), dst
, regT1
, regT0
);
778 void JIT::emitSlow_op_to_primitive(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
780 int dst
= currentInstruction
[1].u
.operand
;
784 JITStubCall
stubCall(this, cti_op_to_primitive
);
785 stubCall
.addArgument(regT1
, regT0
);
789 void JIT::emit_op_strcat(Instruction
* currentInstruction
)
791 JITStubCall
stubCall(this, cti_op_strcat
);
792 stubCall
.addArgument(Imm32(currentInstruction
[2].u
.operand
));
793 stubCall
.addArgument(Imm32(currentInstruction
[3].u
.operand
));
794 stubCall
.call(currentInstruction
[1].u
.operand
);
797 void JIT::emit_op_resolve_base(Instruction
* currentInstruction
)
799 JITStubCall
stubCall(this, currentInstruction
[3].u
.operand
? cti_op_resolve_base_strict_put
: cti_op_resolve_base
);
800 stubCall
.addArgument(TrustedImmPtr(&m_codeBlock
->identifier(currentInstruction
[2].u
.operand
)));
801 stubCall
.call(currentInstruction
[1].u
.operand
);
804 void JIT::emit_op_ensure_property_exists(Instruction
* currentInstruction
)
806 JITStubCall
stubCall(this, cti_op_ensure_property_exists
);
807 stubCall
.addArgument(Imm32(currentInstruction
[1].u
.operand
));
808 stubCall
.addArgument(TrustedImmPtr(&m_codeBlock
->identifier(currentInstruction
[2].u
.operand
)));
809 stubCall
.call(currentInstruction
[1].u
.operand
);
812 void JIT::emit_op_resolve_skip(Instruction
* currentInstruction
)
814 JITStubCall
stubCall(this, cti_op_resolve_skip
);
815 stubCall
.addArgument(TrustedImmPtr(&m_codeBlock
->identifier(currentInstruction
[2].u
.operand
)));
816 stubCall
.addArgument(Imm32(currentInstruction
[3].u
.operand
));
817 stubCall
.call(currentInstruction
[1].u
.operand
);
820 void JIT::emit_op_resolve_global(Instruction
* currentInstruction
, bool dynamic
)
822 // FIXME: Optimize to use patching instead of so many memory accesses.
824 unsigned dst
= currentInstruction
[1].u
.operand
;
825 void* globalObject
= m_codeBlock
->globalObject();
827 unsigned currentIndex
= m_globalResolveInfoIndex
++;
828 GlobalResolveInfo
* resolveInfoAddress
= &m_codeBlock
->globalResolveInfo(currentIndex
);
832 move(TrustedImmPtr(globalObject
), regT0
);
833 move(TrustedImmPtr(resolveInfoAddress
), regT3
);
834 loadPtr(Address(regT3
, OBJECT_OFFSETOF(GlobalResolveInfo
, structure
)), regT1
);
835 addSlowCase(branchPtr(NotEqual
, regT1
, Address(regT0
, JSCell::structureOffset())));
838 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSGlobalObject
, m_propertyStorage
)), regT2
);
839 load32(Address(regT3
, OBJECT_OFFSETOF(GlobalResolveInfo
, offset
)), regT3
);
840 load32(BaseIndex(regT2
, regT3
, TimesEight
, OBJECT_OFFSETOF(JSValue
, u
.asBits
.payload
)), regT0
); // payload
841 load32(BaseIndex(regT2
, regT3
, TimesEight
, OBJECT_OFFSETOF(JSValue
, u
.asBits
.tag
)), regT1
); // tag
842 emitStore(dst
, regT1
, regT0
);
843 map(m_bytecodeOffset
+ (dynamic
? OPCODE_LENGTH(op_resolve_global_dynamic
) : OPCODE_LENGTH(op_resolve_global
)), dst
, regT1
, regT0
);
846 void JIT::emitSlow_op_resolve_global(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
848 unsigned dst
= currentInstruction
[1].u
.operand
;
849 Identifier
* ident
= &m_codeBlock
->identifier(currentInstruction
[2].u
.operand
);
851 unsigned currentIndex
= m_globalResolveInfoIndex
++;
854 JITStubCall
stubCall(this, cti_op_resolve_global
);
855 stubCall
.addArgument(TrustedImmPtr(ident
));
856 stubCall
.addArgument(Imm32(currentIndex
));
860 void JIT::emit_op_not(Instruction
* currentInstruction
)
862 unsigned dst
= currentInstruction
[1].u
.operand
;
863 unsigned src
= currentInstruction
[2].u
.operand
;
865 emitLoadTag(src
, regT0
);
867 emitLoad(src
, regT1
, regT0
);
868 addSlowCase(branch32(NotEqual
, regT1
, TrustedImm32(JSValue::BooleanTag
)));
869 xor32(TrustedImm32(1), regT0
);
871 emitStoreBool(dst
, regT0
, (dst
== src
));
874 void JIT::emitSlow_op_not(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
876 unsigned dst
= currentInstruction
[1].u
.operand
;
877 unsigned src
= currentInstruction
[2].u
.operand
;
881 JITStubCall
stubCall(this, cti_op_not
);
882 stubCall
.addArgument(src
);
886 void JIT::emit_op_jfalse(Instruction
* currentInstruction
)
888 unsigned cond
= currentInstruction
[1].u
.operand
;
889 unsigned target
= currentInstruction
[2].u
.operand
;
891 emitLoad(cond
, regT1
, regT0
);
893 ASSERT((JSValue::BooleanTag
+ 1 == JSValue::Int32Tag
) && !(JSValue::Int32Tag
+ 1));
894 addSlowCase(branch32(Below
, regT1
, TrustedImm32(JSValue::BooleanTag
)));
895 addJump(branchTest32(Zero
, regT0
), target
);
898 void JIT::emitSlow_op_jfalse(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
900 unsigned cond
= currentInstruction
[1].u
.operand
;
901 unsigned target
= currentInstruction
[2].u
.operand
;
905 if (supportsFloatingPoint()) {
906 // regT1 contains the tag from the hot path.
907 Jump notNumber
= branch32(Above
, regT1
, Imm32(JSValue::LowestTag
));
909 emitLoadDouble(cond
, fpRegT0
);
910 emitJumpSlowToHot(branchDoubleZeroOrNaN(fpRegT0
, fpRegT1
), target
);
911 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_jfalse
));
913 notNumber
.link(this);
916 JITStubCall
stubCall(this, cti_op_jtrue
);
917 stubCall
.addArgument(cond
);
919 emitJumpSlowToHot(branchTest32(Zero
, regT0
), target
); // Inverted.
922 void JIT::emit_op_jtrue(Instruction
* currentInstruction
)
924 unsigned cond
= currentInstruction
[1].u
.operand
;
925 unsigned target
= currentInstruction
[2].u
.operand
;
927 emitLoad(cond
, regT1
, regT0
);
929 ASSERT((JSValue::BooleanTag
+ 1 == JSValue::Int32Tag
) && !(JSValue::Int32Tag
+ 1));
930 addSlowCase(branch32(Below
, regT1
, TrustedImm32(JSValue::BooleanTag
)));
931 addJump(branchTest32(NonZero
, regT0
), target
);
934 void JIT::emitSlow_op_jtrue(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
936 unsigned cond
= currentInstruction
[1].u
.operand
;
937 unsigned target
= currentInstruction
[2].u
.operand
;
941 if (supportsFloatingPoint()) {
942 // regT1 contains the tag from the hot path.
943 Jump notNumber
= branch32(Above
, regT1
, Imm32(JSValue::LowestTag
));
945 emitLoadDouble(cond
, fpRegT0
);
946 emitJumpSlowToHot(branchDoubleNonZero(fpRegT0
, fpRegT1
), target
);
947 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_jtrue
));
949 notNumber
.link(this);
952 JITStubCall
stubCall(this, cti_op_jtrue
);
953 stubCall
.addArgument(cond
);
955 emitJumpSlowToHot(branchTest32(NonZero
, regT0
), target
);
958 void JIT::emit_op_jeq_null(Instruction
* currentInstruction
)
960 unsigned src
= currentInstruction
[1].u
.operand
;
961 unsigned target
= currentInstruction
[2].u
.operand
;
963 emitLoad(src
, regT1
, regT0
);
965 Jump isImmediate
= branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
));
967 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
968 loadPtr(Address(regT0
, JSCell::structureOffset()), regT2
);
969 addJump(branchTest8(NonZero
, Address(regT2
, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined
)), target
);
971 Jump wasNotImmediate
= jump();
973 // Now handle the immediate cases - undefined & null
974 isImmediate
.link(this);
976 ASSERT((JSValue::UndefinedTag
+ 1 == JSValue::NullTag
) && (JSValue::NullTag
& 0x1));
977 or32(TrustedImm32(1), regT1
);
978 addJump(branch32(Equal
, regT1
, TrustedImm32(JSValue::NullTag
)), target
);
980 wasNotImmediate
.link(this);
983 void JIT::emit_op_jneq_null(Instruction
* currentInstruction
)
985 unsigned src
= currentInstruction
[1].u
.operand
;
986 unsigned target
= currentInstruction
[2].u
.operand
;
988 emitLoad(src
, regT1
, regT0
);
990 Jump isImmediate
= branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
));
992 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
993 loadPtr(Address(regT0
, JSCell::structureOffset()), regT2
);
994 addJump(branchTest8(Zero
, Address(regT2
, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined
)), target
);
996 Jump wasNotImmediate
= jump();
998 // Now handle the immediate cases - undefined & null
999 isImmediate
.link(this);
1001 ASSERT((JSValue::UndefinedTag
+ 1 == JSValue::NullTag
) && (JSValue::NullTag
& 0x1));
1002 or32(TrustedImm32(1), regT1
);
1003 addJump(branch32(NotEqual
, regT1
, TrustedImm32(JSValue::NullTag
)), target
);
1005 wasNotImmediate
.link(this);
1008 void JIT::emit_op_jneq_ptr(Instruction
* currentInstruction
)
1010 unsigned src
= currentInstruction
[1].u
.operand
;
1011 JSCell
* ptr
= currentInstruction
[2].u
.jsCell
.get();
1012 unsigned target
= currentInstruction
[3].u
.operand
;
1014 emitLoad(src
, regT1
, regT0
);
1015 addJump(branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
)), target
);
1016 addJump(branchPtr(NotEqual
, regT0
, TrustedImmPtr(ptr
)), target
);
1019 void JIT::emit_op_jsr(Instruction
* currentInstruction
)
1021 int retAddrDst
= currentInstruction
[1].u
.operand
;
1022 int target
= currentInstruction
[2].u
.operand
;
1023 DataLabelPtr storeLocation
= storePtrWithPatch(TrustedImmPtr(0), Address(callFrameRegister
, sizeof(Register
) * retAddrDst
));
1024 addJump(jump(), target
);
1025 m_jsrSites
.append(JSRInfo(storeLocation
, label()));
1028 void JIT::emit_op_sret(Instruction
* currentInstruction
)
1030 jump(Address(callFrameRegister
, sizeof(Register
) * currentInstruction
[1].u
.operand
));
1033 void JIT::emit_op_eq(Instruction
* currentInstruction
)
1035 unsigned dst
= currentInstruction
[1].u
.operand
;
1036 unsigned src1
= currentInstruction
[2].u
.operand
;
1037 unsigned src2
= currentInstruction
[3].u
.operand
;
1039 emitLoad2(src1
, regT1
, regT0
, src2
, regT3
, regT2
);
1040 addSlowCase(branch32(NotEqual
, regT1
, regT3
));
1041 addSlowCase(branch32(Equal
, regT1
, TrustedImm32(JSValue::CellTag
)));
1042 addSlowCase(branch32(Below
, regT1
, TrustedImm32(JSValue::LowestTag
)));
1044 compare32(Equal
, regT0
, regT2
, regT0
);
1046 emitStoreBool(dst
, regT0
);
1049 void JIT::emitSlow_op_eq(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1051 unsigned dst
= currentInstruction
[1].u
.operand
;
1052 unsigned op1
= currentInstruction
[2].u
.operand
;
1053 unsigned op2
= currentInstruction
[3].u
.operand
;
1055 JumpList storeResult
;
1056 JumpList genericCase
;
1058 genericCase
.append(getSlowCase(iter
)); // tags not equal
1060 linkSlowCase(iter
); // tags equal and JSCell
1061 genericCase
.append(branchPtr(NotEqual
, Address(regT0
), TrustedImmPtr(m_globalData
->jsStringVPtr
)));
1062 genericCase
.append(branchPtr(NotEqual
, Address(regT2
), TrustedImmPtr(m_globalData
->jsStringVPtr
)));
1065 JITStubCall
stubCallEqStrings(this, cti_op_eq_strings
);
1066 stubCallEqStrings
.addArgument(regT0
);
1067 stubCallEqStrings
.addArgument(regT2
);
1068 stubCallEqStrings
.call();
1069 storeResult
.append(jump());
1072 genericCase
.append(getSlowCase(iter
)); // doubles
1073 genericCase
.link(this);
1074 JITStubCall
stubCallEq(this, cti_op_eq
);
1075 stubCallEq
.addArgument(op1
);
1076 stubCallEq
.addArgument(op2
);
1077 stubCallEq
.call(regT0
);
1079 storeResult
.link(this);
1080 emitStoreBool(dst
, regT0
);
1083 void JIT::emit_op_neq(Instruction
* currentInstruction
)
1085 unsigned dst
= currentInstruction
[1].u
.operand
;
1086 unsigned src1
= currentInstruction
[2].u
.operand
;
1087 unsigned src2
= currentInstruction
[3].u
.operand
;
1089 emitLoad2(src1
, regT1
, regT0
, src2
, regT3
, regT2
);
1090 addSlowCase(branch32(NotEqual
, regT1
, regT3
));
1091 addSlowCase(branch32(Equal
, regT1
, TrustedImm32(JSValue::CellTag
)));
1092 addSlowCase(branch32(Below
, regT1
, TrustedImm32(JSValue::LowestTag
)));
1094 compare32(NotEqual
, regT0
, regT2
, regT0
);
1096 emitStoreBool(dst
, regT0
);
1099 void JIT::emitSlow_op_neq(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1101 unsigned dst
= currentInstruction
[1].u
.operand
;
1103 JumpList storeResult
;
1104 JumpList genericCase
;
1106 genericCase
.append(getSlowCase(iter
)); // tags not equal
1108 linkSlowCase(iter
); // tags equal and JSCell
1109 genericCase
.append(branchPtr(NotEqual
, Address(regT0
), TrustedImmPtr(m_globalData
->jsStringVPtr
)));
1110 genericCase
.append(branchPtr(NotEqual
, Address(regT2
), TrustedImmPtr(m_globalData
->jsStringVPtr
)));
1113 JITStubCall
stubCallEqStrings(this, cti_op_eq_strings
);
1114 stubCallEqStrings
.addArgument(regT0
);
1115 stubCallEqStrings
.addArgument(regT2
);
1116 stubCallEqStrings
.call(regT0
);
1117 storeResult
.append(jump());
1120 genericCase
.append(getSlowCase(iter
)); // doubles
1121 genericCase
.link(this);
1122 JITStubCall
stubCallEq(this, cti_op_eq
);
1123 stubCallEq
.addArgument(regT1
, regT0
);
1124 stubCallEq
.addArgument(regT3
, regT2
);
1125 stubCallEq
.call(regT0
);
1127 storeResult
.link(this);
1128 xor32(TrustedImm32(0x1), regT0
);
1129 emitStoreBool(dst
, regT0
);
1132 void JIT::compileOpStrictEq(Instruction
* currentInstruction
, CompileOpStrictEqType type
)
1134 unsigned dst
= currentInstruction
[1].u
.operand
;
1135 unsigned src1
= currentInstruction
[2].u
.operand
;
1136 unsigned src2
= currentInstruction
[3].u
.operand
;
1138 emitLoadTag(src1
, regT0
);
1139 emitLoadTag(src2
, regT1
);
1141 // Jump to a slow case if either operand is double, or if both operands are
1142 // cells and/or Int32s.
1144 and32(regT1
, regT2
);
1145 addSlowCase(branch32(Below
, regT2
, TrustedImm32(JSValue::LowestTag
)));
1146 addSlowCase(branch32(AboveOrEqual
, regT2
, TrustedImm32(JSValue::CellTag
)));
1148 if (type
== OpStrictEq
)
1149 compare32(Equal
, regT0
, regT1
, regT0
);
1151 compare32(NotEqual
, regT0
, regT1
, regT0
);
1153 emitStoreBool(dst
, regT0
);
1156 void JIT::emit_op_stricteq(Instruction
* currentInstruction
)
1158 compileOpStrictEq(currentInstruction
, OpStrictEq
);
1161 void JIT::emitSlow_op_stricteq(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1163 unsigned dst
= currentInstruction
[1].u
.operand
;
1164 unsigned src1
= currentInstruction
[2].u
.operand
;
1165 unsigned src2
= currentInstruction
[3].u
.operand
;
1170 JITStubCall
stubCall(this, cti_op_stricteq
);
1171 stubCall
.addArgument(src1
);
1172 stubCall
.addArgument(src2
);
1176 void JIT::emit_op_nstricteq(Instruction
* currentInstruction
)
1178 compileOpStrictEq(currentInstruction
, OpNStrictEq
);
1181 void JIT::emitSlow_op_nstricteq(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1183 unsigned dst
= currentInstruction
[1].u
.operand
;
1184 unsigned src1
= currentInstruction
[2].u
.operand
;
1185 unsigned src2
= currentInstruction
[3].u
.operand
;
1190 JITStubCall
stubCall(this, cti_op_nstricteq
);
1191 stubCall
.addArgument(src1
);
1192 stubCall
.addArgument(src2
);
1196 void JIT::emit_op_eq_null(Instruction
* currentInstruction
)
1198 unsigned dst
= currentInstruction
[1].u
.operand
;
1199 unsigned src
= currentInstruction
[2].u
.operand
;
1201 emitLoad(src
, regT1
, regT0
);
1202 Jump isImmediate
= branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
));
1204 loadPtr(Address(regT0
, JSCell::structureOffset()), regT1
);
1205 test8(NonZero
, Address(regT1
, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined
), regT1
);
1207 Jump wasNotImmediate
= jump();
1209 isImmediate
.link(this);
1211 compare32(Equal
, regT1
, TrustedImm32(JSValue::NullTag
), regT2
);
1212 compare32(Equal
, regT1
, TrustedImm32(JSValue::UndefinedTag
), regT1
);
1215 wasNotImmediate
.link(this);
1217 emitStoreBool(dst
, regT1
);
1220 void JIT::emit_op_neq_null(Instruction
* currentInstruction
)
1222 unsigned dst
= currentInstruction
[1].u
.operand
;
1223 unsigned src
= currentInstruction
[2].u
.operand
;
1225 emitLoad(src
, regT1
, regT0
);
1226 Jump isImmediate
= branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
));
1228 loadPtr(Address(regT0
, JSCell::structureOffset()), regT1
);
1229 test8(Zero
, Address(regT1
, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined
), regT1
);
1231 Jump wasNotImmediate
= jump();
1233 isImmediate
.link(this);
1235 compare32(NotEqual
, regT1
, TrustedImm32(JSValue::NullTag
), regT2
);
1236 compare32(NotEqual
, regT1
, TrustedImm32(JSValue::UndefinedTag
), regT1
);
1237 and32(regT2
, regT1
);
1239 wasNotImmediate
.link(this);
1241 emitStoreBool(dst
, regT1
);
1244 void JIT::emit_op_resolve_with_base(Instruction
* currentInstruction
)
1246 JITStubCall
stubCall(this, cti_op_resolve_with_base
);
1247 stubCall
.addArgument(TrustedImmPtr(&m_codeBlock
->identifier(currentInstruction
[3].u
.operand
)));
1248 stubCall
.addArgument(Imm32(currentInstruction
[1].u
.operand
));
1249 stubCall
.call(currentInstruction
[2].u
.operand
);
1252 void JIT::emit_op_new_func_exp(Instruction
* currentInstruction
)
1254 JITStubCall
stubCall(this, cti_op_new_func_exp
);
1255 stubCall
.addArgument(TrustedImmPtr(m_codeBlock
->functionExpr(currentInstruction
[2].u
.operand
)));
1256 stubCall
.call(currentInstruction
[1].u
.operand
);
1259 void JIT::emit_op_throw(Instruction
* currentInstruction
)
1261 unsigned exception
= currentInstruction
[1].u
.operand
;
1262 JITStubCall
stubCall(this, cti_op_throw
);
1263 stubCall
.addArgument(exception
);
1267 // cti_op_throw always changes it's return address,
1268 // this point in the code should never be reached.
1273 void JIT::emit_op_get_pnames(Instruction
* currentInstruction
)
1275 int dst
= currentInstruction
[1].u
.operand
;
1276 int base
= currentInstruction
[2].u
.operand
;
1277 int i
= currentInstruction
[3].u
.operand
;
1278 int size
= currentInstruction
[4].u
.operand
;
1279 int breakTarget
= currentInstruction
[5].u
.operand
;
1281 JumpList isNotObject
;
1283 emitLoad(base
, regT1
, regT0
);
1284 if (!m_codeBlock
->isKnownNotImmediate(base
))
1285 isNotObject
.append(branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
)));
1286 if (base
!= m_codeBlock
->thisRegister() || m_codeBlock
->isStrictMode()) {
1287 loadPtr(Address(regT0
, JSCell::structureOffset()), regT2
);
1288 isNotObject
.append(branch8(NotEqual
, Address(regT2
, Structure::typeInfoTypeOffset()), TrustedImm32(ObjectType
)));
1291 // We could inline the case where you have a valid cache, but
1292 // this call doesn't seem to be hot.
1293 Label
isObject(this);
1294 JITStubCall
getPnamesStubCall(this, cti_op_get_pnames
);
1295 getPnamesStubCall
.addArgument(regT0
);
1296 getPnamesStubCall
.call(dst
);
1297 load32(Address(regT0
, OBJECT_OFFSETOF(JSPropertyNameIterator
, m_jsStringsSize
)), regT3
);
1298 store32(TrustedImm32(Int32Tag
), intTagFor(i
));
1299 store32(TrustedImm32(0), intPayloadFor(i
));
1300 store32(TrustedImm32(Int32Tag
), intTagFor(size
));
1301 store32(regT3
, payloadFor(size
));
1304 isNotObject
.link(this);
1305 addJump(branch32(Equal
, regT1
, TrustedImm32(JSValue::NullTag
)), breakTarget
);
1306 addJump(branch32(Equal
, regT1
, TrustedImm32(JSValue::UndefinedTag
)), breakTarget
);
1307 JITStubCall
toObjectStubCall(this, cti_to_object
);
1308 toObjectStubCall
.addArgument(regT1
, regT0
);
1309 toObjectStubCall
.call(base
);
1310 jump().linkTo(isObject
, this);
1315 void JIT::emit_op_next_pname(Instruction
* currentInstruction
)
1317 int dst
= currentInstruction
[1].u
.operand
;
1318 int base
= currentInstruction
[2].u
.operand
;
1319 int i
= currentInstruction
[3].u
.operand
;
1320 int size
= currentInstruction
[4].u
.operand
;
1321 int it
= currentInstruction
[5].u
.operand
;
1322 int target
= currentInstruction
[6].u
.operand
;
1324 JumpList callHasProperty
;
1327 load32(intPayloadFor(i
), regT0
);
1328 Jump end
= branch32(Equal
, regT0
, intPayloadFor(size
));
1331 loadPtr(payloadFor(it
), regT1
);
1332 loadPtr(Address(regT1
, OBJECT_OFFSETOF(JSPropertyNameIterator
, m_jsStrings
)), regT2
);
1333 load32(BaseIndex(regT2
, regT0
, TimesEight
), regT2
);
1334 store32(TrustedImm32(JSValue::CellTag
), tagFor(dst
));
1335 store32(regT2
, payloadFor(dst
));
1338 add32(TrustedImm32(1), regT0
);
1339 store32(regT0
, intPayloadFor(i
));
1341 // Verify that i is valid:
1342 loadPtr(payloadFor(base
), regT0
);
1344 // Test base's structure
1345 loadPtr(Address(regT0
, JSCell::structureOffset()), regT2
);
1346 callHasProperty
.append(branchPtr(NotEqual
, regT2
, Address(Address(regT1
, OBJECT_OFFSETOF(JSPropertyNameIterator
, m_cachedStructure
)))));
1348 // Test base's prototype chain
1349 loadPtr(Address(Address(regT1
, OBJECT_OFFSETOF(JSPropertyNameIterator
, m_cachedPrototypeChain
))), regT3
);
1350 loadPtr(Address(regT3
, OBJECT_OFFSETOF(StructureChain
, m_vector
)), regT3
);
1351 addJump(branchTestPtr(Zero
, Address(regT3
)), target
);
1353 Label
checkPrototype(this);
1354 callHasProperty
.append(branch32(Equal
, Address(regT2
, Structure::prototypeOffset() + OBJECT_OFFSETOF(JSValue
, u
.asBits
.tag
)), TrustedImm32(JSValue::NullTag
)));
1355 loadPtr(Address(regT2
, Structure::prototypeOffset() + OBJECT_OFFSETOF(JSValue
, u
.asBits
.payload
)), regT2
);
1356 loadPtr(Address(regT2
, JSCell::structureOffset()), regT2
);
1357 callHasProperty
.append(branchPtr(NotEqual
, regT2
, Address(regT3
)));
1358 addPtr(TrustedImm32(sizeof(Structure
*)), regT3
);
1359 branchTestPtr(NonZero
, Address(regT3
)).linkTo(checkPrototype
, this);
1362 addJump(jump(), target
);
1364 // Slow case: Ask the object if i is valid.
1365 callHasProperty
.link(this);
1366 loadPtr(addressFor(dst
), regT1
);
1367 JITStubCall
stubCall(this, cti_has_property
);
1368 stubCall
.addArgument(regT0
);
1369 stubCall
.addArgument(regT1
);
1372 // Test for valid key.
1373 addJump(branchTest32(NonZero
, regT0
), target
);
1374 jump().linkTo(begin
, this);
1380 void JIT::emit_op_push_scope(Instruction
* currentInstruction
)
1382 JITStubCall
stubCall(this, cti_op_push_scope
);
1383 stubCall
.addArgument(currentInstruction
[1].u
.operand
);
1384 stubCall
.call(currentInstruction
[1].u
.operand
);
1387 void JIT::emit_op_pop_scope(Instruction
*)
1389 JITStubCall(this, cti_op_pop_scope
).call();
1392 void JIT::emit_op_to_jsnumber(Instruction
* currentInstruction
)
1394 int dst
= currentInstruction
[1].u
.operand
;
1395 int src
= currentInstruction
[2].u
.operand
;
1397 emitLoad(src
, regT1
, regT0
);
1399 Jump isInt32
= branch32(Equal
, regT1
, TrustedImm32(JSValue::Int32Tag
));
1400 addSlowCase(branch32(AboveOrEqual
, regT1
, TrustedImm32(JSValue::EmptyValueTag
)));
1404 emitStore(dst
, regT1
, regT0
);
1405 map(m_bytecodeOffset
+ OPCODE_LENGTH(op_to_jsnumber
), dst
, regT1
, regT0
);
1408 void JIT::emitSlow_op_to_jsnumber(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1410 int dst
= currentInstruction
[1].u
.operand
;
1414 JITStubCall
stubCall(this, cti_op_to_jsnumber
);
1415 stubCall
.addArgument(regT1
, regT0
);
1419 void JIT::emit_op_push_new_scope(Instruction
* currentInstruction
)
1421 JITStubCall
stubCall(this, cti_op_push_new_scope
);
1422 stubCall
.addArgument(TrustedImmPtr(&m_codeBlock
->identifier(currentInstruction
[2].u
.operand
)));
1423 stubCall
.addArgument(currentInstruction
[3].u
.operand
);
1424 stubCall
.call(currentInstruction
[1].u
.operand
);
1427 void JIT::emit_op_catch(Instruction
* currentInstruction
)
1429 // cti_op_throw returns the callFrame for the handler.
1430 move(regT0
, callFrameRegister
);
1432 // Now store the exception returned by cti_op_throw.
1433 loadPtr(Address(stackPointerRegister
, OBJECT_OFFSETOF(struct JITStackFrame
, globalData
)), regT3
);
1434 load32(Address(regT3
, OBJECT_OFFSETOF(JSGlobalData
, exception
) + OBJECT_OFFSETOF(JSValue
, u
.asBits
.payload
)), regT0
);
1435 load32(Address(regT3
, OBJECT_OFFSETOF(JSGlobalData
, exception
) + OBJECT_OFFSETOF(JSValue
, u
.asBits
.tag
)), regT1
);
1436 store32(TrustedImm32(JSValue().payload()), Address(regT3
, OBJECT_OFFSETOF(JSGlobalData
, exception
) + OBJECT_OFFSETOF(JSValue
, u
.asBits
.payload
)));
1437 store32(TrustedImm32(JSValue().tag()), Address(regT3
, OBJECT_OFFSETOF(JSGlobalData
, exception
) + OBJECT_OFFSETOF(JSValue
, u
.asBits
.tag
)));
1439 unsigned exception
= currentInstruction
[1].u
.operand
;
1440 emitStore(exception
, regT1
, regT0
);
1441 map(m_bytecodeOffset
+ OPCODE_LENGTH(op_catch
), exception
, regT1
, regT0
);
1444 void JIT::emit_op_jmp_scopes(Instruction
* currentInstruction
)
1446 JITStubCall
stubCall(this, cti_op_jmp_scopes
);
1447 stubCall
.addArgument(Imm32(currentInstruction
[1].u
.operand
));
1449 addJump(jump(), currentInstruction
[2].u
.operand
);
1452 void JIT::emit_op_switch_imm(Instruction
* currentInstruction
)
1454 unsigned tableIndex
= currentInstruction
[1].u
.operand
;
1455 unsigned defaultOffset
= currentInstruction
[2].u
.operand
;
1456 unsigned scrutinee
= currentInstruction
[3].u
.operand
;
1458 // create jump table for switch destinations, track this switch statement.
1459 SimpleJumpTable
* jumpTable
= &m_codeBlock
->immediateSwitchJumpTable(tableIndex
);
1460 m_switches
.append(SwitchRecord(jumpTable
, m_bytecodeOffset
, defaultOffset
, SwitchRecord::Immediate
));
1461 jumpTable
->ctiOffsets
.grow(jumpTable
->branchOffsets
.size());
1463 JITStubCall
stubCall(this, cti_op_switch_imm
);
1464 stubCall
.addArgument(scrutinee
);
1465 stubCall
.addArgument(Imm32(tableIndex
));
1470 void JIT::emit_op_switch_char(Instruction
* currentInstruction
)
1472 unsigned tableIndex
= currentInstruction
[1].u
.operand
;
1473 unsigned defaultOffset
= currentInstruction
[2].u
.operand
;
1474 unsigned scrutinee
= currentInstruction
[3].u
.operand
;
1476 // create jump table for switch destinations, track this switch statement.
1477 SimpleJumpTable
* jumpTable
= &m_codeBlock
->characterSwitchJumpTable(tableIndex
);
1478 m_switches
.append(SwitchRecord(jumpTable
, m_bytecodeOffset
, defaultOffset
, SwitchRecord::Character
));
1479 jumpTable
->ctiOffsets
.grow(jumpTable
->branchOffsets
.size());
1481 JITStubCall
stubCall(this, cti_op_switch_char
);
1482 stubCall
.addArgument(scrutinee
);
1483 stubCall
.addArgument(Imm32(tableIndex
));
1488 void JIT::emit_op_switch_string(Instruction
* currentInstruction
)
1490 unsigned tableIndex
= currentInstruction
[1].u
.operand
;
1491 unsigned defaultOffset
= currentInstruction
[2].u
.operand
;
1492 unsigned scrutinee
= currentInstruction
[3].u
.operand
;
1494 // create jump table for switch destinations, track this switch statement.
1495 StringJumpTable
* jumpTable
= &m_codeBlock
->stringSwitchJumpTable(tableIndex
);
1496 m_switches
.append(SwitchRecord(jumpTable
, m_bytecodeOffset
, defaultOffset
));
1498 JITStubCall
stubCall(this, cti_op_switch_string
);
1499 stubCall
.addArgument(scrutinee
);
1500 stubCall
.addArgument(Imm32(tableIndex
));
1505 void JIT::emit_op_throw_reference_error(Instruction
* currentInstruction
)
1507 unsigned message
= currentInstruction
[1].u
.operand
;
1509 JITStubCall
stubCall(this, cti_op_throw_reference_error
);
1510 stubCall
.addArgument(m_codeBlock
->getConstant(message
));
1514 void JIT::emit_op_debug(Instruction
* currentInstruction
)
1516 #if ENABLE(DEBUG_WITH_BREAKPOINT)
1517 UNUSED_PARAM(currentInstruction
);
1520 JITStubCall
stubCall(this, cti_op_debug
);
1521 stubCall
.addArgument(Imm32(currentInstruction
[1].u
.operand
));
1522 stubCall
.addArgument(Imm32(currentInstruction
[2].u
.operand
));
1523 stubCall
.addArgument(Imm32(currentInstruction
[3].u
.operand
));
1529 void JIT::emit_op_enter(Instruction
*)
1531 // Even though JIT code doesn't use them, we initialize our constant
1532 // registers to zap stale pointers, to avoid unnecessarily prolonging
1533 // object lifetime and increasing GC pressure.
1534 for (int i
= 0; i
< m_codeBlock
->m_numVars
; ++i
)
1535 emitStore(i
, jsUndefined());
1538 void JIT::emit_op_create_activation(Instruction
* currentInstruction
)
1540 unsigned activation
= currentInstruction
[1].u
.operand
;
1542 Jump activationCreated
= branch32(NotEqual
, tagFor(activation
), TrustedImm32(JSValue::EmptyValueTag
));
1543 JITStubCall(this, cti_op_push_activation
).call(activation
);
1544 activationCreated
.link(this);
1547 void JIT::emit_op_create_arguments(Instruction
* currentInstruction
)
1549 unsigned dst
= currentInstruction
[1].u
.operand
;
1551 Jump argsCreated
= branch32(NotEqual
, tagFor(dst
), TrustedImm32(JSValue::EmptyValueTag
));
1553 if (m_codeBlock
->m_numParameters
== 1)
1554 JITStubCall(this, cti_op_create_arguments_no_params
).call();
1556 JITStubCall(this, cti_op_create_arguments
).call();
1558 emitStore(dst
, regT1
, regT0
);
1559 emitStore(unmodifiedArgumentsRegister(dst
), regT1
, regT0
);
1561 argsCreated
.link(this);
1564 void JIT::emit_op_init_lazy_reg(Instruction
* currentInstruction
)
1566 unsigned dst
= currentInstruction
[1].u
.operand
;
1568 emitStore(dst
, JSValue());
1571 void JIT::emit_op_get_callee(Instruction
* currentInstruction
)
1573 int dst
= currentInstruction
[1].u
.operand
;
1574 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee
, regT0
);
1575 emitStoreCell(dst
, regT0
);
1578 void JIT::emit_op_create_this(Instruction
* currentInstruction
)
1580 unsigned protoRegister
= currentInstruction
[2].u
.operand
;
1581 emitLoad(protoRegister
, regT1
, regT0
);
1582 JITStubCall
stubCall(this, cti_op_create_this
);
1583 stubCall
.addArgument(regT1
, regT0
);
1584 stubCall
.call(currentInstruction
[1].u
.operand
);
1587 void JIT::emit_op_convert_this(Instruction
* currentInstruction
)
1589 unsigned thisRegister
= currentInstruction
[1].u
.operand
;
1591 emitLoad(thisRegister
, regT1
, regT0
);
1593 addSlowCase(branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
)));
1595 loadPtr(Address(regT0
, JSCell::structureOffset()), regT2
);
1596 addSlowCase(branchTest8(NonZero
, Address(regT2
, Structure::typeInfoFlagsOffset()), TrustedImm32(NeedsThisConversion
)));
1598 map(m_bytecodeOffset
+ OPCODE_LENGTH(op_convert_this
), thisRegister
, regT1
, regT0
);
1601 void JIT::emit_op_convert_this_strict(Instruction
* currentInstruction
)
1603 unsigned thisRegister
= currentInstruction
[1].u
.operand
;
1605 emitLoad(thisRegister
, regT1
, regT0
);
1607 Jump notNull
= branch32(NotEqual
, regT1
, TrustedImm32(JSValue::EmptyValueTag
));
1608 emitStore(thisRegister
, jsNull());
1609 Jump setThis
= jump();
1611 Jump isImmediate
= branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
));
1612 loadPtr(Address(regT0
, JSCell::structureOffset()), regT2
);
1613 Jump notAnObject
= branch8(NotEqual
, Address(regT2
, Structure::typeInfoTypeOffset()), TrustedImm32(ObjectType
));
1614 addSlowCase(branchTest8(NonZero
, Address(regT2
, Structure::typeInfoFlagsOffset()), TrustedImm32(NeedsThisConversion
)));
1615 isImmediate
.link(this);
1616 notAnObject
.link(this);
1618 map(m_bytecodeOffset
+ OPCODE_LENGTH(op_convert_this_strict
), thisRegister
, regT1
, regT0
);
1621 void JIT::emitSlow_op_convert_this(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1623 unsigned thisRegister
= currentInstruction
[1].u
.operand
;
1628 JITStubCall
stubCall(this, cti_op_convert_this
);
1629 stubCall
.addArgument(regT1
, regT0
);
1630 stubCall
.call(thisRegister
);
1633 void JIT::emitSlow_op_convert_this_strict(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1635 unsigned thisRegister
= currentInstruction
[1].u
.operand
;
1639 JITStubCall
stubCall(this, cti_op_convert_this_strict
);
1640 stubCall
.addArgument(regT1
, regT0
);
1641 stubCall
.call(thisRegister
);
1644 void JIT::emit_op_profile_will_call(Instruction
* currentInstruction
)
1646 peek(regT2
, OBJECT_OFFSETOF(JITStackFrame
, enabledProfilerReference
) / sizeof(void*));
1647 Jump noProfiler
= branchTestPtr(Zero
, Address(regT2
));
1649 JITStubCall
stubCall(this, cti_op_profile_will_call
);
1650 stubCall
.addArgument(currentInstruction
[1].u
.operand
);
1652 noProfiler
.link(this);
1655 void JIT::emit_op_profile_did_call(Instruction
* currentInstruction
)
1657 peek(regT2
, OBJECT_OFFSETOF(JITStackFrame
, enabledProfilerReference
) / sizeof(void*));
1658 Jump noProfiler
= branchTestPtr(Zero
, Address(regT2
));
1660 JITStubCall
stubCall(this, cti_op_profile_did_call
);
1661 stubCall
.addArgument(currentInstruction
[1].u
.operand
);
1663 noProfiler
.link(this);
1666 void JIT::emit_op_get_arguments_length(Instruction
* currentInstruction
)
1668 int dst
= currentInstruction
[1].u
.operand
;
1669 int argumentsRegister
= currentInstruction
[2].u
.operand
;
1670 addSlowCase(branch32(NotEqual
, tagFor(argumentsRegister
), TrustedImm32(JSValue::EmptyValueTag
)));
1671 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount
, regT0
);
1672 sub32(TrustedImm32(1), regT0
);
1673 emitStoreInt32(dst
, regT0
);
1676 void JIT::emitSlow_op_get_arguments_length(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1679 int dst
= currentInstruction
[1].u
.operand
;
1680 int base
= currentInstruction
[2].u
.operand
;
1681 int ident
= currentInstruction
[3].u
.operand
;
1683 JITStubCall
stubCall(this, cti_op_get_by_id_generic
);
1684 stubCall
.addArgument(base
);
1685 stubCall
.addArgument(TrustedImmPtr(&(m_codeBlock
->identifier(ident
))));
1689 void JIT::emit_op_get_argument_by_val(Instruction
* currentInstruction
)
1691 int dst
= currentInstruction
[1].u
.operand
;
1692 int argumentsRegister
= currentInstruction
[2].u
.operand
;
1693 int property
= currentInstruction
[3].u
.operand
;
1694 addSlowCase(branch32(NotEqual
, tagFor(argumentsRegister
), TrustedImm32(JSValue::EmptyValueTag
)));
1695 emitLoad(property
, regT1
, regT2
);
1696 addSlowCase(branch32(NotEqual
, regT1
, TrustedImm32(JSValue::Int32Tag
)));
1697 add32(TrustedImm32(1), regT2
);
1698 // regT2 now contains the integer index of the argument we want, including this
1699 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount
, regT3
);
1700 addSlowCase(branch32(AboveOrEqual
, regT2
, regT3
));
1702 Jump skipOutofLineParams
;
1703 int numArgs
= m_codeBlock
->m_numParameters
;
1705 Jump notInInPlaceArgs
= branch32(AboveOrEqual
, regT2
, Imm32(numArgs
));
1706 addPtr(Imm32(static_cast<unsigned>(-(RegisterFile::CallFrameHeaderSize
+ numArgs
) * sizeof(Register
))), callFrameRegister
, regT1
);
1707 loadPtr(BaseIndex(regT1
, regT2
, TimesEight
, OBJECT_OFFSETOF(JSValue
, u
.asBits
.payload
)), regT0
);
1708 loadPtr(BaseIndex(regT1
, regT2
, TimesEight
, OBJECT_OFFSETOF(JSValue
, u
.asBits
.tag
)), regT1
);
1709 skipOutofLineParams
= jump();
1710 notInInPlaceArgs
.link(this);
1713 addPtr(Imm32(static_cast<unsigned>(-(RegisterFile::CallFrameHeaderSize
+ numArgs
) * sizeof(Register
))), callFrameRegister
, regT1
);
1714 mul32(TrustedImm32(sizeof(Register
)), regT3
, regT3
);
1715 subPtr(regT3
, regT1
);
1716 loadPtr(BaseIndex(regT1
, regT2
, TimesEight
, OBJECT_OFFSETOF(JSValue
, u
.asBits
.payload
)), regT0
);
1717 loadPtr(BaseIndex(regT1
, regT2
, TimesEight
, OBJECT_OFFSETOF(JSValue
, u
.asBits
.tag
)), regT1
);
1719 skipOutofLineParams
.link(this);
1720 emitStore(dst
, regT1
, regT0
);
1723 void JIT::emitSlow_op_get_argument_by_val(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1725 unsigned dst
= currentInstruction
[1].u
.operand
;
1726 unsigned arguments
= currentInstruction
[2].u
.operand
;
1727 unsigned property
= currentInstruction
[3].u
.operand
;
1730 Jump skipArgumentsCreation
= jump();
1734 if (m_codeBlock
->m_numParameters
== 1)
1735 JITStubCall(this, cti_op_create_arguments_no_params
).call();
1737 JITStubCall(this, cti_op_create_arguments
).call();
1739 emitStore(arguments
, regT1
, regT0
);
1740 emitStore(unmodifiedArgumentsRegister(arguments
), regT1
, regT0
);
1742 skipArgumentsCreation
.link(this);
1743 JITStubCall
stubCall(this, cti_op_get_by_val
);
1744 stubCall
.addArgument(arguments
);
1745 stubCall
.addArgument(property
);
1749 #if ENABLE(JIT_USE_SOFT_MODULO)
1750 void JIT::softModulo()
1756 move(TrustedImm32(0), regT1
);
1758 // Check for negative result reminder
1759 Jump positiveRegT3
= branch32(GreaterThanOrEqual
, regT3
, TrustedImm32(0));
1761 xor32(TrustedImm32(1), regT1
);
1762 positiveRegT3
.link(this);
1764 Jump positiveRegT2
= branch32(GreaterThanOrEqual
, regT2
, TrustedImm32(0));
1766 xor32(TrustedImm32(2), regT1
);
1767 positiveRegT2
.link(this);
1769 // Save the condition for negative reminder
1772 Jump exitBranch
= branch32(LessThan
, regT2
, regT3
);
1774 // Power of two fast case
1776 sub32(TrustedImm32(1), regT0
);
1777 Jump powerOfTwo
= branchTest32(NonZero
, regT0
, regT3
);
1778 and32(regT0
, regT2
);
1779 powerOfTwo
.link(this);
1781 and32(regT3
, regT0
);
1783 Jump exitBranch2
= branchTest32(Zero
, regT0
);
1785 countLeadingZeros32(regT2
, regT0
);
1786 countLeadingZeros32(regT3
, regT1
);
1787 sub32(regT0
, regT1
);
1789 Jump useFullTable
= branch32(Equal
, regT1
, TrustedImm32(31));
1792 add32(TrustedImm32(31), regT1
);
1794 int elementSizeByShift
= -1;
1796 elementSizeByShift
= 3;
1798 #error "JIT_OPTIMIZE_MOD not yet supported on this platform."
1800 relativeTableJump(regT1
, elementSizeByShift
);
1802 useFullTable
.link(this);
1804 for (int i
= 31; i
> 0; --i
) {
1805 #if CPU(ARM_TRADITIONAL)
1806 m_assembler
.cmp_r(regT2
, m_assembler
.lsl(regT3
, i
));
1807 m_assembler
.sub_r(regT2
, regT2
, m_assembler
.lsl(regT3
, i
), ARMAssembler::CS
);
1808 #elif CPU(ARM_THUMB2)
1809 ShiftTypeAndAmount
shift(SRType_LSL
, i
);
1810 m_assembler
.sub_S(regT1
, regT2
, regT3
, shift
);
1811 m_assembler
.it(ARMv7Assembler::ConditionCS
);
1812 m_assembler
.mov(regT2
, regT1
);
1814 #error "JIT_OPTIMIZE_MOD not yet supported on this platform."
1818 Jump lower
= branch32(Below
, regT2
, regT3
);
1819 sub32(regT3
, regT2
);
1822 exitBranch
.link(this);
1823 exitBranch2
.link(this);
1825 // Check for negative reminder
1827 Jump positiveResult
= branch32(Equal
, regT1
, TrustedImm32(0));
1829 positiveResult
.link(this);
1837 #endif // ENABLE(JIT_USE_SOFT_MODULO)
1841 #endif // USE(JSVALUE32_64)
1842 #endif // ENABLE(JIT)