2 * Copyright (C) 2009, 2012 Apple Inc. All rights reserved.
3 * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
14 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
18 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
33 #include "JITInlines.h"
34 #include "JITStubCall.h"
37 #include "JSFunction.h"
38 #include "JSPropertyNameIterator.h"
39 #include "JSVariableObject.h"
40 #include "LinkBuffer.h"
44 JIT::CodeRef
JIT::privateCompileCTINativeCall(VM
* vm
, NativeFunction func
)
48 emitPutImmediateToCallFrameHeader(0, JSStack::CodeBlock
);
49 storePtr(callFrameRegister
, &m_vm
->topCallFrame
);
52 // Load caller frame's scope chain into this callframe so that whatever we call can
53 // get to its global data.
54 emitGetFromCallFrameHeaderPtr(JSStack::CallerFrame
, regT0
);
55 emitGetFromCallFrameHeaderPtr(JSStack::ScopeChain
, regT1
, regT0
);
56 emitPutCellToCallFrameHeader(regT1
, JSStack::ScopeChain
);
59 emitPutToCallFrameHeader(regT1
, JSStack::ReturnPC
);
61 // Calling convention: f(ecx, edx, ...);
62 // Host function signature: f(ExecState*);
63 move(callFrameRegister
, X86Registers::ecx
);
65 subPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister
); // Align stack after call.
67 move(regT0
, callFrameRegister
); // Eagerly restore caller frame register to avoid loading from stack.
72 addPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister
);
75 // Load caller frame's scope chain into this callframe so that whatever we call can
76 // get to its global data.
77 emitGetFromCallFrameHeaderPtr(JSStack::CallerFrame
, regT2
);
78 emitGetFromCallFrameHeaderPtr(JSStack::ScopeChain
, regT1
, regT2
);
79 emitPutCellToCallFrameHeader(regT1
, JSStack::ScopeChain
);
81 preserveReturnAddressAfterCall(regT3
); // Callee preserved
82 emitPutToCallFrameHeader(regT3
, JSStack::ReturnPC
);
84 // Calling convention: f(r0 == regT0, r1 == regT1, ...);
85 // Host function signature: f(ExecState*);
86 move(callFrameRegister
, ARMRegisters::r0
);
88 emitGetFromCallFrameHeaderPtr(JSStack::Callee
, ARMRegisters::r1
);
89 move(regT2
, callFrameRegister
); // Eagerly restore caller frame register to avoid loading from stack.
90 loadPtr(Address(ARMRegisters::r1
, OBJECT_OFFSETOF(JSFunction
, m_executable
)), regT2
);
95 restoreReturnAddressBeforeReturn(regT3
);
98 // Load caller frame's scope chain into this callframe so that whatever we call can
99 // get to its global data.
100 emitGetFromCallFrameHeaderPtr(JSStack::CallerFrame
, regT0
);
101 emitGetFromCallFrameHeaderPtr(JSStack::ScopeChain
, regT1
, regT0
);
102 emitPutCellToCallFrameHeader(regT1
, JSStack::ScopeChain
);
104 preserveReturnAddressAfterCall(regT3
); // Callee preserved
105 emitPutToCallFrameHeader(regT3
, JSStack::ReturnPC
);
107 // Calling convention: f(a0, a1, a2, a3);
108 // Host function signature: f(ExecState*);
110 // Allocate stack space for 16 bytes (8-byte aligned)
111 // 16 bytes (unused) for 4 arguments
112 subPtr(TrustedImm32(16), stackPointerRegister
);
115 move(callFrameRegister
, MIPSRegisters::a0
);
118 emitGetFromCallFrameHeaderPtr(JSStack::Callee
, MIPSRegisters::a2
);
119 loadPtr(Address(MIPSRegisters::a2
, OBJECT_OFFSETOF(JSFunction
, m_executable
)), regT2
);
120 move(regT0
, callFrameRegister
); // Eagerly restore caller frame register to avoid loading from stack.
125 // Restore stack space
126 addPtr(TrustedImm32(16), stackPointerRegister
);
128 restoreReturnAddressBeforeReturn(regT3
);
130 // Load caller frame's scope chain into this callframe so that whatever we call can
131 // get to its global data.
132 emitGetFromCallFrameHeaderPtr(JSStack::CallerFrame
, regT2
);
133 emitGetFromCallFrameHeaderPtr(JSStack::ScopeChain
, regT1
, regT2
);
134 emitPutCellToCallFrameHeader(regT1
, JSStack::ScopeChain
);
136 preserveReturnAddressAfterCall(regT3
); // Callee preserved
137 emitPutToCallFrameHeader(regT3
, JSStack::ReturnPC
);
139 // Calling convention: f(r0 == regT4, r1 == regT5, ...);
140 // Host function signature: f(ExecState*);
141 move(callFrameRegister
, regT4
);
143 emitGetFromCallFrameHeaderPtr(JSStack::Callee
, regT5
);
144 move(regT2
, callFrameRegister
); // Eagerly restore caller frame register to avoid loading from stack.
145 loadPtr(Address(regT5
, OBJECT_OFFSETOF(JSFunction
, m_executable
)), regT2
);
150 restoreReturnAddressBeforeReturn(regT3
);
152 #error "JIT not supported on this platform."
156 // Check for an exception
157 Jump sawException
= branch32(NotEqual
, AbsoluteAddress(reinterpret_cast<char*>(&vm
->exception
) + OBJECT_OFFSETOF(JSValue
, u
.asBits
.tag
)), TrustedImm32(JSValue::EmptyValueTag
));
162 // Handle an exception
163 sawException
.link(this);
165 // Grab the return address.
166 preserveReturnAddressAfterCall(regT1
);
168 move(TrustedImmPtr(&vm
->exceptionLocation
), regT2
);
169 storePtr(regT1
, regT2
);
170 poke(callFrameRegister
, OBJECT_OFFSETOF(struct JITStackFrame
, callFrame
) / sizeof(void*));
172 storePtr(callFrameRegister
, &m_vm
->topCallFrame
);
173 // Set the return address.
174 move(TrustedImmPtr(FunctionPtr(ctiVMThrowTrampoline
).value()), regT1
);
175 restoreReturnAddressBeforeReturn(regT1
);
179 // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
180 LinkBuffer
patchBuffer(*m_vm
, this, GLOBAL_THUNK_ID
);
182 patchBuffer
.link(nativeCall
, FunctionPtr(func
));
183 return FINALIZE_CODE(patchBuffer
, ("JIT CTI native call"));
186 void JIT::emit_op_mov(Instruction
* currentInstruction
)
188 unsigned dst
= currentInstruction
[1].u
.operand
;
189 unsigned src
= currentInstruction
[2].u
.operand
;
191 if (m_codeBlock
->isConstantRegisterIndex(src
))
192 emitStore(dst
, getConstantOperand(src
));
194 emitLoad(src
, regT1
, regT0
);
195 emitStore(dst
, regT1
, regT0
);
196 map(m_bytecodeOffset
+ OPCODE_LENGTH(op_mov
), dst
, regT1
, regT0
);
200 void JIT::emit_op_end(Instruction
* currentInstruction
)
202 ASSERT(returnValueRegister
!= callFrameRegister
);
203 emitLoad(currentInstruction
[1].u
.operand
, regT1
, regT0
);
204 restoreReturnAddressBeforeReturn(Address(callFrameRegister
, JSStack::ReturnPC
* static_cast<int>(sizeof(Register
))));
208 void JIT::emit_op_jmp(Instruction
* currentInstruction
)
210 unsigned target
= currentInstruction
[1].u
.operand
;
211 addJump(jump(), target
);
214 void JIT::emit_op_new_object(Instruction
* currentInstruction
)
216 Structure
* structure
= currentInstruction
[3].u
.objectAllocationProfile
->structure();
217 size_t allocationSize
= JSObject::allocationSize(structure
->inlineCapacity());
218 MarkedAllocator
* allocator
= &m_vm
->heap
.allocatorForObjectWithoutDestructor(allocationSize
);
220 RegisterID resultReg
= regT0
;
221 RegisterID allocatorReg
= regT1
;
222 RegisterID scratchReg
= regT2
;
224 move(TrustedImmPtr(allocator
), allocatorReg
);
225 emitAllocateJSObject(allocatorReg
, TrustedImmPtr(structure
), resultReg
, scratchReg
);
226 emitStoreCell(currentInstruction
[1].u
.operand
, resultReg
);
229 void JIT::emitSlow_op_new_object(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
232 JITStubCall
stubCall(this, cti_op_new_object
);
233 stubCall
.addArgument(TrustedImmPtr(currentInstruction
[3].u
.objectAllocationProfile
->structure()));
234 stubCall
.call(currentInstruction
[1].u
.operand
);
237 void JIT::emit_op_check_has_instance(Instruction
* currentInstruction
)
239 unsigned baseVal
= currentInstruction
[3].u
.operand
;
241 emitLoadPayload(baseVal
, regT0
);
243 // Check that baseVal is a cell.
244 emitJumpSlowCaseIfNotJSCell(baseVal
);
246 // Check that baseVal 'ImplementsHasInstance'.
247 loadPtr(Address(regT0
, JSCell::structureOffset()), regT0
);
248 addSlowCase(branchTest8(Zero
, Address(regT0
, Structure::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance
)));
251 void JIT::emit_op_instanceof(Instruction
* currentInstruction
)
253 unsigned dst
= currentInstruction
[1].u
.operand
;
254 unsigned value
= currentInstruction
[2].u
.operand
;
255 unsigned proto
= currentInstruction
[3].u
.operand
;
257 // Load the operands into registers.
258 // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
259 emitLoadPayload(value
, regT2
);
260 emitLoadPayload(proto
, regT1
);
262 // Check that proto are cells. baseVal must be a cell - this is checked by op_check_has_instance.
263 emitJumpSlowCaseIfNotJSCell(value
);
264 emitJumpSlowCaseIfNotJSCell(proto
);
266 // Check that prototype is an object
267 loadPtr(Address(regT1
, JSCell::structureOffset()), regT3
);
268 addSlowCase(emitJumpIfNotObject(regT3
));
270 // Optimistically load the result true, and start looping.
271 // Initially, regT1 still contains proto and regT2 still contains value.
272 // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
273 move(TrustedImm32(1), regT0
);
276 // Load the prototype of the cell in regT2. If this is equal to regT1 - WIN!
277 // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
278 loadPtr(Address(regT2
, JSCell::structureOffset()), regT2
);
279 load32(Address(regT2
, Structure::prototypeOffset() + OBJECT_OFFSETOF(JSValue
, u
.asBits
.payload
)), regT2
);
280 Jump isInstance
= branchPtr(Equal
, regT2
, regT1
);
281 branchTest32(NonZero
, regT2
).linkTo(loop
, this);
283 // We get here either by dropping out of the loop, or if value was not an Object. Result is false.
284 move(TrustedImm32(0), regT0
);
286 // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
287 isInstance
.link(this);
288 emitStoreBool(dst
, regT0
);
291 void JIT::emitSlow_op_check_has_instance(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
293 unsigned dst
= currentInstruction
[1].u
.operand
;
294 unsigned value
= currentInstruction
[2].u
.operand
;
295 unsigned baseVal
= currentInstruction
[3].u
.operand
;
297 linkSlowCaseIfNotJSCell(iter
, baseVal
);
300 JITStubCall
stubCall(this, cti_op_check_has_instance
);
301 stubCall
.addArgument(value
);
302 stubCall
.addArgument(baseVal
);
305 emitJumpSlowToHot(jump(), currentInstruction
[4].u
.operand
);
308 void JIT::emitSlow_op_instanceof(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
310 unsigned dst
= currentInstruction
[1].u
.operand
;
311 unsigned value
= currentInstruction
[2].u
.operand
;
312 unsigned proto
= currentInstruction
[3].u
.operand
;
314 linkSlowCaseIfNotJSCell(iter
, value
);
315 linkSlowCaseIfNotJSCell(iter
, proto
);
318 JITStubCall
stubCall(this, cti_op_instanceof
);
319 stubCall
.addArgument(value
);
320 stubCall
.addArgument(proto
);
324 void JIT::emit_op_is_undefined(Instruction
* currentInstruction
)
326 unsigned dst
= currentInstruction
[1].u
.operand
;
327 unsigned value
= currentInstruction
[2].u
.operand
;
329 emitLoad(value
, regT1
, regT0
);
330 Jump isCell
= branch32(Equal
, regT1
, TrustedImm32(JSValue::CellTag
));
332 compare32(Equal
, regT1
, TrustedImm32(JSValue::UndefinedTag
), regT0
);
336 loadPtr(Address(regT0
, JSCell::structureOffset()), regT1
);
337 Jump isMasqueradesAsUndefined
= branchTest8(NonZero
, Address(regT1
, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined
));
338 move(TrustedImm32(0), regT0
);
339 Jump notMasqueradesAsUndefined
= jump();
341 isMasqueradesAsUndefined
.link(this);
342 move(TrustedImmPtr(m_codeBlock
->globalObject()), regT0
);
343 loadPtr(Address(regT1
, Structure::globalObjectOffset()), regT1
);
344 compare32(Equal
, regT0
, regT1
, regT0
);
346 notMasqueradesAsUndefined
.link(this);
348 emitStoreBool(dst
, regT0
);
351 void JIT::emit_op_is_boolean(Instruction
* currentInstruction
)
353 unsigned dst
= currentInstruction
[1].u
.operand
;
354 unsigned value
= currentInstruction
[2].u
.operand
;
356 emitLoadTag(value
, regT0
);
357 compare32(Equal
, regT0
, TrustedImm32(JSValue::BooleanTag
), regT0
);
358 emitStoreBool(dst
, regT0
);
361 void JIT::emit_op_is_number(Instruction
* currentInstruction
)
363 unsigned dst
= currentInstruction
[1].u
.operand
;
364 unsigned value
= currentInstruction
[2].u
.operand
;
366 emitLoadTag(value
, regT0
);
367 add32(TrustedImm32(1), regT0
);
368 compare32(Below
, regT0
, TrustedImm32(JSValue::LowestTag
+ 1), regT0
);
369 emitStoreBool(dst
, regT0
);
372 void JIT::emit_op_is_string(Instruction
* currentInstruction
)
374 unsigned dst
= currentInstruction
[1].u
.operand
;
375 unsigned value
= currentInstruction
[2].u
.operand
;
377 emitLoad(value
, regT1
, regT0
);
378 Jump isNotCell
= branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
));
380 loadPtr(Address(regT0
, JSCell::structureOffset()), regT1
);
381 compare8(Equal
, Address(regT1
, Structure::typeInfoTypeOffset()), TrustedImm32(StringType
), regT0
);
384 isNotCell
.link(this);
385 move(TrustedImm32(0), regT0
);
388 emitStoreBool(dst
, regT0
);
391 void JIT::emit_op_tear_off_activation(Instruction
* currentInstruction
)
393 unsigned activation
= currentInstruction
[1].u
.operand
;
394 Jump activationNotCreated
= branch32(Equal
, tagFor(activation
), TrustedImm32(JSValue::EmptyValueTag
));
395 JITStubCall
stubCall(this, cti_op_tear_off_activation
);
396 stubCall
.addArgument(activation
);
398 activationNotCreated
.link(this);
401 void JIT::emit_op_tear_off_arguments(Instruction
* currentInstruction
)
403 int arguments
= currentInstruction
[1].u
.operand
;
404 int activation
= currentInstruction
[2].u
.operand
;
406 Jump argsNotCreated
= branch32(Equal
, tagFor(unmodifiedArgumentsRegister(arguments
)), TrustedImm32(JSValue::EmptyValueTag
));
407 JITStubCall
stubCall(this, cti_op_tear_off_arguments
);
408 stubCall
.addArgument(unmodifiedArgumentsRegister(arguments
));
409 stubCall
.addArgument(activation
);
411 argsNotCreated
.link(this);
414 void JIT::emit_op_to_primitive(Instruction
* currentInstruction
)
416 int dst
= currentInstruction
[1].u
.operand
;
417 int src
= currentInstruction
[2].u
.operand
;
419 emitLoad(src
, regT1
, regT0
);
421 Jump isImm
= branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
));
422 addSlowCase(branchPtr(NotEqual
, Address(regT0
, JSCell::structureOffset()), TrustedImmPtr(m_vm
->stringStructure
.get())));
426 emitStore(dst
, regT1
, regT0
);
427 map(m_bytecodeOffset
+ OPCODE_LENGTH(op_to_primitive
), dst
, regT1
, regT0
);
430 void JIT::emitSlow_op_to_primitive(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
432 int dst
= currentInstruction
[1].u
.operand
;
436 JITStubCall
stubCall(this, cti_op_to_primitive
);
437 stubCall
.addArgument(regT1
, regT0
);
441 void JIT::emit_op_strcat(Instruction
* currentInstruction
)
443 JITStubCall
stubCall(this, cti_op_strcat
);
444 stubCall
.addArgument(TrustedImm32(currentInstruction
[2].u
.operand
));
445 stubCall
.addArgument(TrustedImm32(currentInstruction
[3].u
.operand
));
446 stubCall
.call(currentInstruction
[1].u
.operand
);
449 void JIT::emit_op_not(Instruction
* currentInstruction
)
451 unsigned dst
= currentInstruction
[1].u
.operand
;
452 unsigned src
= currentInstruction
[2].u
.operand
;
454 emitLoadTag(src
, regT0
);
456 emitLoad(src
, regT1
, regT0
);
457 addSlowCase(branch32(NotEqual
, regT1
, TrustedImm32(JSValue::BooleanTag
)));
458 xor32(TrustedImm32(1), regT0
);
460 emitStoreBool(dst
, regT0
, (dst
== src
));
463 void JIT::emitSlow_op_not(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
465 unsigned dst
= currentInstruction
[1].u
.operand
;
466 unsigned src
= currentInstruction
[2].u
.operand
;
470 JITStubCall
stubCall(this, cti_op_not
);
471 stubCall
.addArgument(src
);
475 void JIT::emit_op_jfalse(Instruction
* currentInstruction
)
477 unsigned cond
= currentInstruction
[1].u
.operand
;
478 unsigned target
= currentInstruction
[2].u
.operand
;
480 emitLoad(cond
, regT1
, regT0
);
482 ASSERT((JSValue::BooleanTag
+ 1 == JSValue::Int32Tag
) && !(JSValue::Int32Tag
+ 1));
483 addSlowCase(branch32(Below
, regT1
, TrustedImm32(JSValue::BooleanTag
)));
484 addJump(branchTest32(Zero
, regT0
), target
);
487 void JIT::emitSlow_op_jfalse(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
489 unsigned cond
= currentInstruction
[1].u
.operand
;
490 unsigned target
= currentInstruction
[2].u
.operand
;
494 if (supportsFloatingPoint()) {
495 // regT1 contains the tag from the hot path.
496 Jump notNumber
= branch32(Above
, regT1
, TrustedImm32(JSValue::LowestTag
));
498 emitLoadDouble(cond
, fpRegT0
);
499 emitJumpSlowToHot(branchDoubleZeroOrNaN(fpRegT0
, fpRegT1
), target
);
500 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_jfalse
));
502 notNumber
.link(this);
505 JITStubCall
stubCall(this, cti_op_jtrue
);
506 stubCall
.addArgument(cond
);
508 emitJumpSlowToHot(branchTest32(Zero
, regT0
), target
); // Inverted.
511 void JIT::emit_op_jtrue(Instruction
* currentInstruction
)
513 unsigned cond
= currentInstruction
[1].u
.operand
;
514 unsigned target
= currentInstruction
[2].u
.operand
;
516 emitLoad(cond
, regT1
, regT0
);
518 ASSERT((JSValue::BooleanTag
+ 1 == JSValue::Int32Tag
) && !(JSValue::Int32Tag
+ 1));
519 addSlowCase(branch32(Below
, regT1
, TrustedImm32(JSValue::BooleanTag
)));
520 addJump(branchTest32(NonZero
, regT0
), target
);
523 void JIT::emitSlow_op_jtrue(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
525 unsigned cond
= currentInstruction
[1].u
.operand
;
526 unsigned target
= currentInstruction
[2].u
.operand
;
530 if (supportsFloatingPoint()) {
531 // regT1 contains the tag from the hot path.
532 Jump notNumber
= branch32(Above
, regT1
, TrustedImm32(JSValue::LowestTag
));
534 emitLoadDouble(cond
, fpRegT0
);
535 emitJumpSlowToHot(branchDoubleNonZero(fpRegT0
, fpRegT1
), target
);
536 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_jtrue
));
538 notNumber
.link(this);
541 JITStubCall
stubCall(this, cti_op_jtrue
);
542 stubCall
.addArgument(cond
);
544 emitJumpSlowToHot(branchTest32(NonZero
, regT0
), target
);
547 void JIT::emit_op_jeq_null(Instruction
* currentInstruction
)
549 unsigned src
= currentInstruction
[1].u
.operand
;
550 unsigned target
= currentInstruction
[2].u
.operand
;
552 emitLoad(src
, regT1
, regT0
);
554 Jump isImmediate
= branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
));
556 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
557 loadPtr(Address(regT0
, JSCell::structureOffset()), regT2
);
558 Jump isNotMasqueradesAsUndefined
= branchTest8(Zero
, Address(regT2
, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined
));
559 move(TrustedImmPtr(m_codeBlock
->globalObject()), regT0
);
560 addJump(branchPtr(Equal
, Address(regT2
, Structure::globalObjectOffset()), regT0
), target
);
561 Jump masqueradesGlobalObjectIsForeign
= jump();
563 // Now handle the immediate cases - undefined & null
564 isImmediate
.link(this);
565 ASSERT((JSValue::UndefinedTag
+ 1 == JSValue::NullTag
) && (JSValue::NullTag
& 0x1));
566 or32(TrustedImm32(1), regT1
);
567 addJump(branch32(Equal
, regT1
, TrustedImm32(JSValue::NullTag
)), target
);
569 isNotMasqueradesAsUndefined
.link(this);
570 masqueradesGlobalObjectIsForeign
.link(this);
573 void JIT::emit_op_jneq_null(Instruction
* currentInstruction
)
575 unsigned src
= currentInstruction
[1].u
.operand
;
576 unsigned target
= currentInstruction
[2].u
.operand
;
578 emitLoad(src
, regT1
, regT0
);
580 Jump isImmediate
= branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
));
582 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
583 loadPtr(Address(regT0
, JSCell::structureOffset()), regT2
);
584 addJump(branchTest8(Zero
, Address(regT2
, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined
)), target
);
585 move(TrustedImmPtr(m_codeBlock
->globalObject()), regT0
);
586 addJump(branchPtr(NotEqual
, Address(regT2
, Structure::globalObjectOffset()), regT0
), target
);
587 Jump wasNotImmediate
= jump();
589 // Now handle the immediate cases - undefined & null
590 isImmediate
.link(this);
592 ASSERT((JSValue::UndefinedTag
+ 1 == JSValue::NullTag
) && (JSValue::NullTag
& 0x1));
593 or32(TrustedImm32(1), regT1
);
594 addJump(branch32(NotEqual
, regT1
, TrustedImm32(JSValue::NullTag
)), target
);
596 wasNotImmediate
.link(this);
599 void JIT::emit_op_jneq_ptr(Instruction
* currentInstruction
)
601 unsigned src
= currentInstruction
[1].u
.operand
;
602 Special::Pointer ptr
= currentInstruction
[2].u
.specialPointer
;
603 unsigned target
= currentInstruction
[3].u
.operand
;
605 emitLoad(src
, regT1
, regT0
);
606 addJump(branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
)), target
);
607 addJump(branchPtr(NotEqual
, regT0
, TrustedImmPtr(actualPointerFor(m_codeBlock
, ptr
))), target
);
610 void JIT::emit_op_eq(Instruction
* currentInstruction
)
612 unsigned dst
= currentInstruction
[1].u
.operand
;
613 unsigned src1
= currentInstruction
[2].u
.operand
;
614 unsigned src2
= currentInstruction
[3].u
.operand
;
616 emitLoad2(src1
, regT1
, regT0
, src2
, regT3
, regT2
);
617 addSlowCase(branch32(NotEqual
, regT1
, regT3
));
618 addSlowCase(branch32(Equal
, regT1
, TrustedImm32(JSValue::CellTag
)));
619 addSlowCase(branch32(Below
, regT1
, TrustedImm32(JSValue::LowestTag
)));
621 compare32(Equal
, regT0
, regT2
, regT0
);
623 emitStoreBool(dst
, regT0
);
626 void JIT::emitSlow_op_eq(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
628 unsigned dst
= currentInstruction
[1].u
.operand
;
629 unsigned op1
= currentInstruction
[2].u
.operand
;
630 unsigned op2
= currentInstruction
[3].u
.operand
;
632 JumpList storeResult
;
633 JumpList genericCase
;
635 genericCase
.append(getSlowCase(iter
)); // tags not equal
637 linkSlowCase(iter
); // tags equal and JSCell
638 genericCase
.append(branchPtr(NotEqual
, Address(regT0
, JSCell::structureOffset()), TrustedImmPtr(m_vm
->stringStructure
.get())));
639 genericCase
.append(branchPtr(NotEqual
, Address(regT2
, JSCell::structureOffset()), TrustedImmPtr(m_vm
->stringStructure
.get())));
642 JITStubCall
stubCallEqStrings(this, cti_op_eq_strings
);
643 stubCallEqStrings
.addArgument(regT0
);
644 stubCallEqStrings
.addArgument(regT2
);
645 stubCallEqStrings
.call();
646 storeResult
.append(jump());
649 genericCase
.append(getSlowCase(iter
)); // doubles
650 genericCase
.link(this);
651 JITStubCall
stubCallEq(this, cti_op_eq
);
652 stubCallEq
.addArgument(op1
);
653 stubCallEq
.addArgument(op2
);
654 stubCallEq
.call(regT0
);
656 storeResult
.link(this);
657 emitStoreBool(dst
, regT0
);
660 void JIT::emit_op_neq(Instruction
* currentInstruction
)
662 unsigned dst
= currentInstruction
[1].u
.operand
;
663 unsigned src1
= currentInstruction
[2].u
.operand
;
664 unsigned src2
= currentInstruction
[3].u
.operand
;
666 emitLoad2(src1
, regT1
, regT0
, src2
, regT3
, regT2
);
667 addSlowCase(branch32(NotEqual
, regT1
, regT3
));
668 addSlowCase(branch32(Equal
, regT1
, TrustedImm32(JSValue::CellTag
)));
669 addSlowCase(branch32(Below
, regT1
, TrustedImm32(JSValue::LowestTag
)));
671 compare32(NotEqual
, regT0
, regT2
, regT0
);
673 emitStoreBool(dst
, regT0
);
676 void JIT::emitSlow_op_neq(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
678 unsigned dst
= currentInstruction
[1].u
.operand
;
680 JumpList storeResult
;
681 JumpList genericCase
;
683 genericCase
.append(getSlowCase(iter
)); // tags not equal
685 linkSlowCase(iter
); // tags equal and JSCell
686 genericCase
.append(branchPtr(NotEqual
, Address(regT0
, JSCell::structureOffset()), TrustedImmPtr(m_vm
->stringStructure
.get())));
687 genericCase
.append(branchPtr(NotEqual
, Address(regT2
, JSCell::structureOffset()), TrustedImmPtr(m_vm
->stringStructure
.get())));
690 JITStubCall
stubCallEqStrings(this, cti_op_eq_strings
);
691 stubCallEqStrings
.addArgument(regT0
);
692 stubCallEqStrings
.addArgument(regT2
);
693 stubCallEqStrings
.call(regT0
);
694 storeResult
.append(jump());
697 genericCase
.append(getSlowCase(iter
)); // doubles
698 genericCase
.link(this);
699 JITStubCall
stubCallEq(this, cti_op_eq
);
700 stubCallEq
.addArgument(regT1
, regT0
);
701 stubCallEq
.addArgument(regT3
, regT2
);
702 stubCallEq
.call(regT0
);
704 storeResult
.link(this);
705 xor32(TrustedImm32(0x1), regT0
);
706 emitStoreBool(dst
, regT0
);
709 void JIT::compileOpStrictEq(Instruction
* currentInstruction
, CompileOpStrictEqType type
)
711 unsigned dst
= currentInstruction
[1].u
.operand
;
712 unsigned src1
= currentInstruction
[2].u
.operand
;
713 unsigned src2
= currentInstruction
[3].u
.operand
;
715 emitLoad2(src1
, regT1
, regT0
, src2
, regT3
, regT2
);
717 // Bail if the tags differ, or are double.
718 addSlowCase(branch32(NotEqual
, regT1
, regT3
));
719 addSlowCase(branch32(Below
, regT1
, TrustedImm32(JSValue::LowestTag
)));
721 // Jump to a slow case if both are strings.
722 Jump notCell
= branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
));
723 Jump firstNotString
= branchPtr(NotEqual
, Address(regT0
, JSCell::structureOffset()), TrustedImmPtr(m_vm
->stringStructure
.get()));
724 addSlowCase(branchPtr(Equal
, Address(regT2
, JSCell::structureOffset()), TrustedImmPtr(m_vm
->stringStructure
.get())));
726 firstNotString
.link(this);
728 // Simply compare the payloads.
729 if (type
== OpStrictEq
)
730 compare32(Equal
, regT0
, regT2
, regT0
);
732 compare32(NotEqual
, regT0
, regT2
, regT0
);
734 emitStoreBool(dst
, regT0
);
737 void JIT::emit_op_stricteq(Instruction
* currentInstruction
)
739 compileOpStrictEq(currentInstruction
, OpStrictEq
);
742 void JIT::emitSlow_op_stricteq(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
744 unsigned dst
= currentInstruction
[1].u
.operand
;
745 unsigned src1
= currentInstruction
[2].u
.operand
;
746 unsigned src2
= currentInstruction
[3].u
.operand
;
752 JITStubCall
stubCall(this, cti_op_stricteq
);
753 stubCall
.addArgument(src1
);
754 stubCall
.addArgument(src2
);
758 void JIT::emit_op_nstricteq(Instruction
* currentInstruction
)
760 compileOpStrictEq(currentInstruction
, OpNStrictEq
);
763 void JIT::emitSlow_op_nstricteq(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
765 unsigned dst
= currentInstruction
[1].u
.operand
;
766 unsigned src1
= currentInstruction
[2].u
.operand
;
767 unsigned src2
= currentInstruction
[3].u
.operand
;
773 JITStubCall
stubCall(this, cti_op_nstricteq
);
774 stubCall
.addArgument(src1
);
775 stubCall
.addArgument(src2
);
779 void JIT::emit_op_eq_null(Instruction
* currentInstruction
)
781 unsigned dst
= currentInstruction
[1].u
.operand
;
782 unsigned src
= currentInstruction
[2].u
.operand
;
784 emitLoad(src
, regT1
, regT0
);
785 Jump isImmediate
= branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
));
787 loadPtr(Address(regT0
, JSCell::structureOffset()), regT2
);
788 Jump isMasqueradesAsUndefined
= branchTest8(NonZero
, Address(regT2
, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined
));
789 move(TrustedImm32(0), regT1
);
790 Jump wasNotMasqueradesAsUndefined
= jump();
792 isMasqueradesAsUndefined
.link(this);
793 move(TrustedImmPtr(m_codeBlock
->globalObject()), regT0
);
794 loadPtr(Address(regT2
, Structure::globalObjectOffset()), regT2
);
795 compare32(Equal
, regT0
, regT2
, regT1
);
796 Jump wasNotImmediate
= jump();
798 isImmediate
.link(this);
800 compare32(Equal
, regT1
, TrustedImm32(JSValue::NullTag
), regT2
);
801 compare32(Equal
, regT1
, TrustedImm32(JSValue::UndefinedTag
), regT1
);
804 wasNotImmediate
.link(this);
805 wasNotMasqueradesAsUndefined
.link(this);
807 emitStoreBool(dst
, regT1
);
810 void JIT::emit_op_neq_null(Instruction
* currentInstruction
)
812 unsigned dst
= currentInstruction
[1].u
.operand
;
813 unsigned src
= currentInstruction
[2].u
.operand
;
815 emitLoad(src
, regT1
, regT0
);
816 Jump isImmediate
= branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
));
818 loadPtr(Address(regT0
, JSCell::structureOffset()), regT2
);
819 Jump isMasqueradesAsUndefined
= branchTest8(NonZero
, Address(regT2
, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined
));
820 move(TrustedImm32(1), regT1
);
821 Jump wasNotMasqueradesAsUndefined
= jump();
823 isMasqueradesAsUndefined
.link(this);
824 move(TrustedImmPtr(m_codeBlock
->globalObject()), regT0
);
825 loadPtr(Address(regT2
, Structure::globalObjectOffset()), regT2
);
826 compare32(NotEqual
, regT0
, regT2
, regT1
);
827 Jump wasNotImmediate
= jump();
829 isImmediate
.link(this);
831 compare32(NotEqual
, regT1
, TrustedImm32(JSValue::NullTag
), regT2
);
832 compare32(NotEqual
, regT1
, TrustedImm32(JSValue::UndefinedTag
), regT1
);
835 wasNotImmediate
.link(this);
836 wasNotMasqueradesAsUndefined
.link(this);
838 emitStoreBool(dst
, regT1
);
841 void JIT::emit_op_throw(Instruction
* currentInstruction
)
843 unsigned exception
= currentInstruction
[1].u
.operand
;
844 JITStubCall
stubCall(this, cti_op_throw
);
845 stubCall
.addArgument(exception
);
849 // cti_op_throw always changes it's return address,
850 // this point in the code should never be reached.
855 void JIT::emit_op_get_pnames(Instruction
* currentInstruction
)
857 int dst
= currentInstruction
[1].u
.operand
;
858 int base
= currentInstruction
[2].u
.operand
;
859 int i
= currentInstruction
[3].u
.operand
;
860 int size
= currentInstruction
[4].u
.operand
;
861 int breakTarget
= currentInstruction
[5].u
.operand
;
863 JumpList isNotObject
;
865 emitLoad(base
, regT1
, regT0
);
866 if (!m_codeBlock
->isKnownNotImmediate(base
))
867 isNotObject
.append(branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
)));
868 if (base
!= m_codeBlock
->thisRegister() || m_codeBlock
->isStrictMode()) {
869 loadPtr(Address(regT0
, JSCell::structureOffset()), regT2
);
870 isNotObject
.append(emitJumpIfNotObject(regT2
));
873 // We could inline the case where you have a valid cache, but
874 // this call doesn't seem to be hot.
875 Label
isObject(this);
876 JITStubCall
getPnamesStubCall(this, cti_op_get_pnames
);
877 getPnamesStubCall
.addArgument(regT0
);
878 getPnamesStubCall
.call(dst
);
879 load32(Address(regT0
, OBJECT_OFFSETOF(JSPropertyNameIterator
, m_jsStringsSize
)), regT3
);
880 store32(TrustedImm32(Int32Tag
), intTagFor(i
));
881 store32(TrustedImm32(0), intPayloadFor(i
));
882 store32(TrustedImm32(Int32Tag
), intTagFor(size
));
883 store32(regT3
, payloadFor(size
));
886 isNotObject
.link(this);
887 addJump(branch32(Equal
, regT1
, TrustedImm32(JSValue::NullTag
)), breakTarget
);
888 addJump(branch32(Equal
, regT1
, TrustedImm32(JSValue::UndefinedTag
)), breakTarget
);
889 JITStubCall
toObjectStubCall(this, cti_to_object
);
890 toObjectStubCall
.addArgument(regT1
, regT0
);
891 toObjectStubCall
.call(base
);
892 jump().linkTo(isObject
, this);
897 void JIT::emit_op_next_pname(Instruction
* currentInstruction
)
899 int dst
= currentInstruction
[1].u
.operand
;
900 int base
= currentInstruction
[2].u
.operand
;
901 int i
= currentInstruction
[3].u
.operand
;
902 int size
= currentInstruction
[4].u
.operand
;
903 int it
= currentInstruction
[5].u
.operand
;
904 int target
= currentInstruction
[6].u
.operand
;
906 JumpList callHasProperty
;
909 load32(intPayloadFor(i
), regT0
);
910 Jump end
= branch32(Equal
, regT0
, intPayloadFor(size
));
913 loadPtr(payloadFor(it
), regT1
);
914 loadPtr(Address(regT1
, OBJECT_OFFSETOF(JSPropertyNameIterator
, m_jsStrings
)), regT2
);
915 load32(BaseIndex(regT2
, regT0
, TimesEight
), regT2
);
916 store32(TrustedImm32(JSValue::CellTag
), tagFor(dst
));
917 store32(regT2
, payloadFor(dst
));
920 add32(TrustedImm32(1), regT0
);
921 store32(regT0
, intPayloadFor(i
));
923 // Verify that i is valid:
924 loadPtr(payloadFor(base
), regT0
);
926 // Test base's structure
927 loadPtr(Address(regT0
, JSCell::structureOffset()), regT2
);
928 callHasProperty
.append(branchPtr(NotEqual
, regT2
, Address(Address(regT1
, OBJECT_OFFSETOF(JSPropertyNameIterator
, m_cachedStructure
)))));
930 // Test base's prototype chain
931 loadPtr(Address(Address(regT1
, OBJECT_OFFSETOF(JSPropertyNameIterator
, m_cachedPrototypeChain
))), regT3
);
932 loadPtr(Address(regT3
, OBJECT_OFFSETOF(StructureChain
, m_vector
)), regT3
);
933 addJump(branchTestPtr(Zero
, Address(regT3
)), target
);
935 Label
checkPrototype(this);
936 callHasProperty
.append(branch32(Equal
, Address(regT2
, Structure::prototypeOffset() + OBJECT_OFFSETOF(JSValue
, u
.asBits
.tag
)), TrustedImm32(JSValue::NullTag
)));
937 loadPtr(Address(regT2
, Structure::prototypeOffset() + OBJECT_OFFSETOF(JSValue
, u
.asBits
.payload
)), regT2
);
938 loadPtr(Address(regT2
, JSCell::structureOffset()), regT2
);
939 callHasProperty
.append(branchPtr(NotEqual
, regT2
, Address(regT3
)));
940 addPtr(TrustedImm32(sizeof(Structure
*)), regT3
);
941 branchTestPtr(NonZero
, Address(regT3
)).linkTo(checkPrototype
, this);
944 addJump(jump(), target
);
946 // Slow case: Ask the object if i is valid.
947 callHasProperty
.link(this);
948 loadPtr(addressFor(dst
), regT1
);
949 JITStubCall
stubCall(this, cti_has_property
);
950 stubCall
.addArgument(regT0
);
951 stubCall
.addArgument(regT1
);
954 // Test for valid key.
955 addJump(branchTest32(NonZero
, regT0
), target
);
956 jump().linkTo(begin
, this);
962 void JIT::emit_op_push_with_scope(Instruction
* currentInstruction
)
964 JITStubCall
stubCall(this, cti_op_push_with_scope
);
965 stubCall
.addArgument(currentInstruction
[1].u
.operand
);
969 void JIT::emit_op_pop_scope(Instruction
*)
971 JITStubCall(this, cti_op_pop_scope
).call();
974 void JIT::emit_op_to_number(Instruction
* currentInstruction
)
976 int dst
= currentInstruction
[1].u
.operand
;
977 int src
= currentInstruction
[2].u
.operand
;
979 emitLoad(src
, regT1
, regT0
);
981 Jump isInt32
= branch32(Equal
, regT1
, TrustedImm32(JSValue::Int32Tag
));
982 addSlowCase(branch32(AboveOrEqual
, regT1
, TrustedImm32(JSValue::LowestTag
)));
986 emitStore(dst
, regT1
, regT0
);
987 map(m_bytecodeOffset
+ OPCODE_LENGTH(op_to_number
), dst
, regT1
, regT0
);
990 void JIT::emitSlow_op_to_number(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
992 int dst
= currentInstruction
[1].u
.operand
;
996 JITStubCall
stubCall(this, cti_op_to_number
);
997 stubCall
.addArgument(regT1
, regT0
);
1001 void JIT::emit_op_push_name_scope(Instruction
* currentInstruction
)
1003 JITStubCall
stubCall(this, cti_op_push_name_scope
);
1004 stubCall
.addArgument(TrustedImmPtr(&m_codeBlock
->identifier(currentInstruction
[1].u
.operand
)));
1005 stubCall
.addArgument(currentInstruction
[2].u
.operand
);
1006 stubCall
.addArgument(TrustedImm32(currentInstruction
[3].u
.operand
));
1010 void JIT::emit_op_catch(Instruction
* currentInstruction
)
1012 // cti_op_throw returns the callFrame for the handler.
1013 move(regT0
, callFrameRegister
);
1015 // Now store the exception returned by cti_op_throw.
1016 loadPtr(Address(stackPointerRegister
, OBJECT_OFFSETOF(struct JITStackFrame
, vm
)), regT3
);
1017 load32(Address(regT3
, OBJECT_OFFSETOF(VM
, exception
) + OBJECT_OFFSETOF(JSValue
, u
.asBits
.payload
)), regT0
);
1018 load32(Address(regT3
, OBJECT_OFFSETOF(VM
, exception
) + OBJECT_OFFSETOF(JSValue
, u
.asBits
.tag
)), regT1
);
1019 store32(TrustedImm32(JSValue().payload()), Address(regT3
, OBJECT_OFFSETOF(VM
, exception
) + OBJECT_OFFSETOF(JSValue
, u
.asBits
.payload
)));
1020 store32(TrustedImm32(JSValue().tag()), Address(regT3
, OBJECT_OFFSETOF(VM
, exception
) + OBJECT_OFFSETOF(JSValue
, u
.asBits
.tag
)));
1022 unsigned exception
= currentInstruction
[1].u
.operand
;
1023 emitStore(exception
, regT1
, regT0
);
1024 map(m_bytecodeOffset
+ OPCODE_LENGTH(op_catch
), exception
, regT1
, regT0
);
1027 void JIT::emit_op_switch_imm(Instruction
* currentInstruction
)
1029 unsigned tableIndex
= currentInstruction
[1].u
.operand
;
1030 unsigned defaultOffset
= currentInstruction
[2].u
.operand
;
1031 unsigned scrutinee
= currentInstruction
[3].u
.operand
;
1033 // create jump table for switch destinations, track this switch statement.
1034 SimpleJumpTable
* jumpTable
= &m_codeBlock
->immediateSwitchJumpTable(tableIndex
);
1035 m_switches
.append(SwitchRecord(jumpTable
, m_bytecodeOffset
, defaultOffset
, SwitchRecord::Immediate
));
1036 jumpTable
->ctiOffsets
.grow(jumpTable
->branchOffsets
.size());
1038 JITStubCall
stubCall(this, cti_op_switch_imm
);
1039 stubCall
.addArgument(scrutinee
);
1040 stubCall
.addArgument(TrustedImm32(tableIndex
));
1045 void JIT::emit_op_switch_char(Instruction
* currentInstruction
)
1047 unsigned tableIndex
= currentInstruction
[1].u
.operand
;
1048 unsigned defaultOffset
= currentInstruction
[2].u
.operand
;
1049 unsigned scrutinee
= currentInstruction
[3].u
.operand
;
1051 // create jump table for switch destinations, track this switch statement.
1052 SimpleJumpTable
* jumpTable
= &m_codeBlock
->characterSwitchJumpTable(tableIndex
);
1053 m_switches
.append(SwitchRecord(jumpTable
, m_bytecodeOffset
, defaultOffset
, SwitchRecord::Character
));
1054 jumpTable
->ctiOffsets
.grow(jumpTable
->branchOffsets
.size());
1056 JITStubCall
stubCall(this, cti_op_switch_char
);
1057 stubCall
.addArgument(scrutinee
);
1058 stubCall
.addArgument(TrustedImm32(tableIndex
));
1063 void JIT::emit_op_switch_string(Instruction
* currentInstruction
)
1065 unsigned tableIndex
= currentInstruction
[1].u
.operand
;
1066 unsigned defaultOffset
= currentInstruction
[2].u
.operand
;
1067 unsigned scrutinee
= currentInstruction
[3].u
.operand
;
1069 // create jump table for switch destinations, track this switch statement.
1070 StringJumpTable
* jumpTable
= &m_codeBlock
->stringSwitchJumpTable(tableIndex
);
1071 m_switches
.append(SwitchRecord(jumpTable
, m_bytecodeOffset
, defaultOffset
));
1073 JITStubCall
stubCall(this, cti_op_switch_string
);
1074 stubCall
.addArgument(scrutinee
);
1075 stubCall
.addArgument(TrustedImm32(tableIndex
));
1080 void JIT::emit_op_throw_static_error(Instruction
* currentInstruction
)
1082 unsigned message
= currentInstruction
[1].u
.operand
;
1084 JITStubCall
stubCall(this, cti_op_throw_static_error
);
1085 stubCall
.addArgument(m_codeBlock
->getConstant(message
));
1086 stubCall
.addArgument(TrustedImm32(currentInstruction
[2].u
.operand
));
1090 void JIT::emit_op_debug(Instruction
* currentInstruction
)
1092 #if ENABLE(DEBUG_WITH_BREAKPOINT)
1093 UNUSED_PARAM(currentInstruction
);
1096 JITStubCall
stubCall(this, cti_op_debug
);
1097 stubCall
.addArgument(Imm32(currentInstruction
[1].u
.operand
));
1098 stubCall
.addArgument(Imm32(currentInstruction
[2].u
.operand
));
1099 stubCall
.addArgument(Imm32(currentInstruction
[3].u
.operand
));
1100 stubCall
.addArgument(Imm32(currentInstruction
[4].u
.operand
));
1106 void JIT::emit_op_enter(Instruction
*)
1108 emitEnterOptimizationCheck();
1110 // Even though JIT code doesn't use them, we initialize our constant
1111 // registers to zap stale pointers, to avoid unnecessarily prolonging
1112 // object lifetime and increasing GC pressure.
1113 for (int i
= 0; i
< m_codeBlock
->m_numVars
; ++i
)
1114 emitStore(i
, jsUndefined());
1117 void JIT::emit_op_create_activation(Instruction
* currentInstruction
)
1119 unsigned activation
= currentInstruction
[1].u
.operand
;
1121 Jump activationCreated
= branch32(NotEqual
, tagFor(activation
), TrustedImm32(JSValue::EmptyValueTag
));
1122 JITStubCall(this, cti_op_push_activation
).call(activation
);
1123 activationCreated
.link(this);
1126 void JIT::emit_op_create_arguments(Instruction
* currentInstruction
)
1128 unsigned dst
= currentInstruction
[1].u
.operand
;
1130 Jump argsCreated
= branch32(NotEqual
, tagFor(dst
), TrustedImm32(JSValue::EmptyValueTag
));
1132 JITStubCall(this, cti_op_create_arguments
).call();
1133 emitStore(dst
, regT1
, regT0
);
1134 emitStore(unmodifiedArgumentsRegister(dst
), regT1
, regT0
);
1136 argsCreated
.link(this);
1139 void JIT::emit_op_init_lazy_reg(Instruction
* currentInstruction
)
1141 unsigned dst
= currentInstruction
[1].u
.operand
;
1143 emitStore(dst
, JSValue());
1146 void JIT::emit_op_get_callee(Instruction
* currentInstruction
)
1148 int dst
= currentInstruction
[1].u
.operand
;
1149 emitGetFromCallFrameHeaderPtr(JSStack::Callee
, regT0
);
1150 move(TrustedImm32(JSValue::CellTag
), regT1
);
1151 emitValueProfilingSite();
1152 emitStore(dst
, regT1
, regT0
);
1155 void JIT::emit_op_create_this(Instruction
* currentInstruction
)
1157 int callee
= currentInstruction
[2].u
.operand
;
1158 RegisterID calleeReg
= regT0
;
1159 RegisterID resultReg
= regT0
;
1160 RegisterID allocatorReg
= regT1
;
1161 RegisterID structureReg
= regT2
;
1162 RegisterID scratchReg
= regT3
;
1164 emitLoadPayload(callee
, calleeReg
);
1165 loadPtr(Address(calleeReg
, JSFunction::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfAllocator()), allocatorReg
);
1166 loadPtr(Address(calleeReg
, JSFunction::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfStructure()), structureReg
);
1167 addSlowCase(branchTestPtr(Zero
, allocatorReg
));
1169 emitAllocateJSObject(allocatorReg
, structureReg
, resultReg
, scratchReg
);
1170 emitStoreCell(currentInstruction
[1].u
.operand
, resultReg
);
1173 void JIT::emitSlow_op_create_this(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1175 linkSlowCase(iter
); // doesn't have an allocation profile
1176 linkSlowCase(iter
); // allocation failed
1178 JITStubCall
stubCall(this, cti_op_create_this
);
1179 stubCall
.addArgument(TrustedImm32(currentInstruction
[3].u
.operand
));
1180 stubCall
.call(currentInstruction
[1].u
.operand
);
1183 void JIT::emit_op_convert_this(Instruction
* currentInstruction
)
1185 unsigned thisRegister
= currentInstruction
[1].u
.operand
;
1187 emitLoad(thisRegister
, regT3
, regT2
);
1189 addSlowCase(branch32(NotEqual
, regT3
, TrustedImm32(JSValue::CellTag
)));
1190 if (shouldEmitProfiling()) {
1191 loadPtr(Address(regT2
, JSCell::structureOffset()), regT0
);
1193 emitValueProfilingSite();
1195 addSlowCase(branchPtr(Equal
, Address(regT2
, JSCell::structureOffset()), TrustedImmPtr(m_vm
->stringStructure
.get())));
1198 void JIT::emitSlow_op_convert_this(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1200 void* globalThis
= m_codeBlock
->globalObject()->globalThis();
1201 unsigned thisRegister
= currentInstruction
[1].u
.operand
;
1204 if (shouldEmitProfiling()) {
1205 move(TrustedImm32(JSValue::UndefinedTag
), regT1
);
1206 move(TrustedImm32(0), regT0
);
1208 Jump isNotUndefined
= branch32(NotEqual
, regT3
, TrustedImm32(JSValue::UndefinedTag
));
1209 emitValueProfilingSite();
1210 move(TrustedImmPtr(globalThis
), regT0
);
1211 move(TrustedImm32(JSValue::CellTag
), regT1
);
1212 emitStore(thisRegister
, regT1
, regT0
);
1213 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_convert_this
));
1216 if (shouldEmitProfiling()) {
1217 move(TrustedImm32(JSValue::CellTag
), regT1
);
1218 move(TrustedImmPtr(m_vm
->stringStructure
.get()), regT0
);
1220 isNotUndefined
.link(this);
1221 emitValueProfilingSite();
1222 JITStubCall
stubCall(this, cti_op_convert_this
);
1223 stubCall
.addArgument(regT3
, regT2
);
1224 stubCall
.call(thisRegister
);
1227 void JIT::emit_op_profile_will_call(Instruction
* currentInstruction
)
1229 JITStubCall
stubCall(this, cti_op_profile_will_call
);
1230 stubCall
.addArgument(currentInstruction
[1].u
.operand
);
1234 void JIT::emit_op_profile_did_call(Instruction
* currentInstruction
)
1236 JITStubCall
stubCall(this, cti_op_profile_did_call
);
1237 stubCall
.addArgument(currentInstruction
[1].u
.operand
);
1241 void JIT::emit_op_get_arguments_length(Instruction
* currentInstruction
)
1243 int dst
= currentInstruction
[1].u
.operand
;
1244 int argumentsRegister
= currentInstruction
[2].u
.operand
;
1245 addSlowCase(branch32(NotEqual
, tagFor(argumentsRegister
), TrustedImm32(JSValue::EmptyValueTag
)));
1246 load32(payloadFor(JSStack::ArgumentCount
), regT0
);
1247 sub32(TrustedImm32(1), regT0
);
1248 emitStoreInt32(dst
, regT0
);
1251 void JIT::emitSlow_op_get_arguments_length(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1254 int dst
= currentInstruction
[1].u
.operand
;
1255 int base
= currentInstruction
[2].u
.operand
;
1256 int ident
= currentInstruction
[3].u
.operand
;
1258 JITStubCall
stubCall(this, cti_op_get_by_id_generic
);
1259 stubCall
.addArgument(base
);
1260 stubCall
.addArgument(TrustedImmPtr(&(m_codeBlock
->identifier(ident
))));
1264 void JIT::emit_op_get_argument_by_val(Instruction
* currentInstruction
)
1266 int dst
= currentInstruction
[1].u
.operand
;
1267 int argumentsRegister
= currentInstruction
[2].u
.operand
;
1268 int property
= currentInstruction
[3].u
.operand
;
1269 addSlowCase(branch32(NotEqual
, tagFor(argumentsRegister
), TrustedImm32(JSValue::EmptyValueTag
)));
1270 emitLoad(property
, regT1
, regT2
);
1271 addSlowCase(branch32(NotEqual
, regT1
, TrustedImm32(JSValue::Int32Tag
)));
1272 add32(TrustedImm32(1), regT2
);
1273 // regT2 now contains the integer index of the argument we want, including this
1274 load32(payloadFor(JSStack::ArgumentCount
), regT3
);
1275 addSlowCase(branch32(AboveOrEqual
, regT2
, regT3
));
1278 loadPtr(BaseIndex(callFrameRegister
, regT2
, TimesEight
, OBJECT_OFFSETOF(JSValue
, u
.asBits
.payload
) + CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register
))), regT0
);
1279 loadPtr(BaseIndex(callFrameRegister
, regT2
, TimesEight
, OBJECT_OFFSETOF(JSValue
, u
.asBits
.tag
) + CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register
))), regT1
);
1280 emitValueProfilingSite();
1281 emitStore(dst
, regT1
, regT0
);
1284 void JIT::emitSlow_op_get_argument_by_val(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1286 unsigned dst
= currentInstruction
[1].u
.operand
;
1287 unsigned arguments
= currentInstruction
[2].u
.operand
;
1288 unsigned property
= currentInstruction
[3].u
.operand
;
1291 Jump skipArgumentsCreation
= jump();
1295 JITStubCall(this, cti_op_create_arguments
).call();
1296 emitStore(arguments
, regT1
, regT0
);
1297 emitStore(unmodifiedArgumentsRegister(arguments
), regT1
, regT0
);
1299 skipArgumentsCreation
.link(this);
1300 JITStubCall
stubCall(this, cti_op_get_by_val_generic
);
1301 stubCall
.addArgument(arguments
);
1302 stubCall
.addArgument(property
);
1303 stubCall
.callWithValueProfiling(dst
);
1306 void JIT::emit_op_put_to_base(Instruction
* currentInstruction
)
1308 int base
= currentInstruction
[1].u
.operand
;
1309 int id
= currentInstruction
[2].u
.operand
;
1310 int value
= currentInstruction
[3].u
.operand
;
1312 PutToBaseOperation
* operation
= currentInstruction
[4].u
.putToBaseOperation
;
1315 switch (operation
->m_kind
) {
1316 case PutToBaseOperation::GlobalVariablePutChecked
:
1317 addSlowCase(branchTest8(NonZero
, AbsoluteAddress(operation
->m_predicatePointer
)));
1318 case PutToBaseOperation::GlobalVariablePut
: {
1319 JSGlobalObject
* globalObject
= m_codeBlock
->globalObject();
1320 if (operation
->m_isDynamic
)
1321 addSlowCase(branchPtr(NotEqual
, payloadFor(base
), TrustedImmPtr(globalObject
)));
1323 emitLoad(value
, regT1
, regT0
);
1324 storePtr(regT0
, reinterpret_cast<char*>(operation
->m_registerAddress
) + OBJECT_OFFSETOF(JSValue
, u
.asBits
.payload
));
1325 storePtr(regT1
, reinterpret_cast<char*>(operation
->m_registerAddress
) + OBJECT_OFFSETOF(JSValue
, u
.asBits
.tag
));
1326 if (Heap::isWriteBarrierEnabled())
1327 emitWriteBarrier(globalObject
, regT0
, regT2
, ShouldFilterImmediates
, WriteBarrierForVariableAccess
);
1330 case PutToBaseOperation::VariablePut
: {
1331 loadPtr(payloadFor(base
), regT3
);
1332 emitLoad(value
, regT1
, regT0
);
1333 loadPtr(Address(regT3
, JSVariableObject::offsetOfRegisters()), regT2
);
1334 store32(regT0
, Address(regT2
, operation
->m_offset
* sizeof(Register
) + OBJECT_OFFSETOF(JSValue
, u
.asBits
.payload
)));
1335 store32(regT1
, Address(regT2
, operation
->m_offset
* sizeof(Register
) + OBJECT_OFFSETOF(JSValue
, u
.asBits
.tag
)));
1336 if (Heap::isWriteBarrierEnabled())
1337 emitWriteBarrier(regT3
, regT1
, regT0
, regT2
, ShouldFilterImmediates
, WriteBarrierForVariableAccess
);
1341 case PutToBaseOperation::GlobalPropertyPut
: {
1342 JSGlobalObject
* globalObject
= m_codeBlock
->globalObject();
1343 loadPtr(payloadFor(base
), regT3
);
1344 emitLoad(value
, regT1
, regT0
);
1345 loadPtr(&operation
->m_structure
, regT2
);
1346 addSlowCase(branchPtr(NotEqual
, Address(regT3
, JSCell::structureOffset()), regT2
));
1347 ASSERT(!operation
->m_structure
|| !operation
->m_structure
->inlineCapacity());
1348 loadPtr(Address(regT3
, JSObject::butterflyOffset()), regT2
);
1349 load32(&operation
->m_offsetInButterfly
, regT3
);
1350 storePtr(regT0
, BaseIndex(regT2
, regT3
, TimesEight
, OBJECT_OFFSETOF(JSValue
, u
.asBits
.payload
)));
1351 storePtr(regT1
, BaseIndex(regT2
, regT3
, TimesEight
, OBJECT_OFFSETOF(JSValue
, u
.asBits
.tag
)));
1352 if (Heap::isWriteBarrierEnabled())
1353 emitWriteBarrier(globalObject
, regT1
, regT2
, ShouldFilterImmediates
, WriteBarrierForVariableAccess
);
1357 case PutToBaseOperation::Uninitialised
:
1358 case PutToBaseOperation::Readonly
:
1359 case PutToBaseOperation::Generic
:
1360 JITStubCall
stubCall(this, cti_op_put_to_base
);
1362 stubCall
.addArgument(TrustedImm32(base
));
1363 stubCall
.addArgument(TrustedImmPtr(&m_codeBlock
->identifier(id
)));
1364 stubCall
.addArgument(TrustedImm32(value
));
1365 stubCall
.addArgument(TrustedImmPtr(operation
));
1373 #endif // USE(JSVALUE32_64)
1374 #endif // ENABLE(JIT)