2 * Copyright (C) 2009, 2012 Apple Inc. All rights reserved.
3 * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
14 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
18 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 #include "Arguments.h"
32 #include "CopiedSpaceInlines.h"
34 #include "JITInlines.h"
35 #include "JITStubCall.h"
38 #include "JSFunction.h"
39 #include "JSPropertyNameIterator.h"
40 #include "LinkBuffer.h"
46 JIT::CodeRef
JIT::privateCompileCTINativeCall(VM
* vm
, NativeFunction
)
48 return vm
->getCTIStub(nativeCallGenerator
);
51 void JIT::emit_op_mov(Instruction
* currentInstruction
)
53 int dst
= currentInstruction
[1].u
.operand
;
54 int src
= currentInstruction
[2].u
.operand
;
56 if (canBeOptimizedOrInlined()) {
57 // Use simpler approach, since the DFG thinks that the last result register
58 // is always set to the destination on every operation.
59 emitGetVirtualRegister(src
, regT0
);
60 emitPutVirtualRegister(dst
);
62 if (m_codeBlock
->isConstantRegisterIndex(src
)) {
63 if (!getConstantOperand(src
).isNumber())
64 store64(TrustedImm64(JSValue::encode(getConstantOperand(src
))), Address(callFrameRegister
, dst
* sizeof(Register
)));
66 store64(Imm64(JSValue::encode(getConstantOperand(src
))), Address(callFrameRegister
, dst
* sizeof(Register
)));
67 if (dst
== m_lastResultBytecodeRegister
)
68 killLastResultRegister();
69 } else if ((src
== m_lastResultBytecodeRegister
) || (dst
== m_lastResultBytecodeRegister
)) {
70 // If either the src or dst is the cached register go though
71 // get/put registers to make sure we track this correctly.
72 emitGetVirtualRegister(src
, regT0
);
73 emitPutVirtualRegister(dst
);
75 // Perform the copy via regT1; do not disturb any mapping in regT0.
76 load64(Address(callFrameRegister
, src
* sizeof(Register
)), regT1
);
77 store64(regT1
, Address(callFrameRegister
, dst
* sizeof(Register
)));
82 void JIT::emit_op_end(Instruction
* currentInstruction
)
84 RELEASE_ASSERT(returnValueRegister
!= callFrameRegister
);
85 emitGetVirtualRegister(currentInstruction
[1].u
.operand
, returnValueRegister
);
86 restoreReturnAddressBeforeReturn(Address(callFrameRegister
, JSStack::ReturnPC
* static_cast<int>(sizeof(Register
))));
90 void JIT::emit_op_jmp(Instruction
* currentInstruction
)
92 unsigned target
= currentInstruction
[1].u
.operand
;
93 addJump(jump(), target
);
96 void JIT::emit_op_new_object(Instruction
* currentInstruction
)
98 Structure
* structure
= currentInstruction
[3].u
.objectAllocationProfile
->structure();
99 size_t allocationSize
= JSObject::allocationSize(structure
->inlineCapacity());
100 MarkedAllocator
* allocator
= &m_vm
->heap
.allocatorForObjectWithoutDestructor(allocationSize
);
102 RegisterID resultReg
= regT0
;
103 RegisterID allocatorReg
= regT1
;
104 RegisterID scratchReg
= regT2
;
106 move(TrustedImmPtr(allocator
), allocatorReg
);
107 emitAllocateJSObject(allocatorReg
, TrustedImmPtr(structure
), resultReg
, scratchReg
);
108 emitPutVirtualRegister(currentInstruction
[1].u
.operand
);
111 void JIT::emitSlow_op_new_object(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
114 JITStubCall
stubCall(this, cti_op_new_object
);
115 stubCall
.addArgument(TrustedImmPtr(currentInstruction
[3].u
.objectAllocationProfile
->structure()));
116 stubCall
.call(currentInstruction
[1].u
.operand
);
119 void JIT::emit_op_check_has_instance(Instruction
* currentInstruction
)
121 unsigned baseVal
= currentInstruction
[3].u
.operand
;
123 emitGetVirtualRegister(baseVal
, regT0
);
125 // Check that baseVal is a cell.
126 emitJumpSlowCaseIfNotJSCell(regT0
, baseVal
);
128 // Check that baseVal 'ImplementsHasInstance'.
129 loadPtr(Address(regT0
, JSCell::structureOffset()), regT0
);
130 addSlowCase(branchTest8(Zero
, Address(regT0
, Structure::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance
)));
133 void JIT::emit_op_instanceof(Instruction
* currentInstruction
)
135 unsigned dst
= currentInstruction
[1].u
.operand
;
136 unsigned value
= currentInstruction
[2].u
.operand
;
137 unsigned proto
= currentInstruction
[3].u
.operand
;
139 // Load the operands (baseVal, proto, and value respectively) into registers.
140 // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
141 emitGetVirtualRegister(value
, regT2
);
142 emitGetVirtualRegister(proto
, regT1
);
144 // Check that proto are cells. baseVal must be a cell - this is checked by op_check_has_instance.
145 emitJumpSlowCaseIfNotJSCell(regT2
, value
);
146 emitJumpSlowCaseIfNotJSCell(regT1
, proto
);
148 // Check that prototype is an object
149 loadPtr(Address(regT1
, JSCell::structureOffset()), regT3
);
150 addSlowCase(emitJumpIfNotObject(regT3
));
152 // Optimistically load the result true, and start looping.
153 // Initially, regT1 still contains proto and regT2 still contains value.
154 // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
155 move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0
);
158 // Load the prototype of the object in regT2. If this is equal to regT1 - WIN!
159 // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
160 loadPtr(Address(regT2
, JSCell::structureOffset()), regT2
);
161 load64(Address(regT2
, Structure::prototypeOffset()), regT2
);
162 Jump isInstance
= branchPtr(Equal
, regT2
, regT1
);
163 emitJumpIfJSCell(regT2
).linkTo(loop
, this);
165 // We get here either by dropping out of the loop, or if value was not an Object. Result is false.
166 move(TrustedImm64(JSValue::encode(jsBoolean(false))), regT0
);
168 // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
169 isInstance
.link(this);
170 emitPutVirtualRegister(dst
);
173 void JIT::emit_op_is_undefined(Instruction
* currentInstruction
)
175 unsigned dst
= currentInstruction
[1].u
.operand
;
176 unsigned value
= currentInstruction
[2].u
.operand
;
178 emitGetVirtualRegister(value
, regT0
);
179 Jump isCell
= emitJumpIfJSCell(regT0
);
181 compare64(Equal
, regT0
, TrustedImm32(ValueUndefined
), regT0
);
185 loadPtr(Address(regT0
, JSCell::structureOffset()), regT1
);
186 Jump isMasqueradesAsUndefined
= branchTest8(NonZero
, Address(regT1
, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined
));
187 move(TrustedImm32(0), regT0
);
188 Jump notMasqueradesAsUndefined
= jump();
190 isMasqueradesAsUndefined
.link(this);
191 move(TrustedImmPtr(m_codeBlock
->globalObject()), regT0
);
192 loadPtr(Address(regT1
, Structure::globalObjectOffset()), regT1
);
193 comparePtr(Equal
, regT0
, regT1
, regT0
);
195 notMasqueradesAsUndefined
.link(this);
197 emitTagAsBoolImmediate(regT0
);
198 emitPutVirtualRegister(dst
);
201 void JIT::emit_op_is_boolean(Instruction
* currentInstruction
)
203 unsigned dst
= currentInstruction
[1].u
.operand
;
204 unsigned value
= currentInstruction
[2].u
.operand
;
206 emitGetVirtualRegister(value
, regT0
);
207 xor64(TrustedImm32(static_cast<int32_t>(ValueFalse
)), regT0
);
208 test64(Zero
, regT0
, TrustedImm32(static_cast<int32_t>(~1)), regT0
);
209 emitTagAsBoolImmediate(regT0
);
210 emitPutVirtualRegister(dst
);
213 void JIT::emit_op_is_number(Instruction
* currentInstruction
)
215 unsigned dst
= currentInstruction
[1].u
.operand
;
216 unsigned value
= currentInstruction
[2].u
.operand
;
218 emitGetVirtualRegister(value
, regT0
);
219 test64(NonZero
, regT0
, tagTypeNumberRegister
, regT0
);
220 emitTagAsBoolImmediate(regT0
);
221 emitPutVirtualRegister(dst
);
224 void JIT::emit_op_is_string(Instruction
* currentInstruction
)
226 unsigned dst
= currentInstruction
[1].u
.operand
;
227 unsigned value
= currentInstruction
[2].u
.operand
;
229 emitGetVirtualRegister(value
, regT0
);
230 Jump isNotCell
= emitJumpIfNotJSCell(regT0
);
232 loadPtr(Address(regT0
, JSCell::structureOffset()), regT1
);
233 compare8(Equal
, Address(regT1
, Structure::typeInfoTypeOffset()), TrustedImm32(StringType
), regT0
);
234 emitTagAsBoolImmediate(regT0
);
237 isNotCell
.link(this);
238 move(TrustedImm32(ValueFalse
), regT0
);
241 emitPutVirtualRegister(dst
);
244 void JIT::emit_op_call(Instruction
* currentInstruction
)
246 compileOpCall(op_call
, currentInstruction
, m_callLinkInfoIndex
++);
249 void JIT::emit_op_call_eval(Instruction
* currentInstruction
)
251 compileOpCall(op_call_eval
, currentInstruction
, m_callLinkInfoIndex
);
254 void JIT::emit_op_call_varargs(Instruction
* currentInstruction
)
256 compileOpCall(op_call_varargs
, currentInstruction
, m_callLinkInfoIndex
++);
259 void JIT::emit_op_construct(Instruction
* currentInstruction
)
261 compileOpCall(op_construct
, currentInstruction
, m_callLinkInfoIndex
++);
264 void JIT::emit_op_tear_off_activation(Instruction
* currentInstruction
)
266 int activation
= currentInstruction
[1].u
.operand
;
267 Jump activationNotCreated
= branchTest64(Zero
, addressFor(activation
));
268 JITStubCall
stubCall(this, cti_op_tear_off_activation
);
269 stubCall
.addArgument(activation
, regT2
);
271 activationNotCreated
.link(this);
274 void JIT::emit_op_tear_off_arguments(Instruction
* currentInstruction
)
276 int arguments
= currentInstruction
[1].u
.operand
;
277 int activation
= currentInstruction
[2].u
.operand
;
279 Jump argsNotCreated
= branchTest64(Zero
, Address(callFrameRegister
, sizeof(Register
) * (unmodifiedArgumentsRegister(arguments
))));
280 JITStubCall
stubCall(this, cti_op_tear_off_arguments
);
281 stubCall
.addArgument(unmodifiedArgumentsRegister(arguments
), regT2
);
282 stubCall
.addArgument(activation
, regT2
);
284 argsNotCreated
.link(this);
287 void JIT::emit_op_ret(Instruction
* currentInstruction
)
289 ASSERT(callFrameRegister
!= regT1
);
290 ASSERT(regT1
!= returnValueRegister
);
291 ASSERT(returnValueRegister
!= callFrameRegister
);
293 // Return the result in %eax.
294 emitGetVirtualRegister(currentInstruction
[1].u
.operand
, returnValueRegister
);
296 // Grab the return address.
297 emitGetFromCallFrameHeaderPtr(JSStack::ReturnPC
, regT1
);
299 // Restore our caller's "r".
300 emitGetFromCallFrameHeaderPtr(JSStack::CallerFrame
, callFrameRegister
);
303 restoreReturnAddressBeforeReturn(regT1
);
307 void JIT::emit_op_ret_object_or_this(Instruction
* currentInstruction
)
309 ASSERT(callFrameRegister
!= regT1
);
310 ASSERT(regT1
!= returnValueRegister
);
311 ASSERT(returnValueRegister
!= callFrameRegister
);
313 // Return the result in %eax.
314 emitGetVirtualRegister(currentInstruction
[1].u
.operand
, returnValueRegister
);
315 Jump notJSCell
= emitJumpIfNotJSCell(returnValueRegister
);
316 loadPtr(Address(returnValueRegister
, JSCell::structureOffset()), regT2
);
317 Jump notObject
= emitJumpIfNotObject(regT2
);
319 // Grab the return address.
320 emitGetFromCallFrameHeaderPtr(JSStack::ReturnPC
, regT1
);
322 // Restore our caller's "r".
323 emitGetFromCallFrameHeaderPtr(JSStack::CallerFrame
, callFrameRegister
);
326 restoreReturnAddressBeforeReturn(regT1
);
329 // Return 'this' in %eax.
330 notJSCell
.link(this);
331 notObject
.link(this);
332 emitGetVirtualRegister(currentInstruction
[2].u
.operand
, returnValueRegister
);
334 // Grab the return address.
335 emitGetFromCallFrameHeaderPtr(JSStack::ReturnPC
, regT1
);
337 // Restore our caller's "r".
338 emitGetFromCallFrameHeaderPtr(JSStack::CallerFrame
, callFrameRegister
);
341 restoreReturnAddressBeforeReturn(regT1
);
345 void JIT::emit_op_to_primitive(Instruction
* currentInstruction
)
347 int dst
= currentInstruction
[1].u
.operand
;
348 int src
= currentInstruction
[2].u
.operand
;
350 emitGetVirtualRegister(src
, regT0
);
352 Jump isImm
= emitJumpIfNotJSCell(regT0
);
353 addSlowCase(branchPtr(NotEqual
, Address(regT0
, JSCell::structureOffset()), TrustedImmPtr(m_vm
->stringStructure
.get())));
357 emitPutVirtualRegister(dst
);
361 void JIT::emit_op_strcat(Instruction
* currentInstruction
)
363 JITStubCall
stubCall(this, cti_op_strcat
);
364 stubCall
.addArgument(TrustedImm32(currentInstruction
[2].u
.operand
));
365 stubCall
.addArgument(TrustedImm32(currentInstruction
[3].u
.operand
));
366 stubCall
.call(currentInstruction
[1].u
.operand
);
369 void JIT::emit_op_not(Instruction
* currentInstruction
)
371 emitGetVirtualRegister(currentInstruction
[2].u
.operand
, regT0
);
373 // Invert against JSValue(false); if the value was tagged as a boolean, then all bits will be
374 // clear other than the low bit (which will be 0 or 1 for false or true inputs respectively).
375 // Then invert against JSValue(true), which will add the tag back in, and flip the low bit.
376 xor64(TrustedImm32(static_cast<int32_t>(ValueFalse
)), regT0
);
377 addSlowCase(branchTestPtr(NonZero
, regT0
, TrustedImm32(static_cast<int32_t>(~1))));
378 xor64(TrustedImm32(static_cast<int32_t>(ValueTrue
)), regT0
);
380 emitPutVirtualRegister(currentInstruction
[1].u
.operand
);
383 void JIT::emit_op_jfalse(Instruction
* currentInstruction
)
385 unsigned target
= currentInstruction
[2].u
.operand
;
386 emitGetVirtualRegister(currentInstruction
[1].u
.operand
, regT0
);
388 addJump(branch64(Equal
, regT0
, TrustedImm64(JSValue::encode(jsNumber(0)))), target
);
389 Jump isNonZero
= emitJumpIfImmediateInteger(regT0
);
391 addJump(branch64(Equal
, regT0
, TrustedImm64(JSValue::encode(jsBoolean(false)))), target
);
392 addSlowCase(branch64(NotEqual
, regT0
, TrustedImm64(JSValue::encode(jsBoolean(true)))));
394 isNonZero
.link(this);
397 void JIT::emit_op_jeq_null(Instruction
* currentInstruction
)
399 unsigned src
= currentInstruction
[1].u
.operand
;
400 unsigned target
= currentInstruction
[2].u
.operand
;
402 emitGetVirtualRegister(src
, regT0
);
403 Jump isImmediate
= emitJumpIfNotJSCell(regT0
);
405 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
406 loadPtr(Address(regT0
, JSCell::structureOffset()), regT2
);
407 Jump isNotMasqueradesAsUndefined
= branchTest8(Zero
, Address(regT2
, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined
));
408 move(TrustedImmPtr(m_codeBlock
->globalObject()), regT0
);
409 addJump(branchPtr(Equal
, Address(regT2
, Structure::globalObjectOffset()), regT0
), target
);
410 Jump masqueradesGlobalObjectIsForeign
= jump();
412 // Now handle the immediate cases - undefined & null
413 isImmediate
.link(this);
414 and64(TrustedImm32(~TagBitUndefined
), regT0
);
415 addJump(branch64(Equal
, regT0
, TrustedImm64(JSValue::encode(jsNull()))), target
);
417 isNotMasqueradesAsUndefined
.link(this);
418 masqueradesGlobalObjectIsForeign
.link(this);
420 void JIT::emit_op_jneq_null(Instruction
* currentInstruction
)
422 unsigned src
= currentInstruction
[1].u
.operand
;
423 unsigned target
= currentInstruction
[2].u
.operand
;
425 emitGetVirtualRegister(src
, regT0
);
426 Jump isImmediate
= emitJumpIfNotJSCell(regT0
);
428 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
429 loadPtr(Address(regT0
, JSCell::structureOffset()), regT2
);
430 addJump(branchTest8(Zero
, Address(regT2
, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined
)), target
);
431 move(TrustedImmPtr(m_codeBlock
->globalObject()), regT0
);
432 addJump(branchPtr(NotEqual
, Address(regT2
, Structure::globalObjectOffset()), regT0
), target
);
433 Jump wasNotImmediate
= jump();
435 // Now handle the immediate cases - undefined & null
436 isImmediate
.link(this);
437 and64(TrustedImm32(~TagBitUndefined
), regT0
);
438 addJump(branch64(NotEqual
, regT0
, TrustedImm64(JSValue::encode(jsNull()))), target
);
440 wasNotImmediate
.link(this);
443 void JIT::emit_op_jneq_ptr(Instruction
* currentInstruction
)
445 unsigned src
= currentInstruction
[1].u
.operand
;
446 Special::Pointer ptr
= currentInstruction
[2].u
.specialPointer
;
447 unsigned target
= currentInstruction
[3].u
.operand
;
449 emitGetVirtualRegister(src
, regT0
);
450 addJump(branchPtr(NotEqual
, regT0
, TrustedImmPtr(actualPointerFor(m_codeBlock
, ptr
))), target
);
453 void JIT::emit_op_eq(Instruction
* currentInstruction
)
455 emitGetVirtualRegisters(currentInstruction
[2].u
.operand
, regT0
, currentInstruction
[3].u
.operand
, regT1
);
456 emitJumpSlowCaseIfNotImmediateIntegers(regT0
, regT1
, regT2
);
457 compare32(Equal
, regT1
, regT0
, regT0
);
458 emitTagAsBoolImmediate(regT0
);
459 emitPutVirtualRegister(currentInstruction
[1].u
.operand
);
462 void JIT::emit_op_jtrue(Instruction
* currentInstruction
)
464 unsigned target
= currentInstruction
[2].u
.operand
;
465 emitGetVirtualRegister(currentInstruction
[1].u
.operand
, regT0
);
467 Jump isZero
= branch64(Equal
, regT0
, TrustedImm64(JSValue::encode(jsNumber(0))));
468 addJump(emitJumpIfImmediateInteger(regT0
), target
);
470 addJump(branch64(Equal
, regT0
, TrustedImm64(JSValue::encode(jsBoolean(true)))), target
);
471 addSlowCase(branch64(NotEqual
, regT0
, TrustedImm64(JSValue::encode(jsBoolean(false)))));
476 void JIT::emit_op_neq(Instruction
* currentInstruction
)
478 emitGetVirtualRegisters(currentInstruction
[2].u
.operand
, regT0
, currentInstruction
[3].u
.operand
, regT1
);
479 emitJumpSlowCaseIfNotImmediateIntegers(regT0
, regT1
, regT2
);
480 compare32(NotEqual
, regT1
, regT0
, regT0
);
481 emitTagAsBoolImmediate(regT0
);
483 emitPutVirtualRegister(currentInstruction
[1].u
.operand
);
487 void JIT::emit_op_bitxor(Instruction
* currentInstruction
)
489 emitGetVirtualRegisters(currentInstruction
[2].u
.operand
, regT0
, currentInstruction
[3].u
.operand
, regT1
);
490 emitJumpSlowCaseIfNotImmediateIntegers(regT0
, regT1
, regT2
);
492 emitFastArithReTagImmediate(regT0
, regT0
);
493 emitPutVirtualRegister(currentInstruction
[1].u
.operand
);
496 void JIT::emit_op_bitor(Instruction
* currentInstruction
)
498 emitGetVirtualRegisters(currentInstruction
[2].u
.operand
, regT0
, currentInstruction
[3].u
.operand
, regT1
);
499 emitJumpSlowCaseIfNotImmediateIntegers(regT0
, regT1
, regT2
);
501 emitPutVirtualRegister(currentInstruction
[1].u
.operand
);
504 void JIT::emit_op_throw(Instruction
* currentInstruction
)
506 JITStubCall
stubCall(this, cti_op_throw
);
507 stubCall
.addArgument(currentInstruction
[1].u
.operand
, regT2
);
509 ASSERT(regT0
== returnValueRegister
);
511 // cti_op_throw always changes it's return address,
512 // this point in the code should never be reached.
517 void JIT::emit_op_get_pnames(Instruction
* currentInstruction
)
519 int dst
= currentInstruction
[1].u
.operand
;
520 int base
= currentInstruction
[2].u
.operand
;
521 int i
= currentInstruction
[3].u
.operand
;
522 int size
= currentInstruction
[4].u
.operand
;
523 int breakTarget
= currentInstruction
[5].u
.operand
;
525 JumpList isNotObject
;
527 emitGetVirtualRegister(base
, regT0
);
528 if (!m_codeBlock
->isKnownNotImmediate(base
))
529 isNotObject
.append(emitJumpIfNotJSCell(regT0
));
530 if (base
!= m_codeBlock
->thisRegister() || m_codeBlock
->isStrictMode()) {
531 loadPtr(Address(regT0
, JSCell::structureOffset()), regT2
);
532 isNotObject
.append(emitJumpIfNotObject(regT2
));
535 // We could inline the case where you have a valid cache, but
536 // this call doesn't seem to be hot.
537 Label
isObject(this);
538 JITStubCall
getPnamesStubCall(this, cti_op_get_pnames
);
539 getPnamesStubCall
.addArgument(regT0
);
540 getPnamesStubCall
.call(dst
);
541 load32(Address(regT0
, OBJECT_OFFSETOF(JSPropertyNameIterator
, m_jsStringsSize
)), regT3
);
542 store64(tagTypeNumberRegister
, addressFor(i
));
543 store32(TrustedImm32(Int32Tag
), intTagFor(size
));
544 store32(regT3
, intPayloadFor(size
));
547 isNotObject
.link(this);
549 and32(TrustedImm32(~TagBitUndefined
), regT1
);
550 addJump(branch32(Equal
, regT1
, TrustedImm32(ValueNull
)), breakTarget
);
552 JITStubCall
toObjectStubCall(this, cti_to_object
);
553 toObjectStubCall
.addArgument(regT0
);
554 toObjectStubCall
.call(base
);
555 jump().linkTo(isObject
, this);
560 void JIT::emit_op_next_pname(Instruction
* currentInstruction
)
562 int dst
= currentInstruction
[1].u
.operand
;
563 int base
= currentInstruction
[2].u
.operand
;
564 int i
= currentInstruction
[3].u
.operand
;
565 int size
= currentInstruction
[4].u
.operand
;
566 int it
= currentInstruction
[5].u
.operand
;
567 int target
= currentInstruction
[6].u
.operand
;
569 JumpList callHasProperty
;
572 load32(intPayloadFor(i
), regT0
);
573 Jump end
= branch32(Equal
, regT0
, intPayloadFor(size
));
576 loadPtr(addressFor(it
), regT1
);
577 loadPtr(Address(regT1
, OBJECT_OFFSETOF(JSPropertyNameIterator
, m_jsStrings
)), regT2
);
579 load64(BaseIndex(regT2
, regT0
, TimesEight
), regT2
);
581 emitPutVirtualRegister(dst
, regT2
);
584 add32(TrustedImm32(1), regT0
);
585 store32(regT0
, intPayloadFor(i
));
587 // Verify that i is valid:
588 emitGetVirtualRegister(base
, regT0
);
590 // Test base's structure
591 loadPtr(Address(regT0
, JSCell::structureOffset()), regT2
);
592 callHasProperty
.append(branchPtr(NotEqual
, regT2
, Address(Address(regT1
, OBJECT_OFFSETOF(JSPropertyNameIterator
, m_cachedStructure
)))));
594 // Test base's prototype chain
595 loadPtr(Address(Address(regT1
, OBJECT_OFFSETOF(JSPropertyNameIterator
, m_cachedPrototypeChain
))), regT3
);
596 loadPtr(Address(regT3
, OBJECT_OFFSETOF(StructureChain
, m_vector
)), regT3
);
597 addJump(branchTestPtr(Zero
, Address(regT3
)), target
);
599 Label
checkPrototype(this);
600 load64(Address(regT2
, Structure::prototypeOffset()), regT2
);
601 callHasProperty
.append(emitJumpIfNotJSCell(regT2
));
602 loadPtr(Address(regT2
, JSCell::structureOffset()), regT2
);
603 callHasProperty
.append(branchPtr(NotEqual
, regT2
, Address(regT3
)));
604 addPtr(TrustedImm32(sizeof(Structure
*)), regT3
);
605 branchTestPtr(NonZero
, Address(regT3
)).linkTo(checkPrototype
, this);
608 addJump(jump(), target
);
610 // Slow case: Ask the object if i is valid.
611 callHasProperty
.link(this);
612 emitGetVirtualRegister(dst
, regT1
);
613 JITStubCall
stubCall(this, cti_has_property
);
614 stubCall
.addArgument(regT0
);
615 stubCall
.addArgument(regT1
);
618 // Test for valid key.
619 addJump(branchTest32(NonZero
, regT0
), target
);
620 jump().linkTo(begin
, this);
626 void JIT::emit_op_push_with_scope(Instruction
* currentInstruction
)
628 JITStubCall
stubCall(this, cti_op_push_with_scope
);
629 stubCall
.addArgument(currentInstruction
[1].u
.operand
, regT2
);
633 void JIT::emit_op_pop_scope(Instruction
*)
635 JITStubCall(this, cti_op_pop_scope
).call();
638 void JIT::compileOpStrictEq(Instruction
* currentInstruction
, CompileOpStrictEqType type
)
640 unsigned dst
= currentInstruction
[1].u
.operand
;
641 unsigned src1
= currentInstruction
[2].u
.operand
;
642 unsigned src2
= currentInstruction
[3].u
.operand
;
644 emitGetVirtualRegisters(src1
, regT0
, src2
, regT1
);
646 // Jump slow if both are cells (to cover strings).
649 addSlowCase(emitJumpIfJSCell(regT2
));
651 // Jump slow if either is a double. First test if it's an integer, which is fine, and then test
653 Jump leftOK
= emitJumpIfImmediateInteger(regT0
);
654 addSlowCase(emitJumpIfImmediateNumber(regT0
));
656 Jump rightOK
= emitJumpIfImmediateInteger(regT1
);
657 addSlowCase(emitJumpIfImmediateNumber(regT1
));
660 if (type
== OpStrictEq
)
661 compare64(Equal
, regT1
, regT0
, regT0
);
663 compare64(NotEqual
, regT1
, regT0
, regT0
);
664 emitTagAsBoolImmediate(regT0
);
666 emitPutVirtualRegister(dst
);
669 void JIT::emit_op_stricteq(Instruction
* currentInstruction
)
671 compileOpStrictEq(currentInstruction
, OpStrictEq
);
674 void JIT::emit_op_nstricteq(Instruction
* currentInstruction
)
676 compileOpStrictEq(currentInstruction
, OpNStrictEq
);
679 void JIT::emit_op_to_number(Instruction
* currentInstruction
)
681 int srcVReg
= currentInstruction
[2].u
.operand
;
682 emitGetVirtualRegister(srcVReg
, regT0
);
684 addSlowCase(emitJumpIfNotImmediateNumber(regT0
));
686 emitPutVirtualRegister(currentInstruction
[1].u
.operand
);
689 void JIT::emit_op_push_name_scope(Instruction
* currentInstruction
)
691 JITStubCall
stubCall(this, cti_op_push_name_scope
);
692 stubCall
.addArgument(TrustedImmPtr(&m_codeBlock
->identifier(currentInstruction
[1].u
.operand
)));
693 stubCall
.addArgument(currentInstruction
[2].u
.operand
, regT2
);
694 stubCall
.addArgument(TrustedImm32(currentInstruction
[3].u
.operand
));
698 void JIT::emit_op_catch(Instruction
* currentInstruction
)
700 killLastResultRegister(); // FIXME: Implicitly treat op_catch as a labeled statement, and remove this line of code.
701 move(regT0
, callFrameRegister
);
702 peek(regT3
, OBJECT_OFFSETOF(struct JITStackFrame
, vm
) / sizeof(void*));
703 load64(Address(regT3
, OBJECT_OFFSETOF(VM
, exception
)), regT0
);
704 store64(TrustedImm64(JSValue::encode(JSValue())), Address(regT3
, OBJECT_OFFSETOF(VM
, exception
)));
705 emitPutVirtualRegister(currentInstruction
[1].u
.operand
);
708 void JIT::emit_op_switch_imm(Instruction
* currentInstruction
)
710 unsigned tableIndex
= currentInstruction
[1].u
.operand
;
711 unsigned defaultOffset
= currentInstruction
[2].u
.operand
;
712 unsigned scrutinee
= currentInstruction
[3].u
.operand
;
714 // create jump table for switch destinations, track this switch statement.
715 SimpleJumpTable
* jumpTable
= &m_codeBlock
->immediateSwitchJumpTable(tableIndex
);
716 m_switches
.append(SwitchRecord(jumpTable
, m_bytecodeOffset
, defaultOffset
, SwitchRecord::Immediate
));
717 jumpTable
->ctiOffsets
.grow(jumpTable
->branchOffsets
.size());
719 JITStubCall
stubCall(this, cti_op_switch_imm
);
720 stubCall
.addArgument(scrutinee
, regT2
);
721 stubCall
.addArgument(TrustedImm32(tableIndex
));
726 void JIT::emit_op_switch_char(Instruction
* currentInstruction
)
728 unsigned tableIndex
= currentInstruction
[1].u
.operand
;
729 unsigned defaultOffset
= currentInstruction
[2].u
.operand
;
730 unsigned scrutinee
= currentInstruction
[3].u
.operand
;
732 // create jump table for switch destinations, track this switch statement.
733 SimpleJumpTable
* jumpTable
= &m_codeBlock
->characterSwitchJumpTable(tableIndex
);
734 m_switches
.append(SwitchRecord(jumpTable
, m_bytecodeOffset
, defaultOffset
, SwitchRecord::Character
));
735 jumpTable
->ctiOffsets
.grow(jumpTable
->branchOffsets
.size());
737 JITStubCall
stubCall(this, cti_op_switch_char
);
738 stubCall
.addArgument(scrutinee
, regT2
);
739 stubCall
.addArgument(TrustedImm32(tableIndex
));
744 void JIT::emit_op_switch_string(Instruction
* currentInstruction
)
746 unsigned tableIndex
= currentInstruction
[1].u
.operand
;
747 unsigned defaultOffset
= currentInstruction
[2].u
.operand
;
748 unsigned scrutinee
= currentInstruction
[3].u
.operand
;
750 // create jump table for switch destinations, track this switch statement.
751 StringJumpTable
* jumpTable
= &m_codeBlock
->stringSwitchJumpTable(tableIndex
);
752 m_switches
.append(SwitchRecord(jumpTable
, m_bytecodeOffset
, defaultOffset
));
754 JITStubCall
stubCall(this, cti_op_switch_string
);
755 stubCall
.addArgument(scrutinee
, regT2
);
756 stubCall
.addArgument(TrustedImm32(tableIndex
));
761 void JIT::emit_op_throw_static_error(Instruction
* currentInstruction
)
763 JITStubCall
stubCall(this, cti_op_throw_static_error
);
764 if (!m_codeBlock
->getConstant(currentInstruction
[1].u
.operand
).isNumber())
765 stubCall
.addArgument(TrustedImm64(JSValue::encode(m_codeBlock
->getConstant(currentInstruction
[1].u
.operand
))));
767 stubCall
.addArgument(Imm64(JSValue::encode(m_codeBlock
->getConstant(currentInstruction
[1].u
.operand
))));
768 stubCall
.addArgument(TrustedImm32(currentInstruction
[2].u
.operand
));
772 void JIT::emit_op_debug(Instruction
* currentInstruction
)
774 #if ENABLE(DEBUG_WITH_BREAKPOINT)
775 UNUSED_PARAM(currentInstruction
);
778 JITStubCall
stubCall(this, cti_op_debug
);
779 stubCall
.addArgument(TrustedImm32(currentInstruction
[1].u
.operand
));
780 stubCall
.addArgument(TrustedImm32(currentInstruction
[2].u
.operand
));
781 stubCall
.addArgument(TrustedImm32(currentInstruction
[3].u
.operand
));
782 stubCall
.addArgument(TrustedImm32(currentInstruction
[4].u
.operand
));
787 void JIT::emit_op_eq_null(Instruction
* currentInstruction
)
789 unsigned dst
= currentInstruction
[1].u
.operand
;
790 unsigned src1
= currentInstruction
[2].u
.operand
;
792 emitGetVirtualRegister(src1
, regT0
);
793 Jump isImmediate
= emitJumpIfNotJSCell(regT0
);
795 loadPtr(Address(regT0
, JSCell::structureOffset()), regT2
);
796 Jump isMasqueradesAsUndefined
= branchTest8(NonZero
, Address(regT2
, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined
));
797 move(TrustedImm32(0), regT0
);
798 Jump wasNotMasqueradesAsUndefined
= jump();
800 isMasqueradesAsUndefined
.link(this);
801 move(TrustedImmPtr(m_codeBlock
->globalObject()), regT0
);
802 loadPtr(Address(regT2
, Structure::globalObjectOffset()), regT2
);
803 comparePtr(Equal
, regT0
, regT2
, regT0
);
804 Jump wasNotImmediate
= jump();
806 isImmediate
.link(this);
808 and64(TrustedImm32(~TagBitUndefined
), regT0
);
809 compare64(Equal
, regT0
, TrustedImm32(ValueNull
), regT0
);
811 wasNotImmediate
.link(this);
812 wasNotMasqueradesAsUndefined
.link(this);
814 emitTagAsBoolImmediate(regT0
);
815 emitPutVirtualRegister(dst
);
819 void JIT::emit_op_neq_null(Instruction
* currentInstruction
)
821 unsigned dst
= currentInstruction
[1].u
.operand
;
822 unsigned src1
= currentInstruction
[2].u
.operand
;
824 emitGetVirtualRegister(src1
, regT0
);
825 Jump isImmediate
= emitJumpIfNotJSCell(regT0
);
827 loadPtr(Address(regT0
, JSCell::structureOffset()), regT2
);
828 Jump isMasqueradesAsUndefined
= branchTest8(NonZero
, Address(regT2
, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined
));
829 move(TrustedImm32(1), regT0
);
830 Jump wasNotMasqueradesAsUndefined
= jump();
832 isMasqueradesAsUndefined
.link(this);
833 move(TrustedImmPtr(m_codeBlock
->globalObject()), regT0
);
834 loadPtr(Address(regT2
, Structure::globalObjectOffset()), regT2
);
835 comparePtr(NotEqual
, regT0
, regT2
, regT0
);
836 Jump wasNotImmediate
= jump();
838 isImmediate
.link(this);
840 and64(TrustedImm32(~TagBitUndefined
), regT0
);
841 compare64(NotEqual
, regT0
, TrustedImm32(ValueNull
), regT0
);
843 wasNotImmediate
.link(this);
844 wasNotMasqueradesAsUndefined
.link(this);
846 emitTagAsBoolImmediate(regT0
);
847 emitPutVirtualRegister(dst
);
850 void JIT::emit_op_enter(Instruction
*)
852 emitEnterOptimizationCheck();
854 // Even though CTI doesn't use them, we initialize our constant
855 // registers to zap stale pointers, to avoid unnecessarily prolonging
856 // object lifetime and increasing GC pressure.
857 size_t count
= m_codeBlock
->m_numVars
;
858 for (size_t j
= 0; j
< count
; ++j
)
862 void JIT::emit_op_create_activation(Instruction
* currentInstruction
)
864 unsigned dst
= currentInstruction
[1].u
.operand
;
866 Jump activationCreated
= branchTest64(NonZero
, Address(callFrameRegister
, sizeof(Register
) * dst
));
867 JITStubCall(this, cti_op_push_activation
).call(currentInstruction
[1].u
.operand
);
868 emitPutVirtualRegister(dst
);
869 activationCreated
.link(this);
872 void JIT::emit_op_create_arguments(Instruction
* currentInstruction
)
874 unsigned dst
= currentInstruction
[1].u
.operand
;
876 Jump argsCreated
= branchTest64(NonZero
, Address(callFrameRegister
, sizeof(Register
) * dst
));
877 JITStubCall(this, cti_op_create_arguments
).call();
878 emitPutVirtualRegister(dst
);
879 emitPutVirtualRegister(unmodifiedArgumentsRegister(dst
));
880 argsCreated
.link(this);
883 void JIT::emit_op_init_lazy_reg(Instruction
* currentInstruction
)
885 unsigned dst
= currentInstruction
[1].u
.operand
;
887 store64(TrustedImm64((int64_t)0), Address(callFrameRegister
, sizeof(Register
) * dst
));
890 void JIT::emit_op_convert_this(Instruction
* currentInstruction
)
892 emitGetVirtualRegister(currentInstruction
[1].u
.operand
, regT1
);
894 emitJumpSlowCaseIfNotJSCell(regT1
);
895 if (shouldEmitProfiling()) {
896 loadPtr(Address(regT1
, JSCell::structureOffset()), regT0
);
897 emitValueProfilingSite();
899 addSlowCase(branchPtr(Equal
, Address(regT1
, JSCell::structureOffset()), TrustedImmPtr(m_vm
->stringStructure
.get())));
902 void JIT::emit_op_get_callee(Instruction
* currentInstruction
)
904 unsigned result
= currentInstruction
[1].u
.operand
;
905 emitGetFromCallFrameHeaderPtr(JSStack::Callee
, regT0
);
906 emitValueProfilingSite();
907 emitPutVirtualRegister(result
);
910 void JIT::emit_op_create_this(Instruction
* currentInstruction
)
912 int callee
= currentInstruction
[2].u
.operand
;
913 RegisterID calleeReg
= regT0
;
914 RegisterID resultReg
= regT0
;
915 RegisterID allocatorReg
= regT1
;
916 RegisterID structureReg
= regT2
;
917 RegisterID scratchReg
= regT3
;
919 emitGetVirtualRegister(callee
, calleeReg
);
920 loadPtr(Address(calleeReg
, JSFunction::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfAllocator()), allocatorReg
);
921 loadPtr(Address(calleeReg
, JSFunction::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfStructure()), structureReg
);
922 addSlowCase(branchTestPtr(Zero
, allocatorReg
));
924 emitAllocateJSObject(allocatorReg
, structureReg
, resultReg
, scratchReg
);
925 emitPutVirtualRegister(currentInstruction
[1].u
.operand
);
928 void JIT::emitSlow_op_create_this(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
930 linkSlowCase(iter
); // doesn't have an allocation profile
931 linkSlowCase(iter
); // allocation failed
933 JITStubCall
stubCall(this, cti_op_create_this
);
934 stubCall
.addArgument(TrustedImm32(currentInstruction
[3].u
.operand
));
935 stubCall
.call(currentInstruction
[1].u
.operand
);
938 void JIT::emit_op_profile_will_call(Instruction
* currentInstruction
)
940 JITStubCall
stubCall(this, cti_op_profile_will_call
);
941 stubCall
.addArgument(currentInstruction
[1].u
.operand
, regT1
);
945 void JIT::emit_op_profile_did_call(Instruction
* currentInstruction
)
947 JITStubCall
stubCall(this, cti_op_profile_did_call
);
948 stubCall
.addArgument(currentInstruction
[1].u
.operand
, regT1
);
955 void JIT::emitSlow_op_convert_this(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
957 void* globalThis
= m_codeBlock
->globalObject()->globalThis();
960 if (shouldEmitProfiling())
961 move(TrustedImm64((JSValue::encode(jsUndefined()))), regT0
);
962 Jump isNotUndefined
= branch64(NotEqual
, regT1
, TrustedImm64(JSValue::encode(jsUndefined())));
963 emitValueProfilingSite();
964 move(TrustedImm64(JSValue::encode(JSValue(static_cast<JSCell
*>(globalThis
)))), regT0
);
965 emitPutVirtualRegister(currentInstruction
[1].u
.operand
, regT0
);
966 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_convert_this
));
969 if (shouldEmitProfiling())
970 move(TrustedImm64(JSValue::encode(m_vm
->stringStructure
.get())), regT0
);
971 isNotUndefined
.link(this);
972 emitValueProfilingSite();
973 JITStubCall
stubCall(this, cti_op_convert_this
);
974 stubCall
.addArgument(regT1
);
975 stubCall
.call(currentInstruction
[1].u
.operand
);
978 void JIT::emitSlow_op_to_primitive(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
982 JITStubCall
stubCall(this, cti_op_to_primitive
);
983 stubCall
.addArgument(regT0
);
984 stubCall
.call(currentInstruction
[1].u
.operand
);
987 void JIT::emitSlow_op_not(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
990 xor64(TrustedImm32(static_cast<int32_t>(ValueFalse
)), regT0
);
991 JITStubCall
stubCall(this, cti_op_not
);
992 stubCall
.addArgument(regT0
);
993 stubCall
.call(currentInstruction
[1].u
.operand
);
996 void JIT::emitSlow_op_jfalse(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
999 JITStubCall
stubCall(this, cti_op_jtrue
);
1000 stubCall
.addArgument(regT0
);
1002 emitJumpSlowToHot(branchTest32(Zero
, regT0
), currentInstruction
[2].u
.operand
); // inverted!
1005 void JIT::emitSlow_op_jtrue(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1008 JITStubCall
stubCall(this, cti_op_jtrue
);
1009 stubCall
.addArgument(regT0
);
1011 emitJumpSlowToHot(branchTest32(NonZero
, regT0
), currentInstruction
[2].u
.operand
);
1014 void JIT::emitSlow_op_bitxor(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1017 JITStubCall
stubCall(this, cti_op_bitxor
);
1018 stubCall
.addArgument(regT0
);
1019 stubCall
.addArgument(regT1
);
1020 stubCall
.call(currentInstruction
[1].u
.operand
);
1023 void JIT::emitSlow_op_bitor(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1026 JITStubCall
stubCall(this, cti_op_bitor
);
1027 stubCall
.addArgument(regT0
);
1028 stubCall
.addArgument(regT1
);
1029 stubCall
.call(currentInstruction
[1].u
.operand
);
1032 void JIT::emitSlow_op_eq(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1035 JITStubCall
stubCall(this, cti_op_eq
);
1036 stubCall
.addArgument(regT0
);
1037 stubCall
.addArgument(regT1
);
1039 emitTagAsBoolImmediate(regT0
);
1040 emitPutVirtualRegister(currentInstruction
[1].u
.operand
);
1043 void JIT::emitSlow_op_neq(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1046 JITStubCall
stubCall(this, cti_op_eq
);
1047 stubCall
.addArgument(regT0
);
1048 stubCall
.addArgument(regT1
);
1050 xor32(TrustedImm32(0x1), regT0
);
1051 emitTagAsBoolImmediate(regT0
);
1052 emitPutVirtualRegister(currentInstruction
[1].u
.operand
);
1055 void JIT::emitSlow_op_stricteq(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1060 JITStubCall
stubCall(this, cti_op_stricteq
);
1061 stubCall
.addArgument(regT0
);
1062 stubCall
.addArgument(regT1
);
1063 stubCall
.call(currentInstruction
[1].u
.operand
);
1066 void JIT::emitSlow_op_nstricteq(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1071 JITStubCall
stubCall(this, cti_op_nstricteq
);
1072 stubCall
.addArgument(regT0
);
1073 stubCall
.addArgument(regT1
);
1074 stubCall
.call(currentInstruction
[1].u
.operand
);
1077 void JIT::emitSlow_op_check_has_instance(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1079 unsigned dst
= currentInstruction
[1].u
.operand
;
1080 unsigned value
= currentInstruction
[2].u
.operand
;
1081 unsigned baseVal
= currentInstruction
[3].u
.operand
;
1083 linkSlowCaseIfNotJSCell(iter
, baseVal
);
1085 JITStubCall
stubCall(this, cti_op_check_has_instance
);
1086 stubCall
.addArgument(value
, regT2
);
1087 stubCall
.addArgument(baseVal
, regT2
);
1090 emitJumpSlowToHot(jump(), currentInstruction
[4].u
.operand
);
1093 void JIT::emitSlow_op_instanceof(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1095 unsigned dst
= currentInstruction
[1].u
.operand
;
1096 unsigned value
= currentInstruction
[2].u
.operand
;
1097 unsigned proto
= currentInstruction
[3].u
.operand
;
1099 linkSlowCaseIfNotJSCell(iter
, value
);
1100 linkSlowCaseIfNotJSCell(iter
, proto
);
1102 JITStubCall
stubCall(this, cti_op_instanceof
);
1103 stubCall
.addArgument(value
, regT2
);
1104 stubCall
.addArgument(proto
, regT2
);
1108 void JIT::emitSlow_op_call(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1110 compileOpCallSlowCase(op_call
, currentInstruction
, iter
, m_callLinkInfoIndex
++);
1113 void JIT::emitSlow_op_call_eval(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1115 compileOpCallSlowCase(op_call_eval
, currentInstruction
, iter
, m_callLinkInfoIndex
);
1118 void JIT::emitSlow_op_call_varargs(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1120 compileOpCallSlowCase(op_call_varargs
, currentInstruction
, iter
, m_callLinkInfoIndex
++);
1123 void JIT::emitSlow_op_construct(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1125 compileOpCallSlowCase(op_construct
, currentInstruction
, iter
, m_callLinkInfoIndex
++);
1128 void JIT::emitSlow_op_to_number(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1132 JITStubCall
stubCall(this, cti_op_to_number
);
1133 stubCall
.addArgument(regT0
);
1134 stubCall
.call(currentInstruction
[1].u
.operand
);
1137 void JIT::emit_op_get_arguments_length(Instruction
* currentInstruction
)
1139 int dst
= currentInstruction
[1].u
.operand
;
1140 int argumentsRegister
= currentInstruction
[2].u
.operand
;
1141 addSlowCase(branchTest64(NonZero
, addressFor(argumentsRegister
)));
1142 emitGetFromCallFrameHeader32(JSStack::ArgumentCount
, regT0
);
1143 sub32(TrustedImm32(1), regT0
);
1144 emitFastArithReTagImmediate(regT0
, regT0
);
1145 emitPutVirtualRegister(dst
, regT0
);
1148 void JIT::emitSlow_op_get_arguments_length(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1151 unsigned dst
= currentInstruction
[1].u
.operand
;
1152 unsigned base
= currentInstruction
[2].u
.operand
;
1153 Identifier
* ident
= &(m_codeBlock
->identifier(currentInstruction
[3].u
.operand
));
1155 emitGetVirtualRegister(base
, regT0
);
1156 JITStubCall
stubCall(this, cti_op_get_by_id_generic
);
1157 stubCall
.addArgument(regT0
);
1158 stubCall
.addArgument(TrustedImmPtr(ident
));
1162 void JIT::emit_op_get_argument_by_val(Instruction
* currentInstruction
)
1164 int dst
= currentInstruction
[1].u
.operand
;
1165 int argumentsRegister
= currentInstruction
[2].u
.operand
;
1166 int property
= currentInstruction
[3].u
.operand
;
1167 addSlowCase(branchTest64(NonZero
, addressFor(argumentsRegister
)));
1168 emitGetVirtualRegister(property
, regT1
);
1169 addSlowCase(emitJumpIfNotImmediateInteger(regT1
));
1170 add32(TrustedImm32(1), regT1
);
1171 // regT1 now contains the integer index of the argument we want, including this
1172 emitGetFromCallFrameHeader32(JSStack::ArgumentCount
, regT2
);
1173 addSlowCase(branch32(AboveOrEqual
, regT1
, regT2
));
1176 signExtend32ToPtr(regT1
, regT1
);
1177 load64(BaseIndex(callFrameRegister
, regT1
, TimesEight
, CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register
))), regT0
);
1178 emitValueProfilingSite();
1179 emitPutVirtualRegister(dst
, regT0
);
1182 void JIT::emitSlow_op_get_argument_by_val(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1184 unsigned dst
= currentInstruction
[1].u
.operand
;
1185 unsigned arguments
= currentInstruction
[2].u
.operand
;
1186 unsigned property
= currentInstruction
[3].u
.operand
;
1189 Jump skipArgumentsCreation
= jump();
1193 JITStubCall(this, cti_op_create_arguments
).call();
1194 emitPutVirtualRegister(arguments
);
1195 emitPutVirtualRegister(unmodifiedArgumentsRegister(arguments
));
1197 skipArgumentsCreation
.link(this);
1198 JITStubCall
stubCall(this, cti_op_get_by_val_generic
);
1199 stubCall
.addArgument(arguments
, regT2
);
1200 stubCall
.addArgument(property
, regT2
);
1201 stubCall
.callWithValueProfiling(dst
);
1204 void JIT::emit_op_put_to_base(Instruction
* currentInstruction
)
1206 int base
= currentInstruction
[1].u
.operand
;
1207 int id
= currentInstruction
[2].u
.operand
;
1208 int value
= currentInstruction
[3].u
.operand
;
1210 PutToBaseOperation
* operation
= currentInstruction
[4].u
.putToBaseOperation
;
1211 switch (operation
->m_kind
) {
1212 case PutToBaseOperation::GlobalVariablePutChecked
:
1213 addSlowCase(branchTest8(NonZero
, AbsoluteAddress(operation
->m_predicatePointer
)));
1214 case PutToBaseOperation::GlobalVariablePut
: {
1215 JSGlobalObject
* globalObject
= m_codeBlock
->globalObject();
1216 if (operation
->m_isDynamic
) {
1217 emitGetVirtualRegister(base
, regT0
);
1218 addSlowCase(branchPtr(NotEqual
, regT0
, TrustedImmPtr(globalObject
)));
1220 emitGetVirtualRegister(value
, regT0
);
1221 store64(regT0
, operation
->m_registerAddress
);
1222 if (Heap::isWriteBarrierEnabled())
1223 emitWriteBarrier(globalObject
, regT0
, regT2
, ShouldFilterImmediates
, WriteBarrierForVariableAccess
);
1226 case PutToBaseOperation::VariablePut
: {
1227 emitGetVirtualRegisters(base
, regT0
, value
, regT1
);
1228 loadPtr(Address(regT0
, JSVariableObject::offsetOfRegisters()), regT2
);
1229 store64(regT1
, Address(regT2
, operation
->m_offset
* sizeof(Register
)));
1230 if (Heap::isWriteBarrierEnabled())
1231 emitWriteBarrier(regT0
, regT1
, regT2
, regT3
, ShouldFilterImmediates
, WriteBarrierForVariableAccess
);
1235 case PutToBaseOperation::GlobalPropertyPut
: {
1236 emitGetVirtualRegisters(base
, regT0
, value
, regT1
);
1237 loadPtr(&operation
->m_structure
, regT2
);
1238 addSlowCase(branchPtr(NotEqual
, Address(regT0
, JSCell::structureOffset()), regT2
));
1239 ASSERT(!operation
->m_structure
|| !operation
->m_structure
->inlineCapacity());
1240 loadPtr(Address(regT0
, JSObject::butterflyOffset()), regT2
);
1241 load32(&operation
->m_offsetInButterfly
, regT3
);
1242 signExtend32ToPtr(regT3
, regT3
);
1243 store64(regT1
, BaseIndex(regT2
, regT3
, TimesEight
));
1244 if (Heap::isWriteBarrierEnabled())
1245 emitWriteBarrier(regT0
, regT1
, regT2
, regT3
, ShouldFilterImmediates
, WriteBarrierForVariableAccess
);
1249 case PutToBaseOperation::Uninitialised
:
1250 case PutToBaseOperation::Readonly
:
1251 case PutToBaseOperation::Generic
:
1252 JITStubCall
stubCall(this, cti_op_put_to_base
);
1254 stubCall
.addArgument(TrustedImm32(base
));
1255 stubCall
.addArgument(TrustedImmPtr(&m_codeBlock
->identifier(id
)));
1256 stubCall
.addArgument(TrustedImm32(value
));
1257 stubCall
.addArgument(TrustedImmPtr(operation
));
1263 #endif // USE(JSVALUE64)
1265 void JIT::emit_op_loop_hint(Instruction
*)
1267 // Emit the JIT optimization check:
1268 if (canBeOptimized())
1269 addSlowCase(branchAdd32(PositiveOrZero
, TrustedImm32(Options::executionCounterIncrementForLoop()),
1270 AbsoluteAddress(m_codeBlock
->addressOfJITExecuteCounter())));
1272 // Emit the watchdog timer check:
1273 if (m_vm
->watchdog
.isEnabled())
1274 addSlowCase(branchTest8(NonZero
, AbsoluteAddress(m_vm
->watchdog
.timerDidFireAddress())));
1277 void JIT::emitSlow_op_loop_hint(Instruction
*, Vector
<SlowCaseEntry
>::iterator
& iter
)
1280 // Emit the slow path for the JIT optimization check:
1281 if (canBeOptimized()) {
1284 JITStubCall
stubCall(this, cti_optimize
);
1285 stubCall
.addArgument(TrustedImm32(m_bytecodeOffset
));
1288 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_loop_hint
));
1292 // Emit the slow path of the watchdog timer check:
1293 if (m_vm
->watchdog
.isEnabled()) {
1296 JITStubCall
stubCall(this, cti_handle_watchdog_timer
);
1299 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_loop_hint
));
1304 void JIT::emit_resolve_operations(ResolveOperations
* resolveOperations
, const int* baseVR
, const int* valueVR
)
1307 #if USE(JSVALUE32_64)
1310 killLastResultRegister();
1313 if (resolveOperations
->isEmpty()) {
1314 addSlowCase(jump());
1318 const RegisterID value
= regT0
;
1319 #if USE(JSVALUE32_64)
1320 const RegisterID valueTag
= regT1
;
1322 const RegisterID scope
= regT2
;
1323 const RegisterID scratch
= regT3
;
1325 JSGlobalObject
* globalObject
= m_codeBlock
->globalObject();
1326 ResolveOperation
* pc
= resolveOperations
->data();
1327 emitGetFromCallFrameHeaderPtr(JSStack::ScopeChain
, scope
);
1328 bool setBase
= false;
1329 bool resolvingBase
= true;
1330 while (resolvingBase
) {
1331 switch (pc
->m_operation
) {
1332 case ResolveOperation::ReturnGlobalObjectAsBase
:
1333 move(TrustedImmPtr(globalObject
), value
);
1334 #if USE(JSVALUE32_64)
1335 move(TrustedImm32(JSValue::CellTag
), valueTag
);
1337 emitValueProfilingSite();
1338 emitStoreCell(*baseVR
, value
);
1340 case ResolveOperation::SetBaseToGlobal
:
1341 RELEASE_ASSERT(baseVR
);
1343 move(TrustedImmPtr(globalObject
), scratch
);
1344 emitStoreCell(*baseVR
, scratch
);
1345 resolvingBase
= false;
1348 case ResolveOperation::SetBaseToUndefined
: {
1349 RELEASE_ASSERT(baseVR
);
1352 move(TrustedImm64(JSValue::encode(jsUndefined())), scratch
);
1353 emitPutVirtualRegister(*baseVR
, scratch
);
1355 emitStore(*baseVR
, jsUndefined());
1357 resolvingBase
= false;
1361 case ResolveOperation::SetBaseToScope
:
1362 RELEASE_ASSERT(baseVR
);
1364 emitStoreCell(*baseVR
, scope
);
1365 resolvingBase
= false;
1368 case ResolveOperation::ReturnScopeAsBase
:
1369 emitStoreCell(*baseVR
, scope
);
1370 RELEASE_ASSERT(value
== regT0
);
1372 #if USE(JSVALUE32_64)
1373 move(TrustedImm32(JSValue::CellTag
), valueTag
);
1375 emitValueProfilingSite();
1377 case ResolveOperation::SkipTopScopeNode
: {
1378 #if USE(JSVALUE32_64)
1379 Jump activationNotCreated
= branch32(Equal
, tagFor(m_codeBlock
->activationRegister()), TrustedImm32(JSValue::EmptyValueTag
));
1381 Jump activationNotCreated
= branchTest64(Zero
, addressFor(m_codeBlock
->activationRegister()));
1383 loadPtr(Address(scope
, JSScope::offsetOfNext()), scope
);
1384 activationNotCreated
.link(this);
1388 case ResolveOperation::CheckForDynamicEntriesBeforeGlobalScope
: {
1390 loadPtr(Address(regT3
, JSScope::offsetOfNext()), regT1
);
1391 Jump atTopOfScope
= branchTestPtr(Zero
, regT1
);
1392 Label loopStart
= label();
1393 loadPtr(Address(regT3
, JSCell::structureOffset()), regT2
);
1394 Jump isActivation
= branchPtr(Equal
, regT2
, TrustedImmPtr(globalObject
->activationStructure()));
1395 addSlowCase(branchPtr(NotEqual
, regT2
, TrustedImmPtr(globalObject
->nameScopeStructure())));
1396 isActivation
.link(this);
1398 loadPtr(Address(regT3
, JSScope::offsetOfNext()), regT1
);
1399 branchTestPtr(NonZero
, regT1
, loopStart
);
1400 atTopOfScope
.link(this);
1404 case ResolveOperation::SkipScopes
: {
1405 for (int i
= 0; i
< pc
->m_scopesToSkip
; i
++)
1406 loadPtr(Address(scope
, JSScope::offsetOfNext()), scope
);
1410 case ResolveOperation::Fail
:
1411 addSlowCase(jump());
1414 resolvingBase
= false;
1417 if (baseVR
&& !setBase
)
1418 emitStoreCell(*baseVR
, scope
);
1420 RELEASE_ASSERT(valueVR
);
1421 ResolveOperation
* resolveValueOperation
= pc
;
1422 switch (resolveValueOperation
->m_operation
) {
1423 case ResolveOperation::GetAndReturnGlobalProperty
: {
1424 // Verify structure.
1425 move(TrustedImmPtr(globalObject
), regT2
);
1426 move(TrustedImmPtr(resolveValueOperation
), regT3
);
1427 loadPtr(Address(regT3
, OBJECT_OFFSETOF(ResolveOperation
, m_structure
)), regT1
);
1428 addSlowCase(branchPtr(NotEqual
, regT1
, Address(regT2
, JSCell::structureOffset())));
1431 load32(Address(regT3
, OBJECT_OFFSETOF(ResolveOperation
, m_offset
)), regT3
);
1433 // regT2: GlobalObject
1435 #if USE(JSVALUE32_64)
1436 compileGetDirectOffset(regT2
, valueTag
, value
, regT3
, KnownNotFinal
);
1438 compileGetDirectOffset(regT2
, value
, regT3
, regT1
, KnownNotFinal
);
1442 case ResolveOperation::GetAndReturnGlobalVarWatchable
:
1443 case ResolveOperation::GetAndReturnGlobalVar
: {
1444 #if USE(JSVALUE32_64)
1445 load32(reinterpret_cast<char*>(pc
->m_registerAddress
) + OBJECT_OFFSETOF(JSValue
, u
.asBits
.tag
), valueTag
);
1446 load32(reinterpret_cast<char*>(pc
->m_registerAddress
) + OBJECT_OFFSETOF(JSValue
, u
.asBits
.payload
), value
);
1448 load64(reinterpret_cast<char*>(pc
->m_registerAddress
), value
);
1452 case ResolveOperation::GetAndReturnScopedVar
: {
1453 loadPtr(Address(scope
, JSVariableObject::offsetOfRegisters()), scope
);
1454 #if USE(JSVALUE32_64)
1455 load32(Address(scope
, pc
->m_offset
* sizeof(Register
) + OBJECT_OFFSETOF(JSValue
, u
.asBits
.tag
)), valueTag
);
1456 load32(Address(scope
, pc
->m_offset
* sizeof(Register
) + OBJECT_OFFSETOF(JSValue
, u
.asBits
.payload
)), value
);
1458 load64(Address(scope
, pc
->m_offset
* sizeof(Register
)), value
);
1467 #if USE(JSVALUE32_64)
1468 emitStore(*valueVR
, valueTag
, value
);
1470 emitPutVirtualRegister(*valueVR
, value
);
1472 emitValueProfilingSite();
1475 void JIT::emitSlow_link_resolve_operations(ResolveOperations
* resolveOperations
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1477 if (resolveOperations
->isEmpty()) {
1482 ResolveOperation
* pc
= resolveOperations
->data();
1483 bool resolvingBase
= true;
1484 while (resolvingBase
) {
1485 switch (pc
->m_operation
) {
1486 case ResolveOperation::ReturnGlobalObjectAsBase
:
1488 case ResolveOperation::SetBaseToGlobal
:
1489 resolvingBase
= false;
1492 case ResolveOperation::SetBaseToUndefined
: {
1493 resolvingBase
= false;
1497 case ResolveOperation::SetBaseToScope
:
1498 resolvingBase
= false;
1501 case ResolveOperation::ReturnScopeAsBase
:
1503 case ResolveOperation::SkipTopScopeNode
: {
1507 case ResolveOperation::SkipScopes
:
1510 case ResolveOperation::Fail
:
1513 case ResolveOperation::CheckForDynamicEntriesBeforeGlobalScope
: {
1519 resolvingBase
= false;
1522 ResolveOperation
* resolveValueOperation
= pc
;
1523 switch (resolveValueOperation
->m_operation
) {
1524 case ResolveOperation::GetAndReturnGlobalProperty
: {
1528 case ResolveOperation::GetAndReturnGlobalVarWatchable
:
1529 case ResolveOperation::GetAndReturnGlobalVar
:
1531 case ResolveOperation::GetAndReturnScopedVar
:
1539 void JIT::emit_op_resolve(Instruction
* currentInstruction
)
1541 ResolveOperations
* operations
= currentInstruction
[3].u
.resolveOperations
;
1542 int dst
= currentInstruction
[1].u
.operand
;
1543 emit_resolve_operations(operations
, 0, &dst
);
1546 void JIT::emitSlow_op_resolve(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1548 ResolveOperations
* operations
= currentInstruction
[3].u
.resolveOperations
;
1549 emitSlow_link_resolve_operations(operations
, iter
);
1550 JITStubCall
stubCall(this, cti_op_resolve
);
1551 stubCall
.addArgument(TrustedImmPtr(&m_codeBlock
->identifier(currentInstruction
[2].u
.operand
)));
1552 stubCall
.addArgument(TrustedImmPtr(currentInstruction
[3].u
.resolveOperations
));
1553 stubCall
.callWithValueProfiling(currentInstruction
[1].u
.operand
);
1556 void JIT::emit_op_resolve_base(Instruction
* currentInstruction
)
1558 ResolveOperations
* operations
= currentInstruction
[4].u
.resolveOperations
;
1559 int dst
= currentInstruction
[1].u
.operand
;
1560 emit_resolve_operations(operations
, &dst
, 0);
1563 void JIT::emitSlow_op_resolve_base(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1565 ResolveOperations
* operations
= currentInstruction
[4].u
.resolveOperations
;
1566 emitSlow_link_resolve_operations(operations
, iter
);
1567 JITStubCall
stubCall(this, currentInstruction
[3].u
.operand
? cti_op_resolve_base_strict_put
: cti_op_resolve_base
);
1568 stubCall
.addArgument(TrustedImmPtr(&m_codeBlock
->identifier(currentInstruction
[2].u
.operand
)));
1569 stubCall
.addArgument(TrustedImmPtr(currentInstruction
[4].u
.resolveOperations
));
1570 stubCall
.addArgument(TrustedImmPtr(currentInstruction
[5].u
.putToBaseOperation
));
1571 stubCall
.callWithValueProfiling(currentInstruction
[1].u
.operand
);
1574 void JIT::emit_op_resolve_with_base(Instruction
* currentInstruction
)
1576 ResolveOperations
* operations
= currentInstruction
[4].u
.resolveOperations
;
1577 int base
= currentInstruction
[1].u
.operand
;
1578 int value
= currentInstruction
[2].u
.operand
;
1579 emit_resolve_operations(operations
, &base
, &value
);
1582 void JIT::emitSlow_op_resolve_with_base(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1584 ResolveOperations
* operations
= currentInstruction
[4].u
.resolveOperations
;
1585 emitSlow_link_resolve_operations(operations
, iter
);
1586 JITStubCall
stubCall(this, cti_op_resolve_with_base
);
1587 stubCall
.addArgument(TrustedImmPtr(&m_codeBlock
->identifier(currentInstruction
[3].u
.operand
)));
1588 stubCall
.addArgument(TrustedImm32(currentInstruction
[1].u
.operand
));
1589 stubCall
.addArgument(TrustedImmPtr(currentInstruction
[4].u
.resolveOperations
));
1590 stubCall
.addArgument(TrustedImmPtr(currentInstruction
[5].u
.putToBaseOperation
));
1591 stubCall
.callWithValueProfiling(currentInstruction
[2].u
.operand
);
1594 void JIT::emit_op_resolve_with_this(Instruction
* currentInstruction
)
1596 ResolveOperations
* operations
= currentInstruction
[4].u
.resolveOperations
;
1597 int base
= currentInstruction
[1].u
.operand
;
1598 int value
= currentInstruction
[2].u
.operand
;
1599 emit_resolve_operations(operations
, &base
, &value
);
1602 void JIT::emitSlow_op_resolve_with_this(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1604 ResolveOperations
* operations
= currentInstruction
[4].u
.resolveOperations
;
1605 emitSlow_link_resolve_operations(operations
, iter
);
1606 JITStubCall
stubCall(this, cti_op_resolve_with_this
);
1607 stubCall
.addArgument(TrustedImmPtr(&m_codeBlock
->identifier(currentInstruction
[3].u
.operand
)));
1608 stubCall
.addArgument(TrustedImm32(currentInstruction
[1].u
.operand
));
1609 stubCall
.addArgument(TrustedImmPtr(currentInstruction
[4].u
.resolveOperations
));
1610 stubCall
.callWithValueProfiling(currentInstruction
[2].u
.operand
);
1613 void JIT::emitSlow_op_put_to_base(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1615 int base
= currentInstruction
[1].u
.operand
;
1616 int id
= currentInstruction
[2].u
.operand
;
1617 int value
= currentInstruction
[3].u
.operand
;
1619 PutToBaseOperation
* putToBaseOperation
= currentInstruction
[4].u
.putToBaseOperation
;
1620 switch (putToBaseOperation
->m_kind
) {
1621 case PutToBaseOperation::VariablePut
:
1624 case PutToBaseOperation::GlobalVariablePutChecked
:
1626 case PutToBaseOperation::GlobalVariablePut
:
1627 if (!putToBaseOperation
->m_isDynamic
)
1632 case PutToBaseOperation::Uninitialised
:
1633 case PutToBaseOperation::Readonly
:
1634 case PutToBaseOperation::Generic
:
1637 case PutToBaseOperation::GlobalPropertyPut
:
1643 JITStubCall
stubCall(this, cti_op_put_to_base
);
1645 stubCall
.addArgument(TrustedImm32(base
));
1646 stubCall
.addArgument(TrustedImmPtr(&m_codeBlock
->identifier(id
)));
1647 stubCall
.addArgument(TrustedImm32(value
));
1648 stubCall
.addArgument(TrustedImmPtr(putToBaseOperation
));
1652 void JIT::emit_op_new_regexp(Instruction
* currentInstruction
)
1654 JITStubCall
stubCall(this, cti_op_new_regexp
);
1655 stubCall
.addArgument(TrustedImmPtr(m_codeBlock
->regexp(currentInstruction
[2].u
.operand
)));
1656 stubCall
.call(currentInstruction
[1].u
.operand
);
1659 void JIT::emit_op_new_func(Instruction
* currentInstruction
)
1662 int dst
= currentInstruction
[1].u
.operand
;
1663 if (currentInstruction
[3].u
.operand
) {
1664 #if USE(JSVALUE32_64)
1665 lazyJump
= branch32(NotEqual
, tagFor(dst
), TrustedImm32(JSValue::EmptyValueTag
));
1667 lazyJump
= branchTest64(NonZero
, addressFor(dst
));
1671 JITStubCall
stubCall(this, cti_op_new_func
);
1672 stubCall
.addArgument(TrustedImmPtr(m_codeBlock
->functionDecl(currentInstruction
[2].u
.operand
)));
1675 if (currentInstruction
[3].u
.operand
) {
1676 #if USE(JSVALUE32_64)
1679 killLastResultRegister();
1681 lazyJump
.link(this);
1685 void JIT::emit_op_new_func_exp(Instruction
* currentInstruction
)
1687 JITStubCall
stubCall(this, cti_op_new_func_exp
);
1688 stubCall
.addArgument(TrustedImmPtr(m_codeBlock
->functionExpr(currentInstruction
[2].u
.operand
)));
1689 stubCall
.call(currentInstruction
[1].u
.operand
);
1692 void JIT::emit_op_new_array(Instruction
* currentInstruction
)
1694 JITStubCall
stubCall(this, cti_op_new_array
);
1695 stubCall
.addArgument(TrustedImm32(currentInstruction
[2].u
.operand
));
1696 stubCall
.addArgument(TrustedImm32(currentInstruction
[3].u
.operand
));
1697 stubCall
.addArgument(TrustedImmPtr(currentInstruction
[4].u
.arrayAllocationProfile
));
1698 stubCall
.call(currentInstruction
[1].u
.operand
);
1701 void JIT::emit_op_new_array_with_size(Instruction
* currentInstruction
)
1703 JITStubCall
stubCall(this, cti_op_new_array_with_size
);
1705 stubCall
.addArgument(currentInstruction
[2].u
.operand
, regT2
);
1707 stubCall
.addArgument(currentInstruction
[2].u
.operand
);
1709 stubCall
.addArgument(TrustedImmPtr(currentInstruction
[3].u
.arrayAllocationProfile
));
1710 stubCall
.call(currentInstruction
[1].u
.operand
);
1713 void JIT::emit_op_new_array_buffer(Instruction
* currentInstruction
)
1715 JITStubCall
stubCall(this, cti_op_new_array_buffer
);
1716 stubCall
.addArgument(TrustedImm32(currentInstruction
[2].u
.operand
));
1717 stubCall
.addArgument(TrustedImm32(currentInstruction
[3].u
.operand
));
1718 stubCall
.addArgument(TrustedImmPtr(currentInstruction
[4].u
.arrayAllocationProfile
));
1719 stubCall
.call(currentInstruction
[1].u
.operand
);
1724 #endif // ENABLE(JIT)