2 * Copyright (C) 2009, 2012, 2013, 2014 Apple Inc. All rights reserved.
3 * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
14 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
18 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
33 #include "CCallHelpers.h"
35 #include "JITInlines.h"
38 #include "JSFunction.h"
39 #include "JSPropertyNameIterator.h"
40 #include "JSVariableObject.h"
41 #include "LinkBuffer.h"
42 #include "MaxFrameExtentForSlowPathCall.h"
43 #include "SlowPathCall.h"
44 #include "VirtualRegister.h"
48 JIT::CodeRef
JIT::privateCompileCTINativeCall(VM
* vm
, NativeFunction func
)
52 emitFunctionPrologue();
53 emitPutImmediateToCallFrameHeader(0, JSStack::CodeBlock
);
54 storePtr(callFrameRegister
, &m_vm
->topCallFrame
);
57 // Load caller frame's scope chain into this callframe so that whatever we call can
58 // get to its global data.
59 emitGetCallerFrameFromCallFrameHeaderPtr(regT0
);
60 emitGetFromCallFrameHeaderPtr(JSStack::ScopeChain
, regT1
, regT0
);
61 emitPutCellToCallFrameHeader(regT1
, JSStack::ScopeChain
);
63 // Calling convention: f(ecx, edx, ...);
64 // Host function signature: f(ExecState*);
65 move(callFrameRegister
, X86Registers::ecx
);
67 subPtr(TrustedImm32(8), stackPointerRegister
); // Align stack for call.
68 storePtr(X86Registers::ecx
, Address(stackPointerRegister
));
73 addPtr(TrustedImm32(8), stackPointerRegister
);
75 #elif CPU(ARM) || CPU(SH4) || CPU(MIPS)
76 // Load caller frame's scope chain into this callframe so that whatever we call can get to its global data.
77 emitGetCallerFrameFromCallFrameHeaderPtr(regT2
);
78 emitGetFromCallFrameHeaderPtr(JSStack::ScopeChain
, regT1
, regT2
);
79 emitPutCellToCallFrameHeader(regT1
, JSStack::ScopeChain
);
82 // Allocate stack space for (unused) 16 bytes (8-byte aligned) for 4 arguments.
83 subPtr(TrustedImm32(16), stackPointerRegister
);
86 // Calling convention is f(argumentGPR0, argumentGPR1, ...).
87 // Host function signature is f(ExecState*).
88 move(callFrameRegister
, argumentGPR0
);
90 emitGetFromCallFrameHeaderPtr(JSStack::Callee
, argumentGPR1
);
91 loadPtr(Address(argumentGPR1
, OBJECT_OFFSETOF(JSFunction
, m_executable
)), regT2
);
97 // Restore stack space
98 addPtr(TrustedImm32(16), stackPointerRegister
);
101 restoreReturnAddressBeforeReturn(regT3
);
103 #error "JIT not supported on this platform."
104 abortWithReason(JITNotSupported
);
107 // Check for an exception
108 Jump sawException
= branch32(NotEqual
, AbsoluteAddress(reinterpret_cast<char*>(vm
->addressOfException()) + OBJECT_OFFSETOF(JSValue
, u
.asBits
.tag
)), TrustedImm32(JSValue::EmptyValueTag
));
110 emitFunctionEpilogue();
114 // Handle an exception
115 sawException
.link(this);
117 storePtr(callFrameRegister
, &m_vm
->topCallFrame
);
120 addPtr(TrustedImm32(-4), stackPointerRegister
);
121 loadPtr(Address(callFrameRegister
), X86Registers::ecx
);
122 push(X86Registers::ecx
);
124 loadPtr(Address(callFrameRegister
), argumentGPR0
);
126 move(TrustedImmPtr(FunctionPtr(operationVMHandleException
).value()), regT3
);
130 addPtr(TrustedImm32(8), stackPointerRegister
);
133 jumpToExceptionHandler();
135 // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
136 LinkBuffer
patchBuffer(*m_vm
, *this, GLOBAL_THUNK_ID
);
138 patchBuffer
.link(nativeCall
, FunctionPtr(func
));
139 return FINALIZE_CODE(patchBuffer
, ("JIT CTI native call"));
142 void JIT::emit_op_mov(Instruction
* currentInstruction
)
144 int dst
= currentInstruction
[1].u
.operand
;
145 int src
= currentInstruction
[2].u
.operand
;
147 if (m_codeBlock
->isConstantRegisterIndex(src
))
148 emitStore(dst
, getConstantOperand(src
));
150 emitLoad(src
, regT1
, regT0
);
151 emitStore(dst
, regT1
, regT0
);
155 void JIT::emit_op_captured_mov(Instruction
* currentInstruction
)
157 int dst
= currentInstruction
[1].u
.operand
;
158 int src
= currentInstruction
[2].u
.operand
;
160 emitLoad(src
, regT1
, regT0
);
161 emitNotifyWrite(regT1
, regT0
, regT2
, currentInstruction
[3].u
.watchpointSet
);
162 emitStore(dst
, regT1
, regT0
);
165 void JIT::emit_op_end(Instruction
* currentInstruction
)
167 ASSERT(returnValueGPR
!= callFrameRegister
);
168 emitLoad(currentInstruction
[1].u
.operand
, regT1
, regT0
);
169 emitFunctionEpilogue();
173 void JIT::emit_op_jmp(Instruction
* currentInstruction
)
175 unsigned target
= currentInstruction
[1].u
.operand
;
176 addJump(jump(), target
);
179 void JIT::emit_op_new_object(Instruction
* currentInstruction
)
181 Structure
* structure
= currentInstruction
[3].u
.objectAllocationProfile
->structure();
182 size_t allocationSize
= JSFinalObject::allocationSize(structure
->inlineCapacity());
183 MarkedAllocator
* allocator
= &m_vm
->heap
.allocatorForObjectWithoutDestructor(allocationSize
);
185 RegisterID resultReg
= regT0
;
186 RegisterID allocatorReg
= regT1
;
187 RegisterID scratchReg
= regT2
;
189 move(TrustedImmPtr(allocator
), allocatorReg
);
190 emitAllocateJSObject(allocatorReg
, TrustedImmPtr(structure
), resultReg
, scratchReg
);
191 emitStoreCell(currentInstruction
[1].u
.operand
, resultReg
);
194 void JIT::emitSlow_op_new_object(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
197 int dst
= currentInstruction
[1].u
.operand
;
198 Structure
* structure
= currentInstruction
[3].u
.objectAllocationProfile
->structure();
199 callOperation(operationNewObject
, structure
);
200 emitStoreCell(dst
, returnValueGPR
);
203 void JIT::emit_op_check_has_instance(Instruction
* currentInstruction
)
205 int baseVal
= currentInstruction
[3].u
.operand
;
207 emitLoadPayload(baseVal
, regT0
);
209 // Check that baseVal is a cell.
210 emitJumpSlowCaseIfNotJSCell(baseVal
);
212 // Check that baseVal 'ImplementsHasInstance'.
213 addSlowCase(branchTest8(Zero
, Address(regT0
, JSCell::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance
)));
216 void JIT::emit_op_instanceof(Instruction
* currentInstruction
)
218 int dst
= currentInstruction
[1].u
.operand
;
219 int value
= currentInstruction
[2].u
.operand
;
220 int proto
= currentInstruction
[3].u
.operand
;
222 // Load the operands into registers.
223 // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
224 emitLoadPayload(value
, regT2
);
225 emitLoadPayload(proto
, regT1
);
227 // Check that proto are cells. baseVal must be a cell - this is checked by op_check_has_instance.
228 emitJumpSlowCaseIfNotJSCell(value
);
229 emitJumpSlowCaseIfNotJSCell(proto
);
231 // Check that prototype is an object
232 addSlowCase(emitJumpIfCellNotObject(regT1
));
234 // Optimistically load the result true, and start looping.
235 // Initially, regT1 still contains proto and regT2 still contains value.
236 // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
237 move(TrustedImm32(1), regT0
);
240 // Load the prototype of the cell in regT2. If this is equal to regT1 - WIN!
241 // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
242 loadPtr(Address(regT2
, JSCell::structureIDOffset()), regT2
);
243 load32(Address(regT2
, Structure::prototypeOffset() + OBJECT_OFFSETOF(JSValue
, u
.asBits
.payload
)), regT2
);
244 Jump isInstance
= branchPtr(Equal
, regT2
, regT1
);
245 branchTest32(NonZero
, regT2
).linkTo(loop
, this);
247 // We get here either by dropping out of the loop, or if value was not an Object. Result is false.
248 move(TrustedImm32(0), regT0
);
250 // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
251 isInstance
.link(this);
252 emitStoreBool(dst
, regT0
);
255 void JIT::emitSlow_op_check_has_instance(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
257 int dst
= currentInstruction
[1].u
.operand
;
258 int value
= currentInstruction
[2].u
.operand
;
259 int baseVal
= currentInstruction
[3].u
.operand
;
261 linkSlowCaseIfNotJSCell(iter
, baseVal
);
264 emitLoad(value
, regT1
, regT0
);
265 emitLoad(baseVal
, regT3
, regT2
);
266 callOperation(operationCheckHasInstance
, dst
, regT1
, regT0
, regT3
, regT2
);
268 emitJumpSlowToHot(jump(), currentInstruction
[4].u
.operand
);
271 void JIT::emitSlow_op_instanceof(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
273 int dst
= currentInstruction
[1].u
.operand
;
274 int value
= currentInstruction
[2].u
.operand
;
275 int proto
= currentInstruction
[3].u
.operand
;
277 linkSlowCaseIfNotJSCell(iter
, value
);
278 linkSlowCaseIfNotJSCell(iter
, proto
);
281 emitLoad(value
, regT1
, regT0
);
282 emitLoad(proto
, regT3
, regT2
);
283 callOperation(operationInstanceOf
, dst
, regT1
, regT0
, regT3
, regT2
);
286 void JIT::emit_op_is_undefined(Instruction
* currentInstruction
)
288 int dst
= currentInstruction
[1].u
.operand
;
289 int value
= currentInstruction
[2].u
.operand
;
291 emitLoad(value
, regT1
, regT0
);
292 Jump isCell
= branch32(Equal
, regT1
, TrustedImm32(JSValue::CellTag
));
294 compare32(Equal
, regT1
, TrustedImm32(JSValue::UndefinedTag
), regT0
);
298 Jump isMasqueradesAsUndefined
= branchTest8(NonZero
, Address(regT0
, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined
));
299 move(TrustedImm32(0), regT0
);
300 Jump notMasqueradesAsUndefined
= jump();
302 isMasqueradesAsUndefined
.link(this);
303 loadPtr(Address(regT0
, JSCell::structureIDOffset()), regT1
);
304 move(TrustedImmPtr(m_codeBlock
->globalObject()), regT0
);
305 loadPtr(Address(regT1
, Structure::globalObjectOffset()), regT1
);
306 compare32(Equal
, regT0
, regT1
, regT0
);
308 notMasqueradesAsUndefined
.link(this);
310 emitStoreBool(dst
, regT0
);
313 void JIT::emit_op_is_boolean(Instruction
* currentInstruction
)
315 int dst
= currentInstruction
[1].u
.operand
;
316 int value
= currentInstruction
[2].u
.operand
;
318 emitLoadTag(value
, regT0
);
319 compare32(Equal
, regT0
, TrustedImm32(JSValue::BooleanTag
), regT0
);
320 emitStoreBool(dst
, regT0
);
323 void JIT::emit_op_is_number(Instruction
* currentInstruction
)
325 int dst
= currentInstruction
[1].u
.operand
;
326 int value
= currentInstruction
[2].u
.operand
;
328 emitLoadTag(value
, regT0
);
329 add32(TrustedImm32(1), regT0
);
330 compare32(Below
, regT0
, TrustedImm32(JSValue::LowestTag
+ 1), regT0
);
331 emitStoreBool(dst
, regT0
);
334 void JIT::emit_op_is_string(Instruction
* currentInstruction
)
336 int dst
= currentInstruction
[1].u
.operand
;
337 int value
= currentInstruction
[2].u
.operand
;
339 emitLoad(value
, regT1
, regT0
);
340 Jump isNotCell
= branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
));
342 compare8(Equal
, Address(regT0
, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType
), regT0
);
345 isNotCell
.link(this);
346 move(TrustedImm32(0), regT0
);
349 emitStoreBool(dst
, regT0
);
352 void JIT::emit_op_tear_off_activation(Instruction
* currentInstruction
)
354 int activation
= currentInstruction
[1].u
.operand
;
355 Jump activationNotCreated
= branch32(Equal
, tagFor(activation
), TrustedImm32(JSValue::EmptyValueTag
));
356 emitLoadPayload(activation
, regT0
);
357 callOperation(operationTearOffActivation
, regT0
);
358 activationNotCreated
.link(this);
361 void JIT::emit_op_tear_off_arguments(Instruction
* currentInstruction
)
363 VirtualRegister arguments
= VirtualRegister(currentInstruction
[1].u
.operand
);
364 int activation
= currentInstruction
[2].u
.operand
;
366 Jump argsNotCreated
= branch32(Equal
, tagFor(unmodifiedArgumentsRegister(arguments
).offset()), TrustedImm32(JSValue::EmptyValueTag
));
367 emitLoadPayload(unmodifiedArgumentsRegister(VirtualRegister(arguments
)).offset(), regT0
);
368 emitLoadPayload(activation
, regT1
);
369 callOperation(operationTearOffArguments
, regT0
, regT1
);
370 argsNotCreated
.link(this);
373 void JIT::emit_op_to_primitive(Instruction
* currentInstruction
)
375 int dst
= currentInstruction
[1].u
.operand
;
376 int src
= currentInstruction
[2].u
.operand
;
378 emitLoad(src
, regT1
, regT0
);
380 Jump isImm
= branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
));
381 addSlowCase(branchPtr(NotEqual
, Address(regT0
, JSCell::structureIDOffset()), TrustedImmPtr(m_vm
->stringStructure
.get())));
385 emitStore(dst
, regT1
, regT0
);
388 void JIT::emitSlow_op_to_primitive(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
392 JITSlowPathCall
slowPathCall(this, currentInstruction
, slow_path_to_primitive
);
396 void JIT::emit_op_strcat(Instruction
* currentInstruction
)
398 JITSlowPathCall
slowPathCall(this, currentInstruction
, slow_path_strcat
);
402 void JIT::emit_op_not(Instruction
* currentInstruction
)
404 int dst
= currentInstruction
[1].u
.operand
;
405 int src
= currentInstruction
[2].u
.operand
;
407 emitLoadTag(src
, regT0
);
409 emitLoad(src
, regT1
, regT0
);
410 addSlowCase(branch32(NotEqual
, regT1
, TrustedImm32(JSValue::BooleanTag
)));
411 xor32(TrustedImm32(1), regT0
);
413 emitStoreBool(dst
, regT0
, (dst
== src
));
416 void JIT::emitSlow_op_not(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
420 JITSlowPathCall
slowPathCall(this, currentInstruction
, slow_path_not
);
424 void JIT::emit_op_jfalse(Instruction
* currentInstruction
)
426 int cond
= currentInstruction
[1].u
.operand
;
427 unsigned target
= currentInstruction
[2].u
.operand
;
429 emitLoad(cond
, regT1
, regT0
);
431 ASSERT((JSValue::BooleanTag
+ 1 == JSValue::Int32Tag
) && !(JSValue::Int32Tag
+ 1));
432 addSlowCase(branch32(Below
, regT1
, TrustedImm32(JSValue::BooleanTag
)));
433 addJump(branchTest32(Zero
, regT0
), target
);
436 void JIT::emitSlow_op_jfalse(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
438 int cond
= currentInstruction
[1].u
.operand
;
439 unsigned target
= currentInstruction
[2].u
.operand
;
443 if (supportsFloatingPoint()) {
444 // regT1 contains the tag from the hot path.
445 Jump notNumber
= branch32(Above
, regT1
, TrustedImm32(JSValue::LowestTag
));
447 emitLoadDouble(cond
, fpRegT0
);
448 emitJumpSlowToHot(branchDoubleZeroOrNaN(fpRegT0
, fpRegT1
), target
);
449 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_jfalse
));
451 notNumber
.link(this);
454 callOperation(operationConvertJSValueToBoolean
, regT1
, regT0
);
455 emitJumpSlowToHot(branchTest32(Zero
, returnValueGPR
), target
); // Inverted.
458 void JIT::emit_op_jtrue(Instruction
* currentInstruction
)
460 int cond
= currentInstruction
[1].u
.operand
;
461 unsigned target
= currentInstruction
[2].u
.operand
;
463 emitLoad(cond
, regT1
, regT0
);
465 ASSERT((JSValue::BooleanTag
+ 1 == JSValue::Int32Tag
) && !(JSValue::Int32Tag
+ 1));
466 addSlowCase(branch32(Below
, regT1
, TrustedImm32(JSValue::BooleanTag
)));
467 addJump(branchTest32(NonZero
, regT0
), target
);
470 void JIT::emitSlow_op_jtrue(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
472 int cond
= currentInstruction
[1].u
.operand
;
473 unsigned target
= currentInstruction
[2].u
.operand
;
477 if (supportsFloatingPoint()) {
478 // regT1 contains the tag from the hot path.
479 Jump notNumber
= branch32(Above
, regT1
, TrustedImm32(JSValue::LowestTag
));
481 emitLoadDouble(cond
, fpRegT0
);
482 emitJumpSlowToHot(branchDoubleNonZero(fpRegT0
, fpRegT1
), target
);
483 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_jtrue
));
485 notNumber
.link(this);
488 callOperation(operationConvertJSValueToBoolean
, regT1
, regT0
);
489 emitJumpSlowToHot(branchTest32(NonZero
, returnValueGPR
), target
);
492 void JIT::emit_op_jeq_null(Instruction
* currentInstruction
)
494 int src
= currentInstruction
[1].u
.operand
;
495 unsigned target
= currentInstruction
[2].u
.operand
;
497 emitLoad(src
, regT1
, regT0
);
499 Jump isImmediate
= branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
));
501 Jump isNotMasqueradesAsUndefined
= branchTest8(Zero
, Address(regT0
, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined
));
502 loadPtr(Address(regT0
, JSCell::structureIDOffset()), regT2
);
503 move(TrustedImmPtr(m_codeBlock
->globalObject()), regT0
);
504 addJump(branchPtr(Equal
, Address(regT2
, Structure::globalObjectOffset()), regT0
), target
);
505 Jump masqueradesGlobalObjectIsForeign
= jump();
507 // Now handle the immediate cases - undefined & null
508 isImmediate
.link(this);
509 ASSERT((JSValue::UndefinedTag
+ 1 == JSValue::NullTag
) && (JSValue::NullTag
& 0x1));
510 or32(TrustedImm32(1), regT1
);
511 addJump(branch32(Equal
, regT1
, TrustedImm32(JSValue::NullTag
)), target
);
513 isNotMasqueradesAsUndefined
.link(this);
514 masqueradesGlobalObjectIsForeign
.link(this);
517 void JIT::emit_op_jneq_null(Instruction
* currentInstruction
)
519 int src
= currentInstruction
[1].u
.operand
;
520 unsigned target
= currentInstruction
[2].u
.operand
;
522 emitLoad(src
, regT1
, regT0
);
524 Jump isImmediate
= branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
));
526 addJump(branchTest8(Zero
, Address(regT0
, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined
)), target
);
527 loadPtr(Address(regT0
, JSCell::structureIDOffset()), regT2
);
528 move(TrustedImmPtr(m_codeBlock
->globalObject()), regT0
);
529 addJump(branchPtr(NotEqual
, Address(regT2
, Structure::globalObjectOffset()), regT0
), target
);
530 Jump wasNotImmediate
= jump();
532 // Now handle the immediate cases - undefined & null
533 isImmediate
.link(this);
535 ASSERT((JSValue::UndefinedTag
+ 1 == JSValue::NullTag
) && (JSValue::NullTag
& 0x1));
536 or32(TrustedImm32(1), regT1
);
537 addJump(branch32(NotEqual
, regT1
, TrustedImm32(JSValue::NullTag
)), target
);
539 wasNotImmediate
.link(this);
542 void JIT::emit_op_jneq_ptr(Instruction
* currentInstruction
)
544 int src
= currentInstruction
[1].u
.operand
;
545 Special::Pointer ptr
= currentInstruction
[2].u
.specialPointer
;
546 unsigned target
= currentInstruction
[3].u
.operand
;
548 emitLoad(src
, regT1
, regT0
);
549 addJump(branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
)), target
);
550 addJump(branchPtr(NotEqual
, regT0
, TrustedImmPtr(actualPointerFor(m_codeBlock
, ptr
))), target
);
553 void JIT::emit_op_eq(Instruction
* currentInstruction
)
555 int dst
= currentInstruction
[1].u
.operand
;
556 int src1
= currentInstruction
[2].u
.operand
;
557 int src2
= currentInstruction
[3].u
.operand
;
559 emitLoad2(src1
, regT1
, regT0
, src2
, regT3
, regT2
);
560 addSlowCase(branch32(NotEqual
, regT1
, regT3
));
561 addSlowCase(branch32(Equal
, regT1
, TrustedImm32(JSValue::CellTag
)));
562 addSlowCase(branch32(Below
, regT1
, TrustedImm32(JSValue::LowestTag
)));
564 compare32(Equal
, regT0
, regT2
, regT0
);
566 emitStoreBool(dst
, regT0
);
569 void JIT::emitSlow_op_eq(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
571 int dst
= currentInstruction
[1].u
.operand
;
572 int op1
= currentInstruction
[2].u
.operand
;
573 int op2
= currentInstruction
[3].u
.operand
;
575 JumpList storeResult
;
576 JumpList genericCase
;
578 genericCase
.append(getSlowCase(iter
)); // tags not equal
580 linkSlowCase(iter
); // tags equal and JSCell
581 genericCase
.append(branchPtr(NotEqual
, Address(regT0
, JSCell::structureIDOffset()), TrustedImmPtr(m_vm
->stringStructure
.get())));
582 genericCase
.append(branchPtr(NotEqual
, Address(regT2
, JSCell::structureIDOffset()), TrustedImmPtr(m_vm
->stringStructure
.get())));
585 callOperation(operationCompareStringEq
, regT0
, regT2
);
586 storeResult
.append(jump());
589 genericCase
.append(getSlowCase(iter
)); // doubles
590 genericCase
.link(this);
591 emitLoad(op1
, regT1
, regT0
);
592 emitLoad(op2
, regT3
, regT2
);
593 callOperation(operationCompareEq
, regT1
, regT0
, regT3
, regT2
);
595 storeResult
.link(this);
596 emitStoreBool(dst
, returnValueGPR
);
599 void JIT::emit_op_neq(Instruction
* currentInstruction
)
601 int dst
= currentInstruction
[1].u
.operand
;
602 int src1
= currentInstruction
[2].u
.operand
;
603 int src2
= currentInstruction
[3].u
.operand
;
605 emitLoad2(src1
, regT1
, regT0
, src2
, regT3
, regT2
);
606 addSlowCase(branch32(NotEqual
, regT1
, regT3
));
607 addSlowCase(branch32(Equal
, regT1
, TrustedImm32(JSValue::CellTag
)));
608 addSlowCase(branch32(Below
, regT1
, TrustedImm32(JSValue::LowestTag
)));
610 compare32(NotEqual
, regT0
, regT2
, regT0
);
612 emitStoreBool(dst
, regT0
);
615 void JIT::emitSlow_op_neq(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
617 int dst
= currentInstruction
[1].u
.operand
;
619 JumpList storeResult
;
620 JumpList genericCase
;
622 genericCase
.append(getSlowCase(iter
)); // tags not equal
624 linkSlowCase(iter
); // tags equal and JSCell
625 genericCase
.append(branchPtr(NotEqual
, Address(regT0
, JSCell::structureIDOffset()), TrustedImmPtr(m_vm
->stringStructure
.get())));
626 genericCase
.append(branchPtr(NotEqual
, Address(regT2
, JSCell::structureIDOffset()), TrustedImmPtr(m_vm
->stringStructure
.get())));
629 callOperation(operationCompareStringEq
, regT0
, regT2
);
630 storeResult
.append(jump());
633 genericCase
.append(getSlowCase(iter
)); // doubles
634 genericCase
.link(this);
635 callOperation(operationCompareEq
, regT1
, regT0
, regT3
, regT2
);
637 storeResult
.link(this);
638 xor32(TrustedImm32(0x1), returnValueGPR
);
639 emitStoreBool(dst
, returnValueGPR
);
642 void JIT::compileOpStrictEq(Instruction
* currentInstruction
, CompileOpStrictEqType type
)
644 int dst
= currentInstruction
[1].u
.operand
;
645 int src1
= currentInstruction
[2].u
.operand
;
646 int src2
= currentInstruction
[3].u
.operand
;
648 emitLoad2(src1
, regT1
, regT0
, src2
, regT3
, regT2
);
650 // Bail if the tags differ, or are double.
651 addSlowCase(branch32(NotEqual
, regT1
, regT3
));
652 addSlowCase(branch32(Below
, regT1
, TrustedImm32(JSValue::LowestTag
)));
654 // Jump to a slow case if both are strings.
655 Jump notCell
= branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
));
656 Jump firstNotString
= branchPtr(NotEqual
, Address(regT0
, JSCell::structureIDOffset()), TrustedImmPtr(m_vm
->stringStructure
.get()));
657 addSlowCase(branchPtr(Equal
, Address(regT2
, JSCell::structureIDOffset()), TrustedImmPtr(m_vm
->stringStructure
.get())));
659 firstNotString
.link(this);
661 // Simply compare the payloads.
662 if (type
== OpStrictEq
)
663 compare32(Equal
, regT0
, regT2
, regT0
);
665 compare32(NotEqual
, regT0
, regT2
, regT0
);
667 emitStoreBool(dst
, regT0
);
670 void JIT::emit_op_stricteq(Instruction
* currentInstruction
)
672 compileOpStrictEq(currentInstruction
, OpStrictEq
);
675 void JIT::emitSlow_op_stricteq(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
681 JITSlowPathCall
slowPathCall(this, currentInstruction
, slow_path_stricteq
);
685 void JIT::emit_op_nstricteq(Instruction
* currentInstruction
)
687 compileOpStrictEq(currentInstruction
, OpNStrictEq
);
690 void JIT::emitSlow_op_nstricteq(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
696 JITSlowPathCall
slowPathCall(this, currentInstruction
, slow_path_nstricteq
);
700 void JIT::emit_op_eq_null(Instruction
* currentInstruction
)
702 int dst
= currentInstruction
[1].u
.operand
;
703 int src
= currentInstruction
[2].u
.operand
;
705 emitLoad(src
, regT1
, regT0
);
706 Jump isImmediate
= branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
));
708 Jump isMasqueradesAsUndefined
= branchTest8(NonZero
, Address(regT0
, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined
));
709 move(TrustedImm32(0), regT1
);
710 Jump wasNotMasqueradesAsUndefined
= jump();
712 isMasqueradesAsUndefined
.link(this);
713 loadPtr(Address(regT0
, JSCell::structureIDOffset()), regT2
);
714 move(TrustedImmPtr(m_codeBlock
->globalObject()), regT0
);
715 loadPtr(Address(regT2
, Structure::globalObjectOffset()), regT2
);
716 compare32(Equal
, regT0
, regT2
, regT1
);
717 Jump wasNotImmediate
= jump();
719 isImmediate
.link(this);
721 compare32(Equal
, regT1
, TrustedImm32(JSValue::NullTag
), regT2
);
722 compare32(Equal
, regT1
, TrustedImm32(JSValue::UndefinedTag
), regT1
);
725 wasNotImmediate
.link(this);
726 wasNotMasqueradesAsUndefined
.link(this);
728 emitStoreBool(dst
, regT1
);
731 void JIT::emit_op_neq_null(Instruction
* currentInstruction
)
733 int dst
= currentInstruction
[1].u
.operand
;
734 int src
= currentInstruction
[2].u
.operand
;
736 emitLoad(src
, regT1
, regT0
);
737 Jump isImmediate
= branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
));
739 Jump isMasqueradesAsUndefined
= branchTest8(NonZero
, Address(regT0
, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined
));
740 move(TrustedImm32(1), regT1
);
741 Jump wasNotMasqueradesAsUndefined
= jump();
743 isMasqueradesAsUndefined
.link(this);
744 loadPtr(Address(regT0
, JSCell::structureIDOffset()), regT2
);
745 move(TrustedImmPtr(m_codeBlock
->globalObject()), regT0
);
746 loadPtr(Address(regT2
, Structure::globalObjectOffset()), regT2
);
747 compare32(NotEqual
, regT0
, regT2
, regT1
);
748 Jump wasNotImmediate
= jump();
750 isImmediate
.link(this);
752 compare32(NotEqual
, regT1
, TrustedImm32(JSValue::NullTag
), regT2
);
753 compare32(NotEqual
, regT1
, TrustedImm32(JSValue::UndefinedTag
), regT1
);
756 wasNotImmediate
.link(this);
757 wasNotMasqueradesAsUndefined
.link(this);
759 emitStoreBool(dst
, regT1
);
762 void JIT::emit_op_throw(Instruction
* currentInstruction
)
764 ASSERT(regT0
== returnValueGPR
);
765 emitLoad(currentInstruction
[1].u
.operand
, regT1
, regT0
);
766 callOperationNoExceptionCheck(operationThrow
, regT1
, regT0
);
767 jumpToExceptionHandler();
770 void JIT::emit_op_get_pnames(Instruction
* currentInstruction
)
772 int dst
= currentInstruction
[1].u
.operand
;
773 int base
= currentInstruction
[2].u
.operand
;
774 int i
= currentInstruction
[3].u
.operand
;
775 int size
= currentInstruction
[4].u
.operand
;
776 int breakTarget
= currentInstruction
[5].u
.operand
;
778 JumpList isNotObject
;
780 emitLoad(base
, regT1
, regT0
);
781 if (!m_codeBlock
->isKnownNotImmediate(base
))
782 isNotObject
.append(branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
)));
783 if (VirtualRegister(base
) != m_codeBlock
->thisRegister() || m_codeBlock
->isStrictMode())
784 isNotObject
.append(emitJumpIfCellNotObject(regT0
));
786 // We could inline the case where you have a valid cache, but
787 // this call doesn't seem to be hot.
788 Label
isObject(this);
789 callOperation(operationGetPNames
, regT0
);
790 emitStoreCell(dst
, returnValueGPR
);
791 load32(Address(regT0
, OBJECT_OFFSETOF(JSPropertyNameIterator
, m_jsStringsSize
)), regT3
);
792 store32(TrustedImm32(Int32Tag
), intTagFor(i
));
793 store32(TrustedImm32(0), intPayloadFor(i
));
794 store32(TrustedImm32(Int32Tag
), intTagFor(size
));
795 store32(regT3
, payloadFor(size
));
798 isNotObject
.link(this);
799 addJump(branch32(Equal
, regT1
, TrustedImm32(JSValue::NullTag
)), breakTarget
);
800 addJump(branch32(Equal
, regT1
, TrustedImm32(JSValue::UndefinedTag
)), breakTarget
);
801 callOperation(operationToObject
, base
, regT1
, regT0
);
802 jump().linkTo(isObject
, this);
807 void JIT::emit_op_next_pname(Instruction
* currentInstruction
)
809 int dst
= currentInstruction
[1].u
.operand
;
810 int base
= currentInstruction
[2].u
.operand
;
811 int i
= currentInstruction
[3].u
.operand
;
812 int size
= currentInstruction
[4].u
.operand
;
813 int it
= currentInstruction
[5].u
.operand
;
814 int target
= currentInstruction
[6].u
.operand
;
816 JumpList callHasProperty
;
819 load32(intPayloadFor(i
), regT0
);
820 Jump end
= branch32(Equal
, regT0
, intPayloadFor(size
));
823 loadPtr(payloadFor(it
), regT1
);
824 loadPtr(Address(regT1
, OBJECT_OFFSETOF(JSPropertyNameIterator
, m_jsStrings
)), regT2
);
825 load32(BaseIndex(regT2
, regT0
, TimesEight
), regT2
);
826 store32(TrustedImm32(JSValue::CellTag
), tagFor(dst
));
827 store32(regT2
, payloadFor(dst
));
830 add32(TrustedImm32(1), regT0
);
831 store32(regT0
, intPayloadFor(i
));
833 // Verify that i is valid:
834 loadPtr(payloadFor(base
), regT0
);
836 // Test base's structure
837 loadPtr(Address(regT0
, JSCell::structureIDOffset()), regT2
);
838 callHasProperty
.append(branchPtr(NotEqual
, regT2
, Address(Address(regT1
, OBJECT_OFFSETOF(JSPropertyNameIterator
, m_cachedStructure
)))));
840 // Test base's prototype chain
841 loadPtr(Address(Address(regT1
, OBJECT_OFFSETOF(JSPropertyNameIterator
, m_cachedPrototypeChain
))), regT3
);
842 loadPtr(Address(regT3
, OBJECT_OFFSETOF(StructureChain
, m_vector
)), regT3
);
843 addJump(branchTestPtr(Zero
, Address(regT3
)), target
);
845 Label
checkPrototype(this);
846 callHasProperty
.append(branch32(Equal
, Address(regT2
, Structure::prototypeOffset() + OBJECT_OFFSETOF(JSValue
, u
.asBits
.tag
)), TrustedImm32(JSValue::NullTag
)));
847 loadPtr(Address(regT2
, Structure::prototypeOffset() + OBJECT_OFFSETOF(JSValue
, u
.asBits
.payload
)), regT2
);
848 loadPtr(Address(regT2
, JSCell::structureIDOffset()), regT2
);
849 callHasProperty
.append(branchPtr(NotEqual
, regT2
, Address(regT3
)));
850 addPtr(TrustedImm32(sizeof(Structure
*)), regT3
);
851 branchTestPtr(NonZero
, Address(regT3
)).linkTo(checkPrototype
, this);
854 addJump(jump(), target
);
856 // Slow case: Ask the object if i is valid.
857 callHasProperty
.link(this);
858 loadPtr(addressFor(dst
), regT1
);
859 callOperation(operationHasProperty
, regT0
, regT1
);
861 // Test for valid key.
862 addJump(branchTest32(NonZero
, regT0
), target
);
863 jump().linkTo(begin
, this);
869 void JIT::emit_op_push_with_scope(Instruction
* currentInstruction
)
871 emitLoad(currentInstruction
[1].u
.operand
, regT1
, regT0
);
872 callOperation(operationPushWithScope
, regT1
, regT0
);
875 void JIT::emit_op_pop_scope(Instruction
*)
877 callOperation(operationPopScope
);
880 void JIT::emit_op_to_number(Instruction
* currentInstruction
)
882 int dst
= currentInstruction
[1].u
.operand
;
883 int src
= currentInstruction
[2].u
.operand
;
885 emitLoad(src
, regT1
, regT0
);
887 Jump isInt32
= branch32(Equal
, regT1
, TrustedImm32(JSValue::Int32Tag
));
888 addSlowCase(branch32(AboveOrEqual
, regT1
, TrustedImm32(JSValue::LowestTag
)));
892 emitStore(dst
, regT1
, regT0
);
895 void JIT::emitSlow_op_to_number(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
899 JITSlowPathCall
slowPathCall(this, currentInstruction
, slow_path_to_number
);
903 void JIT::emit_op_push_name_scope(Instruction
* currentInstruction
)
905 emitLoad(currentInstruction
[2].u
.operand
, regT1
, regT0
);
906 callOperation(operationPushNameScope
, &m_codeBlock
->identifier(currentInstruction
[1].u
.operand
), regT1
, regT0
, currentInstruction
[3].u
.operand
);
909 void JIT::emit_op_catch(Instruction
* currentInstruction
)
911 move(TrustedImmPtr(m_vm
), regT3
);
912 // operationThrow returns the callFrame for the handler.
913 load32(Address(regT3
, VM::callFrameForThrowOffset()), callFrameRegister
);
915 addPtr(TrustedImm32(stackPointerOffsetFor(codeBlock()) * sizeof(Register
)), callFrameRegister
, stackPointerRegister
);
917 // Now store the exception returned by operationThrow.
918 load32(Address(regT3
, VM::exceptionOffset() + OBJECT_OFFSETOF(JSValue
, u
.asBits
.payload
)), regT0
);
919 load32(Address(regT3
, VM::exceptionOffset() + OBJECT_OFFSETOF(JSValue
, u
.asBits
.tag
)), regT1
);
920 store32(TrustedImm32(JSValue().payload()), Address(regT3
, VM::exceptionOffset() + OBJECT_OFFSETOF(JSValue
, u
.asBits
.payload
)));
921 store32(TrustedImm32(JSValue().tag()), Address(regT3
, VM::exceptionOffset() + OBJECT_OFFSETOF(JSValue
, u
.asBits
.tag
)));
923 unsigned exception
= currentInstruction
[1].u
.operand
;
924 emitStore(exception
, regT1
, regT0
);
927 void JIT::emit_op_switch_imm(Instruction
* currentInstruction
)
929 size_t tableIndex
= currentInstruction
[1].u
.operand
;
930 unsigned defaultOffset
= currentInstruction
[2].u
.operand
;
931 unsigned scrutinee
= currentInstruction
[3].u
.operand
;
933 // create jump table for switch destinations, track this switch statement.
934 SimpleJumpTable
* jumpTable
= &m_codeBlock
->switchJumpTable(tableIndex
);
935 m_switches
.append(SwitchRecord(jumpTable
, m_bytecodeOffset
, defaultOffset
, SwitchRecord::Immediate
));
936 jumpTable
->ensureCTITable();
938 emitLoad(scrutinee
, regT1
, regT0
);
939 callOperation(operationSwitchImmWithUnknownKeyType
, regT1
, regT0
, tableIndex
);
940 jump(returnValueGPR
);
943 void JIT::emit_op_switch_char(Instruction
* currentInstruction
)
945 size_t tableIndex
= currentInstruction
[1].u
.operand
;
946 unsigned defaultOffset
= currentInstruction
[2].u
.operand
;
947 unsigned scrutinee
= currentInstruction
[3].u
.operand
;
949 // create jump table for switch destinations, track this switch statement.
950 SimpleJumpTable
* jumpTable
= &m_codeBlock
->switchJumpTable(tableIndex
);
951 m_switches
.append(SwitchRecord(jumpTable
, m_bytecodeOffset
, defaultOffset
, SwitchRecord::Character
));
952 jumpTable
->ensureCTITable();
954 emitLoad(scrutinee
, regT1
, regT0
);
955 callOperation(operationSwitchCharWithUnknownKeyType
, regT1
, regT0
, tableIndex
);
956 jump(returnValueGPR
);
959 void JIT::emit_op_switch_string(Instruction
* currentInstruction
)
961 size_t tableIndex
= currentInstruction
[1].u
.operand
;
962 unsigned defaultOffset
= currentInstruction
[2].u
.operand
;
963 unsigned scrutinee
= currentInstruction
[3].u
.operand
;
965 // create jump table for switch destinations, track this switch statement.
966 StringJumpTable
* jumpTable
= &m_codeBlock
->stringSwitchJumpTable(tableIndex
);
967 m_switches
.append(SwitchRecord(jumpTable
, m_bytecodeOffset
, defaultOffset
));
969 emitLoad(scrutinee
, regT1
, regT0
);
970 callOperation(operationSwitchStringWithUnknownKeyType
, regT1
, regT0
, tableIndex
);
971 jump(returnValueGPR
);
974 void JIT::emit_op_throw_static_error(Instruction
* currentInstruction
)
976 emitLoad(m_codeBlock
->getConstant(currentInstruction
[1].u
.operand
), regT1
, regT0
);
977 callOperation(operationThrowStaticError
, regT1
, regT0
, currentInstruction
[2].u
.operand
);
980 void JIT::emit_op_debug(Instruction
* currentInstruction
)
982 load32(codeBlock()->debuggerRequestsAddress(), regT0
);
983 Jump noDebuggerRequests
= branchTest32(Zero
, regT0
);
984 callOperation(operationDebug
, currentInstruction
[1].u
.operand
);
985 noDebuggerRequests
.link(this);
989 void JIT::emit_op_enter(Instruction
* currentInstruction
)
991 emitEnterOptimizationCheck();
993 // Even though JIT code doesn't use them, we initialize our constant
994 // registers to zap stale pointers, to avoid unnecessarily prolonging
995 // object lifetime and increasing GC pressure.
996 for (int i
= 0; i
< m_codeBlock
->m_numVars
; ++i
)
997 emitStore(virtualRegisterForLocal(i
).offset(), jsUndefined());
999 JITSlowPathCall
slowPathCall(this, currentInstruction
, slow_path_enter
);
1000 slowPathCall
.call();
1003 void JIT::emit_op_create_activation(Instruction
* currentInstruction
)
1005 int activation
= currentInstruction
[1].u
.operand
;
1007 Jump activationCreated
= branch32(NotEqual
, tagFor(activation
), TrustedImm32(JSValue::EmptyValueTag
));
1008 callOperation(operationCreateActivation
, 0);
1009 emitStoreCell(activation
, returnValueGPR
);
1010 activationCreated
.link(this);
1013 void JIT::emit_op_create_arguments(Instruction
* currentInstruction
)
1015 int dst
= currentInstruction
[1].u
.operand
;
1017 Jump argsCreated
= branch32(NotEqual
, tagFor(dst
), TrustedImm32(JSValue::EmptyValueTag
));
1018 callOperation(operationCreateArguments
);
1019 emitStoreCell(dst
, returnValueGPR
);
1020 emitStoreCell(unmodifiedArgumentsRegister(VirtualRegister(dst
)).offset(), returnValueGPR
);
1021 argsCreated
.link(this);
1024 void JIT::emit_op_init_lazy_reg(Instruction
* currentInstruction
)
1026 int dst
= currentInstruction
[1].u
.operand
;
1028 emitStore(dst
, JSValue());
1031 void JIT::emit_op_get_callee(Instruction
* currentInstruction
)
1033 int result
= currentInstruction
[1].u
.operand
;
1034 WriteBarrierBase
<JSCell
>* cachedFunction
= ¤tInstruction
[2].u
.jsCell
;
1035 emitGetFromCallFrameHeaderPtr(JSStack::Callee
, regT0
);
1037 loadPtr(cachedFunction
, regT2
);
1038 addSlowCase(branchPtr(NotEqual
, regT0
, regT2
));
1040 move(TrustedImm32(JSValue::CellTag
), regT1
);
1041 emitStore(result
, regT1
, regT0
);
1044 void JIT::emitSlow_op_get_callee(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1048 JITSlowPathCall
slowPathCall(this, currentInstruction
, slow_path_get_callee
);
1049 slowPathCall
.call();
1052 void JIT::emit_op_create_this(Instruction
* currentInstruction
)
1054 int callee
= currentInstruction
[2].u
.operand
;
1055 RegisterID calleeReg
= regT0
;
1056 RegisterID resultReg
= regT0
;
1057 RegisterID allocatorReg
= regT1
;
1058 RegisterID structureReg
= regT2
;
1059 RegisterID scratchReg
= regT3
;
1061 emitLoadPayload(callee
, calleeReg
);
1062 loadPtr(Address(calleeReg
, JSFunction::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfAllocator()), allocatorReg
);
1063 loadPtr(Address(calleeReg
, JSFunction::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfStructure()), structureReg
);
1064 addSlowCase(branchTestPtr(Zero
, allocatorReg
));
1066 emitAllocateJSObject(allocatorReg
, structureReg
, resultReg
, scratchReg
);
1067 emitStoreCell(currentInstruction
[1].u
.operand
, resultReg
);
1070 void JIT::emitSlow_op_create_this(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1072 linkSlowCase(iter
); // doesn't have an allocation profile
1073 linkSlowCase(iter
); // allocation failed
1075 JITSlowPathCall
slowPathCall(this, currentInstruction
, slow_path_create_this
);
1076 slowPathCall
.call();
1079 void JIT::emit_op_to_this(Instruction
* currentInstruction
)
1081 WriteBarrierBase
<Structure
>* cachedStructure
= ¤tInstruction
[2].u
.structure
;
1082 int thisRegister
= currentInstruction
[1].u
.operand
;
1084 emitLoad(thisRegister
, regT3
, regT2
);
1086 addSlowCase(branch32(NotEqual
, regT3
, TrustedImm32(JSValue::CellTag
)));
1087 addSlowCase(branch8(NotEqual
, Address(regT2
, JSCell::typeInfoTypeOffset()), TrustedImm32(FinalObjectType
)));
1088 loadPtr(Address(regT2
, JSCell::structureIDOffset()), regT0
);
1089 loadPtr(cachedStructure
, regT2
);
1090 addSlowCase(branchPtr(NotEqual
, regT0
, regT2
));
1093 void JIT::emitSlow_op_to_this(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1098 JITSlowPathCall
slowPathCall(this, currentInstruction
, slow_path_to_this
);
1099 slowPathCall
.call();
1102 void JIT::emit_op_profile_will_call(Instruction
* currentInstruction
)
1104 load32(m_vm
->enabledProfilerAddress(), regT0
);
1105 Jump profilerDone
= branchTestPtr(Zero
, regT0
);
1106 emitLoad(currentInstruction
[1].u
.operand
, regT1
, regT0
);
1107 callOperation(operationProfileWillCall
, regT1
, regT0
);
1108 profilerDone
.link(this);
1111 void JIT::emit_op_profile_did_call(Instruction
* currentInstruction
)
1113 load32(m_vm
->enabledProfilerAddress(), regT0
);
1114 Jump profilerDone
= branchTestPtr(Zero
, regT0
);
1115 emitLoad(currentInstruction
[1].u
.operand
, regT1
, regT0
);
1116 callOperation(operationProfileDidCall
, regT1
, regT0
);
1117 profilerDone
.link(this);
1120 void JIT::emit_op_get_arguments_length(Instruction
* currentInstruction
)
1122 int dst
= currentInstruction
[1].u
.operand
;
1123 int argumentsRegister
= currentInstruction
[2].u
.operand
;
1124 addSlowCase(branch32(NotEqual
, tagFor(argumentsRegister
), TrustedImm32(JSValue::EmptyValueTag
)));
1125 load32(payloadFor(JSStack::ArgumentCount
), regT0
);
1126 sub32(TrustedImm32(1), regT0
);
1127 emitStoreInt32(dst
, regT0
);
1130 void JIT::emitSlow_op_get_arguments_length(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1133 int dst
= currentInstruction
[1].u
.operand
;
1134 int base
= currentInstruction
[2].u
.operand
;
1135 callOperation(operationGetArgumentsLength
, dst
, base
);
1138 void JIT::emit_op_get_argument_by_val(Instruction
* currentInstruction
)
1140 int dst
= currentInstruction
[1].u
.operand
;
1141 int argumentsRegister
= currentInstruction
[2].u
.operand
;
1142 int property
= currentInstruction
[3].u
.operand
;
1143 addSlowCase(branch32(NotEqual
, tagFor(argumentsRegister
), TrustedImm32(JSValue::EmptyValueTag
)));
1144 emitLoad(property
, regT1
, regT2
);
1145 addSlowCase(branch32(NotEqual
, regT1
, TrustedImm32(JSValue::Int32Tag
)));
1146 add32(TrustedImm32(1), regT2
);
1147 // regT2 now contains the integer index of the argument we want, including this
1148 load32(payloadFor(JSStack::ArgumentCount
), regT3
);
1149 addSlowCase(branch32(AboveOrEqual
, regT2
, regT3
));
1151 loadPtr(BaseIndex(callFrameRegister
, regT2
, TimesEight
, OBJECT_OFFSETOF(JSValue
, u
.asBits
.payload
) + CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register
))), regT0
);
1152 loadPtr(BaseIndex(callFrameRegister
, regT2
, TimesEight
, OBJECT_OFFSETOF(JSValue
, u
.asBits
.tag
) + CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register
))), regT1
);
1153 emitValueProfilingSite();
1154 emitStore(dst
, regT1
, regT0
);
1157 void JIT::emitSlow_op_get_argument_by_val(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1159 int dst
= currentInstruction
[1].u
.operand
;
1160 int arguments
= currentInstruction
[2].u
.operand
;
1161 int property
= currentInstruction
[3].u
.operand
;
1164 Jump skipArgumentsCreation
= jump();
1169 callOperation(operationCreateArguments
);
1170 emitStoreCell(arguments
, returnValueGPR
);
1171 emitStoreCell(unmodifiedArgumentsRegister(VirtualRegister(arguments
)).offset(), returnValueGPR
);
1173 skipArgumentsCreation
.link(this);
1174 emitLoad(arguments
, regT1
, regT0
);
1175 emitLoad(property
, regT3
, regT2
);
1176 callOperation(WithProfile
, operationGetByValGeneric
, dst
, regT1
, regT0
, regT3
, regT2
);
1181 #endif // USE(JSVALUE32_64)
1182 #endif // ENABLE(JIT)