2 * Copyright (C) 2009, 2012, 2013, 2014 Apple Inc. All rights reserved.
3 * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
14 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
18 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
33 #include "CCallHelpers.h"
35 #include "Exception.h"
36 #include "JITInlines.h"
39 #include "JSEnvironmentRecord.h"
40 #include "JSFunction.h"
41 #include "JSNameScope.h"
42 #include "JSPropertyNameEnumerator.h"
43 #include "LinkBuffer.h"
44 #include "MaxFrameExtentForSlowPathCall.h"
45 #include "RepatchBuffer.h"
46 #include "SlowPathCall.h"
47 #include "TypeProfilerLog.h"
48 #include "VirtualRegister.h"
52 JIT::CodeRef
JIT::privateCompileCTINativeCall(VM
* vm
, NativeFunction func
)
56 emitFunctionPrologue();
57 emitPutImmediateToCallFrameHeader(0, JSStack::CodeBlock
);
58 storePtr(callFrameRegister
, &m_vm
->topCallFrame
);
61 // Calling convention: f(ecx, edx, ...);
62 // Host function signature: f(ExecState*);
63 move(callFrameRegister
, X86Registers::ecx
);
65 subPtr(TrustedImm32(8), stackPointerRegister
); // Align stack for call.
66 storePtr(X86Registers::ecx
, Address(stackPointerRegister
));
71 addPtr(TrustedImm32(8), stackPointerRegister
);
73 #elif CPU(ARM) || CPU(SH4) || CPU(MIPS)
75 // Allocate stack space for (unused) 16 bytes (8-byte aligned) for 4 arguments.
76 subPtr(TrustedImm32(16), stackPointerRegister
);
79 // Calling convention is f(argumentGPR0, argumentGPR1, ...).
80 // Host function signature is f(ExecState*).
81 move(callFrameRegister
, argumentGPR0
);
83 emitGetFromCallFrameHeaderPtr(JSStack::Callee
, argumentGPR1
);
84 loadPtr(Address(argumentGPR1
, OBJECT_OFFSETOF(JSFunction
, m_executable
)), regT2
);
90 // Restore stack space
91 addPtr(TrustedImm32(16), stackPointerRegister
);
94 restoreReturnAddressBeforeReturn(regT3
);
96 #error "JIT not supported on this platform."
97 abortWithReason(JITNotSupported
);
100 // Check for an exception
101 Jump sawException
= branch32(NotEqual
, AbsoluteAddress(vm
->addressOfException()), TrustedImm32(0));
103 emitFunctionEpilogue();
107 // Handle an exception
108 sawException
.link(this);
110 storePtr(callFrameRegister
, &m_vm
->topCallFrame
);
113 addPtr(TrustedImm32(-4), stackPointerRegister
);
114 loadPtr(Address(callFrameRegister
), X86Registers::ecx
);
115 push(X86Registers::ecx
);
117 loadPtr(Address(callFrameRegister
), argumentGPR0
);
119 move(TrustedImmPtr(FunctionPtr(operationVMHandleException
).value()), regT3
);
123 addPtr(TrustedImm32(8), stackPointerRegister
);
126 jumpToExceptionHandler();
128 // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
129 LinkBuffer
patchBuffer(*m_vm
, *this, GLOBAL_THUNK_ID
);
131 patchBuffer
.link(nativeCall
, FunctionPtr(func
));
132 return FINALIZE_CODE(patchBuffer
, ("JIT CTI native call"));
135 void JIT::emit_op_mov(Instruction
* currentInstruction
)
137 int dst
= currentInstruction
[1].u
.operand
;
138 int src
= currentInstruction
[2].u
.operand
;
140 if (m_codeBlock
->isConstantRegisterIndex(src
))
141 emitStore(dst
, getConstantOperand(src
));
143 emitLoad(src
, regT1
, regT0
);
144 emitStore(dst
, regT1
, regT0
);
148 void JIT::emit_op_end(Instruction
* currentInstruction
)
150 ASSERT(returnValueGPR
!= callFrameRegister
);
151 emitLoad(currentInstruction
[1].u
.operand
, regT1
, regT0
);
152 emitFunctionEpilogue();
156 void JIT::emit_op_jmp(Instruction
* currentInstruction
)
158 unsigned target
= currentInstruction
[1].u
.operand
;
159 addJump(jump(), target
);
162 void JIT::emit_op_new_object(Instruction
* currentInstruction
)
164 Structure
* structure
= currentInstruction
[3].u
.objectAllocationProfile
->structure();
165 size_t allocationSize
= JSFinalObject::allocationSize(structure
->inlineCapacity());
166 MarkedAllocator
* allocator
= &m_vm
->heap
.allocatorForObjectWithoutDestructor(allocationSize
);
168 RegisterID resultReg
= regT0
;
169 RegisterID allocatorReg
= regT1
;
170 RegisterID scratchReg
= regT2
;
172 move(TrustedImmPtr(allocator
), allocatorReg
);
173 emitAllocateJSObject(allocatorReg
, TrustedImmPtr(structure
), resultReg
, scratchReg
);
174 emitStoreCell(currentInstruction
[1].u
.operand
, resultReg
);
177 void JIT::emitSlow_op_new_object(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
180 int dst
= currentInstruction
[1].u
.operand
;
181 Structure
* structure
= currentInstruction
[3].u
.objectAllocationProfile
->structure();
182 callOperation(operationNewObject
, structure
);
183 emitStoreCell(dst
, returnValueGPR
);
186 void JIT::emit_op_check_has_instance(Instruction
* currentInstruction
)
188 int baseVal
= currentInstruction
[3].u
.operand
;
190 emitLoadPayload(baseVal
, regT0
);
192 // Check that baseVal is a cell.
193 emitJumpSlowCaseIfNotJSCell(baseVal
);
195 // Check that baseVal 'ImplementsHasInstance'.
196 addSlowCase(branchTest8(Zero
, Address(regT0
, JSCell::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance
)));
199 void JIT::emit_op_instanceof(Instruction
* currentInstruction
)
201 int dst
= currentInstruction
[1].u
.operand
;
202 int value
= currentInstruction
[2].u
.operand
;
203 int proto
= currentInstruction
[3].u
.operand
;
205 // Load the operands into registers.
206 // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
207 emitLoadPayload(value
, regT2
);
208 emitLoadPayload(proto
, regT1
);
210 // Check that proto are cells. baseVal must be a cell - this is checked by op_check_has_instance.
211 emitJumpSlowCaseIfNotJSCell(value
);
212 emitJumpSlowCaseIfNotJSCell(proto
);
214 // Check that prototype is an object
215 addSlowCase(emitJumpIfCellNotObject(regT1
));
217 // Optimistically load the result true, and start looping.
218 // Initially, regT1 still contains proto and regT2 still contains value.
219 // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
220 move(TrustedImm32(1), regT0
);
223 // Load the prototype of the cell in regT2. If this is equal to regT1 - WIN!
224 // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
225 loadPtr(Address(regT2
, JSCell::structureIDOffset()), regT2
);
226 load32(Address(regT2
, Structure::prototypeOffset() + OBJECT_OFFSETOF(JSValue
, u
.asBits
.payload
)), regT2
);
227 Jump isInstance
= branchPtr(Equal
, regT2
, regT1
);
228 branchTest32(NonZero
, regT2
).linkTo(loop
, this);
230 // We get here either by dropping out of the loop, or if value was not an Object. Result is false.
231 move(TrustedImm32(0), regT0
);
233 // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
234 isInstance
.link(this);
235 emitStoreBool(dst
, regT0
);
238 void JIT::emitSlow_op_check_has_instance(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
240 int dst
= currentInstruction
[1].u
.operand
;
241 int value
= currentInstruction
[2].u
.operand
;
242 int baseVal
= currentInstruction
[3].u
.operand
;
244 linkSlowCaseIfNotJSCell(iter
, baseVal
);
247 emitLoad(value
, regT1
, regT0
);
248 emitLoad(baseVal
, regT3
, regT2
);
249 callOperation(operationCheckHasInstance
, dst
, regT1
, regT0
, regT3
, regT2
);
251 emitJumpSlowToHot(jump(), currentInstruction
[4].u
.operand
);
254 void JIT::emitSlow_op_instanceof(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
256 int dst
= currentInstruction
[1].u
.operand
;
257 int value
= currentInstruction
[2].u
.operand
;
258 int proto
= currentInstruction
[3].u
.operand
;
260 linkSlowCaseIfNotJSCell(iter
, value
);
261 linkSlowCaseIfNotJSCell(iter
, proto
);
264 emitLoad(value
, regT1
, regT0
);
265 emitLoad(proto
, regT3
, regT2
);
266 callOperation(operationInstanceOf
, dst
, regT1
, regT0
, regT3
, regT2
);
269 void JIT::emit_op_is_undefined(Instruction
* currentInstruction
)
271 int dst
= currentInstruction
[1].u
.operand
;
272 int value
= currentInstruction
[2].u
.operand
;
274 emitLoad(value
, regT1
, regT0
);
275 Jump isCell
= branch32(Equal
, regT1
, TrustedImm32(JSValue::CellTag
));
277 compare32(Equal
, regT1
, TrustedImm32(JSValue::UndefinedTag
), regT0
);
281 Jump isMasqueradesAsUndefined
= branchTest8(NonZero
, Address(regT0
, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined
));
282 move(TrustedImm32(0), regT0
);
283 Jump notMasqueradesAsUndefined
= jump();
285 isMasqueradesAsUndefined
.link(this);
286 loadPtr(Address(regT0
, JSCell::structureIDOffset()), regT1
);
287 move(TrustedImmPtr(m_codeBlock
->globalObject()), regT0
);
288 loadPtr(Address(regT1
, Structure::globalObjectOffset()), regT1
);
289 compare32(Equal
, regT0
, regT1
, regT0
);
291 notMasqueradesAsUndefined
.link(this);
293 emitStoreBool(dst
, regT0
);
296 void JIT::emit_op_is_boolean(Instruction
* currentInstruction
)
298 int dst
= currentInstruction
[1].u
.operand
;
299 int value
= currentInstruction
[2].u
.operand
;
301 emitLoadTag(value
, regT0
);
302 compare32(Equal
, regT0
, TrustedImm32(JSValue::BooleanTag
), regT0
);
303 emitStoreBool(dst
, regT0
);
306 void JIT::emit_op_is_number(Instruction
* currentInstruction
)
308 int dst
= currentInstruction
[1].u
.operand
;
309 int value
= currentInstruction
[2].u
.operand
;
311 emitLoadTag(value
, regT0
);
312 add32(TrustedImm32(1), regT0
);
313 compare32(Below
, regT0
, TrustedImm32(JSValue::LowestTag
+ 1), regT0
);
314 emitStoreBool(dst
, regT0
);
317 void JIT::emit_op_is_string(Instruction
* currentInstruction
)
319 int dst
= currentInstruction
[1].u
.operand
;
320 int value
= currentInstruction
[2].u
.operand
;
322 emitLoad(value
, regT1
, regT0
);
323 Jump isNotCell
= branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
));
325 compare8(Equal
, Address(regT0
, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType
), regT0
);
328 isNotCell
.link(this);
329 move(TrustedImm32(0), regT0
);
332 emitStoreBool(dst
, regT0
);
335 void JIT::emit_op_is_object(Instruction
* currentInstruction
)
337 int dst
= currentInstruction
[1].u
.operand
;
338 int value
= currentInstruction
[2].u
.operand
;
340 emitLoad(value
, regT1
, regT0
);
341 Jump isNotCell
= branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
));
343 compare8(AboveOrEqual
, Address(regT0
, JSCell::typeInfoTypeOffset()), TrustedImm32(ObjectType
), regT0
);
346 isNotCell
.link(this);
347 move(TrustedImm32(0), regT0
);
350 emitStoreBool(dst
, regT0
);
353 void JIT::emit_op_to_primitive(Instruction
* currentInstruction
)
355 int dst
= currentInstruction
[1].u
.operand
;
356 int src
= currentInstruction
[2].u
.operand
;
358 emitLoad(src
, regT1
, regT0
);
360 Jump isImm
= branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
));
361 addSlowCase(emitJumpIfCellObject(regT0
));
365 emitStore(dst
, regT1
, regT0
);
368 void JIT::emitSlow_op_to_primitive(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
372 JITSlowPathCall
slowPathCall(this, currentInstruction
, slow_path_to_primitive
);
376 void JIT::emit_op_strcat(Instruction
* currentInstruction
)
378 JITSlowPathCall
slowPathCall(this, currentInstruction
, slow_path_strcat
);
382 void JIT::emit_op_not(Instruction
* currentInstruction
)
384 int dst
= currentInstruction
[1].u
.operand
;
385 int src
= currentInstruction
[2].u
.operand
;
387 emitLoadTag(src
, regT0
);
389 emitLoad(src
, regT1
, regT0
);
390 addSlowCase(branch32(NotEqual
, regT1
, TrustedImm32(JSValue::BooleanTag
)));
391 xor32(TrustedImm32(1), regT0
);
393 emitStoreBool(dst
, regT0
, (dst
== src
));
396 void JIT::emitSlow_op_not(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
400 JITSlowPathCall
slowPathCall(this, currentInstruction
, slow_path_not
);
404 void JIT::emit_op_jfalse(Instruction
* currentInstruction
)
406 int cond
= currentInstruction
[1].u
.operand
;
407 unsigned target
= currentInstruction
[2].u
.operand
;
409 emitLoad(cond
, regT1
, regT0
);
411 ASSERT((JSValue::BooleanTag
+ 1 == JSValue::Int32Tag
) && !(JSValue::Int32Tag
+ 1));
412 addSlowCase(branch32(Below
, regT1
, TrustedImm32(JSValue::BooleanTag
)));
413 addJump(branchTest32(Zero
, regT0
), target
);
416 void JIT::emitSlow_op_jfalse(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
418 int cond
= currentInstruction
[1].u
.operand
;
419 unsigned target
= currentInstruction
[2].u
.operand
;
423 if (supportsFloatingPoint()) {
424 // regT1 contains the tag from the hot path.
425 Jump notNumber
= branch32(Above
, regT1
, TrustedImm32(JSValue::LowestTag
));
427 emitLoadDouble(cond
, fpRegT0
);
428 emitJumpSlowToHot(branchDoubleZeroOrNaN(fpRegT0
, fpRegT1
), target
);
429 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_jfalse
));
431 notNumber
.link(this);
434 callOperation(operationConvertJSValueToBoolean
, regT1
, regT0
);
435 emitJumpSlowToHot(branchTest32(Zero
, returnValueGPR
), target
); // Inverted.
438 void JIT::emit_op_jtrue(Instruction
* currentInstruction
)
440 int cond
= currentInstruction
[1].u
.operand
;
441 unsigned target
= currentInstruction
[2].u
.operand
;
443 emitLoad(cond
, regT1
, regT0
);
445 ASSERT((JSValue::BooleanTag
+ 1 == JSValue::Int32Tag
) && !(JSValue::Int32Tag
+ 1));
446 addSlowCase(branch32(Below
, regT1
, TrustedImm32(JSValue::BooleanTag
)));
447 addJump(branchTest32(NonZero
, regT0
), target
);
450 void JIT::emitSlow_op_jtrue(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
452 int cond
= currentInstruction
[1].u
.operand
;
453 unsigned target
= currentInstruction
[2].u
.operand
;
457 if (supportsFloatingPoint()) {
458 // regT1 contains the tag from the hot path.
459 Jump notNumber
= branch32(Above
, regT1
, TrustedImm32(JSValue::LowestTag
));
461 emitLoadDouble(cond
, fpRegT0
);
462 emitJumpSlowToHot(branchDoubleNonZero(fpRegT0
, fpRegT1
), target
);
463 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_jtrue
));
465 notNumber
.link(this);
468 callOperation(operationConvertJSValueToBoolean
, regT1
, regT0
);
469 emitJumpSlowToHot(branchTest32(NonZero
, returnValueGPR
), target
);
472 void JIT::emit_op_jeq_null(Instruction
* currentInstruction
)
474 int src
= currentInstruction
[1].u
.operand
;
475 unsigned target
= currentInstruction
[2].u
.operand
;
477 emitLoad(src
, regT1
, regT0
);
479 Jump isImmediate
= branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
));
481 Jump isNotMasqueradesAsUndefined
= branchTest8(Zero
, Address(regT0
, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined
));
482 loadPtr(Address(regT0
, JSCell::structureIDOffset()), regT2
);
483 move(TrustedImmPtr(m_codeBlock
->globalObject()), regT0
);
484 addJump(branchPtr(Equal
, Address(regT2
, Structure::globalObjectOffset()), regT0
), target
);
485 Jump masqueradesGlobalObjectIsForeign
= jump();
487 // Now handle the immediate cases - undefined & null
488 isImmediate
.link(this);
489 ASSERT((JSValue::UndefinedTag
+ 1 == JSValue::NullTag
) && (JSValue::NullTag
& 0x1));
490 or32(TrustedImm32(1), regT1
);
491 addJump(branch32(Equal
, regT1
, TrustedImm32(JSValue::NullTag
)), target
);
493 isNotMasqueradesAsUndefined
.link(this);
494 masqueradesGlobalObjectIsForeign
.link(this);
497 void JIT::emit_op_jneq_null(Instruction
* currentInstruction
)
499 int src
= currentInstruction
[1].u
.operand
;
500 unsigned target
= currentInstruction
[2].u
.operand
;
502 emitLoad(src
, regT1
, regT0
);
504 Jump isImmediate
= branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
));
506 addJump(branchTest8(Zero
, Address(regT0
, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined
)), target
);
507 loadPtr(Address(regT0
, JSCell::structureIDOffset()), regT2
);
508 move(TrustedImmPtr(m_codeBlock
->globalObject()), regT0
);
509 addJump(branchPtr(NotEqual
, Address(regT2
, Structure::globalObjectOffset()), regT0
), target
);
510 Jump wasNotImmediate
= jump();
512 // Now handle the immediate cases - undefined & null
513 isImmediate
.link(this);
515 ASSERT((JSValue::UndefinedTag
+ 1 == JSValue::NullTag
) && (JSValue::NullTag
& 0x1));
516 or32(TrustedImm32(1), regT1
);
517 addJump(branch32(NotEqual
, regT1
, TrustedImm32(JSValue::NullTag
)), target
);
519 wasNotImmediate
.link(this);
522 void JIT::emit_op_jneq_ptr(Instruction
* currentInstruction
)
524 int src
= currentInstruction
[1].u
.operand
;
525 Special::Pointer ptr
= currentInstruction
[2].u
.specialPointer
;
526 unsigned target
= currentInstruction
[3].u
.operand
;
528 emitLoad(src
, regT1
, regT0
);
529 addJump(branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
)), target
);
530 addJump(branchPtr(NotEqual
, regT0
, TrustedImmPtr(actualPointerFor(m_codeBlock
, ptr
))), target
);
533 void JIT::emit_op_eq(Instruction
* currentInstruction
)
535 int dst
= currentInstruction
[1].u
.operand
;
536 int src1
= currentInstruction
[2].u
.operand
;
537 int src2
= currentInstruction
[3].u
.operand
;
539 emitLoad2(src1
, regT1
, regT0
, src2
, regT3
, regT2
);
540 addSlowCase(branch32(NotEqual
, regT1
, regT3
));
541 addSlowCase(branch32(Equal
, regT1
, TrustedImm32(JSValue::CellTag
)));
542 addSlowCase(branch32(Below
, regT1
, TrustedImm32(JSValue::LowestTag
)));
544 compare32(Equal
, regT0
, regT2
, regT0
);
546 emitStoreBool(dst
, regT0
);
549 void JIT::emitSlow_op_eq(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
551 int dst
= currentInstruction
[1].u
.operand
;
552 int op1
= currentInstruction
[2].u
.operand
;
553 int op2
= currentInstruction
[3].u
.operand
;
555 JumpList storeResult
;
556 JumpList genericCase
;
558 genericCase
.append(getSlowCase(iter
)); // tags not equal
560 linkSlowCase(iter
); // tags equal and JSCell
561 genericCase
.append(branchPtr(NotEqual
, Address(regT0
, JSCell::structureIDOffset()), TrustedImmPtr(m_vm
->stringStructure
.get())));
562 genericCase
.append(branchPtr(NotEqual
, Address(regT2
, JSCell::structureIDOffset()), TrustedImmPtr(m_vm
->stringStructure
.get())));
565 callOperation(operationCompareStringEq
, regT0
, regT2
);
566 storeResult
.append(jump());
569 genericCase
.append(getSlowCase(iter
)); // doubles
570 genericCase
.link(this);
571 emitLoad(op1
, regT1
, regT0
);
572 emitLoad(op2
, regT3
, regT2
);
573 callOperation(operationCompareEq
, regT1
, regT0
, regT3
, regT2
);
575 storeResult
.link(this);
576 emitStoreBool(dst
, returnValueGPR
);
579 void JIT::emit_op_neq(Instruction
* currentInstruction
)
581 int dst
= currentInstruction
[1].u
.operand
;
582 int src1
= currentInstruction
[2].u
.operand
;
583 int src2
= currentInstruction
[3].u
.operand
;
585 emitLoad2(src1
, regT1
, regT0
, src2
, regT3
, regT2
);
586 addSlowCase(branch32(NotEqual
, regT1
, regT3
));
587 addSlowCase(branch32(Equal
, regT1
, TrustedImm32(JSValue::CellTag
)));
588 addSlowCase(branch32(Below
, regT1
, TrustedImm32(JSValue::LowestTag
)));
590 compare32(NotEqual
, regT0
, regT2
, regT0
);
592 emitStoreBool(dst
, regT0
);
595 void JIT::emitSlow_op_neq(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
597 int dst
= currentInstruction
[1].u
.operand
;
599 JumpList storeResult
;
600 JumpList genericCase
;
602 genericCase
.append(getSlowCase(iter
)); // tags not equal
604 linkSlowCase(iter
); // tags equal and JSCell
605 genericCase
.append(branchPtr(NotEqual
, Address(regT0
, JSCell::structureIDOffset()), TrustedImmPtr(m_vm
->stringStructure
.get())));
606 genericCase
.append(branchPtr(NotEqual
, Address(regT2
, JSCell::structureIDOffset()), TrustedImmPtr(m_vm
->stringStructure
.get())));
609 callOperation(operationCompareStringEq
, regT0
, regT2
);
610 storeResult
.append(jump());
613 genericCase
.append(getSlowCase(iter
)); // doubles
614 genericCase
.link(this);
615 callOperation(operationCompareEq
, regT1
, regT0
, regT3
, regT2
);
617 storeResult
.link(this);
618 xor32(TrustedImm32(0x1), returnValueGPR
);
619 emitStoreBool(dst
, returnValueGPR
);
622 void JIT::compileOpStrictEq(Instruction
* currentInstruction
, CompileOpStrictEqType type
)
624 int dst
= currentInstruction
[1].u
.operand
;
625 int src1
= currentInstruction
[2].u
.operand
;
626 int src2
= currentInstruction
[3].u
.operand
;
628 emitLoad2(src1
, regT1
, regT0
, src2
, regT3
, regT2
);
630 // Bail if the tags differ, or are double.
631 addSlowCase(branch32(NotEqual
, regT1
, regT3
));
632 addSlowCase(branch32(Below
, regT1
, TrustedImm32(JSValue::LowestTag
)));
634 // Jump to a slow case if both are strings or symbols (non object).
635 Jump notCell
= branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
));
636 Jump firstIsObject
= emitJumpIfCellObject(regT0
);
637 addSlowCase(emitJumpIfCellNotObject(regT2
));
639 firstIsObject
.link(this);
641 // Simply compare the payloads.
642 if (type
== OpStrictEq
)
643 compare32(Equal
, regT0
, regT2
, regT0
);
645 compare32(NotEqual
, regT0
, regT2
, regT0
);
647 emitStoreBool(dst
, regT0
);
650 void JIT::emit_op_stricteq(Instruction
* currentInstruction
)
652 compileOpStrictEq(currentInstruction
, OpStrictEq
);
655 void JIT::emitSlow_op_stricteq(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
661 JITSlowPathCall
slowPathCall(this, currentInstruction
, slow_path_stricteq
);
665 void JIT::emit_op_nstricteq(Instruction
* currentInstruction
)
667 compileOpStrictEq(currentInstruction
, OpNStrictEq
);
670 void JIT::emitSlow_op_nstricteq(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
676 JITSlowPathCall
slowPathCall(this, currentInstruction
, slow_path_nstricteq
);
680 void JIT::emit_op_eq_null(Instruction
* currentInstruction
)
682 int dst
= currentInstruction
[1].u
.operand
;
683 int src
= currentInstruction
[2].u
.operand
;
685 emitLoad(src
, regT1
, regT0
);
686 Jump isImmediate
= branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
));
688 Jump isMasqueradesAsUndefined
= branchTest8(NonZero
, Address(regT0
, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined
));
689 move(TrustedImm32(0), regT1
);
690 Jump wasNotMasqueradesAsUndefined
= jump();
692 isMasqueradesAsUndefined
.link(this);
693 loadPtr(Address(regT0
, JSCell::structureIDOffset()), regT2
);
694 move(TrustedImmPtr(m_codeBlock
->globalObject()), regT0
);
695 loadPtr(Address(regT2
, Structure::globalObjectOffset()), regT2
);
696 compare32(Equal
, regT0
, regT2
, regT1
);
697 Jump wasNotImmediate
= jump();
699 isImmediate
.link(this);
701 compare32(Equal
, regT1
, TrustedImm32(JSValue::NullTag
), regT2
);
702 compare32(Equal
, regT1
, TrustedImm32(JSValue::UndefinedTag
), regT1
);
705 wasNotImmediate
.link(this);
706 wasNotMasqueradesAsUndefined
.link(this);
708 emitStoreBool(dst
, regT1
);
711 void JIT::emit_op_neq_null(Instruction
* currentInstruction
)
713 int dst
= currentInstruction
[1].u
.operand
;
714 int src
= currentInstruction
[2].u
.operand
;
716 emitLoad(src
, regT1
, regT0
);
717 Jump isImmediate
= branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
));
719 Jump isMasqueradesAsUndefined
= branchTest8(NonZero
, Address(regT0
, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined
));
720 move(TrustedImm32(1), regT1
);
721 Jump wasNotMasqueradesAsUndefined
= jump();
723 isMasqueradesAsUndefined
.link(this);
724 loadPtr(Address(regT0
, JSCell::structureIDOffset()), regT2
);
725 move(TrustedImmPtr(m_codeBlock
->globalObject()), regT0
);
726 loadPtr(Address(regT2
, Structure::globalObjectOffset()), regT2
);
727 compare32(NotEqual
, regT0
, regT2
, regT1
);
728 Jump wasNotImmediate
= jump();
730 isImmediate
.link(this);
732 compare32(NotEqual
, regT1
, TrustedImm32(JSValue::NullTag
), regT2
);
733 compare32(NotEqual
, regT1
, TrustedImm32(JSValue::UndefinedTag
), regT1
);
736 wasNotImmediate
.link(this);
737 wasNotMasqueradesAsUndefined
.link(this);
739 emitStoreBool(dst
, regT1
);
742 void JIT::emit_op_throw(Instruction
* currentInstruction
)
744 ASSERT(regT0
== returnValueGPR
);
745 emitLoad(currentInstruction
[1].u
.operand
, regT1
, regT0
);
746 callOperationNoExceptionCheck(operationThrow
, regT1
, regT0
);
747 jumpToExceptionHandler();
750 void JIT::emit_op_push_with_scope(Instruction
* currentInstruction
)
752 int dst
= currentInstruction
[1].u
.operand
;
753 emitLoad(currentInstruction
[2].u
.operand
, regT1
, regT0
);
754 callOperation(operationPushWithScope
, dst
, regT1
, regT0
);
757 void JIT::emit_op_pop_scope(Instruction
* currentInstruction
)
759 int scope
= currentInstruction
[1].u
.operand
;
760 callOperation(operationPopScope
, scope
);
763 void JIT::emit_op_to_number(Instruction
* currentInstruction
)
765 int dst
= currentInstruction
[1].u
.operand
;
766 int src
= currentInstruction
[2].u
.operand
;
768 emitLoad(src
, regT1
, regT0
);
770 Jump isInt32
= branch32(Equal
, regT1
, TrustedImm32(JSValue::Int32Tag
));
771 addSlowCase(branch32(AboveOrEqual
, regT1
, TrustedImm32(JSValue::LowestTag
)));
775 emitStore(dst
, regT1
, regT0
);
778 void JIT::emitSlow_op_to_number(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
782 JITSlowPathCall
slowPathCall(this, currentInstruction
, slow_path_to_number
);
786 void JIT::emit_op_to_string(Instruction
* currentInstruction
)
788 int dst
= currentInstruction
[1].u
.operand
;
789 int src
= currentInstruction
[2].u
.operand
;
791 emitLoad(src
, regT1
, regT0
);
793 addSlowCase(branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
)));
794 addSlowCase(branch8(NotEqual
, Address(regT0
, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType
)));
797 emitStore(dst
, regT1
, regT0
);
800 void JIT::emitSlow_op_to_string(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
802 linkSlowCase(iter
); // Not JSCell.
803 linkSlowCase(iter
); // Not JSString.
805 JITSlowPathCall
slowPathCall(this, currentInstruction
, slow_path_to_string
);
809 void JIT::emit_op_push_name_scope(Instruction
* currentInstruction
)
811 int dst
= currentInstruction
[1].u
.operand
;
812 emitLoad(currentInstruction
[2].u
.operand
, regT1
, regT0
);
813 if (currentInstruction
[4].u
.operand
== JSNameScope::CatchScope
) {
814 callOperation(operationPushCatchScope
, dst
, jsCast
<SymbolTable
*>(getConstantOperand(currentInstruction
[3].u
.operand
)), regT1
, regT0
);
818 RELEASE_ASSERT(currentInstruction
[4].u
.operand
== JSNameScope::FunctionNameScope
);
819 callOperation(operationPushFunctionNameScope
, dst
, jsCast
<SymbolTable
*>(getConstantOperand(currentInstruction
[3].u
.operand
)), regT1
, regT0
);
822 void JIT::emit_op_catch(Instruction
* currentInstruction
)
824 move(TrustedImmPtr(m_vm
), regT3
);
825 // operationThrow returns the callFrame for the handler.
826 load32(Address(regT3
, VM::callFrameForThrowOffset()), callFrameRegister
);
827 load32(Address(regT3
, VM::vmEntryFrameForThrowOffset()), regT0
);
828 store32(regT0
, Address(regT3
, VM::topVMEntryFrameOffset()));
830 addPtr(TrustedImm32(stackPointerOffsetFor(codeBlock()) * sizeof(Register
)), callFrameRegister
, stackPointerRegister
);
832 // Now store the exception returned by operationThrow.
833 load32(Address(regT3
, VM::exceptionOffset()), regT2
);
834 move(TrustedImm32(JSValue::CellTag
), regT1
);
836 store32(TrustedImm32(0), Address(regT3
, VM::exceptionOffset()));
838 unsigned exception
= currentInstruction
[1].u
.operand
;
839 emitStore(exception
, regT1
, regT2
);
841 load32(Address(regT2
, Exception::valueOffset() + OBJECT_OFFSETOF(JSValue
, u
.asBits
.payload
)), regT0
);
842 load32(Address(regT2
, Exception::valueOffset() + OBJECT_OFFSETOF(JSValue
, u
.asBits
.tag
)), regT1
);
844 unsigned thrownValue
= currentInstruction
[2].u
.operand
;
845 emitStore(thrownValue
, regT1
, regT0
);
848 void JIT::emit_op_switch_imm(Instruction
* currentInstruction
)
850 size_t tableIndex
= currentInstruction
[1].u
.operand
;
851 unsigned defaultOffset
= currentInstruction
[2].u
.operand
;
852 unsigned scrutinee
= currentInstruction
[3].u
.operand
;
854 // create jump table for switch destinations, track this switch statement.
855 SimpleJumpTable
* jumpTable
= &m_codeBlock
->switchJumpTable(tableIndex
);
856 m_switches
.append(SwitchRecord(jumpTable
, m_bytecodeOffset
, defaultOffset
, SwitchRecord::Immediate
));
857 jumpTable
->ensureCTITable();
859 emitLoad(scrutinee
, regT1
, regT0
);
860 callOperation(operationSwitchImmWithUnknownKeyType
, regT1
, regT0
, tableIndex
);
861 jump(returnValueGPR
);
864 void JIT::emit_op_switch_char(Instruction
* currentInstruction
)
866 size_t tableIndex
= currentInstruction
[1].u
.operand
;
867 unsigned defaultOffset
= currentInstruction
[2].u
.operand
;
868 unsigned scrutinee
= currentInstruction
[3].u
.operand
;
870 // create jump table for switch destinations, track this switch statement.
871 SimpleJumpTable
* jumpTable
= &m_codeBlock
->switchJumpTable(tableIndex
);
872 m_switches
.append(SwitchRecord(jumpTable
, m_bytecodeOffset
, defaultOffset
, SwitchRecord::Character
));
873 jumpTable
->ensureCTITable();
875 emitLoad(scrutinee
, regT1
, regT0
);
876 callOperation(operationSwitchCharWithUnknownKeyType
, regT1
, regT0
, tableIndex
);
877 jump(returnValueGPR
);
880 void JIT::emit_op_switch_string(Instruction
* currentInstruction
)
882 size_t tableIndex
= currentInstruction
[1].u
.operand
;
883 unsigned defaultOffset
= currentInstruction
[2].u
.operand
;
884 unsigned scrutinee
= currentInstruction
[3].u
.operand
;
886 // create jump table for switch destinations, track this switch statement.
887 StringJumpTable
* jumpTable
= &m_codeBlock
->stringSwitchJumpTable(tableIndex
);
888 m_switches
.append(SwitchRecord(jumpTable
, m_bytecodeOffset
, defaultOffset
));
890 emitLoad(scrutinee
, regT1
, regT0
);
891 callOperation(operationSwitchStringWithUnknownKeyType
, regT1
, regT0
, tableIndex
);
892 jump(returnValueGPR
);
895 void JIT::emit_op_throw_static_error(Instruction
* currentInstruction
)
897 emitLoad(m_codeBlock
->getConstant(currentInstruction
[1].u
.operand
), regT1
, regT0
);
898 callOperation(operationThrowStaticError
, regT1
, regT0
, currentInstruction
[2].u
.operand
);
901 void JIT::emit_op_debug(Instruction
* currentInstruction
)
903 load32(codeBlock()->debuggerRequestsAddress(), regT0
);
904 Jump noDebuggerRequests
= branchTest32(Zero
, regT0
);
905 callOperation(operationDebug
, currentInstruction
[1].u
.operand
);
906 noDebuggerRequests
.link(this);
910 void JIT::emit_op_enter(Instruction
* currentInstruction
)
912 emitEnterOptimizationCheck();
914 // Even though JIT code doesn't use them, we initialize our constant
915 // registers to zap stale pointers, to avoid unnecessarily prolonging
916 // object lifetime and increasing GC pressure.
917 for (int i
= 0; i
< m_codeBlock
->m_numVars
; ++i
)
918 emitStore(virtualRegisterForLocal(i
).offset(), jsUndefined());
920 JITSlowPathCall
slowPathCall(this, currentInstruction
, slow_path_enter
);
924 void JIT::emit_op_create_lexical_environment(Instruction
* currentInstruction
)
926 int lexicalEnvironment
= currentInstruction
[1].u
.operand
;
927 int scope
= currentInstruction
[2].u
.operand
;
929 emitLoadPayload(currentInstruction
[2].u
.operand
, regT0
);
930 callOperation(operationCreateActivation
, regT0
);
931 emitStoreCell(lexicalEnvironment
, returnValueGPR
);
932 emitStoreCell(scope
, returnValueGPR
);
935 void JIT::emit_op_get_scope(Instruction
* currentInstruction
)
937 int dst
= currentInstruction
[1].u
.operand
;
938 emitGetFromCallFrameHeaderPtr(JSStack::Callee
, regT0
);
939 loadPtr(Address(regT0
, JSFunction::offsetOfScopeChain()), regT0
);
940 emitStoreCell(dst
, regT0
);
943 void JIT::emit_op_create_this(Instruction
* currentInstruction
)
945 int callee
= currentInstruction
[2].u
.operand
;
946 WriteBarrierBase
<JSCell
>* cachedFunction
= ¤tInstruction
[4].u
.jsCell
;
947 RegisterID calleeReg
= regT0
;
948 RegisterID rareDataReg
= regT4
;
949 RegisterID resultReg
= regT0
;
950 RegisterID allocatorReg
= regT1
;
951 RegisterID structureReg
= regT2
;
952 RegisterID cachedFunctionReg
= regT4
;
953 RegisterID scratchReg
= regT3
;
955 emitLoadPayload(callee
, calleeReg
);
956 loadPtr(Address(calleeReg
, JSFunction::offsetOfRareData()), rareDataReg
);
957 addSlowCase(branchTestPtr(Zero
, rareDataReg
));
958 loadPtr(Address(rareDataReg
, FunctionRareData::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfAllocator()), allocatorReg
);
959 loadPtr(Address(rareDataReg
, FunctionRareData::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfStructure()), structureReg
);
960 addSlowCase(branchTestPtr(Zero
, allocatorReg
));
962 loadPtr(cachedFunction
, cachedFunctionReg
);
963 Jump hasSeenMultipleCallees
= branchPtr(Equal
, cachedFunctionReg
, TrustedImmPtr(JSCell::seenMultipleCalleeObjects()));
964 addSlowCase(branchPtr(NotEqual
, calleeReg
, cachedFunctionReg
));
965 hasSeenMultipleCallees
.link(this);
967 emitAllocateJSObject(allocatorReg
, structureReg
, resultReg
, scratchReg
);
968 emitStoreCell(currentInstruction
[1].u
.operand
, resultReg
);
971 void JIT::emitSlow_op_create_this(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
973 linkSlowCase(iter
); // doesn't have rare data
974 linkSlowCase(iter
); // doesn't have an allocation profile
975 linkSlowCase(iter
); // allocation failed
976 linkSlowCase(iter
); // cached function didn't match
978 JITSlowPathCall
slowPathCall(this, currentInstruction
, slow_path_create_this
);
982 void JIT::emit_op_to_this(Instruction
* currentInstruction
)
984 WriteBarrierBase
<Structure
>* cachedStructure
= ¤tInstruction
[2].u
.structure
;
985 int thisRegister
= currentInstruction
[1].u
.operand
;
987 emitLoad(thisRegister
, regT3
, regT2
);
989 addSlowCase(branch32(NotEqual
, regT3
, TrustedImm32(JSValue::CellTag
)));
990 addSlowCase(branch8(NotEqual
, Address(regT2
, JSCell::typeInfoTypeOffset()), TrustedImm32(FinalObjectType
)));
991 loadPtr(Address(regT2
, JSCell::structureIDOffset()), regT0
);
992 loadPtr(cachedStructure
, regT2
);
993 addSlowCase(branchPtr(NotEqual
, regT0
, regT2
));
996 void JIT::emitSlow_op_to_this(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1001 JITSlowPathCall
slowPathCall(this, currentInstruction
, slow_path_to_this
);
1002 slowPathCall
.call();
1005 void JIT::emit_op_check_tdz(Instruction
* currentInstruction
)
1007 emitLoadTag(currentInstruction
[1].u
.operand
, regT0
);
1008 addSlowCase(branch32(Equal
, regT0
, TrustedImm32(JSValue::EmptyValueTag
)));
1011 void JIT::emitSlow_op_check_tdz(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1014 JITSlowPathCall
slowPathCall(this, currentInstruction
, slow_path_throw_tdz_error
);
1015 slowPathCall
.call();
1018 void JIT::emit_op_profile_will_call(Instruction
* currentInstruction
)
1020 load32(m_vm
->enabledProfilerAddress(), regT0
);
1021 Jump profilerDone
= branchTestPtr(Zero
, regT0
);
1022 emitLoad(currentInstruction
[1].u
.operand
, regT1
, regT0
);
1023 callOperation(operationProfileWillCall
, regT1
, regT0
);
1024 profilerDone
.link(this);
1027 void JIT::emit_op_profile_did_call(Instruction
* currentInstruction
)
1029 load32(m_vm
->enabledProfilerAddress(), regT0
);
1030 Jump profilerDone
= branchTestPtr(Zero
, regT0
);
1031 emitLoad(currentInstruction
[1].u
.operand
, regT1
, regT0
);
1032 callOperation(operationProfileDidCall
, regT1
, regT0
);
1033 profilerDone
.link(this);
1036 void JIT::emit_op_has_structure_property(Instruction
* currentInstruction
)
1038 int dst
= currentInstruction
[1].u
.operand
;
1039 int base
= currentInstruction
[2].u
.operand
;
1040 int enumerator
= currentInstruction
[4].u
.operand
;
1042 emitLoadPayload(base
, regT0
);
1043 emitJumpSlowCaseIfNotJSCell(base
);
1045 emitLoadPayload(enumerator
, regT1
);
1047 load32(Address(regT0
, JSCell::structureIDOffset()), regT0
);
1048 addSlowCase(branch32(NotEqual
, regT0
, Address(regT1
, JSPropertyNameEnumerator::cachedStructureIDOffset())));
1050 move(TrustedImm32(1), regT0
);
1051 emitStoreBool(dst
, regT0
);
1054 void JIT::privateCompileHasIndexedProperty(ByValInfo
* byValInfo
, ReturnAddressPtr returnAddress
, JITArrayMode arrayMode
)
1056 Instruction
* currentInstruction
= m_codeBlock
->instructions().begin() + byValInfo
->bytecodeIndex
;
1058 PatchableJump badType
;
1060 // FIXME: Add support for other types like TypedArrays and Arguments.
1061 // See https://bugs.webkit.org/show_bug.cgi?id=135033 and https://bugs.webkit.org/show_bug.cgi?id=135034.
1062 JumpList slowCases
= emitLoadForArrayMode(currentInstruction
, arrayMode
, badType
);
1063 move(TrustedImm32(1), regT0
);
1066 LinkBuffer
patchBuffer(*m_vm
, *this, m_codeBlock
);
1068 patchBuffer
.link(badType
, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress
.value())).labelAtOffset(byValInfo
->returnAddressToSlowPath
));
1069 patchBuffer
.link(slowCases
, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress
.value())).labelAtOffset(byValInfo
->returnAddressToSlowPath
));
1071 patchBuffer
.link(done
, byValInfo
->badTypeJump
.labelAtOffset(byValInfo
->badTypeJumpToDone
));
1073 byValInfo
->stubRoutine
= FINALIZE_CODE_FOR_STUB(
1074 m_codeBlock
, patchBuffer
,
1075 ("Baseline has_indexed_property stub for %s, return point %p", toCString(*m_codeBlock
).data(), returnAddress
.value()));
1077 RepatchBuffer
repatchBuffer(m_codeBlock
);
1078 repatchBuffer
.relink(byValInfo
->badTypeJump
, CodeLocationLabel(byValInfo
->stubRoutine
->code().code()));
1079 repatchBuffer
.relinkCallerToFunction(returnAddress
, FunctionPtr(operationHasIndexedPropertyGeneric
));
1082 void JIT::emit_op_has_indexed_property(Instruction
* currentInstruction
)
1084 int dst
= currentInstruction
[1].u
.operand
;
1085 int base
= currentInstruction
[2].u
.operand
;
1086 int property
= currentInstruction
[3].u
.operand
;
1087 ArrayProfile
* profile
= currentInstruction
[4].u
.arrayProfile
;
1089 emitLoadPayload(base
, regT0
);
1090 emitJumpSlowCaseIfNotJSCell(base
);
1092 emitLoadPayload(property
, regT1
);
1094 // This is technically incorrect - we're zero-extending an int32. On the hot path this doesn't matter.
1095 // We check the value as if it was a uint32 against the m_vectorLength - which will always fail if
1096 // number was signed since m_vectorLength is always less than intmax (since the total allocation
1097 // size is always less than 4Gb). As such zero extending will have been correct (and extending the value
1098 // to 64-bits is necessary since it's used in the address calculation. We zero extend rather than sign
1099 // extending since it makes it easier to re-tag the value in the slow case.
1100 zeroExtend32ToPtr(regT1
, regT1
);
1102 emitArrayProfilingSiteWithCell(regT0
, regT2
, profile
);
1103 and32(TrustedImm32(IndexingShapeMask
), regT2
);
1105 JITArrayMode mode
= chooseArrayMode(profile
);
1106 PatchableJump badType
;
1108 // FIXME: Add support for other types like TypedArrays and Arguments.
1109 // See https://bugs.webkit.org/show_bug.cgi?id=135033 and https://bugs.webkit.org/show_bug.cgi?id=135034.
1110 JumpList slowCases
= emitLoadForArrayMode(currentInstruction
, mode
, badType
);
1111 move(TrustedImm32(1), regT0
);
1113 addSlowCase(badType
);
1114 addSlowCase(slowCases
);
1116 Label done
= label();
1118 emitStoreBool(dst
, regT0
);
1120 m_byValCompilationInfo
.append(ByValCompilationInfo(m_bytecodeOffset
, badType
, mode
, done
));
1123 void JIT::emitSlow_op_has_indexed_property(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1125 int dst
= currentInstruction
[1].u
.operand
;
1126 int base
= currentInstruction
[2].u
.operand
;
1127 int property
= currentInstruction
[3].u
.operand
;
1128 ArrayProfile
* profile
= currentInstruction
[4].u
.arrayProfile
;
1130 linkSlowCaseIfNotJSCell(iter
, base
); // base cell check
1131 linkSlowCase(iter
); // base array check
1132 linkSlowCase(iter
); // vector length check
1133 linkSlowCase(iter
); // empty value
1135 Label slowPath
= label();
1137 emitLoad(base
, regT1
, regT0
);
1138 emitLoad(property
, regT3
, regT2
);
1139 Call call
= callOperation(operationHasIndexedPropertyDefault
, dst
, regT1
, regT0
, regT3
, regT2
, profile
);
1141 m_byValCompilationInfo
[m_byValInstructionIndex
].slowPathTarget
= slowPath
;
1142 m_byValCompilationInfo
[m_byValInstructionIndex
].returnAddress
= call
;
1143 m_byValInstructionIndex
++;
1146 void JIT::emit_op_get_direct_pname(Instruction
* currentInstruction
)
1148 int dst
= currentInstruction
[1].u
.operand
;
1149 int base
= currentInstruction
[2].u
.operand
;
1150 int index
= currentInstruction
[4].u
.operand
;
1151 int enumerator
= currentInstruction
[5].u
.operand
;
1153 // Check that base is a cell
1154 emitLoadPayload(base
, regT0
);
1155 emitJumpSlowCaseIfNotJSCell(base
);
1157 // Check the structure
1158 emitLoadPayload(enumerator
, regT1
);
1159 load32(Address(regT0
, JSCell::structureIDOffset()), regT2
);
1160 addSlowCase(branch32(NotEqual
, regT2
, Address(regT1
, JSPropertyNameEnumerator::cachedStructureIDOffset())));
1162 // Compute the offset
1163 emitLoadPayload(index
, regT2
);
1164 // If index is less than the enumerator's cached inline storage, then it's an inline access
1165 Jump outOfLineAccess
= branch32(AboveOrEqual
, regT2
, Address(regT1
, JSPropertyNameEnumerator::cachedInlineCapacityOffset()));
1166 addPtr(TrustedImm32(JSObject::offsetOfInlineStorage()), regT0
);
1167 load32(BaseIndex(regT0
, regT2
, TimesEight
, OBJECT_OFFSETOF(JSValue
, u
.asBits
.tag
)), regT1
);
1168 load32(BaseIndex(regT0
, regT2
, TimesEight
, OBJECT_OFFSETOF(JSValue
, u
.asBits
.payload
)), regT0
);
1172 // Otherwise it's out of line
1173 outOfLineAccess
.link(this);
1174 loadPtr(Address(regT0
, JSObject::butterflyOffset()), regT0
);
1175 sub32(Address(regT1
, JSPropertyNameEnumerator::cachedInlineCapacityOffset()), regT2
);
1177 int32_t offsetOfFirstProperty
= static_cast<int32_t>(offsetInButterfly(firstOutOfLineOffset
)) * sizeof(EncodedJSValue
);
1178 load32(BaseIndex(regT0
, regT2
, TimesEight
, offsetOfFirstProperty
+ OBJECT_OFFSETOF(JSValue
, u
.asBits
.tag
)), regT1
);
1179 load32(BaseIndex(regT0
, regT2
, TimesEight
, offsetOfFirstProperty
+ OBJECT_OFFSETOF(JSValue
, u
.asBits
.payload
)), regT0
);
1182 emitValueProfilingSite();
1183 emitStore(dst
, regT1
, regT0
);
1186 void JIT::emitSlow_op_get_direct_pname(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1188 int base
= currentInstruction
[2].u
.operand
;
1189 linkSlowCaseIfNotJSCell(iter
, base
);
1192 JITSlowPathCall
slowPathCall(this, currentInstruction
, slow_path_get_direct_pname
);
1193 slowPathCall
.call();
1196 void JIT::emit_op_enumerator_structure_pname(Instruction
* currentInstruction
)
1198 int dst
= currentInstruction
[1].u
.operand
;
1199 int enumerator
= currentInstruction
[2].u
.operand
;
1200 int index
= currentInstruction
[3].u
.operand
;
1202 emitLoadPayload(index
, regT0
);
1203 emitLoadPayload(enumerator
, regT1
);
1204 Jump inBounds
= branch32(Below
, regT0
, Address(regT1
, JSPropertyNameEnumerator::endStructurePropertyIndexOffset()));
1206 move(TrustedImm32(JSValue::NullTag
), regT2
);
1207 move(TrustedImm32(0), regT0
);
1210 inBounds
.link(this);
1212 loadPtr(Address(regT1
, JSPropertyNameEnumerator::cachedPropertyNamesVectorOffset()), regT1
);
1213 loadPtr(BaseIndex(regT1
, regT0
, timesPtr()), regT0
);
1214 move(TrustedImm32(JSValue::CellTag
), regT2
);
1217 emitStore(dst
, regT2
, regT0
);
1220 void JIT::emit_op_enumerator_generic_pname(Instruction
* currentInstruction
)
1222 int dst
= currentInstruction
[1].u
.operand
;
1223 int enumerator
= currentInstruction
[2].u
.operand
;
1224 int index
= currentInstruction
[3].u
.operand
;
1226 emitLoadPayload(index
, regT0
);
1227 emitLoadPayload(enumerator
, regT1
);
1228 Jump inBounds
= branch32(Below
, regT0
, Address(regT1
, JSPropertyNameEnumerator::endGenericPropertyIndexOffset()));
1230 move(TrustedImm32(JSValue::NullTag
), regT2
);
1231 move(TrustedImm32(0), regT0
);
1234 inBounds
.link(this);
1236 loadPtr(Address(regT1
, JSPropertyNameEnumerator::cachedPropertyNamesVectorOffset()), regT1
);
1237 loadPtr(BaseIndex(regT1
, regT0
, timesPtr()), regT0
);
1238 move(TrustedImm32(JSValue::CellTag
), regT2
);
1241 emitStore(dst
, regT2
, regT0
);
1244 void JIT::emit_op_profile_type(Instruction
* currentInstruction
)
1246 TypeLocation
* cachedTypeLocation
= currentInstruction
[2].u
.location
;
1247 int valueToProfile
= currentInstruction
[1].u
.operand
;
1249 // Load payload in T0. Load tag in T3.
1250 emitLoadPayload(valueToProfile
, regT0
);
1251 emitLoadTag(valueToProfile
, regT3
);
1255 // Compile in a predictive type check, if possible, to see if we can skip writing to the log.
1256 // These typechecks are inlined to match those of the 32-bit JSValue type checks.
1257 if (cachedTypeLocation
->m_lastSeenType
== TypeUndefined
)
1258 jumpToEnd
.append(branch32(Equal
, regT3
, TrustedImm32(JSValue::UndefinedTag
)));
1259 else if (cachedTypeLocation
->m_lastSeenType
== TypeNull
)
1260 jumpToEnd
.append(branch32(Equal
, regT3
, TrustedImm32(JSValue::NullTag
)));
1261 else if (cachedTypeLocation
->m_lastSeenType
== TypeBoolean
)
1262 jumpToEnd
.append(branch32(Equal
, regT3
, TrustedImm32(JSValue::BooleanTag
)));
1263 else if (cachedTypeLocation
->m_lastSeenType
== TypeMachineInt
)
1264 jumpToEnd
.append(branch32(Equal
, regT3
, TrustedImm32(JSValue::Int32Tag
)));
1265 else if (cachedTypeLocation
->m_lastSeenType
== TypeNumber
) {
1266 jumpToEnd
.append(branch32(Below
, regT3
, TrustedImm32(JSValue::LowestTag
)));
1267 jumpToEnd
.append(branch32(Equal
, regT3
, TrustedImm32(JSValue::Int32Tag
)));
1268 } else if (cachedTypeLocation
->m_lastSeenType
== TypeString
) {
1269 Jump isNotCell
= branch32(NotEqual
, regT3
, TrustedImm32(JSValue::CellTag
));
1270 jumpToEnd
.append(branch8(Equal
, Address(regT0
, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType
)));
1271 isNotCell
.link(this);
1274 // Load the type profiling log into T2.
1275 TypeProfilerLog
* cachedTypeProfilerLog
= m_vm
->typeProfilerLog();
1276 move(TrustedImmPtr(cachedTypeProfilerLog
), regT2
);
1278 // Load the next log entry into T1.
1279 loadPtr(Address(regT2
, TypeProfilerLog::currentLogEntryOffset()), regT1
);
1281 // Store the JSValue onto the log entry.
1282 store32(regT0
, Address(regT1
, TypeProfilerLog::LogEntry::valueOffset() + OBJECT_OFFSETOF(JSValue
, u
.asBits
.payload
)));
1283 store32(regT3
, Address(regT1
, TypeProfilerLog::LogEntry::valueOffset() + OBJECT_OFFSETOF(JSValue
, u
.asBits
.tag
)));
1285 // Store the structureID of the cell if argument is a cell, otherwise, store 0 on the log entry.
1286 Jump notCell
= branch32(NotEqual
, regT3
, TrustedImm32(JSValue::CellTag
));
1287 load32(Address(regT0
, JSCell::structureIDOffset()), regT0
);
1288 store32(regT0
, Address(regT1
, TypeProfilerLog::LogEntry::structureIDOffset()));
1289 Jump skipNotCell
= jump();
1291 store32(TrustedImm32(0), Address(regT1
, TypeProfilerLog::LogEntry::structureIDOffset()));
1292 skipNotCell
.link(this);
1294 // Store the typeLocation on the log entry.
1295 move(TrustedImmPtr(cachedTypeLocation
), regT0
);
1296 store32(regT0
, Address(regT1
, TypeProfilerLog::LogEntry::locationOffset()));
1298 // Increment the current log entry.
1299 addPtr(TrustedImm32(sizeof(TypeProfilerLog::LogEntry
)), regT1
);
1300 store32(regT1
, Address(regT2
, TypeProfilerLog::currentLogEntryOffset()));
1301 jumpToEnd
.append(branchPtr(NotEqual
, regT1
, TrustedImmPtr(cachedTypeProfilerLog
->logEndPtr())));
1302 // Clear the log if we're at the end of the log.
1303 callOperation(operationProcessTypeProfilerLog
);
1305 jumpToEnd
.link(this);
1310 #endif // USE(JSVALUE32_64)
1311 #endif // ENABLE(JIT)