2 * Copyright (C) 2009, 2012-2015 Apple Inc. All rights reserved.
3 * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
14 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
18 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 #include "BasicBlockLocation.h"
32 #include "CopiedSpaceInlines.h"
34 #include "Exception.h"
36 #include "JITInlines.h"
39 #include "JSFunction.h"
40 #include "JSNameScope.h"
41 #include "JSPropertyNameEnumerator.h"
42 #include "LinkBuffer.h"
43 #include "MaxFrameExtentForSlowPathCall.h"
44 #include "RepatchBuffer.h"
45 #include "SlowPathCall.h"
46 #include "TypeLocation.h"
47 #include "TypeProfilerLog.h"
48 #include "VirtualRegister.h"
54 JIT::CodeRef
JIT::privateCompileCTINativeCall(VM
* vm
, NativeFunction
)
56 return vm
->getCTIStub(nativeCallGenerator
);
59 void JIT::emit_op_mov(Instruction
* currentInstruction
)
61 int dst
= currentInstruction
[1].u
.operand
;
62 int src
= currentInstruction
[2].u
.operand
;
64 emitGetVirtualRegister(src
, regT0
);
65 emitPutVirtualRegister(dst
);
69 void JIT::emit_op_end(Instruction
* currentInstruction
)
71 RELEASE_ASSERT(returnValueGPR
!= callFrameRegister
);
72 emitGetVirtualRegister(currentInstruction
[1].u
.operand
, returnValueGPR
);
73 emitFunctionEpilogue();
77 void JIT::emit_op_jmp(Instruction
* currentInstruction
)
79 unsigned target
= currentInstruction
[1].u
.operand
;
80 addJump(jump(), target
);
83 void JIT::emit_op_new_object(Instruction
* currentInstruction
)
85 Structure
* structure
= currentInstruction
[3].u
.objectAllocationProfile
->structure();
86 size_t allocationSize
= JSFinalObject::allocationSize(structure
->inlineCapacity());
87 MarkedAllocator
* allocator
= &m_vm
->heap
.allocatorForObjectWithoutDestructor(allocationSize
);
89 RegisterID resultReg
= regT0
;
90 RegisterID allocatorReg
= regT1
;
91 RegisterID scratchReg
= regT2
;
93 move(TrustedImmPtr(allocator
), allocatorReg
);
94 emitAllocateJSObject(allocatorReg
, TrustedImmPtr(structure
), resultReg
, scratchReg
);
95 emitPutVirtualRegister(currentInstruction
[1].u
.operand
);
98 void JIT::emitSlow_op_new_object(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
101 int dst
= currentInstruction
[1].u
.operand
;
102 Structure
* structure
= currentInstruction
[3].u
.objectAllocationProfile
->structure();
103 callOperation(operationNewObject
, structure
);
104 emitStoreCell(dst
, returnValueGPR
);
107 void JIT::emit_op_check_has_instance(Instruction
* currentInstruction
)
109 int baseVal
= currentInstruction
[3].u
.operand
;
111 emitGetVirtualRegister(baseVal
, regT0
);
113 // Check that baseVal is a cell.
114 emitJumpSlowCaseIfNotJSCell(regT0
, baseVal
);
116 // Check that baseVal 'ImplementsHasInstance'.
117 addSlowCase(branchTest8(Zero
, Address(regT0
, JSCell::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance
)));
120 void JIT::emit_op_instanceof(Instruction
* currentInstruction
)
122 int dst
= currentInstruction
[1].u
.operand
;
123 int value
= currentInstruction
[2].u
.operand
;
124 int proto
= currentInstruction
[3].u
.operand
;
126 // Load the operands (baseVal, proto, and value respectively) into registers.
127 // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
128 emitGetVirtualRegister(value
, regT2
);
129 emitGetVirtualRegister(proto
, regT1
);
131 // Check that proto are cells. baseVal must be a cell - this is checked by op_check_has_instance.
132 emitJumpSlowCaseIfNotJSCell(regT2
, value
);
133 emitJumpSlowCaseIfNotJSCell(regT1
, proto
);
135 // Check that prototype is an object
136 addSlowCase(emitJumpIfCellNotObject(regT1
));
138 // Optimistically load the result true, and start looping.
139 // Initially, regT1 still contains proto and regT2 still contains value.
140 // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
141 move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0
);
144 // Load the prototype of the object in regT2. If this is equal to regT1 - WIN!
145 // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
146 emitLoadStructure(regT2
, regT2
, regT3
);
147 load64(Address(regT2
, Structure::prototypeOffset()), regT2
);
148 Jump isInstance
= branchPtr(Equal
, regT2
, regT1
);
149 emitJumpIfJSCell(regT2
).linkTo(loop
, this);
151 // We get here either by dropping out of the loop, or if value was not an Object. Result is false.
152 move(TrustedImm64(JSValue::encode(jsBoolean(false))), regT0
);
154 // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
155 isInstance
.link(this);
156 emitPutVirtualRegister(dst
);
159 void JIT::emit_op_is_undefined(Instruction
* currentInstruction
)
161 int dst
= currentInstruction
[1].u
.operand
;
162 int value
= currentInstruction
[2].u
.operand
;
164 emitGetVirtualRegister(value
, regT0
);
165 Jump isCell
= emitJumpIfJSCell(regT0
);
167 compare64(Equal
, regT0
, TrustedImm32(ValueUndefined
), regT0
);
171 Jump isMasqueradesAsUndefined
= branchTest8(NonZero
, Address(regT0
, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined
));
172 move(TrustedImm32(0), regT0
);
173 Jump notMasqueradesAsUndefined
= jump();
175 isMasqueradesAsUndefined
.link(this);
176 emitLoadStructure(regT0
, regT1
, regT2
);
177 move(TrustedImmPtr(m_codeBlock
->globalObject()), regT0
);
178 loadPtr(Address(regT1
, Structure::globalObjectOffset()), regT1
);
179 comparePtr(Equal
, regT0
, regT1
, regT0
);
181 notMasqueradesAsUndefined
.link(this);
183 emitTagAsBoolImmediate(regT0
);
184 emitPutVirtualRegister(dst
);
187 void JIT::emit_op_is_boolean(Instruction
* currentInstruction
)
189 int dst
= currentInstruction
[1].u
.operand
;
190 int value
= currentInstruction
[2].u
.operand
;
192 emitGetVirtualRegister(value
, regT0
);
193 xor64(TrustedImm32(static_cast<int32_t>(ValueFalse
)), regT0
);
194 test64(Zero
, regT0
, TrustedImm32(static_cast<int32_t>(~1)), regT0
);
195 emitTagAsBoolImmediate(regT0
);
196 emitPutVirtualRegister(dst
);
199 void JIT::emit_op_is_number(Instruction
* currentInstruction
)
201 int dst
= currentInstruction
[1].u
.operand
;
202 int value
= currentInstruction
[2].u
.operand
;
204 emitGetVirtualRegister(value
, regT0
);
205 test64(NonZero
, regT0
, tagTypeNumberRegister
, regT0
);
206 emitTagAsBoolImmediate(regT0
);
207 emitPutVirtualRegister(dst
);
210 void JIT::emit_op_is_string(Instruction
* currentInstruction
)
212 int dst
= currentInstruction
[1].u
.operand
;
213 int value
= currentInstruction
[2].u
.operand
;
215 emitGetVirtualRegister(value
, regT0
);
216 Jump isNotCell
= emitJumpIfNotJSCell(regT0
);
218 compare8(Equal
, Address(regT0
, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType
), regT0
);
219 emitTagAsBoolImmediate(regT0
);
222 isNotCell
.link(this);
223 move(TrustedImm32(ValueFalse
), regT0
);
226 emitPutVirtualRegister(dst
);
229 void JIT::emit_op_is_object(Instruction
* currentInstruction
)
231 int dst
= currentInstruction
[1].u
.operand
;
232 int value
= currentInstruction
[2].u
.operand
;
234 emitGetVirtualRegister(value
, regT0
);
235 Jump isNotCell
= emitJumpIfNotJSCell(regT0
);
237 compare8(AboveOrEqual
, Address(regT0
, JSCell::typeInfoTypeOffset()), TrustedImm32(ObjectType
), regT0
);
238 emitTagAsBoolImmediate(regT0
);
241 isNotCell
.link(this);
242 move(TrustedImm32(ValueFalse
), regT0
);
245 emitPutVirtualRegister(dst
);
248 void JIT::emit_op_ret(Instruction
* currentInstruction
)
250 ASSERT(callFrameRegister
!= regT1
);
251 ASSERT(regT1
!= returnValueGPR
);
252 ASSERT(returnValueGPR
!= callFrameRegister
);
254 // Return the result in %eax.
255 emitGetVirtualRegister(currentInstruction
[1].u
.operand
, returnValueGPR
);
257 checkStackPointerAlignment();
258 emitFunctionEpilogue();
262 void JIT::emit_op_to_primitive(Instruction
* currentInstruction
)
264 int dst
= currentInstruction
[1].u
.operand
;
265 int src
= currentInstruction
[2].u
.operand
;
267 emitGetVirtualRegister(src
, regT0
);
269 Jump isImm
= emitJumpIfNotJSCell(regT0
);
270 addSlowCase(emitJumpIfCellObject(regT0
));
274 emitPutVirtualRegister(dst
);
278 void JIT::emit_op_strcat(Instruction
* currentInstruction
)
280 JITSlowPathCall
slowPathCall(this, currentInstruction
, slow_path_strcat
);
284 void JIT::emit_op_not(Instruction
* currentInstruction
)
286 emitGetVirtualRegister(currentInstruction
[2].u
.operand
, regT0
);
288 // Invert against JSValue(false); if the value was tagged as a boolean, then all bits will be
289 // clear other than the low bit (which will be 0 or 1 for false or true inputs respectively).
290 // Then invert against JSValue(true), which will add the tag back in, and flip the low bit.
291 xor64(TrustedImm32(static_cast<int32_t>(ValueFalse
)), regT0
);
292 addSlowCase(branchTestPtr(NonZero
, regT0
, TrustedImm32(static_cast<int32_t>(~1))));
293 xor64(TrustedImm32(static_cast<int32_t>(ValueTrue
)), regT0
);
295 emitPutVirtualRegister(currentInstruction
[1].u
.operand
);
298 void JIT::emit_op_jfalse(Instruction
* currentInstruction
)
300 unsigned target
= currentInstruction
[2].u
.operand
;
301 emitGetVirtualRegister(currentInstruction
[1].u
.operand
, regT0
);
303 addJump(branch64(Equal
, regT0
, TrustedImm64(JSValue::encode(jsNumber(0)))), target
);
304 Jump isNonZero
= emitJumpIfImmediateInteger(regT0
);
306 addJump(branch64(Equal
, regT0
, TrustedImm64(JSValue::encode(jsBoolean(false)))), target
);
307 addSlowCase(branch64(NotEqual
, regT0
, TrustedImm64(JSValue::encode(jsBoolean(true)))));
309 isNonZero
.link(this);
312 void JIT::emit_op_jeq_null(Instruction
* currentInstruction
)
314 int src
= currentInstruction
[1].u
.operand
;
315 unsigned target
= currentInstruction
[2].u
.operand
;
317 emitGetVirtualRegister(src
, regT0
);
318 Jump isImmediate
= emitJumpIfNotJSCell(regT0
);
320 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
321 Jump isNotMasqueradesAsUndefined
= branchTest8(Zero
, Address(regT0
, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined
));
322 emitLoadStructure(regT0
, regT2
, regT1
);
323 move(TrustedImmPtr(m_codeBlock
->globalObject()), regT0
);
324 addJump(branchPtr(Equal
, Address(regT2
, Structure::globalObjectOffset()), regT0
), target
);
325 Jump masqueradesGlobalObjectIsForeign
= jump();
327 // Now handle the immediate cases - undefined & null
328 isImmediate
.link(this);
329 and64(TrustedImm32(~TagBitUndefined
), regT0
);
330 addJump(branch64(Equal
, regT0
, TrustedImm64(JSValue::encode(jsNull()))), target
);
332 isNotMasqueradesAsUndefined
.link(this);
333 masqueradesGlobalObjectIsForeign
.link(this);
335 void JIT::emit_op_jneq_null(Instruction
* currentInstruction
)
337 int src
= currentInstruction
[1].u
.operand
;
338 unsigned target
= currentInstruction
[2].u
.operand
;
340 emitGetVirtualRegister(src
, regT0
);
341 Jump isImmediate
= emitJumpIfNotJSCell(regT0
);
343 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
344 addJump(branchTest8(Zero
, Address(regT0
, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined
)), target
);
345 emitLoadStructure(regT0
, regT2
, regT1
);
346 move(TrustedImmPtr(m_codeBlock
->globalObject()), regT0
);
347 addJump(branchPtr(NotEqual
, Address(regT2
, Structure::globalObjectOffset()), regT0
), target
);
348 Jump wasNotImmediate
= jump();
350 // Now handle the immediate cases - undefined & null
351 isImmediate
.link(this);
352 and64(TrustedImm32(~TagBitUndefined
), regT0
);
353 addJump(branch64(NotEqual
, regT0
, TrustedImm64(JSValue::encode(jsNull()))), target
);
355 wasNotImmediate
.link(this);
358 void JIT::emit_op_jneq_ptr(Instruction
* currentInstruction
)
360 int src
= currentInstruction
[1].u
.operand
;
361 Special::Pointer ptr
= currentInstruction
[2].u
.specialPointer
;
362 unsigned target
= currentInstruction
[3].u
.operand
;
364 emitGetVirtualRegister(src
, regT0
);
365 addJump(branchPtr(NotEqual
, regT0
, TrustedImmPtr(actualPointerFor(m_codeBlock
, ptr
))), target
);
368 void JIT::emit_op_eq(Instruction
* currentInstruction
)
370 emitGetVirtualRegisters(currentInstruction
[2].u
.operand
, regT0
, currentInstruction
[3].u
.operand
, regT1
);
371 emitJumpSlowCaseIfNotImmediateIntegers(regT0
, regT1
, regT2
);
372 compare32(Equal
, regT1
, regT0
, regT0
);
373 emitTagAsBoolImmediate(regT0
);
374 emitPutVirtualRegister(currentInstruction
[1].u
.operand
);
377 void JIT::emit_op_jtrue(Instruction
* currentInstruction
)
379 unsigned target
= currentInstruction
[2].u
.operand
;
380 emitGetVirtualRegister(currentInstruction
[1].u
.operand
, regT0
);
382 Jump isZero
= branch64(Equal
, regT0
, TrustedImm64(JSValue::encode(jsNumber(0))));
383 addJump(emitJumpIfImmediateInteger(regT0
), target
);
385 addJump(branch64(Equal
, regT0
, TrustedImm64(JSValue::encode(jsBoolean(true)))), target
);
386 addSlowCase(branch64(NotEqual
, regT0
, TrustedImm64(JSValue::encode(jsBoolean(false)))));
391 void JIT::emit_op_neq(Instruction
* currentInstruction
)
393 emitGetVirtualRegisters(currentInstruction
[2].u
.operand
, regT0
, currentInstruction
[3].u
.operand
, regT1
);
394 emitJumpSlowCaseIfNotImmediateIntegers(regT0
, regT1
, regT2
);
395 compare32(NotEqual
, regT1
, regT0
, regT0
);
396 emitTagAsBoolImmediate(regT0
);
398 emitPutVirtualRegister(currentInstruction
[1].u
.operand
);
402 void JIT::emit_op_bitxor(Instruction
* currentInstruction
)
404 emitGetVirtualRegisters(currentInstruction
[2].u
.operand
, regT0
, currentInstruction
[3].u
.operand
, regT1
);
405 emitJumpSlowCaseIfNotImmediateIntegers(regT0
, regT1
, regT2
);
407 emitFastArithReTagImmediate(regT0
, regT0
);
408 emitPutVirtualRegister(currentInstruction
[1].u
.operand
);
411 void JIT::emit_op_bitor(Instruction
* currentInstruction
)
413 emitGetVirtualRegisters(currentInstruction
[2].u
.operand
, regT0
, currentInstruction
[3].u
.operand
, regT1
);
414 emitJumpSlowCaseIfNotImmediateIntegers(regT0
, regT1
, regT2
);
416 emitPutVirtualRegister(currentInstruction
[1].u
.operand
);
419 void JIT::emit_op_throw(Instruction
* currentInstruction
)
421 ASSERT(regT0
== returnValueGPR
);
422 emitGetVirtualRegister(currentInstruction
[1].u
.operand
, regT0
);
423 callOperationNoExceptionCheck(operationThrow
, regT0
);
424 jumpToExceptionHandler();
427 void JIT::emit_op_push_with_scope(Instruction
* currentInstruction
)
429 int dst
= currentInstruction
[1].u
.operand
;
430 emitGetVirtualRegister(currentInstruction
[2].u
.operand
, regT0
);
431 callOperation(operationPushWithScope
, dst
, regT0
);
434 void JIT::emit_op_pop_scope(Instruction
* currentInstruction
)
436 int scope
= currentInstruction
[1].u
.operand
;
438 callOperation(operationPopScope
, scope
);
441 void JIT::compileOpStrictEq(Instruction
* currentInstruction
, CompileOpStrictEqType type
)
443 int dst
= currentInstruction
[1].u
.operand
;
444 int src1
= currentInstruction
[2].u
.operand
;
445 int src2
= currentInstruction
[3].u
.operand
;
447 emitGetVirtualRegisters(src1
, regT0
, src2
, regT1
);
449 // Jump slow if both are cells (to cover strings).
452 addSlowCase(emitJumpIfJSCell(regT2
));
454 // Jump slow if either is a double. First test if it's an integer, which is fine, and then test
456 Jump leftOK
= emitJumpIfImmediateInteger(regT0
);
457 addSlowCase(emitJumpIfImmediateNumber(regT0
));
459 Jump rightOK
= emitJumpIfImmediateInteger(regT1
);
460 addSlowCase(emitJumpIfImmediateNumber(regT1
));
463 if (type
== OpStrictEq
)
464 compare64(Equal
, regT1
, regT0
, regT0
);
466 compare64(NotEqual
, regT1
, regT0
, regT0
);
467 emitTagAsBoolImmediate(regT0
);
469 emitPutVirtualRegister(dst
);
472 void JIT::emit_op_stricteq(Instruction
* currentInstruction
)
474 compileOpStrictEq(currentInstruction
, OpStrictEq
);
477 void JIT::emit_op_nstricteq(Instruction
* currentInstruction
)
479 compileOpStrictEq(currentInstruction
, OpNStrictEq
);
482 void JIT::emit_op_to_number(Instruction
* currentInstruction
)
484 int srcVReg
= currentInstruction
[2].u
.operand
;
485 emitGetVirtualRegister(srcVReg
, regT0
);
487 addSlowCase(emitJumpIfNotImmediateNumber(regT0
));
489 emitPutVirtualRegister(currentInstruction
[1].u
.operand
);
492 void JIT::emit_op_to_string(Instruction
* currentInstruction
)
494 int srcVReg
= currentInstruction
[2].u
.operand
;
495 emitGetVirtualRegister(srcVReg
, regT0
);
497 addSlowCase(emitJumpIfNotJSCell(regT0
));
498 addSlowCase(branch8(NotEqual
, Address(regT0
, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType
)));
500 emitPutVirtualRegister(currentInstruction
[1].u
.operand
);
503 void JIT::emit_op_push_name_scope(Instruction
* currentInstruction
)
505 int dst
= currentInstruction
[1].u
.operand
;
506 emitGetVirtualRegister(currentInstruction
[2].u
.operand
, regT0
);
507 if (currentInstruction
[4].u
.operand
== JSNameScope::CatchScope
) {
508 callOperation(operationPushCatchScope
, dst
, jsCast
<SymbolTable
*>(getConstantOperand(currentInstruction
[3].u
.operand
)), regT0
);
512 RELEASE_ASSERT(currentInstruction
[4].u
.operand
== JSNameScope::FunctionNameScope
);
513 callOperation(operationPushFunctionNameScope
, dst
, jsCast
<SymbolTable
*>(getConstantOperand(currentInstruction
[3].u
.operand
)), regT0
);
516 void JIT::emit_op_catch(Instruction
* currentInstruction
)
518 // Gotta restore the tag registers. We could be throwing from FTL, which may
520 move(TrustedImm64(TagTypeNumber
), tagTypeNumberRegister
);
521 move(TrustedImm64(TagMask
), tagMaskRegister
);
523 move(TrustedImmPtr(m_vm
), regT3
);
524 load64(Address(regT3
, VM::callFrameForThrowOffset()), callFrameRegister
);
525 load64(Address(regT3
, VM::vmEntryFrameForThrowOffset()), regT0
);
526 store64(regT0
, Address(regT3
, VM::topVMEntryFrameOffset()));
528 addPtr(TrustedImm32(stackPointerOffsetFor(codeBlock()) * sizeof(Register
)), callFrameRegister
, stackPointerRegister
);
530 load64(Address(regT3
, VM::exceptionOffset()), regT0
);
531 store64(TrustedImm64(JSValue::encode(JSValue())), Address(regT3
, VM::exceptionOffset()));
532 emitPutVirtualRegister(currentInstruction
[1].u
.operand
);
534 load64(Address(regT0
, Exception::valueOffset()), regT0
);
535 emitPutVirtualRegister(currentInstruction
[2].u
.operand
);
538 void JIT::emit_op_switch_imm(Instruction
* currentInstruction
)
540 size_t tableIndex
= currentInstruction
[1].u
.operand
;
541 unsigned defaultOffset
= currentInstruction
[2].u
.operand
;
542 unsigned scrutinee
= currentInstruction
[3].u
.operand
;
544 // create jump table for switch destinations, track this switch statement.
545 SimpleJumpTable
* jumpTable
= &m_codeBlock
->switchJumpTable(tableIndex
);
546 m_switches
.append(SwitchRecord(jumpTable
, m_bytecodeOffset
, defaultOffset
, SwitchRecord::Immediate
));
547 jumpTable
->ensureCTITable();
549 emitGetVirtualRegister(scrutinee
, regT0
);
550 callOperation(operationSwitchImmWithUnknownKeyType
, regT0
, tableIndex
);
551 jump(returnValueGPR
);
554 void JIT::emit_op_switch_char(Instruction
* currentInstruction
)
556 size_t tableIndex
= currentInstruction
[1].u
.operand
;
557 unsigned defaultOffset
= currentInstruction
[2].u
.operand
;
558 unsigned scrutinee
= currentInstruction
[3].u
.operand
;
560 // create jump table for switch destinations, track this switch statement.
561 SimpleJumpTable
* jumpTable
= &m_codeBlock
->switchJumpTable(tableIndex
);
562 m_switches
.append(SwitchRecord(jumpTable
, m_bytecodeOffset
, defaultOffset
, SwitchRecord::Character
));
563 jumpTable
->ensureCTITable();
565 emitGetVirtualRegister(scrutinee
, regT0
);
566 callOperation(operationSwitchCharWithUnknownKeyType
, regT0
, tableIndex
);
567 jump(returnValueGPR
);
570 void JIT::emit_op_switch_string(Instruction
* currentInstruction
)
572 size_t tableIndex
= currentInstruction
[1].u
.operand
;
573 unsigned defaultOffset
= currentInstruction
[2].u
.operand
;
574 unsigned scrutinee
= currentInstruction
[3].u
.operand
;
576 // create jump table for switch destinations, track this switch statement.
577 StringJumpTable
* jumpTable
= &m_codeBlock
->stringSwitchJumpTable(tableIndex
);
578 m_switches
.append(SwitchRecord(jumpTable
, m_bytecodeOffset
, defaultOffset
));
580 emitGetVirtualRegister(scrutinee
, regT0
);
581 callOperation(operationSwitchStringWithUnknownKeyType
, regT0
, tableIndex
);
582 jump(returnValueGPR
);
585 void JIT::emit_op_throw_static_error(Instruction
* currentInstruction
)
587 move(TrustedImm64(JSValue::encode(m_codeBlock
->getConstant(currentInstruction
[1].u
.operand
))), regT0
);
588 callOperation(operationThrowStaticError
, regT0
, currentInstruction
[2].u
.operand
);
591 void JIT::emit_op_debug(Instruction
* currentInstruction
)
593 load32(codeBlock()->debuggerRequestsAddress(), regT0
);
594 Jump noDebuggerRequests
= branchTest32(Zero
, regT0
);
595 callOperation(operationDebug
, currentInstruction
[1].u
.operand
);
596 noDebuggerRequests
.link(this);
599 void JIT::emit_op_eq_null(Instruction
* currentInstruction
)
601 int dst
= currentInstruction
[1].u
.operand
;
602 int src1
= currentInstruction
[2].u
.operand
;
604 emitGetVirtualRegister(src1
, regT0
);
605 Jump isImmediate
= emitJumpIfNotJSCell(regT0
);
607 Jump isMasqueradesAsUndefined
= branchTest8(NonZero
, Address(regT0
, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined
));
608 move(TrustedImm32(0), regT0
);
609 Jump wasNotMasqueradesAsUndefined
= jump();
611 isMasqueradesAsUndefined
.link(this);
612 emitLoadStructure(regT0
, regT2
, regT1
);
613 move(TrustedImmPtr(m_codeBlock
->globalObject()), regT0
);
614 loadPtr(Address(regT2
, Structure::globalObjectOffset()), regT2
);
615 comparePtr(Equal
, regT0
, regT2
, regT0
);
616 Jump wasNotImmediate
= jump();
618 isImmediate
.link(this);
620 and64(TrustedImm32(~TagBitUndefined
), regT0
);
621 compare64(Equal
, regT0
, TrustedImm32(ValueNull
), regT0
);
623 wasNotImmediate
.link(this);
624 wasNotMasqueradesAsUndefined
.link(this);
626 emitTagAsBoolImmediate(regT0
);
627 emitPutVirtualRegister(dst
);
631 void JIT::emit_op_neq_null(Instruction
* currentInstruction
)
633 int dst
= currentInstruction
[1].u
.operand
;
634 int src1
= currentInstruction
[2].u
.operand
;
636 emitGetVirtualRegister(src1
, regT0
);
637 Jump isImmediate
= emitJumpIfNotJSCell(regT0
);
639 Jump isMasqueradesAsUndefined
= branchTest8(NonZero
, Address(regT0
, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined
));
640 move(TrustedImm32(1), regT0
);
641 Jump wasNotMasqueradesAsUndefined
= jump();
643 isMasqueradesAsUndefined
.link(this);
644 emitLoadStructure(regT0
, regT2
, regT1
);
645 move(TrustedImmPtr(m_codeBlock
->globalObject()), regT0
);
646 loadPtr(Address(regT2
, Structure::globalObjectOffset()), regT2
);
647 comparePtr(NotEqual
, regT0
, regT2
, regT0
);
648 Jump wasNotImmediate
= jump();
650 isImmediate
.link(this);
652 and64(TrustedImm32(~TagBitUndefined
), regT0
);
653 compare64(NotEqual
, regT0
, TrustedImm32(ValueNull
), regT0
);
655 wasNotImmediate
.link(this);
656 wasNotMasqueradesAsUndefined
.link(this);
658 emitTagAsBoolImmediate(regT0
);
659 emitPutVirtualRegister(dst
);
662 void JIT::emit_op_enter(Instruction
*)
664 // Even though CTI doesn't use them, we initialize our constant
665 // registers to zap stale pointers, to avoid unnecessarily prolonging
666 // object lifetime and increasing GC pressure.
667 size_t count
= m_codeBlock
->m_numVars
;
668 for (size_t j
= 0; j
< count
; ++j
)
669 emitInitRegister(virtualRegisterForLocal(j
).offset());
671 emitWriteBarrier(m_codeBlock
->ownerExecutable());
673 emitEnterOptimizationCheck();
676 void JIT::emit_op_create_lexical_environment(Instruction
* currentInstruction
)
678 int dst
= currentInstruction
[1].u
.operand
;
679 int scope
= currentInstruction
[2].u
.operand
;
681 emitGetVirtualRegister(scope
, regT0
);
682 callOperation(operationCreateActivation
, regT0
);
683 emitStoreCell(dst
, returnValueGPR
);
684 emitStoreCell(scope
, returnValueGPR
);
687 void JIT::emit_op_get_scope(Instruction
* currentInstruction
)
689 int dst
= currentInstruction
[1].u
.operand
;
690 emitGetFromCallFrameHeaderPtr(JSStack::Callee
, regT0
);
691 loadPtr(Address(regT0
, JSFunction::offsetOfScopeChain()), regT0
);
692 emitStoreCell(dst
, regT0
);
695 void JIT::emit_op_to_this(Instruction
* currentInstruction
)
697 WriteBarrierBase
<Structure
>* cachedStructure
= ¤tInstruction
[2].u
.structure
;
698 emitGetVirtualRegister(currentInstruction
[1].u
.operand
, regT1
);
700 emitJumpSlowCaseIfNotJSCell(regT1
);
702 addSlowCase(branch8(NotEqual
, Address(regT1
, JSCell::typeInfoTypeOffset()), TrustedImm32(FinalObjectType
)));
703 loadPtr(cachedStructure
, regT2
);
704 addSlowCase(branchTestPtr(Zero
, regT2
));
705 load32(Address(regT2
, Structure::structureIDOffset()), regT2
);
706 addSlowCase(branch32(NotEqual
, Address(regT1
, JSCell::structureIDOffset()), regT2
));
709 void JIT::emit_op_create_this(Instruction
* currentInstruction
)
711 int callee
= currentInstruction
[2].u
.operand
;
712 WriteBarrierBase
<JSCell
>* cachedFunction
= ¤tInstruction
[4].u
.jsCell
;
713 RegisterID calleeReg
= regT0
;
714 RegisterID rareDataReg
= regT4
;
715 RegisterID resultReg
= regT0
;
716 RegisterID allocatorReg
= regT1
;
717 RegisterID structureReg
= regT2
;
718 RegisterID cachedFunctionReg
= regT4
;
719 RegisterID scratchReg
= regT3
;
721 emitGetVirtualRegister(callee
, calleeReg
);
722 loadPtr(Address(calleeReg
, JSFunction::offsetOfRareData()), rareDataReg
);
723 addSlowCase(branchTestPtr(Zero
, rareDataReg
));
724 loadPtr(Address(rareDataReg
, FunctionRareData::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfAllocator()), allocatorReg
);
725 loadPtr(Address(rareDataReg
, FunctionRareData::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfStructure()), structureReg
);
726 addSlowCase(branchTestPtr(Zero
, allocatorReg
));
728 loadPtr(cachedFunction
, cachedFunctionReg
);
729 Jump hasSeenMultipleCallees
= branchPtr(Equal
, cachedFunctionReg
, TrustedImmPtr(JSCell::seenMultipleCalleeObjects()));
730 addSlowCase(branchPtr(NotEqual
, calleeReg
, cachedFunctionReg
));
731 hasSeenMultipleCallees
.link(this);
733 emitAllocateJSObject(allocatorReg
, structureReg
, resultReg
, scratchReg
);
734 emitPutVirtualRegister(currentInstruction
[1].u
.operand
);
737 void JIT::emitSlow_op_create_this(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
739 linkSlowCase(iter
); // doesn't have rare data
740 linkSlowCase(iter
); // doesn't have an allocation profile
741 linkSlowCase(iter
); // allocation failed
742 linkSlowCase(iter
); // cached function didn't match
744 JITSlowPathCall
slowPathCall(this, currentInstruction
, slow_path_create_this
);
748 void JIT::emit_op_check_tdz(Instruction
* currentInstruction
)
750 emitGetVirtualRegister(currentInstruction
[1].u
.operand
, regT0
);
751 addSlowCase(branchTest64(Zero
, regT0
));
754 void JIT::emitSlow_op_check_tdz(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
757 JITSlowPathCall
slowPathCall(this, currentInstruction
, slow_path_throw_tdz_error
);
761 void JIT::emit_op_profile_will_call(Instruction
* currentInstruction
)
763 Jump profilerDone
= branchTestPtr(Zero
, AbsoluteAddress(m_vm
->enabledProfilerAddress()));
764 emitGetVirtualRegister(currentInstruction
[1].u
.operand
, regT0
);
765 callOperation(operationProfileWillCall
, regT0
);
766 profilerDone
.link(this);
769 void JIT::emit_op_profile_did_call(Instruction
* currentInstruction
)
771 Jump profilerDone
= branchTestPtr(Zero
, AbsoluteAddress(m_vm
->enabledProfilerAddress()));
772 emitGetVirtualRegister(currentInstruction
[1].u
.operand
, regT0
);
773 callOperation(operationProfileDidCall
, regT0
);
774 profilerDone
.link(this);
780 void JIT::emitSlow_op_to_this(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
787 JITSlowPathCall
slowPathCall(this, currentInstruction
, slow_path_to_this
);
791 void JIT::emitSlow_op_to_primitive(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
795 JITSlowPathCall
slowPathCall(this, currentInstruction
, slow_path_to_primitive
);
799 void JIT::emitSlow_op_not(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
803 JITSlowPathCall
slowPathCall(this, currentInstruction
, slow_path_not
);
807 void JIT::emitSlow_op_jfalse(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
810 callOperation(operationConvertJSValueToBoolean
, regT0
);
811 emitJumpSlowToHot(branchTest32(Zero
, returnValueGPR
), currentInstruction
[2].u
.operand
); // inverted!
814 void JIT::emitSlow_op_jtrue(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
817 callOperation(operationConvertJSValueToBoolean
, regT0
);
818 emitJumpSlowToHot(branchTest32(NonZero
, returnValueGPR
), currentInstruction
[2].u
.operand
);
821 void JIT::emitSlow_op_bitxor(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
824 JITSlowPathCall
slowPathCall(this, currentInstruction
, slow_path_bitxor
);
828 void JIT::emitSlow_op_bitor(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
831 JITSlowPathCall
slowPathCall(this, currentInstruction
, slow_path_bitor
);
835 void JIT::emitSlow_op_eq(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
838 callOperation(operationCompareEq
, regT0
, regT1
);
839 emitTagAsBoolImmediate(returnValueGPR
);
840 emitPutVirtualRegister(currentInstruction
[1].u
.operand
, returnValueGPR
);
843 void JIT::emitSlow_op_neq(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
846 callOperation(operationCompareEq
, regT0
, regT1
);
847 xor32(TrustedImm32(0x1), regT0
);
848 emitTagAsBoolImmediate(returnValueGPR
);
849 emitPutVirtualRegister(currentInstruction
[1].u
.operand
, returnValueGPR
);
852 void JIT::emitSlow_op_stricteq(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
857 JITSlowPathCall
slowPathCall(this, currentInstruction
, slow_path_stricteq
);
861 void JIT::emitSlow_op_nstricteq(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
866 JITSlowPathCall
slowPathCall(this, currentInstruction
, slow_path_nstricteq
);
870 void JIT::emitSlow_op_check_has_instance(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
872 int dst
= currentInstruction
[1].u
.operand
;
873 int value
= currentInstruction
[2].u
.operand
;
874 int baseVal
= currentInstruction
[3].u
.operand
;
876 linkSlowCaseIfNotJSCell(iter
, baseVal
);
878 emitGetVirtualRegister(value
, regT0
);
879 emitGetVirtualRegister(baseVal
, regT1
);
880 callOperation(operationCheckHasInstance
, dst
, regT0
, regT1
);
882 emitJumpSlowToHot(jump(), currentInstruction
[4].u
.operand
);
885 void JIT::emitSlow_op_instanceof(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
887 int dst
= currentInstruction
[1].u
.operand
;
888 int value
= currentInstruction
[2].u
.operand
;
889 int proto
= currentInstruction
[3].u
.operand
;
891 linkSlowCaseIfNotJSCell(iter
, value
);
892 linkSlowCaseIfNotJSCell(iter
, proto
);
894 emitGetVirtualRegister(value
, regT0
);
895 emitGetVirtualRegister(proto
, regT1
);
896 callOperation(operationInstanceOf
, dst
, regT0
, regT1
);
899 void JIT::emitSlow_op_to_number(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
903 JITSlowPathCall
slowPathCall(this, currentInstruction
, slow_path_to_number
);
907 void JIT::emitSlow_op_to_string(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
909 linkSlowCase(iter
); // Not JSCell.
910 linkSlowCase(iter
); // Not JSString.
912 JITSlowPathCall
slowPathCall(this, currentInstruction
, slow_path_to_string
);
916 #endif // USE(JSVALUE64)
918 void JIT::emit_op_loop_hint(Instruction
*)
920 // Emit the JIT optimization check:
921 if (canBeOptimized()) {
922 addSlowCase(branchAdd32(PositiveOrZero
, TrustedImm32(Options::executionCounterIncrementForLoop()),
923 AbsoluteAddress(m_codeBlock
->addressOfJITExecuteCounter())));
926 // Emit the watchdog timer check:
927 if (m_vm
->watchdog
&& m_vm
->watchdog
->isEnabled())
928 addSlowCase(branchTest8(NonZero
, AbsoluteAddress(m_vm
->watchdog
->timerDidFireAddress())));
931 void JIT::emitSlow_op_loop_hint(Instruction
*, Vector
<SlowCaseEntry
>::iterator
& iter
)
934 // Emit the slow path for the JIT optimization check:
935 if (canBeOptimized()) {
938 callOperation(operationOptimize
, m_bytecodeOffset
);
939 Jump noOptimizedEntry
= branchTestPtr(Zero
, returnValueGPR
);
940 if (!ASSERT_DISABLED
) {
941 Jump ok
= branchPtr(MacroAssembler::Above
, regT0
, TrustedImmPtr(bitwise_cast
<void*>(static_cast<intptr_t>(1000))));
942 abortWithReason(JITUnreasonableLoopHintJumpTarget
);
945 jump(returnValueGPR
);
946 noOptimizedEntry
.link(this);
948 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_loop_hint
));
952 // Emit the slow path of the watchdog timer check:
953 if (m_vm
->watchdog
&& m_vm
->watchdog
->isEnabled()) {
955 callOperation(operationHandleWatchdogTimer
);
957 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_loop_hint
));
962 void JIT::emit_op_new_regexp(Instruction
* currentInstruction
)
964 callOperation(operationNewRegexp
, currentInstruction
[1].u
.operand
, m_codeBlock
->regexp(currentInstruction
[2].u
.operand
));
967 void JIT::emit_op_new_func(Instruction
* currentInstruction
)
970 int dst
= currentInstruction
[1].u
.operand
;
973 emitGetVirtualRegister(currentInstruction
[2].u
.operand
, regT0
);
975 emitLoadPayload(currentInstruction
[2].u
.operand
, regT0
);
977 FunctionExecutable
* funcExec
= m_codeBlock
->functionDecl(currentInstruction
[3].u
.operand
);
978 callOperation(operationNewFunction
, dst
, regT0
, funcExec
);
981 void JIT::emit_op_new_func_exp(Instruction
* currentInstruction
)
983 Jump notUndefinedScope
;
984 int dst
= currentInstruction
[1].u
.operand
;
986 emitGetVirtualRegister(currentInstruction
[2].u
.operand
, regT0
);
987 notUndefinedScope
= branch64(NotEqual
, regT0
, TrustedImm64(JSValue::encode(jsUndefined())));
988 store64(TrustedImm64(JSValue::encode(jsUndefined())), Address(callFrameRegister
, sizeof(Register
) * dst
));
990 emitLoadPayload(currentInstruction
[2].u
.operand
, regT0
);
991 notUndefinedScope
= branch32(NotEqual
, tagFor(currentInstruction
[2].u
.operand
), TrustedImm32(JSValue::UndefinedTag
));
992 emitStore(dst
, jsUndefined());
996 notUndefinedScope
.link(this);
998 FunctionExecutable
* funcExpr
= m_codeBlock
->functionExpr(currentInstruction
[3].u
.operand
);
999 callOperation(operationNewFunction
, dst
, regT0
, funcExpr
);
1003 void JIT::emit_op_new_array(Instruction
* currentInstruction
)
1005 int dst
= currentInstruction
[1].u
.operand
;
1006 int valuesIndex
= currentInstruction
[2].u
.operand
;
1007 int size
= currentInstruction
[3].u
.operand
;
1008 addPtr(TrustedImm32(valuesIndex
* sizeof(Register
)), callFrameRegister
, regT0
);
1009 callOperation(operationNewArrayWithProfile
, dst
,
1010 currentInstruction
[4].u
.arrayAllocationProfile
, regT0
, size
);
1013 void JIT::emit_op_new_array_with_size(Instruction
* currentInstruction
)
1015 int dst
= currentInstruction
[1].u
.operand
;
1016 int sizeIndex
= currentInstruction
[2].u
.operand
;
1018 emitGetVirtualRegister(sizeIndex
, regT0
);
1019 callOperation(operationNewArrayWithSizeAndProfile
, dst
,
1020 currentInstruction
[3].u
.arrayAllocationProfile
, regT0
);
1022 emitLoad(sizeIndex
, regT1
, regT0
);
1023 callOperation(operationNewArrayWithSizeAndProfile
, dst
,
1024 currentInstruction
[3].u
.arrayAllocationProfile
, regT1
, regT0
);
1028 void JIT::emit_op_new_array_buffer(Instruction
* currentInstruction
)
1030 int dst
= currentInstruction
[1].u
.operand
;
1031 int valuesIndex
= currentInstruction
[2].u
.operand
;
1032 int size
= currentInstruction
[3].u
.operand
;
1033 const JSValue
* values
= codeBlock()->constantBuffer(valuesIndex
);
1034 callOperation(operationNewArrayBufferWithProfile
, dst
, currentInstruction
[4].u
.arrayAllocationProfile
, values
, size
);
1038 void JIT::emit_op_has_structure_property(Instruction
* currentInstruction
)
1040 int dst
= currentInstruction
[1].u
.operand
;
1041 int base
= currentInstruction
[2].u
.operand
;
1042 int enumerator
= currentInstruction
[4].u
.operand
;
1044 emitGetVirtualRegister(base
, regT0
);
1045 emitGetVirtualRegister(enumerator
, regT1
);
1046 emitJumpSlowCaseIfNotJSCell(regT0
, base
);
1048 load32(Address(regT0
, JSCell::structureIDOffset()), regT0
);
1049 addSlowCase(branch32(NotEqual
, regT0
, Address(regT1
, JSPropertyNameEnumerator::cachedStructureIDOffset())));
1051 move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0
);
1052 emitPutVirtualRegister(dst
);
1055 void JIT::privateCompileHasIndexedProperty(ByValInfo
* byValInfo
, ReturnAddressPtr returnAddress
, JITArrayMode arrayMode
)
1057 Instruction
* currentInstruction
= m_codeBlock
->instructions().begin() + byValInfo
->bytecodeIndex
;
1059 PatchableJump badType
;
1061 // FIXME: Add support for other types like TypedArrays and Arguments.
1062 // See https://bugs.webkit.org/show_bug.cgi?id=135033 and https://bugs.webkit.org/show_bug.cgi?id=135034.
1063 JumpList slowCases
= emitLoadForArrayMode(currentInstruction
, arrayMode
, badType
);
1064 move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0
);
1067 LinkBuffer
patchBuffer(*m_vm
, *this, m_codeBlock
);
1069 patchBuffer
.link(badType
, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress
.value())).labelAtOffset(byValInfo
->returnAddressToSlowPath
));
1070 patchBuffer
.link(slowCases
, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress
.value())).labelAtOffset(byValInfo
->returnAddressToSlowPath
));
1072 patchBuffer
.link(done
, byValInfo
->badTypeJump
.labelAtOffset(byValInfo
->badTypeJumpToDone
));
1074 byValInfo
->stubRoutine
= FINALIZE_CODE_FOR_STUB(
1075 m_codeBlock
, patchBuffer
,
1076 ("Baseline has_indexed_property stub for %s, return point %p", toCString(*m_codeBlock
).data(), returnAddress
.value()));
1078 RepatchBuffer
repatchBuffer(m_codeBlock
);
1079 repatchBuffer
.relink(byValInfo
->badTypeJump
, CodeLocationLabel(byValInfo
->stubRoutine
->code().code()));
1080 repatchBuffer
.relinkCallerToFunction(returnAddress
, FunctionPtr(operationHasIndexedPropertyGeneric
));
1083 void JIT::emit_op_has_indexed_property(Instruction
* currentInstruction
)
1085 int dst
= currentInstruction
[1].u
.operand
;
1086 int base
= currentInstruction
[2].u
.operand
;
1087 int property
= currentInstruction
[3].u
.operand
;
1088 ArrayProfile
* profile
= currentInstruction
[4].u
.arrayProfile
;
1090 emitGetVirtualRegisters(base
, regT0
, property
, regT1
);
1092 // This is technically incorrect - we're zero-extending an int32. On the hot path this doesn't matter.
1093 // We check the value as if it was a uint32 against the m_vectorLength - which will always fail if
1094 // number was signed since m_vectorLength is always less than intmax (since the total allocation
1095 // size is always less than 4Gb). As such zero extending will have been correct (and extending the value
1096 // to 64-bits is necessary since it's used in the address calculation. We zero extend rather than sign
1097 // extending since it makes it easier to re-tag the value in the slow case.
1098 zeroExtend32ToPtr(regT1
, regT1
);
1100 emitJumpSlowCaseIfNotJSCell(regT0
, base
);
1101 emitArrayProfilingSiteWithCell(regT0
, regT2
, profile
);
1102 and32(TrustedImm32(IndexingShapeMask
), regT2
);
1104 JITArrayMode mode
= chooseArrayMode(profile
);
1105 PatchableJump badType
;
1107 // FIXME: Add support for other types like TypedArrays and Arguments.
1108 // See https://bugs.webkit.org/show_bug.cgi?id=135033 and https://bugs.webkit.org/show_bug.cgi?id=135034.
1109 JumpList slowCases
= emitLoadForArrayMode(currentInstruction
, mode
, badType
);
1111 move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0
);
1113 addSlowCase(badType
);
1114 addSlowCase(slowCases
);
1116 Label done
= label();
1118 emitPutVirtualRegister(dst
);
1120 m_byValCompilationInfo
.append(ByValCompilationInfo(m_bytecodeOffset
, badType
, mode
, done
));
1123 void JIT::emitSlow_op_has_indexed_property(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1125 int dst
= currentInstruction
[1].u
.operand
;
1126 int base
= currentInstruction
[2].u
.operand
;
1127 int property
= currentInstruction
[3].u
.operand
;
1128 ArrayProfile
* profile
= currentInstruction
[4].u
.arrayProfile
;
1130 linkSlowCaseIfNotJSCell(iter
, base
); // base cell check
1131 linkSlowCase(iter
); // base array check
1132 linkSlowCase(iter
); // vector length check
1133 linkSlowCase(iter
); // empty value
1135 Label slowPath
= label();
1137 emitGetVirtualRegister(base
, regT0
);
1138 emitGetVirtualRegister(property
, regT1
);
1139 Call call
= callOperation(operationHasIndexedPropertyDefault
, dst
, regT0
, regT1
, profile
);
1141 m_byValCompilationInfo
[m_byValInstructionIndex
].slowPathTarget
= slowPath
;
1142 m_byValCompilationInfo
[m_byValInstructionIndex
].returnAddress
= call
;
1143 m_byValInstructionIndex
++;
1146 void JIT::emit_op_get_direct_pname(Instruction
* currentInstruction
)
1148 int dst
= currentInstruction
[1].u
.operand
;
1149 int base
= currentInstruction
[2].u
.operand
;
1150 int index
= currentInstruction
[4].u
.operand
;
1151 int enumerator
= currentInstruction
[5].u
.operand
;
1153 // Check that base is a cell
1154 emitGetVirtualRegister(base
, regT0
);
1155 emitJumpSlowCaseIfNotJSCell(regT0
, base
);
1157 // Check the structure
1158 emitGetVirtualRegister(enumerator
, regT2
);
1159 load32(Address(regT0
, JSCell::structureIDOffset()), regT1
);
1160 addSlowCase(branch32(NotEqual
, regT1
, Address(regT2
, JSPropertyNameEnumerator::cachedStructureIDOffset())));
1162 // Compute the offset
1163 emitGetVirtualRegister(index
, regT1
);
1164 // If index is less than the enumerator's cached inline storage, then it's an inline access
1165 Jump outOfLineAccess
= branch32(AboveOrEqual
, regT1
, Address(regT2
, JSPropertyNameEnumerator::cachedInlineCapacityOffset()));
1166 addPtr(TrustedImm32(JSObject::offsetOfInlineStorage()), regT0
);
1167 signExtend32ToPtr(regT1
, regT1
);
1168 load64(BaseIndex(regT0
, regT1
, TimesEight
), regT0
);
1172 // Otherwise it's out of line
1173 outOfLineAccess
.link(this);
1174 loadPtr(Address(regT0
, JSObject::butterflyOffset()), regT0
);
1175 sub32(Address(regT2
, JSPropertyNameEnumerator::cachedInlineCapacityOffset()), regT1
);
1177 signExtend32ToPtr(regT1
, regT1
);
1178 int32_t offsetOfFirstProperty
= static_cast<int32_t>(offsetInButterfly(firstOutOfLineOffset
)) * sizeof(EncodedJSValue
);
1179 load64(BaseIndex(regT0
, regT1
, TimesEight
, offsetOfFirstProperty
), regT0
);
1182 emitValueProfilingSite();
1183 emitPutVirtualRegister(dst
, regT0
);
1186 void JIT::emitSlow_op_get_direct_pname(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1188 int base
= currentInstruction
[2].u
.operand
;
1189 linkSlowCaseIfNotJSCell(iter
, base
);
1192 JITSlowPathCall
slowPathCall(this, currentInstruction
, slow_path_get_direct_pname
);
1193 slowPathCall
.call();
1196 void JIT::emit_op_enumerator_structure_pname(Instruction
* currentInstruction
)
1198 int dst
= currentInstruction
[1].u
.operand
;
1199 int enumerator
= currentInstruction
[2].u
.operand
;
1200 int index
= currentInstruction
[3].u
.operand
;
1202 emitGetVirtualRegister(index
, regT0
);
1203 emitGetVirtualRegister(enumerator
, regT1
);
1204 Jump inBounds
= branch32(Below
, regT0
, Address(regT1
, JSPropertyNameEnumerator::endStructurePropertyIndexOffset()));
1206 move(TrustedImm64(JSValue::encode(jsNull())), regT0
);
1209 inBounds
.link(this);
1211 loadPtr(Address(regT1
, JSPropertyNameEnumerator::cachedPropertyNamesVectorOffset()), regT1
);
1212 signExtend32ToPtr(regT0
, regT0
);
1213 load64(BaseIndex(regT1
, regT0
, TimesEight
), regT0
);
1216 emitPutVirtualRegister(dst
);
1219 void JIT::emit_op_enumerator_generic_pname(Instruction
* currentInstruction
)
1221 int dst
= currentInstruction
[1].u
.operand
;
1222 int enumerator
= currentInstruction
[2].u
.operand
;
1223 int index
= currentInstruction
[3].u
.operand
;
1225 emitGetVirtualRegister(index
, regT0
);
1226 emitGetVirtualRegister(enumerator
, regT1
);
1227 Jump inBounds
= branch32(Below
, regT0
, Address(regT1
, JSPropertyNameEnumerator::endGenericPropertyIndexOffset()));
1229 move(TrustedImm64(JSValue::encode(jsNull())), regT0
);
1232 inBounds
.link(this);
1234 loadPtr(Address(regT1
, JSPropertyNameEnumerator::cachedPropertyNamesVectorOffset()), regT1
);
1235 signExtend32ToPtr(regT0
, regT0
);
1236 load64(BaseIndex(regT1
, regT0
, TimesEight
), regT0
);
1239 emitPutVirtualRegister(dst
);
1242 void JIT::emit_op_profile_type(Instruction
* currentInstruction
)
1244 TypeLocation
* cachedTypeLocation
= currentInstruction
[2].u
.location
;
1245 int valueToProfile
= currentInstruction
[1].u
.operand
;
1247 emitGetVirtualRegister(valueToProfile
, regT0
);
1251 // Compile in a predictive type check, if possible, to see if we can skip writing to the log.
1252 // These typechecks are inlined to match those of the 64-bit JSValue type checks.
1253 if (cachedTypeLocation
->m_lastSeenType
== TypeUndefined
)
1254 jumpToEnd
.append(branch64(Equal
, regT0
, TrustedImm64(JSValue::encode(jsUndefined()))));
1255 else if (cachedTypeLocation
->m_lastSeenType
== TypeNull
)
1256 jumpToEnd
.append(branch64(Equal
, regT0
, TrustedImm64(JSValue::encode(jsNull()))));
1257 else if (cachedTypeLocation
->m_lastSeenType
== TypeBoolean
) {
1259 and64(TrustedImm32(~1), regT1
);
1260 jumpToEnd
.append(branch64(Equal
, regT1
, TrustedImm64(ValueFalse
)));
1261 } else if (cachedTypeLocation
->m_lastSeenType
== TypeMachineInt
)
1262 jumpToEnd
.append(emitJumpIfImmediateInteger(regT0
));
1263 else if (cachedTypeLocation
->m_lastSeenType
== TypeNumber
)
1264 jumpToEnd
.append(emitJumpIfImmediateNumber(regT0
));
1265 else if (cachedTypeLocation
->m_lastSeenType
== TypeString
) {
1266 Jump isNotCell
= emitJumpIfNotJSCell(regT0
);
1267 jumpToEnd
.append(branch8(Equal
, Address(regT0
, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType
)));
1268 isNotCell
.link(this);
1271 // Load the type profiling log into T2.
1272 TypeProfilerLog
* cachedTypeProfilerLog
= m_vm
->typeProfilerLog();
1273 move(TrustedImmPtr(cachedTypeProfilerLog
), regT2
);
1274 // Load the next log entry into T1.
1275 loadPtr(Address(regT2
, TypeProfilerLog::currentLogEntryOffset()), regT1
);
1277 // Store the JSValue onto the log entry.
1278 store64(regT0
, Address(regT1
, TypeProfilerLog::LogEntry::valueOffset()));
1280 // Store the structureID of the cell if T0 is a cell, otherwise, store 0 on the log entry.
1281 Jump notCell
= emitJumpIfNotJSCell(regT0
);
1282 load32(Address(regT0
, JSCell::structureIDOffset()), regT0
);
1283 store32(regT0
, Address(regT1
, TypeProfilerLog::LogEntry::structureIDOffset()));
1284 Jump skipIsCell
= jump();
1286 store32(TrustedImm32(0), Address(regT1
, TypeProfilerLog::LogEntry::structureIDOffset()));
1287 skipIsCell
.link(this);
1289 // Store the typeLocation on the log entry.
1290 move(TrustedImmPtr(cachedTypeLocation
), regT0
);
1291 store64(regT0
, Address(regT1
, TypeProfilerLog::LogEntry::locationOffset()));
1293 // Increment the current log entry.
1294 addPtr(TrustedImm32(sizeof(TypeProfilerLog::LogEntry
)), regT1
);
1295 store64(regT1
, Address(regT2
, TypeProfilerLog::currentLogEntryOffset()));
1296 Jump skipClearLog
= branchPtr(NotEqual
, regT1
, TrustedImmPtr(cachedTypeProfilerLog
->logEndPtr()));
1297 // Clear the log if we're at the end of the log.
1298 callOperation(operationProcessTypeProfilerLog
);
1299 skipClearLog
.link(this);
1301 jumpToEnd
.link(this);
1304 #endif // USE(JSVALUE64)
1306 void JIT::emit_op_get_enumerable_length(Instruction
* currentInstruction
)
1308 JITSlowPathCall
slowPathCall(this, currentInstruction
, slow_path_get_enumerable_length
);
1309 slowPathCall
.call();
1312 void JIT::emitSlow_op_has_structure_property(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1317 JITSlowPathCall
slowPathCall(this, currentInstruction
, slow_path_has_structure_property
);
1318 slowPathCall
.call();
1321 void JIT::emit_op_has_generic_property(Instruction
* currentInstruction
)
1323 JITSlowPathCall
slowPathCall(this, currentInstruction
, slow_path_has_generic_property
);
1324 slowPathCall
.call();
1327 void JIT::emit_op_get_property_enumerator(Instruction
* currentInstruction
)
1329 JITSlowPathCall
slowPathCall(this, currentInstruction
, slow_path_get_property_enumerator
);
1330 slowPathCall
.call();
1333 void JIT::emit_op_to_index_string(Instruction
* currentInstruction
)
1335 JITSlowPathCall
slowPathCall(this, currentInstruction
, slow_path_to_index_string
);
1336 slowPathCall
.call();
1339 void JIT::emit_op_profile_control_flow(Instruction
* currentInstruction
)
1341 BasicBlockLocation
* basicBlockLocation
= currentInstruction
[1].u
.basicBlockLocation
;
1342 if (!basicBlockLocation
->hasExecuted())
1343 basicBlockLocation
->emitExecuteCode(*this, regT1
);
1346 void JIT::emit_op_create_direct_arguments(Instruction
* currentInstruction
)
1348 JITSlowPathCall
slowPathCall(this, currentInstruction
, slow_path_create_direct_arguments
);
1349 slowPathCall
.call();
1352 void JIT::emit_op_create_scoped_arguments(Instruction
* currentInstruction
)
1354 JITSlowPathCall
slowPathCall(this, currentInstruction
, slow_path_create_scoped_arguments
);
1355 slowPathCall
.call();
1358 void JIT::emit_op_create_out_of_band_arguments(Instruction
* currentInstruction
)
1360 JITSlowPathCall
slowPathCall(this, currentInstruction
, slow_path_create_out_of_band_arguments
);
1361 slowPathCall
.call();
1366 #endif // ENABLE(JIT)