2 * Copyright (C) 2009, 2012, 2013 Apple Inc. All rights reserved.
3 * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
14 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
18 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 #include "Arguments.h"
32 #include "CopiedSpaceInlines.h"
35 #include "JITInlines.h"
38 #include "JSFunction.h"
39 #include "JSPropertyNameIterator.h"
40 #include "MaxFrameExtentForSlowPathCall.h"
41 #include "SlowPathCall.h"
42 #include "VirtualRegister.h"
48 JIT::CodeRef
JIT::privateCompileCTINativeCall(VM
* vm
, NativeFunction
)
50 return vm
->getCTIStub(nativeCallGenerator
);
53 void JIT::emit_op_mov(Instruction
* currentInstruction
)
55 int dst
= currentInstruction
[1].u
.operand
;
56 int src
= currentInstruction
[2].u
.operand
;
58 emitGetVirtualRegister(src
, regT0
);
59 emitPutVirtualRegister(dst
);
62 void JIT::emit_op_captured_mov(Instruction
* currentInstruction
)
64 int dst
= currentInstruction
[1].u
.operand
;
65 int src
= currentInstruction
[2].u
.operand
;
67 emitGetVirtualRegister(src
, regT0
);
68 emitNotifyWrite(regT0
, regT1
, currentInstruction
[3].u
.watchpointSet
);
69 emitPutVirtualRegister(dst
);
72 void JIT::emit_op_end(Instruction
* currentInstruction
)
74 RELEASE_ASSERT(returnValueGPR
!= callFrameRegister
);
75 emitGetVirtualRegister(currentInstruction
[1].u
.operand
, returnValueGPR
);
76 emitFunctionEpilogue();
80 void JIT::emit_op_jmp(Instruction
* currentInstruction
)
82 unsigned target
= currentInstruction
[1].u
.operand
;
83 addJump(jump(), target
);
86 void JIT::emit_op_new_object(Instruction
* currentInstruction
)
88 Structure
* structure
= currentInstruction
[3].u
.objectAllocationProfile
->structure();
89 size_t allocationSize
= JSFinalObject::allocationSize(structure
->inlineCapacity());
90 MarkedAllocator
* allocator
= &m_vm
->heap
.allocatorForObjectWithoutDestructor(allocationSize
);
92 RegisterID resultReg
= regT0
;
93 RegisterID allocatorReg
= regT1
;
94 RegisterID scratchReg
= regT2
;
96 move(TrustedImmPtr(allocator
), allocatorReg
);
97 emitAllocateJSObject(allocatorReg
, TrustedImmPtr(structure
), resultReg
, scratchReg
);
98 emitPutVirtualRegister(currentInstruction
[1].u
.operand
);
101 void JIT::emitSlow_op_new_object(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
104 int dst
= currentInstruction
[1].u
.operand
;
105 Structure
* structure
= currentInstruction
[3].u
.objectAllocationProfile
->structure();
106 callOperation(operationNewObject
, structure
);
107 emitStoreCell(dst
, returnValueGPR
);
110 void JIT::emit_op_check_has_instance(Instruction
* currentInstruction
)
112 int baseVal
= currentInstruction
[3].u
.operand
;
114 emitGetVirtualRegister(baseVal
, regT0
);
116 // Check that baseVal is a cell.
117 emitJumpSlowCaseIfNotJSCell(regT0
, baseVal
);
119 // Check that baseVal 'ImplementsHasInstance'.
120 addSlowCase(branchTest8(Zero
, Address(regT0
, JSCell::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance
)));
123 void JIT::emit_op_instanceof(Instruction
* currentInstruction
)
125 int dst
= currentInstruction
[1].u
.operand
;
126 int value
= currentInstruction
[2].u
.operand
;
127 int proto
= currentInstruction
[3].u
.operand
;
129 // Load the operands (baseVal, proto, and value respectively) into registers.
130 // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
131 emitGetVirtualRegister(value
, regT2
);
132 emitGetVirtualRegister(proto
, regT1
);
134 // Check that proto are cells. baseVal must be a cell - this is checked by op_check_has_instance.
135 emitJumpSlowCaseIfNotJSCell(regT2
, value
);
136 emitJumpSlowCaseIfNotJSCell(regT1
, proto
);
138 // Check that prototype is an object
139 addSlowCase(emitJumpIfCellNotObject(regT1
));
141 // Optimistically load the result true, and start looping.
142 // Initially, regT1 still contains proto and regT2 still contains value.
143 // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
144 move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0
);
147 // Load the prototype of the object in regT2. If this is equal to regT1 - WIN!
148 // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
149 emitLoadStructure(regT2
, regT2
, regT3
);
150 load64(Address(regT2
, Structure::prototypeOffset()), regT2
);
151 Jump isInstance
= branchPtr(Equal
, regT2
, regT1
);
152 emitJumpIfJSCell(regT2
).linkTo(loop
, this);
154 // We get here either by dropping out of the loop, or if value was not an Object. Result is false.
155 move(TrustedImm64(JSValue::encode(jsBoolean(false))), regT0
);
157 // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
158 isInstance
.link(this);
159 emitPutVirtualRegister(dst
);
162 void JIT::emit_op_is_undefined(Instruction
* currentInstruction
)
164 int dst
= currentInstruction
[1].u
.operand
;
165 int value
= currentInstruction
[2].u
.operand
;
167 emitGetVirtualRegister(value
, regT0
);
168 Jump isCell
= emitJumpIfJSCell(regT0
);
170 compare64(Equal
, regT0
, TrustedImm32(ValueUndefined
), regT0
);
174 Jump isMasqueradesAsUndefined
= branchTest8(NonZero
, Address(regT0
, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined
));
175 move(TrustedImm32(0), regT0
);
176 Jump notMasqueradesAsUndefined
= jump();
178 isMasqueradesAsUndefined
.link(this);
179 emitLoadStructure(regT0
, regT1
, regT2
);
180 move(TrustedImmPtr(m_codeBlock
->globalObject()), regT0
);
181 loadPtr(Address(regT1
, Structure::globalObjectOffset()), regT1
);
182 comparePtr(Equal
, regT0
, regT1
, regT0
);
184 notMasqueradesAsUndefined
.link(this);
186 emitTagAsBoolImmediate(regT0
);
187 emitPutVirtualRegister(dst
);
190 void JIT::emit_op_is_boolean(Instruction
* currentInstruction
)
192 int dst
= currentInstruction
[1].u
.operand
;
193 int value
= currentInstruction
[2].u
.operand
;
195 emitGetVirtualRegister(value
, regT0
);
196 xor64(TrustedImm32(static_cast<int32_t>(ValueFalse
)), regT0
);
197 test64(Zero
, regT0
, TrustedImm32(static_cast<int32_t>(~1)), regT0
);
198 emitTagAsBoolImmediate(regT0
);
199 emitPutVirtualRegister(dst
);
202 void JIT::emit_op_is_number(Instruction
* currentInstruction
)
204 int dst
= currentInstruction
[1].u
.operand
;
205 int value
= currentInstruction
[2].u
.operand
;
207 emitGetVirtualRegister(value
, regT0
);
208 test64(NonZero
, regT0
, tagTypeNumberRegister
, regT0
);
209 emitTagAsBoolImmediate(regT0
);
210 emitPutVirtualRegister(dst
);
213 void JIT::emit_op_is_string(Instruction
* currentInstruction
)
215 int dst
= currentInstruction
[1].u
.operand
;
216 int value
= currentInstruction
[2].u
.operand
;
218 emitGetVirtualRegister(value
, regT0
);
219 Jump isNotCell
= emitJumpIfNotJSCell(regT0
);
221 compare8(Equal
, Address(regT0
, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType
), regT0
);
222 emitTagAsBoolImmediate(regT0
);
225 isNotCell
.link(this);
226 move(TrustedImm32(ValueFalse
), regT0
);
229 emitPutVirtualRegister(dst
);
232 void JIT::emit_op_tear_off_activation(Instruction
* currentInstruction
)
234 int activation
= currentInstruction
[1].u
.operand
;
235 Jump activationNotCreated
= branchTest64(Zero
, addressFor(activation
));
236 emitGetVirtualRegister(activation
, regT0
);
237 callOperation(operationTearOffActivation
, regT0
);
238 activationNotCreated
.link(this);
241 void JIT::emit_op_tear_off_arguments(Instruction
* currentInstruction
)
243 int arguments
= currentInstruction
[1].u
.operand
;
244 int activation
= currentInstruction
[2].u
.operand
;
246 Jump argsNotCreated
= branchTest64(Zero
, Address(callFrameRegister
, sizeof(Register
) * (unmodifiedArgumentsRegister(VirtualRegister(arguments
)).offset())));
247 emitGetVirtualRegister(unmodifiedArgumentsRegister(VirtualRegister(arguments
)).offset(), regT0
);
248 emitGetVirtualRegister(activation
, regT1
);
249 callOperation(operationTearOffArguments
, regT0
, regT1
);
250 argsNotCreated
.link(this);
253 void JIT::emit_op_ret(Instruction
* currentInstruction
)
255 ASSERT(callFrameRegister
!= regT1
);
256 ASSERT(regT1
!= returnValueGPR
);
257 ASSERT(returnValueGPR
!= callFrameRegister
);
259 // Return the result in %eax.
260 emitGetVirtualRegister(currentInstruction
[1].u
.operand
, returnValueGPR
);
262 checkStackPointerAlignment();
263 emitFunctionEpilogue();
267 void JIT::emit_op_ret_object_or_this(Instruction
* currentInstruction
)
269 ASSERT(callFrameRegister
!= regT1
);
270 ASSERT(regT1
!= returnValueGPR
);
271 ASSERT(returnValueGPR
!= callFrameRegister
);
273 // Return the result in %eax.
274 emitGetVirtualRegister(currentInstruction
[1].u
.operand
, returnValueGPR
);
275 Jump notJSCell
= emitJumpIfNotJSCell(returnValueGPR
);
276 Jump notObject
= emitJumpIfCellNotObject(returnValueGPR
);
279 emitFunctionEpilogue();
282 // Return 'this' in %eax.
283 notJSCell
.link(this);
284 notObject
.link(this);
285 emitGetVirtualRegister(currentInstruction
[2].u
.operand
, returnValueGPR
);
288 emitFunctionEpilogue();
292 void JIT::emit_op_to_primitive(Instruction
* currentInstruction
)
294 int dst
= currentInstruction
[1].u
.operand
;
295 int src
= currentInstruction
[2].u
.operand
;
297 emitGetVirtualRegister(src
, regT0
);
299 Jump isImm
= emitJumpIfNotJSCell(regT0
);
300 addSlowCase(branchStructure(NotEqual
,
301 Address(regT0
, JSCell::structureIDOffset()),
302 m_vm
->stringStructure
.get()));
306 emitPutVirtualRegister(dst
);
310 void JIT::emit_op_strcat(Instruction
* currentInstruction
)
312 JITSlowPathCall
slowPathCall(this, currentInstruction
, slow_path_strcat
);
316 void JIT::emit_op_not(Instruction
* currentInstruction
)
318 emitGetVirtualRegister(currentInstruction
[2].u
.operand
, regT0
);
320 // Invert against JSValue(false); if the value was tagged as a boolean, then all bits will be
321 // clear other than the low bit (which will be 0 or 1 for false or true inputs respectively).
322 // Then invert against JSValue(true), which will add the tag back in, and flip the low bit.
323 xor64(TrustedImm32(static_cast<int32_t>(ValueFalse
)), regT0
);
324 addSlowCase(branchTestPtr(NonZero
, regT0
, TrustedImm32(static_cast<int32_t>(~1))));
325 xor64(TrustedImm32(static_cast<int32_t>(ValueTrue
)), regT0
);
327 emitPutVirtualRegister(currentInstruction
[1].u
.operand
);
330 void JIT::emit_op_jfalse(Instruction
* currentInstruction
)
332 unsigned target
= currentInstruction
[2].u
.operand
;
333 emitGetVirtualRegister(currentInstruction
[1].u
.operand
, regT0
);
335 addJump(branch64(Equal
, regT0
, TrustedImm64(JSValue::encode(jsNumber(0)))), target
);
336 Jump isNonZero
= emitJumpIfImmediateInteger(regT0
);
338 addJump(branch64(Equal
, regT0
, TrustedImm64(JSValue::encode(jsBoolean(false)))), target
);
339 addSlowCase(branch64(NotEqual
, regT0
, TrustedImm64(JSValue::encode(jsBoolean(true)))));
341 isNonZero
.link(this);
344 void JIT::emit_op_jeq_null(Instruction
* currentInstruction
)
346 int src
= currentInstruction
[1].u
.operand
;
347 unsigned target
= currentInstruction
[2].u
.operand
;
349 emitGetVirtualRegister(src
, regT0
);
350 Jump isImmediate
= emitJumpIfNotJSCell(regT0
);
352 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
353 Jump isNotMasqueradesAsUndefined
= branchTest8(Zero
, Address(regT0
, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined
));
354 emitLoadStructure(regT0
, regT2
, regT1
);
355 move(TrustedImmPtr(m_codeBlock
->globalObject()), regT0
);
356 addJump(branchPtr(Equal
, Address(regT2
, Structure::globalObjectOffset()), regT0
), target
);
357 Jump masqueradesGlobalObjectIsForeign
= jump();
359 // Now handle the immediate cases - undefined & null
360 isImmediate
.link(this);
361 and64(TrustedImm32(~TagBitUndefined
), regT0
);
362 addJump(branch64(Equal
, regT0
, TrustedImm64(JSValue::encode(jsNull()))), target
);
364 isNotMasqueradesAsUndefined
.link(this);
365 masqueradesGlobalObjectIsForeign
.link(this);
367 void JIT::emit_op_jneq_null(Instruction
* currentInstruction
)
369 int src
= currentInstruction
[1].u
.operand
;
370 unsigned target
= currentInstruction
[2].u
.operand
;
372 emitGetVirtualRegister(src
, regT0
);
373 Jump isImmediate
= emitJumpIfNotJSCell(regT0
);
375 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
376 addJump(branchTest8(Zero
, Address(regT0
, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined
)), target
);
377 emitLoadStructure(regT0
, regT2
, regT1
);
378 move(TrustedImmPtr(m_codeBlock
->globalObject()), regT0
);
379 addJump(branchPtr(NotEqual
, Address(regT2
, Structure::globalObjectOffset()), regT0
), target
);
380 Jump wasNotImmediate
= jump();
382 // Now handle the immediate cases - undefined & null
383 isImmediate
.link(this);
384 and64(TrustedImm32(~TagBitUndefined
), regT0
);
385 addJump(branch64(NotEqual
, regT0
, TrustedImm64(JSValue::encode(jsNull()))), target
);
387 wasNotImmediate
.link(this);
390 void JIT::emit_op_jneq_ptr(Instruction
* currentInstruction
)
392 int src
= currentInstruction
[1].u
.operand
;
393 Special::Pointer ptr
= currentInstruction
[2].u
.specialPointer
;
394 unsigned target
= currentInstruction
[3].u
.operand
;
396 emitGetVirtualRegister(src
, regT0
);
397 addJump(branchPtr(NotEqual
, regT0
, TrustedImmPtr(actualPointerFor(m_codeBlock
, ptr
))), target
);
400 void JIT::emit_op_eq(Instruction
* currentInstruction
)
402 emitGetVirtualRegisters(currentInstruction
[2].u
.operand
, regT0
, currentInstruction
[3].u
.operand
, regT1
);
403 emitJumpSlowCaseIfNotImmediateIntegers(regT0
, regT1
, regT2
);
404 compare32(Equal
, regT1
, regT0
, regT0
);
405 emitTagAsBoolImmediate(regT0
);
406 emitPutVirtualRegister(currentInstruction
[1].u
.operand
);
409 void JIT::emit_op_jtrue(Instruction
* currentInstruction
)
411 unsigned target
= currentInstruction
[2].u
.operand
;
412 emitGetVirtualRegister(currentInstruction
[1].u
.operand
, regT0
);
414 Jump isZero
= branch64(Equal
, regT0
, TrustedImm64(JSValue::encode(jsNumber(0))));
415 addJump(emitJumpIfImmediateInteger(regT0
), target
);
417 addJump(branch64(Equal
, regT0
, TrustedImm64(JSValue::encode(jsBoolean(true)))), target
);
418 addSlowCase(branch64(NotEqual
, regT0
, TrustedImm64(JSValue::encode(jsBoolean(false)))));
423 void JIT::emit_op_neq(Instruction
* currentInstruction
)
425 emitGetVirtualRegisters(currentInstruction
[2].u
.operand
, regT0
, currentInstruction
[3].u
.operand
, regT1
);
426 emitJumpSlowCaseIfNotImmediateIntegers(regT0
, regT1
, regT2
);
427 compare32(NotEqual
, regT1
, regT0
, regT0
);
428 emitTagAsBoolImmediate(regT0
);
430 emitPutVirtualRegister(currentInstruction
[1].u
.operand
);
434 void JIT::emit_op_bitxor(Instruction
* currentInstruction
)
436 emitGetVirtualRegisters(currentInstruction
[2].u
.operand
, regT0
, currentInstruction
[3].u
.operand
, regT1
);
437 emitJumpSlowCaseIfNotImmediateIntegers(regT0
, regT1
, regT2
);
439 emitFastArithReTagImmediate(regT0
, regT0
);
440 emitPutVirtualRegister(currentInstruction
[1].u
.operand
);
443 void JIT::emit_op_bitor(Instruction
* currentInstruction
)
445 emitGetVirtualRegisters(currentInstruction
[2].u
.operand
, regT0
, currentInstruction
[3].u
.operand
, regT1
);
446 emitJumpSlowCaseIfNotImmediateIntegers(regT0
, regT1
, regT2
);
448 emitPutVirtualRegister(currentInstruction
[1].u
.operand
);
451 void JIT::emit_op_throw(Instruction
* currentInstruction
)
453 ASSERT(regT0
== returnValueGPR
);
454 emitGetVirtualRegister(currentInstruction
[1].u
.operand
, regT0
);
455 callOperationNoExceptionCheck(operationThrow
, regT0
);
456 jumpToExceptionHandler();
459 void JIT::emit_op_get_pnames(Instruction
* currentInstruction
)
461 int dst
= currentInstruction
[1].u
.operand
;
462 int base
= currentInstruction
[2].u
.operand
;
463 int i
= currentInstruction
[3].u
.operand
;
464 int size
= currentInstruction
[4].u
.operand
;
465 int breakTarget
= currentInstruction
[5].u
.operand
;
467 JumpList isNotObject
;
469 emitGetVirtualRegister(base
, regT0
);
470 if (!m_codeBlock
->isKnownNotImmediate(base
))
471 isNotObject
.append(emitJumpIfNotJSCell(regT0
));
472 if (base
!= m_codeBlock
->thisRegister().offset() || m_codeBlock
->isStrictMode())
473 isNotObject
.append(emitJumpIfCellNotObject(regT0
));
475 // We could inline the case where you have a valid cache, but
476 // this call doesn't seem to be hot.
477 Label
isObject(this);
478 callOperation(operationGetPNames
, regT0
);
479 emitStoreCell(dst
, returnValueGPR
);
480 load32(Address(regT0
, OBJECT_OFFSETOF(JSPropertyNameIterator
, m_jsStringsSize
)), regT3
);
481 store64(tagTypeNumberRegister
, addressFor(i
));
482 store32(TrustedImm32(Int32Tag
), intTagFor(size
));
483 store32(regT3
, intPayloadFor(size
));
486 isNotObject
.link(this);
488 and32(TrustedImm32(~TagBitUndefined
), regT1
);
489 addJump(branch32(Equal
, regT1
, TrustedImm32(ValueNull
)), breakTarget
);
490 callOperation(operationToObject
, base
, regT0
);
491 jump().linkTo(isObject
, this);
496 void JIT::emit_op_next_pname(Instruction
* currentInstruction
)
498 int dst
= currentInstruction
[1].u
.operand
;
499 int base
= currentInstruction
[2].u
.operand
;
500 int i
= currentInstruction
[3].u
.operand
;
501 int size
= currentInstruction
[4].u
.operand
;
502 int it
= currentInstruction
[5].u
.operand
;
503 int target
= currentInstruction
[6].u
.operand
;
505 JumpList callHasProperty
;
508 load32(intPayloadFor(i
), regT0
);
509 Jump end
= branch32(Equal
, regT0
, intPayloadFor(size
));
512 loadPtr(addressFor(it
), regT1
);
513 loadPtr(Address(regT1
, OBJECT_OFFSETOF(JSPropertyNameIterator
, m_jsStrings
)), regT2
);
515 load64(BaseIndex(regT2
, regT0
, TimesEight
), regT2
);
517 emitPutVirtualRegister(dst
, regT2
);
520 add32(TrustedImm32(1), regT0
);
521 store32(regT0
, intPayloadFor(i
));
523 // Verify that i is valid:
524 emitGetVirtualRegister(base
, regT0
);
526 // Test base's structure
527 emitLoadStructure(regT0
, regT2
, regT3
);
528 callHasProperty
.append(branchPtr(NotEqual
, regT2
, Address(Address(regT1
, OBJECT_OFFSETOF(JSPropertyNameIterator
, m_cachedStructure
)))));
530 // Test base's prototype chain
531 loadPtr(Address(Address(regT1
, OBJECT_OFFSETOF(JSPropertyNameIterator
, m_cachedPrototypeChain
))), regT3
);
532 loadPtr(Address(regT3
, OBJECT_OFFSETOF(StructureChain
, m_vector
)), regT3
);
533 addJump(branchTestPtr(Zero
, Address(regT3
)), target
);
535 Label
checkPrototype(this);
536 load64(Address(regT2
, Structure::prototypeOffset()), regT2
);
537 callHasProperty
.append(emitJumpIfNotJSCell(regT2
));
538 emitLoadStructure(regT2
, regT2
, regT1
);
539 callHasProperty
.append(branchPtr(NotEqual
, regT2
, Address(regT3
)));
540 addPtr(TrustedImm32(sizeof(Structure
*)), regT3
);
541 branchTestPtr(NonZero
, Address(regT3
)).linkTo(checkPrototype
, this);
544 addJump(jump(), target
);
546 // Slow case: Ask the object if i is valid.
547 callHasProperty
.link(this);
548 emitGetVirtualRegister(dst
, regT1
);
549 callOperation(operationHasProperty
, regT0
, regT1
);
551 // Test for valid key.
552 addJump(branchTest32(NonZero
, regT0
), target
);
553 jump().linkTo(begin
, this);
559 void JIT::emit_op_push_with_scope(Instruction
* currentInstruction
)
561 emitGetVirtualRegister(currentInstruction
[1].u
.operand
, regT0
);
562 callOperation(operationPushWithScope
, regT0
);
565 void JIT::emit_op_pop_scope(Instruction
*)
567 callOperation(operationPopScope
);
570 void JIT::compileOpStrictEq(Instruction
* currentInstruction
, CompileOpStrictEqType type
)
572 int dst
= currentInstruction
[1].u
.operand
;
573 int src1
= currentInstruction
[2].u
.operand
;
574 int src2
= currentInstruction
[3].u
.operand
;
576 emitGetVirtualRegisters(src1
, regT0
, src2
, regT1
);
578 // Jump slow if both are cells (to cover strings).
581 addSlowCase(emitJumpIfJSCell(regT2
));
583 // Jump slow if either is a double. First test if it's an integer, which is fine, and then test
585 Jump leftOK
= emitJumpIfImmediateInteger(regT0
);
586 addSlowCase(emitJumpIfImmediateNumber(regT0
));
588 Jump rightOK
= emitJumpIfImmediateInteger(regT1
);
589 addSlowCase(emitJumpIfImmediateNumber(regT1
));
592 if (type
== OpStrictEq
)
593 compare64(Equal
, regT1
, regT0
, regT0
);
595 compare64(NotEqual
, regT1
, regT0
, regT0
);
596 emitTagAsBoolImmediate(regT0
);
598 emitPutVirtualRegister(dst
);
601 void JIT::emit_op_stricteq(Instruction
* currentInstruction
)
603 compileOpStrictEq(currentInstruction
, OpStrictEq
);
606 void JIT::emit_op_nstricteq(Instruction
* currentInstruction
)
608 compileOpStrictEq(currentInstruction
, OpNStrictEq
);
611 void JIT::emit_op_to_number(Instruction
* currentInstruction
)
613 int srcVReg
= currentInstruction
[2].u
.operand
;
614 emitGetVirtualRegister(srcVReg
, regT0
);
616 addSlowCase(emitJumpIfNotImmediateNumber(regT0
));
618 emitPutVirtualRegister(currentInstruction
[1].u
.operand
);
621 void JIT::emit_op_push_name_scope(Instruction
* currentInstruction
)
623 emitGetVirtualRegister(currentInstruction
[2].u
.operand
, regT0
);
624 callOperation(operationPushNameScope
, &m_codeBlock
->identifier(currentInstruction
[1].u
.operand
), regT0
, currentInstruction
[3].u
.operand
);
627 void JIT::emit_op_catch(Instruction
* currentInstruction
)
629 // Gotta restore the tag registers. We could be throwing from FTL, which may
631 move(TrustedImm64(TagTypeNumber
), tagTypeNumberRegister
);
632 move(TrustedImm64(TagMask
), tagMaskRegister
);
634 move(TrustedImmPtr(m_vm
), regT3
);
635 load64(Address(regT3
, VM::callFrameForThrowOffset()), callFrameRegister
);
637 addPtr(TrustedImm32(stackPointerOffsetFor(codeBlock()) * sizeof(Register
)), callFrameRegister
, stackPointerRegister
);
639 load64(Address(regT3
, VM::exceptionOffset()), regT0
);
640 store64(TrustedImm64(JSValue::encode(JSValue())), Address(regT3
, VM::exceptionOffset()));
641 emitPutVirtualRegister(currentInstruction
[1].u
.operand
);
644 void JIT::emit_op_switch_imm(Instruction
* currentInstruction
)
646 size_t tableIndex
= currentInstruction
[1].u
.operand
;
647 unsigned defaultOffset
= currentInstruction
[2].u
.operand
;
648 unsigned scrutinee
= currentInstruction
[3].u
.operand
;
650 // create jump table for switch destinations, track this switch statement.
651 SimpleJumpTable
* jumpTable
= &m_codeBlock
->switchJumpTable(tableIndex
);
652 m_switches
.append(SwitchRecord(jumpTable
, m_bytecodeOffset
, defaultOffset
, SwitchRecord::Immediate
));
653 jumpTable
->ensureCTITable();
655 emitGetVirtualRegister(scrutinee
, regT0
);
656 callOperation(operationSwitchImmWithUnknownKeyType
, regT0
, tableIndex
);
657 jump(returnValueGPR
);
660 void JIT::emit_op_switch_char(Instruction
* currentInstruction
)
662 size_t tableIndex
= currentInstruction
[1].u
.operand
;
663 unsigned defaultOffset
= currentInstruction
[2].u
.operand
;
664 unsigned scrutinee
= currentInstruction
[3].u
.operand
;
666 // create jump table for switch destinations, track this switch statement.
667 SimpleJumpTable
* jumpTable
= &m_codeBlock
->switchJumpTable(tableIndex
);
668 m_switches
.append(SwitchRecord(jumpTable
, m_bytecodeOffset
, defaultOffset
, SwitchRecord::Character
));
669 jumpTable
->ensureCTITable();
671 emitGetVirtualRegister(scrutinee
, regT0
);
672 callOperation(operationSwitchCharWithUnknownKeyType
, regT0
, tableIndex
);
673 jump(returnValueGPR
);
676 void JIT::emit_op_switch_string(Instruction
* currentInstruction
)
678 size_t tableIndex
= currentInstruction
[1].u
.operand
;
679 unsigned defaultOffset
= currentInstruction
[2].u
.operand
;
680 unsigned scrutinee
= currentInstruction
[3].u
.operand
;
682 // create jump table for switch destinations, track this switch statement.
683 StringJumpTable
* jumpTable
= &m_codeBlock
->stringSwitchJumpTable(tableIndex
);
684 m_switches
.append(SwitchRecord(jumpTable
, m_bytecodeOffset
, defaultOffset
));
686 emitGetVirtualRegister(scrutinee
, regT0
);
687 callOperation(operationSwitchStringWithUnknownKeyType
, regT0
, tableIndex
);
688 jump(returnValueGPR
);
691 void JIT::emit_op_throw_static_error(Instruction
* currentInstruction
)
693 move(TrustedImm64(JSValue::encode(m_codeBlock
->getConstant(currentInstruction
[1].u
.operand
))), regT0
);
694 callOperation(operationThrowStaticError
, regT0
, currentInstruction
[2].u
.operand
);
697 void JIT::emit_op_debug(Instruction
* currentInstruction
)
699 load32(codeBlock()->debuggerRequestsAddress(), regT0
);
700 Jump noDebuggerRequests
= branchTest32(Zero
, regT0
);
701 callOperation(operationDebug
, currentInstruction
[1].u
.operand
);
702 noDebuggerRequests
.link(this);
705 void JIT::emit_op_eq_null(Instruction
* currentInstruction
)
707 int dst
= currentInstruction
[1].u
.operand
;
708 int src1
= currentInstruction
[2].u
.operand
;
710 emitGetVirtualRegister(src1
, regT0
);
711 Jump isImmediate
= emitJumpIfNotJSCell(regT0
);
713 Jump isMasqueradesAsUndefined
= branchTest8(NonZero
, Address(regT0
, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined
));
714 move(TrustedImm32(0), regT0
);
715 Jump wasNotMasqueradesAsUndefined
= jump();
717 isMasqueradesAsUndefined
.link(this);
718 emitLoadStructure(regT0
, regT2
, regT1
);
719 move(TrustedImmPtr(m_codeBlock
->globalObject()), regT0
);
720 loadPtr(Address(regT2
, Structure::globalObjectOffset()), regT2
);
721 comparePtr(Equal
, regT0
, regT2
, regT0
);
722 Jump wasNotImmediate
= jump();
724 isImmediate
.link(this);
726 and64(TrustedImm32(~TagBitUndefined
), regT0
);
727 compare64(Equal
, regT0
, TrustedImm32(ValueNull
), regT0
);
729 wasNotImmediate
.link(this);
730 wasNotMasqueradesAsUndefined
.link(this);
732 emitTagAsBoolImmediate(regT0
);
733 emitPutVirtualRegister(dst
);
737 void JIT::emit_op_neq_null(Instruction
* currentInstruction
)
739 int dst
= currentInstruction
[1].u
.operand
;
740 int src1
= currentInstruction
[2].u
.operand
;
742 emitGetVirtualRegister(src1
, regT0
);
743 Jump isImmediate
= emitJumpIfNotJSCell(regT0
);
745 Jump isMasqueradesAsUndefined
= branchTest8(NonZero
, Address(regT0
, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined
));
746 move(TrustedImm32(1), regT0
);
747 Jump wasNotMasqueradesAsUndefined
= jump();
749 isMasqueradesAsUndefined
.link(this);
750 emitLoadStructure(regT0
, regT2
, regT1
);
751 move(TrustedImmPtr(m_codeBlock
->globalObject()), regT0
);
752 loadPtr(Address(regT2
, Structure::globalObjectOffset()), regT2
);
753 comparePtr(NotEqual
, regT0
, regT2
, regT0
);
754 Jump wasNotImmediate
= jump();
756 isImmediate
.link(this);
758 and64(TrustedImm32(~TagBitUndefined
), regT0
);
759 compare64(NotEqual
, regT0
, TrustedImm32(ValueNull
), regT0
);
761 wasNotImmediate
.link(this);
762 wasNotMasqueradesAsUndefined
.link(this);
764 emitTagAsBoolImmediate(regT0
);
765 emitPutVirtualRegister(dst
);
768 void JIT::emit_op_enter(Instruction
*)
770 // Even though CTI doesn't use them, we initialize our constant
771 // registers to zap stale pointers, to avoid unnecessarily prolonging
772 // object lifetime and increasing GC pressure.
773 size_t count
= m_codeBlock
->m_numVars
;
774 for (size_t j
= 0; j
< count
; ++j
)
775 emitInitRegister(virtualRegisterForLocal(j
).offset());
777 emitWriteBarrier(m_codeBlock
->ownerExecutable());
779 emitEnterOptimizationCheck();
782 void JIT::emit_op_create_activation(Instruction
* currentInstruction
)
784 int dst
= currentInstruction
[1].u
.operand
;
786 Jump activationCreated
= branchTest64(NonZero
, Address(callFrameRegister
, sizeof(Register
) * dst
));
787 callOperation(operationCreateActivation
, 0);
788 emitStoreCell(dst
, returnValueGPR
);
789 activationCreated
.link(this);
792 void JIT::emit_op_create_arguments(Instruction
* currentInstruction
)
794 int dst
= currentInstruction
[1].u
.operand
;
796 Jump argsCreated
= branchTest64(NonZero
, Address(callFrameRegister
, sizeof(Register
) * dst
));
798 callOperation(operationCreateArguments
);
799 emitStoreCell(dst
, returnValueGPR
);
800 emitStoreCell(unmodifiedArgumentsRegister(VirtualRegister(dst
)), returnValueGPR
);
802 argsCreated
.link(this);
805 void JIT::emit_op_init_lazy_reg(Instruction
* currentInstruction
)
807 int dst
= currentInstruction
[1].u
.operand
;
809 store64(TrustedImm64((int64_t)0), Address(callFrameRegister
, sizeof(Register
) * dst
));
812 void JIT::emit_op_to_this(Instruction
* currentInstruction
)
814 WriteBarrierBase
<Structure
>* cachedStructure
= ¤tInstruction
[2].u
.structure
;
815 emitGetVirtualRegister(currentInstruction
[1].u
.operand
, regT1
);
817 emitJumpSlowCaseIfNotJSCell(regT1
);
819 addSlowCase(branch8(NotEqual
, Address(regT1
, JSCell::typeInfoTypeOffset()), TrustedImm32(FinalObjectType
)));
820 loadPtr(cachedStructure
, regT2
);
821 addSlowCase(branchTestPtr(Zero
, regT2
));
822 load32(Address(regT2
, Structure::structureIDOffset()), regT2
);
823 addSlowCase(branch32(NotEqual
, Address(regT1
, JSCell::structureIDOffset()), regT2
));
826 void JIT::emit_op_get_callee(Instruction
* currentInstruction
)
828 int result
= currentInstruction
[1].u
.operand
;
829 WriteBarrierBase
<JSCell
>* cachedFunction
= ¤tInstruction
[2].u
.jsCell
;
830 emitGetFromCallFrameHeaderPtr(JSStack::Callee
, regT0
);
832 loadPtr(cachedFunction
, regT2
);
833 addSlowCase(branchPtr(NotEqual
, regT0
, regT2
));
835 emitPutVirtualRegister(result
);
838 void JIT::emitSlow_op_get_callee(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
842 JITSlowPathCall
slowPathCall(this, currentInstruction
, slow_path_get_callee
);
846 void JIT::emit_op_create_this(Instruction
* currentInstruction
)
848 int callee
= currentInstruction
[2].u
.operand
;
849 RegisterID calleeReg
= regT0
;
850 RegisterID resultReg
= regT0
;
851 RegisterID allocatorReg
= regT1
;
852 RegisterID structureReg
= regT2
;
853 RegisterID scratchReg
= regT3
;
855 emitGetVirtualRegister(callee
, calleeReg
);
856 loadPtr(Address(calleeReg
, JSFunction::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfAllocator()), allocatorReg
);
857 loadPtr(Address(calleeReg
, JSFunction::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfStructure()), structureReg
);
858 addSlowCase(branchTestPtr(Zero
, allocatorReg
));
860 emitAllocateJSObject(allocatorReg
, structureReg
, resultReg
, scratchReg
);
861 emitPutVirtualRegister(currentInstruction
[1].u
.operand
);
864 void JIT::emitSlow_op_create_this(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
866 linkSlowCase(iter
); // doesn't have an allocation profile
867 linkSlowCase(iter
); // allocation failed
869 JITSlowPathCall
slowPathCall(this, currentInstruction
, slow_path_create_this
);
873 void JIT::emit_op_profile_will_call(Instruction
* currentInstruction
)
875 Jump profilerDone
= branchTestPtr(Zero
, AbsoluteAddress(m_vm
->enabledProfilerAddress()));
876 emitGetVirtualRegister(currentInstruction
[1].u
.operand
, regT0
);
877 callOperation(operationProfileWillCall
, regT0
);
878 profilerDone
.link(this);
881 void JIT::emit_op_profile_did_call(Instruction
* currentInstruction
)
883 Jump profilerDone
= branchTestPtr(Zero
, AbsoluteAddress(m_vm
->enabledProfilerAddress()));
884 emitGetVirtualRegister(currentInstruction
[1].u
.operand
, regT0
);
885 callOperation(operationProfileDidCall
, regT0
);
886 profilerDone
.link(this);
892 void JIT::emitSlow_op_to_this(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
899 JITSlowPathCall
slowPathCall(this, currentInstruction
, slow_path_to_this
);
903 void JIT::emitSlow_op_to_primitive(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
907 JITSlowPathCall
slowPathCall(this, currentInstruction
, slow_path_to_primitive
);
911 void JIT::emitSlow_op_not(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
915 JITSlowPathCall
slowPathCall(this, currentInstruction
, slow_path_not
);
919 void JIT::emitSlow_op_jfalse(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
922 callOperation(operationConvertJSValueToBoolean
, regT0
);
923 emitJumpSlowToHot(branchTest32(Zero
, returnValueGPR
), currentInstruction
[2].u
.operand
); // inverted!
926 void JIT::emitSlow_op_jtrue(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
929 callOperation(operationConvertJSValueToBoolean
, regT0
);
930 emitJumpSlowToHot(branchTest32(NonZero
, returnValueGPR
), currentInstruction
[2].u
.operand
);
933 void JIT::emitSlow_op_bitxor(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
936 JITSlowPathCall
slowPathCall(this, currentInstruction
, slow_path_bitxor
);
940 void JIT::emitSlow_op_bitor(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
943 JITSlowPathCall
slowPathCall(this, currentInstruction
, slow_path_bitor
);
947 void JIT::emitSlow_op_eq(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
950 callOperation(operationCompareEq
, regT0
, regT1
);
951 emitTagAsBoolImmediate(returnValueGPR
);
952 emitPutVirtualRegister(currentInstruction
[1].u
.operand
, returnValueGPR
);
955 void JIT::emitSlow_op_neq(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
958 callOperation(operationCompareEq
, regT0
, regT1
);
959 xor32(TrustedImm32(0x1), regT0
);
960 emitTagAsBoolImmediate(returnValueGPR
);
961 emitPutVirtualRegister(currentInstruction
[1].u
.operand
, returnValueGPR
);
964 void JIT::emitSlow_op_stricteq(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
969 JITSlowPathCall
slowPathCall(this, currentInstruction
, slow_path_stricteq
);
973 void JIT::emitSlow_op_nstricteq(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
978 JITSlowPathCall
slowPathCall(this, currentInstruction
, slow_path_nstricteq
);
982 void JIT::emitSlow_op_check_has_instance(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
984 int dst
= currentInstruction
[1].u
.operand
;
985 int value
= currentInstruction
[2].u
.operand
;
986 int baseVal
= currentInstruction
[3].u
.operand
;
988 linkSlowCaseIfNotJSCell(iter
, baseVal
);
990 emitGetVirtualRegister(value
, regT0
);
991 emitGetVirtualRegister(baseVal
, regT1
);
992 callOperation(operationCheckHasInstance
, dst
, regT0
, regT1
);
994 emitJumpSlowToHot(jump(), currentInstruction
[4].u
.operand
);
997 void JIT::emitSlow_op_instanceof(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
999 int dst
= currentInstruction
[1].u
.operand
;
1000 int value
= currentInstruction
[2].u
.operand
;
1001 int proto
= currentInstruction
[3].u
.operand
;
1003 linkSlowCaseIfNotJSCell(iter
, value
);
1004 linkSlowCaseIfNotJSCell(iter
, proto
);
1006 emitGetVirtualRegister(value
, regT0
);
1007 emitGetVirtualRegister(proto
, regT1
);
1008 callOperation(operationInstanceOf
, dst
, regT0
, regT1
);
1011 void JIT::emitSlow_op_to_number(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1015 JITSlowPathCall
slowPathCall(this, currentInstruction
, slow_path_to_number
);
1016 slowPathCall
.call();
1019 void JIT::emit_op_get_arguments_length(Instruction
* currentInstruction
)
1021 int dst
= currentInstruction
[1].u
.operand
;
1022 int argumentsRegister
= currentInstruction
[2].u
.operand
;
1023 addSlowCase(branchTest64(NonZero
, addressFor(argumentsRegister
)));
1024 emitGetFromCallFrameHeader32(JSStack::ArgumentCount
, regT0
);
1025 sub32(TrustedImm32(1), regT0
);
1026 emitFastArithReTagImmediate(regT0
, regT0
);
1027 emitPutVirtualRegister(dst
, regT0
);
1030 void JIT::emitSlow_op_get_arguments_length(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1033 int dst
= currentInstruction
[1].u
.operand
;
1034 int base
= currentInstruction
[2].u
.operand
;
1035 callOperation(operationGetArgumentsLength
, dst
, base
);
1038 void JIT::emit_op_get_argument_by_val(Instruction
* currentInstruction
)
1040 int dst
= currentInstruction
[1].u
.operand
;
1041 int argumentsRegister
= currentInstruction
[2].u
.operand
;
1042 int property
= currentInstruction
[3].u
.operand
;
1043 addSlowCase(branchTest64(NonZero
, addressFor(argumentsRegister
)));
1044 emitGetVirtualRegister(property
, regT1
);
1045 addSlowCase(emitJumpIfNotImmediateInteger(regT1
));
1046 add32(TrustedImm32(1), regT1
);
1047 // regT1 now contains the integer index of the argument we want, including this
1048 emitGetFromCallFrameHeader32(JSStack::ArgumentCount
, regT2
);
1049 addSlowCase(branch32(AboveOrEqual
, regT1
, regT2
));
1051 signExtend32ToPtr(regT1
, regT1
);
1052 load64(BaseIndex(callFrameRegister
, regT1
, TimesEight
, CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register
))), regT0
);
1053 emitValueProfilingSite();
1054 emitPutVirtualRegister(dst
, regT0
);
1057 void JIT::emitSlow_op_get_argument_by_val(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1059 int dst
= currentInstruction
[1].u
.operand
;
1060 int arguments
= currentInstruction
[2].u
.operand
;
1061 int property
= currentInstruction
[3].u
.operand
;
1064 Jump skipArgumentsCreation
= jump();
1068 callOperation(operationCreateArguments
);
1069 emitStoreCell(arguments
, returnValueGPR
);
1070 emitStoreCell(unmodifiedArgumentsRegister(VirtualRegister(arguments
)), returnValueGPR
);
1072 skipArgumentsCreation
.link(this);
1073 emitGetVirtualRegister(arguments
, regT0
);
1074 emitGetVirtualRegister(property
, regT1
);
1075 callOperation(WithProfile
, operationGetByValGeneric
, dst
, regT0
, regT1
);
1078 #endif // USE(JSVALUE64)
1080 void JIT::emit_op_touch_entry(Instruction
* currentInstruction
)
1082 if (m_codeBlock
->symbolTable()->m_functionEnteredOnce
.hasBeenInvalidated())
1085 JITSlowPathCall
slowPathCall(this, currentInstruction
, slow_path_touch_entry
);
1086 slowPathCall
.call();
1089 void JIT::emit_op_loop_hint(Instruction
*)
1091 // Emit the JIT optimization check:
1092 if (canBeOptimized()) {
1093 addSlowCase(branchAdd32(PositiveOrZero
, TrustedImm32(Options::executionCounterIncrementForLoop()),
1094 AbsoluteAddress(m_codeBlock
->addressOfJITExecuteCounter())));
1097 // Emit the watchdog timer check:
1098 if (m_vm
->watchdog
&& m_vm
->watchdog
->isEnabled())
1099 addSlowCase(branchTest8(NonZero
, AbsoluteAddress(m_vm
->watchdog
->timerDidFireAddress())));
1102 void JIT::emitSlow_op_loop_hint(Instruction
*, Vector
<SlowCaseEntry
>::iterator
& iter
)
1105 // Emit the slow path for the JIT optimization check:
1106 if (canBeOptimized()) {
1109 callOperation(operationOptimize
, m_bytecodeOffset
);
1110 Jump noOptimizedEntry
= branchTestPtr(Zero
, returnValueGPR
);
1111 if (!ASSERT_DISABLED
) {
1112 Jump ok
= branchPtr(MacroAssembler::Above
, regT0
, TrustedImmPtr(bitwise_cast
<void*>(static_cast<intptr_t>(1000))));
1113 abortWithReason(JITUnreasonableLoopHintJumpTarget
);
1116 jump(returnValueGPR
);
1117 noOptimizedEntry
.link(this);
1119 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_loop_hint
));
1123 // Emit the slow path of the watchdog timer check:
1124 if (m_vm
->watchdog
&& m_vm
->watchdog
->isEnabled()) {
1126 callOperation(operationHandleWatchdogTimer
);
1128 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_loop_hint
));
1133 void JIT::emit_op_new_regexp(Instruction
* currentInstruction
)
1135 callOperation(operationNewRegexp
, currentInstruction
[1].u
.operand
, m_codeBlock
->regexp(currentInstruction
[2].u
.operand
));
1138 void JIT::emit_op_new_func(Instruction
* currentInstruction
)
1141 int dst
= currentInstruction
[1].u
.operand
;
1142 if (currentInstruction
[3].u
.operand
) {
1143 #if USE(JSVALUE32_64)
1144 lazyJump
= branch32(NotEqual
, tagFor(dst
), TrustedImm32(JSValue::EmptyValueTag
));
1146 lazyJump
= branchTest64(NonZero
, addressFor(dst
));
1150 FunctionExecutable
* funcExec
= m_codeBlock
->functionDecl(currentInstruction
[2].u
.operand
);
1151 callOperation(operationNewFunction
, dst
, funcExec
);
1153 if (currentInstruction
[3].u
.operand
)
1154 lazyJump
.link(this);
1157 void JIT::emit_op_new_captured_func(Instruction
* currentInstruction
)
1159 JITSlowPathCall
slowPathCall(this, currentInstruction
, slow_path_new_captured_func
);
1160 slowPathCall
.call();
1163 void JIT::emit_op_new_func_exp(Instruction
* currentInstruction
)
1165 int dst
= currentInstruction
[1].u
.operand
;
1166 FunctionExecutable
* funcExpr
= m_codeBlock
->functionExpr(currentInstruction
[2].u
.operand
);
1167 callOperation(operationNewFunction
, dst
, funcExpr
);
1170 void JIT::emit_op_new_array(Instruction
* currentInstruction
)
1172 int dst
= currentInstruction
[1].u
.operand
;
1173 int valuesIndex
= currentInstruction
[2].u
.operand
;
1174 int size
= currentInstruction
[3].u
.operand
;
1175 addPtr(TrustedImm32(valuesIndex
* sizeof(Register
)), callFrameRegister
, regT0
);
1176 callOperation(operationNewArrayWithProfile
, dst
,
1177 currentInstruction
[4].u
.arrayAllocationProfile
, regT0
, size
);
1180 void JIT::emit_op_new_array_with_size(Instruction
* currentInstruction
)
1182 int dst
= currentInstruction
[1].u
.operand
;
1183 int sizeIndex
= currentInstruction
[2].u
.operand
;
1185 emitGetVirtualRegister(sizeIndex
, regT0
);
1186 callOperation(operationNewArrayWithSizeAndProfile
, dst
,
1187 currentInstruction
[3].u
.arrayAllocationProfile
, regT0
);
1189 emitLoad(sizeIndex
, regT1
, regT0
);
1190 callOperation(operationNewArrayWithSizeAndProfile
, dst
,
1191 currentInstruction
[3].u
.arrayAllocationProfile
, regT1
, regT0
);
1195 void JIT::emit_op_new_array_buffer(Instruction
* currentInstruction
)
1197 int dst
= currentInstruction
[1].u
.operand
;
1198 int valuesIndex
= currentInstruction
[2].u
.operand
;
1199 int size
= currentInstruction
[3].u
.operand
;
1200 const JSValue
* values
= codeBlock()->constantBuffer(valuesIndex
);
1201 callOperation(operationNewArrayBufferWithProfile
, dst
, currentInstruction
[4].u
.arrayAllocationProfile
, values
, size
);
1204 void JIT::emitSlow_op_captured_mov(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1206 VariableWatchpointSet
* set
= currentInstruction
[3].u
.watchpointSet
;
1207 if (!set
|| set
->state() == IsInvalidated
)
1209 #if USE(JSVALUE32_64)
1213 JITSlowPathCall
slowPathCall(this, currentInstruction
, slow_path_captured_mov
);
1214 slowPathCall
.call();
1219 #endif // ENABLE(JIT)