]> git.saurik.com Git - apple/javascriptcore.git/blame - jit/JITOpcodes.cpp
JavaScriptCore-7601.1.46.3.tar.gz
[apple/javascriptcore.git] / jit / JITOpcodes.cpp
CommitLineData
ba379fdc 1/*
ed1e77d3 2 * Copyright (C) 2009, 2012-2015 Apple Inc. All rights reserved.
4e4e5a6f 3 * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
ba379fdc
A
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
13 *
14 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
18 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25 */
26
27#include "config.h"
ba379fdc 28#if ENABLE(JIT)
14957cd0 29#include "JIT.h"
ba379fdc 30
ed1e77d3 31#include "BasicBlockLocation.h"
93a37866 32#include "CopiedSpaceInlines.h"
81345200 33#include "Debugger.h"
ed1e77d3 34#include "Exception.h"
6fe7ccc8 35#include "Heap.h"
93a37866 36#include "JITInlines.h"
ba379fdc
A
37#include "JSArray.h"
38#include "JSCell.h"
39#include "JSFunction.h"
ed1e77d3
A
40#include "JSNameScope.h"
41#include "JSPropertyNameEnumerator.h"
42#include "LinkBuffer.h"
81345200 43#include "MaxFrameExtentForSlowPathCall.h"
ed1e77d3 44#include "RepatchBuffer.h"
81345200 45#include "SlowPathCall.h"
ed1e77d3
A
46#include "TypeLocation.h"
47#include "TypeProfilerLog.h"
81345200 48#include "VirtualRegister.h"
ba379fdc
A
49
50namespace JSC {
51
14957cd0 52#if USE(JSVALUE64)
ba379fdc 53
93a37866 54JIT::CodeRef JIT::privateCompileCTINativeCall(VM* vm, NativeFunction)
ba379fdc 55{
93a37866 56 return vm->getCTIStub(nativeCallGenerator);
ba379fdc
A
57}
58
59void JIT::emit_op_mov(Instruction* currentInstruction)
60{
61 int dst = currentInstruction[1].u.operand;
62 int src = currentInstruction[2].u.operand;
63
81345200
A
64 emitGetVirtualRegister(src, regT0);
65 emitPutVirtualRegister(dst);
66}
67
ba379fdc
A
68
69void JIT::emit_op_end(Instruction* currentInstruction)
70{
81345200
A
71 RELEASE_ASSERT(returnValueGPR != callFrameRegister);
72 emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
73 emitFunctionEpilogue();
ba379fdc
A
74 ret();
75}
76
77void JIT::emit_op_jmp(Instruction* currentInstruction)
78{
79 unsigned target = currentInstruction[1].u.operand;
f9bf01c6 80 addJump(jump(), target);
ba379fdc
A
81}
82
6fe7ccc8 83void JIT::emit_op_new_object(Instruction* currentInstruction)
ba379fdc 84{
93a37866 85 Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
81345200 86 size_t allocationSize = JSFinalObject::allocationSize(structure->inlineCapacity());
93a37866
A
87 MarkedAllocator* allocator = &m_vm->heap.allocatorForObjectWithoutDestructor(allocationSize);
88
89 RegisterID resultReg = regT0;
90 RegisterID allocatorReg = regT1;
91 RegisterID scratchReg = regT2;
92
93 move(TrustedImmPtr(allocator), allocatorReg);
94 emitAllocateJSObject(allocatorReg, TrustedImmPtr(structure), resultReg, scratchReg);
6fe7ccc8 95 emitPutVirtualRegister(currentInstruction[1].u.operand);
ba379fdc
A
96}
97
6fe7ccc8 98void JIT::emitSlow_op_new_object(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
ba379fdc 99{
6fe7ccc8 100 linkSlowCase(iter);
81345200
A
101 int dst = currentInstruction[1].u.operand;
102 Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
103 callOperation(operationNewObject, structure);
104 emitStoreCell(dst, returnValueGPR);
ba379fdc
A
105}
106
14957cd0
A
107void JIT::emit_op_check_has_instance(Instruction* currentInstruction)
108{
81345200 109 int baseVal = currentInstruction[3].u.operand;
14957cd0
A
110
111 emitGetVirtualRegister(baseVal, regT0);
112
113 // Check that baseVal is a cell.
114 emitJumpSlowCaseIfNotJSCell(regT0, baseVal);
115
116 // Check that baseVal 'ImplementsHasInstance'.
81345200 117 addSlowCase(branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance)));
14957cd0
A
118}
119
ba379fdc
A
120void JIT::emit_op_instanceof(Instruction* currentInstruction)
121{
81345200
A
122 int dst = currentInstruction[1].u.operand;
123 int value = currentInstruction[2].u.operand;
124 int proto = currentInstruction[3].u.operand;
f9bf01c6 125
ba379fdc
A
126 // Load the operands (baseVal, proto, and value respectively) into registers.
127 // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
f9bf01c6 128 emitGetVirtualRegister(value, regT2);
f9bf01c6 129 emitGetVirtualRegister(proto, regT1);
ba379fdc 130
14957cd0 131 // Check that proto are cells. baseVal must be a cell - this is checked by op_check_has_instance.
f9bf01c6 132 emitJumpSlowCaseIfNotJSCell(regT2, value);
f9bf01c6 133 emitJumpSlowCaseIfNotJSCell(regT1, proto);
ba379fdc 134
14957cd0 135 // Check that prototype is an object
81345200 136 addSlowCase(emitJumpIfCellNotObject(regT1));
14957cd0 137
ba379fdc
A
138 // Optimistically load the result true, and start looping.
139 // Initially, regT1 still contains proto and regT2 still contains value.
140 // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
93a37866 141 move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
ba379fdc
A
142 Label loop(this);
143
144 // Load the prototype of the object in regT2. If this is equal to regT1 - WIN!
145 // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
81345200 146 emitLoadStructure(regT2, regT2, regT3);
93a37866 147 load64(Address(regT2, Structure::prototypeOffset()), regT2);
ba379fdc 148 Jump isInstance = branchPtr(Equal, regT2, regT1);
f9bf01c6 149 emitJumpIfJSCell(regT2).linkTo(loop, this);
ba379fdc
A
150
151 // We get here either by dropping out of the loop, or if value was not an Object. Result is false.
93a37866 152 move(TrustedImm64(JSValue::encode(jsBoolean(false))), regT0);
ba379fdc
A
153
154 // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
155 isInstance.link(this);
f9bf01c6 156 emitPutVirtualRegister(dst);
ba379fdc
A
157}
158
6fe7ccc8 159void JIT::emit_op_is_undefined(Instruction* currentInstruction)
ba379fdc 160{
81345200
A
161 int dst = currentInstruction[1].u.operand;
162 int value = currentInstruction[2].u.operand;
6fe7ccc8
A
163
164 emitGetVirtualRegister(value, regT0);
165 Jump isCell = emitJumpIfJSCell(regT0);
166
93a37866 167 compare64(Equal, regT0, TrustedImm32(ValueUndefined), regT0);
6fe7ccc8
A
168 Jump done = jump();
169
170 isCell.link(this);
81345200 171 Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
93a37866
A
172 move(TrustedImm32(0), regT0);
173 Jump notMasqueradesAsUndefined = jump();
174
175 isMasqueradesAsUndefined.link(this);
81345200 176 emitLoadStructure(regT0, regT1, regT2);
93a37866
A
177 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
178 loadPtr(Address(regT1, Structure::globalObjectOffset()), regT1);
179 comparePtr(Equal, regT0, regT1, regT0);
180
181 notMasqueradesAsUndefined.link(this);
6fe7ccc8
A
182 done.link(this);
183 emitTagAsBoolImmediate(regT0);
184 emitPutVirtualRegister(dst);
ba379fdc
A
185}
186
6fe7ccc8 187void JIT::emit_op_is_boolean(Instruction* currentInstruction)
ba379fdc 188{
81345200
A
189 int dst = currentInstruction[1].u.operand;
190 int value = currentInstruction[2].u.operand;
6fe7ccc8
A
191
192 emitGetVirtualRegister(value, regT0);
93a37866
A
193 xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
194 test64(Zero, regT0, TrustedImm32(static_cast<int32_t>(~1)), regT0);
6fe7ccc8
A
195 emitTagAsBoolImmediate(regT0);
196 emitPutVirtualRegister(dst);
ba379fdc
A
197}
198
6fe7ccc8 199void JIT::emit_op_is_number(Instruction* currentInstruction)
ba379fdc 200{
81345200
A
201 int dst = currentInstruction[1].u.operand;
202 int value = currentInstruction[2].u.operand;
6fe7ccc8
A
203
204 emitGetVirtualRegister(value, regT0);
93a37866 205 test64(NonZero, regT0, tagTypeNumberRegister, regT0);
6fe7ccc8
A
206 emitTagAsBoolImmediate(regT0);
207 emitPutVirtualRegister(dst);
ba379fdc
A
208}
209
6fe7ccc8 210void JIT::emit_op_is_string(Instruction* currentInstruction)
ba379fdc 211{
81345200
A
212 int dst = currentInstruction[1].u.operand;
213 int value = currentInstruction[2].u.operand;
6fe7ccc8
A
214
215 emitGetVirtualRegister(value, regT0);
216 Jump isNotCell = emitJumpIfNotJSCell(regT0);
217
81345200 218 compare8(Equal, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType), regT0);
6fe7ccc8
A
219 emitTagAsBoolImmediate(regT0);
220 Jump done = jump();
221
222 isNotCell.link(this);
223 move(TrustedImm32(ValueFalse), regT0);
224
225 done.link(this);
226 emitPutVirtualRegister(dst);
ba379fdc
A
227}
228
ed1e77d3 229void JIT::emit_op_is_object(Instruction* currentInstruction)
ba379fdc 230{
ed1e77d3
A
231 int dst = currentInstruction[1].u.operand;
232 int value = currentInstruction[2].u.operand;
14957cd0 233
ed1e77d3
A
234 emitGetVirtualRegister(value, regT0);
235 Jump isNotCell = emitJumpIfNotJSCell(regT0);
ba379fdc 236
ed1e77d3
A
237 compare8(AboveOrEqual, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(ObjectType), regT0);
238 emitTagAsBoolImmediate(regT0);
239 Jump done = jump();
ba379fdc 240
ed1e77d3
A
241 isNotCell.link(this);
242 move(TrustedImm32(ValueFalse), regT0);
ba379fdc 243
ed1e77d3
A
244 done.link(this);
245 emitPutVirtualRegister(dst);
ba379fdc
A
246}
247
ed1e77d3 248void JIT::emit_op_ret(Instruction* currentInstruction)
ba379fdc 249{
14957cd0 250 ASSERT(callFrameRegister != regT1);
81345200
A
251 ASSERT(regT1 != returnValueGPR);
252 ASSERT(returnValueGPR != callFrameRegister);
14957cd0
A
253
254 // Return the result in %eax.
81345200 255 emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
14957cd0 256
ed1e77d3 257 checkStackPointerAlignment();
81345200 258 emitFunctionEpilogue();
14957cd0 259 ret();
ba379fdc
A
260}
261
ba379fdc
A
262void JIT::emit_op_to_primitive(Instruction* currentInstruction)
263{
264 int dst = currentInstruction[1].u.operand;
265 int src = currentInstruction[2].u.operand;
266
267 emitGetVirtualRegister(src, regT0);
268
269 Jump isImm = emitJumpIfNotJSCell(regT0);
ed1e77d3 270 addSlowCase(emitJumpIfCellObject(regT0));
ba379fdc
A
271 isImm.link(this);
272
273 if (dst != src)
274 emitPutVirtualRegister(dst);
275
276}
277
278void JIT::emit_op_strcat(Instruction* currentInstruction)
279{
81345200
A
280 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_strcat);
281 slowPathCall.call();
ba379fdc
A
282}
283
ba379fdc
A
284void JIT::emit_op_not(Instruction* currentInstruction)
285{
286 emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
14957cd0
A
287
288 // Invert against JSValue(false); if the value was tagged as a boolean, then all bits will be
289 // clear other than the low bit (which will be 0 or 1 for false or true inputs respectively).
290 // Then invert against JSValue(true), which will add the tag back in, and flip the low bit.
93a37866 291 xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
14957cd0 292 addSlowCase(branchTestPtr(NonZero, regT0, TrustedImm32(static_cast<int32_t>(~1))));
93a37866 293 xor64(TrustedImm32(static_cast<int32_t>(ValueTrue)), regT0);
14957cd0 294
ba379fdc
A
295 emitPutVirtualRegister(currentInstruction[1].u.operand);
296}
297
298void JIT::emit_op_jfalse(Instruction* currentInstruction)
299{
300 unsigned target = currentInstruction[2].u.operand;
301 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
302
93a37866 303 addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNumber(0)))), target);
ba379fdc
A
304 Jump isNonZero = emitJumpIfImmediateInteger(regT0);
305
93a37866
A
306 addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsBoolean(false)))), target);
307 addSlowCase(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsBoolean(true)))));
ba379fdc
A
308
309 isNonZero.link(this);
4e4e5a6f
A
310}
311
ba379fdc
A
312void JIT::emit_op_jeq_null(Instruction* currentInstruction)
313{
81345200 314 int src = currentInstruction[1].u.operand;
ba379fdc
A
315 unsigned target = currentInstruction[2].u.operand;
316
317 emitGetVirtualRegister(src, regT0);
318 Jump isImmediate = emitJumpIfNotJSCell(regT0);
319
320 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
81345200
A
321 Jump isNotMasqueradesAsUndefined = branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
322 emitLoadStructure(regT0, regT2, regT1);
93a37866
A
323 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
324 addJump(branchPtr(Equal, Address(regT2, Structure::globalObjectOffset()), regT0), target);
325 Jump masqueradesGlobalObjectIsForeign = jump();
ba379fdc
A
326
327 // Now handle the immediate cases - undefined & null
328 isImmediate.link(this);
93a37866
A
329 and64(TrustedImm32(~TagBitUndefined), regT0);
330 addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNull()))), target);
ba379fdc 331
93a37866
A
332 isNotMasqueradesAsUndefined.link(this);
333 masqueradesGlobalObjectIsForeign.link(this);
ba379fdc
A
334};
335void JIT::emit_op_jneq_null(Instruction* currentInstruction)
336{
81345200 337 int src = currentInstruction[1].u.operand;
ba379fdc
A
338 unsigned target = currentInstruction[2].u.operand;
339
340 emitGetVirtualRegister(src, regT0);
341 Jump isImmediate = emitJumpIfNotJSCell(regT0);
342
343 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
81345200
A
344 addJump(branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
345 emitLoadStructure(regT0, regT2, regT1);
93a37866
A
346 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
347 addJump(branchPtr(NotEqual, Address(regT2, Structure::globalObjectOffset()), regT0), target);
ba379fdc
A
348 Jump wasNotImmediate = jump();
349
350 // Now handle the immediate cases - undefined & null
351 isImmediate.link(this);
93a37866
A
352 and64(TrustedImm32(~TagBitUndefined), regT0);
353 addJump(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsNull()))), target);
ba379fdc
A
354
355 wasNotImmediate.link(this);
ba379fdc
A
356}
357
358void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
359{
81345200 360 int src = currentInstruction[1].u.operand;
93a37866 361 Special::Pointer ptr = currentInstruction[2].u.specialPointer;
ba379fdc
A
362 unsigned target = currentInstruction[3].u.operand;
363
364 emitGetVirtualRegister(src, regT0);
93a37866 365 addJump(branchPtr(NotEqual, regT0, TrustedImmPtr(actualPointerFor(m_codeBlock, ptr))), target);
ba379fdc
A
366}
367
ba379fdc
A
368void JIT::emit_op_eq(Instruction* currentInstruction)
369{
370 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
371 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
14957cd0 372 compare32(Equal, regT1, regT0, regT0);
ba379fdc
A
373 emitTagAsBoolImmediate(regT0);
374 emitPutVirtualRegister(currentInstruction[1].u.operand);
375}
376
ba379fdc
A
377void JIT::emit_op_jtrue(Instruction* currentInstruction)
378{
379 unsigned target = currentInstruction[2].u.operand;
380 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
381
93a37866 382 Jump isZero = branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNumber(0))));
f9bf01c6 383 addJump(emitJumpIfImmediateInteger(regT0), target);
ba379fdc 384
93a37866
A
385 addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsBoolean(true)))), target);
386 addSlowCase(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsBoolean(false)))));
ba379fdc
A
387
388 isZero.link(this);
ba379fdc
A
389}
390
391void JIT::emit_op_neq(Instruction* currentInstruction)
392{
393 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
394 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
14957cd0 395 compare32(NotEqual, regT1, regT0, regT0);
ba379fdc
A
396 emitTagAsBoolImmediate(regT0);
397
398 emitPutVirtualRegister(currentInstruction[1].u.operand);
399
400}
401
402void JIT::emit_op_bitxor(Instruction* currentInstruction)
403{
404 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
405 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
93a37866 406 xor64(regT1, regT0);
ba379fdc
A
407 emitFastArithReTagImmediate(regT0, regT0);
408 emitPutVirtualRegister(currentInstruction[1].u.operand);
409}
410
ba379fdc
A
411void JIT::emit_op_bitor(Instruction* currentInstruction)
412{
413 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
414 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
93a37866 415 or64(regT1, regT0);
ba379fdc
A
416 emitPutVirtualRegister(currentInstruction[1].u.operand);
417}
418
419void JIT::emit_op_throw(Instruction* currentInstruction)
420{
81345200
A
421 ASSERT(regT0 == returnValueGPR);
422 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
423 callOperationNoExceptionCheck(operationThrow, regT0);
424 jumpToExceptionHandler();
ba379fdc
A
425}
426
ed1e77d3 427void JIT::emit_op_push_with_scope(Instruction* currentInstruction)
ba379fdc 428{
f9bf01c6 429 int dst = currentInstruction[1].u.operand;
ed1e77d3
A
430 emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
431 callOperation(operationPushWithScope, dst, regT0);
ba379fdc
A
432}
433
ed1e77d3 434void JIT::emit_op_pop_scope(Instruction* currentInstruction)
ba379fdc 435{
ed1e77d3 436 int scope = currentInstruction[1].u.operand;
ba379fdc 437
ed1e77d3 438 callOperation(operationPopScope, scope);
ba379fdc
A
439}
440
441void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
442{
81345200
A
443 int dst = currentInstruction[1].u.operand;
444 int src1 = currentInstruction[2].u.operand;
445 int src2 = currentInstruction[3].u.operand;
ba379fdc
A
446
447 emitGetVirtualRegisters(src1, regT0, src2, regT1);
6fe7ccc8
A
448
449 // Jump slow if both are cells (to cover strings).
ba379fdc 450 move(regT0, regT2);
93a37866 451 or64(regT1, regT2);
ba379fdc 452 addSlowCase(emitJumpIfJSCell(regT2));
6fe7ccc8
A
453
454 // Jump slow if either is a double. First test if it's an integer, which is fine, and then test
455 // if it's a double.
456 Jump leftOK = emitJumpIfImmediateInteger(regT0);
457 addSlowCase(emitJumpIfImmediateNumber(regT0));
458 leftOK.link(this);
459 Jump rightOK = emitJumpIfImmediateInteger(regT1);
460 addSlowCase(emitJumpIfImmediateNumber(regT1));
461 rightOK.link(this);
ba379fdc
A
462
463 if (type == OpStrictEq)
93a37866 464 compare64(Equal, regT1, regT0, regT0);
ba379fdc 465 else
93a37866 466 compare64(NotEqual, regT1, regT0, regT0);
ba379fdc
A
467 emitTagAsBoolImmediate(regT0);
468
469 emitPutVirtualRegister(dst);
470}
471
472void JIT::emit_op_stricteq(Instruction* currentInstruction)
473{
474 compileOpStrictEq(currentInstruction, OpStrictEq);
475}
476
477void JIT::emit_op_nstricteq(Instruction* currentInstruction)
478{
479 compileOpStrictEq(currentInstruction, OpNStrictEq);
480}
481
93a37866 482void JIT::emit_op_to_number(Instruction* currentInstruction)
ba379fdc
A
483{
484 int srcVReg = currentInstruction[2].u.operand;
485 emitGetVirtualRegister(srcVReg, regT0);
486
93a37866 487 addSlowCase(emitJumpIfNotImmediateNumber(regT0));
ba379fdc
A
488
489 emitPutVirtualRegister(currentInstruction[1].u.operand);
490}
491
ed1e77d3
A
492void JIT::emit_op_to_string(Instruction* currentInstruction)
493{
494 int srcVReg = currentInstruction[2].u.operand;
495 emitGetVirtualRegister(srcVReg, regT0);
496
497 addSlowCase(emitJumpIfNotJSCell(regT0));
498 addSlowCase(branch8(NotEqual, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType)));
499
500 emitPutVirtualRegister(currentInstruction[1].u.operand);
501}
502
93a37866 503void JIT::emit_op_push_name_scope(Instruction* currentInstruction)
ba379fdc 504{
ed1e77d3 505 int dst = currentInstruction[1].u.operand;
81345200 506 emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
ed1e77d3
A
507 if (currentInstruction[4].u.operand == JSNameScope::CatchScope) {
508 callOperation(operationPushCatchScope, dst, jsCast<SymbolTable*>(getConstantOperand(currentInstruction[3].u.operand)), regT0);
509 return;
510 }
511
512 RELEASE_ASSERT(currentInstruction[4].u.operand == JSNameScope::FunctionNameScope);
513 callOperation(operationPushFunctionNameScope, dst, jsCast<SymbolTable*>(getConstantOperand(currentInstruction[3].u.operand)), regT0);
ba379fdc
A
514}
515
516void JIT::emit_op_catch(Instruction* currentInstruction)
517{
40a37d08
A
518 // Gotta restore the tag registers. We could be throwing from FTL, which may
519 // clobber them.
520 move(TrustedImm64(TagTypeNumber), tagTypeNumberRegister);
521 move(TrustedImm64(TagMask), tagMaskRegister);
522
81345200
A
523 move(TrustedImmPtr(m_vm), regT3);
524 load64(Address(regT3, VM::callFrameForThrowOffset()), callFrameRegister);
ed1e77d3
A
525 load64(Address(regT3, VM::vmEntryFrameForThrowOffset()), regT0);
526 store64(regT0, Address(regT3, VM::topVMEntryFrameOffset()));
81345200
A
527
528 addPtr(TrustedImm32(stackPointerOffsetFor(codeBlock()) * sizeof(Register)), callFrameRegister, stackPointerRegister);
529
530 load64(Address(regT3, VM::exceptionOffset()), regT0);
531 store64(TrustedImm64(JSValue::encode(JSValue())), Address(regT3, VM::exceptionOffset()));
ba379fdc 532 emitPutVirtualRegister(currentInstruction[1].u.operand);
ed1e77d3
A
533
534 load64(Address(regT0, Exception::valueOffset()), regT0);
535 emitPutVirtualRegister(currentInstruction[2].u.operand);
ba379fdc
A
536}
537
ba379fdc
A
538void JIT::emit_op_switch_imm(Instruction* currentInstruction)
539{
81345200 540 size_t tableIndex = currentInstruction[1].u.operand;
ba379fdc
A
541 unsigned defaultOffset = currentInstruction[2].u.operand;
542 unsigned scrutinee = currentInstruction[3].u.operand;
543
544 // create jump table for switch destinations, track this switch statement.
81345200 545 SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex);
14957cd0 546 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Immediate));
81345200 547 jumpTable->ensureCTITable();
ba379fdc 548
81345200
A
549 emitGetVirtualRegister(scrutinee, regT0);
550 callOperation(operationSwitchImmWithUnknownKeyType, regT0, tableIndex);
551 jump(returnValueGPR);
ba379fdc
A
552}
553
554void JIT::emit_op_switch_char(Instruction* currentInstruction)
555{
81345200 556 size_t tableIndex = currentInstruction[1].u.operand;
ba379fdc
A
557 unsigned defaultOffset = currentInstruction[2].u.operand;
558 unsigned scrutinee = currentInstruction[3].u.operand;
559
560 // create jump table for switch destinations, track this switch statement.
81345200 561 SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex);
14957cd0 562 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Character));
81345200 563 jumpTable->ensureCTITable();
ba379fdc 564
81345200
A
565 emitGetVirtualRegister(scrutinee, regT0);
566 callOperation(operationSwitchCharWithUnknownKeyType, regT0, tableIndex);
567 jump(returnValueGPR);
ba379fdc
A
568}
569
570void JIT::emit_op_switch_string(Instruction* currentInstruction)
571{
81345200 572 size_t tableIndex = currentInstruction[1].u.operand;
ba379fdc
A
573 unsigned defaultOffset = currentInstruction[2].u.operand;
574 unsigned scrutinee = currentInstruction[3].u.operand;
575
576 // create jump table for switch destinations, track this switch statement.
577 StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
14957cd0 578 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset));
ba379fdc 579
81345200
A
580 emitGetVirtualRegister(scrutinee, regT0);
581 callOperation(operationSwitchStringWithUnknownKeyType, regT0, tableIndex);
582 jump(returnValueGPR);
ba379fdc
A
583}
584
93a37866 585void JIT::emit_op_throw_static_error(Instruction* currentInstruction)
ba379fdc 586{
81345200
A
587 move(TrustedImm64(JSValue::encode(m_codeBlock->getConstant(currentInstruction[1].u.operand))), regT0);
588 callOperation(operationThrowStaticError, regT0, currentInstruction[2].u.operand);
ba379fdc
A
589}
590
591void JIT::emit_op_debug(Instruction* currentInstruction)
592{
81345200
A
593 load32(codeBlock()->debuggerRequestsAddress(), regT0);
594 Jump noDebuggerRequests = branchTest32(Zero, regT0);
595 callOperation(operationDebug, currentInstruction[1].u.operand);
596 noDebuggerRequests.link(this);
ba379fdc
A
597}
598
599void JIT::emit_op_eq_null(Instruction* currentInstruction)
600{
81345200
A
601 int dst = currentInstruction[1].u.operand;
602 int src1 = currentInstruction[2].u.operand;
ba379fdc
A
603
604 emitGetVirtualRegister(src1, regT0);
605 Jump isImmediate = emitJumpIfNotJSCell(regT0);
606
81345200 607 Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
93a37866
A
608 move(TrustedImm32(0), regT0);
609 Jump wasNotMasqueradesAsUndefined = jump();
610
611 isMasqueradesAsUndefined.link(this);
81345200 612 emitLoadStructure(regT0, regT2, regT1);
93a37866
A
613 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
614 loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
615 comparePtr(Equal, regT0, regT2, regT0);
ba379fdc
A
616 Jump wasNotImmediate = jump();
617
618 isImmediate.link(this);
619
93a37866
A
620 and64(TrustedImm32(~TagBitUndefined), regT0);
621 compare64(Equal, regT0, TrustedImm32(ValueNull), regT0);
ba379fdc
A
622
623 wasNotImmediate.link(this);
93a37866 624 wasNotMasqueradesAsUndefined.link(this);
ba379fdc
A
625
626 emitTagAsBoolImmediate(regT0);
627 emitPutVirtualRegister(dst);
628
629}
630
631void JIT::emit_op_neq_null(Instruction* currentInstruction)
632{
81345200
A
633 int dst = currentInstruction[1].u.operand;
634 int src1 = currentInstruction[2].u.operand;
ba379fdc
A
635
636 emitGetVirtualRegister(src1, regT0);
637 Jump isImmediate = emitJumpIfNotJSCell(regT0);
638
81345200 639 Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
93a37866
A
640 move(TrustedImm32(1), regT0);
641 Jump wasNotMasqueradesAsUndefined = jump();
642
643 isMasqueradesAsUndefined.link(this);
81345200 644 emitLoadStructure(regT0, regT2, regT1);
93a37866
A
645 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
646 loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
647 comparePtr(NotEqual, regT0, regT2, regT0);
ba379fdc
A
648 Jump wasNotImmediate = jump();
649
650 isImmediate.link(this);
651
93a37866
A
652 and64(TrustedImm32(~TagBitUndefined), regT0);
653 compare64(NotEqual, regT0, TrustedImm32(ValueNull), regT0);
ba379fdc
A
654
655 wasNotImmediate.link(this);
93a37866 656 wasNotMasqueradesAsUndefined.link(this);
ba379fdc
A
657
658 emitTagAsBoolImmediate(regT0);
659 emitPutVirtualRegister(dst);
ba379fdc
A
660}
661
662void JIT::emit_op_enter(Instruction*)
663{
664 // Even though CTI doesn't use them, we initialize our constant
665 // registers to zap stale pointers, to avoid unnecessarily prolonging
666 // object lifetime and increasing GC pressure.
667 size_t count = m_codeBlock->m_numVars;
668 for (size_t j = 0; j < count; ++j)
81345200
A
669 emitInitRegister(virtualRegisterForLocal(j).offset());
670
671 emitWriteBarrier(m_codeBlock->ownerExecutable());
672
673 emitEnterOptimizationCheck();
ba379fdc
A
674}
675
ed1e77d3 676void JIT::emit_op_create_lexical_environment(Instruction* currentInstruction)
ba379fdc 677{
81345200 678 int dst = currentInstruction[1].u.operand;
ed1e77d3 679 int scope = currentInstruction[2].u.operand;
14957cd0 680
ed1e77d3
A
681 emitGetVirtualRegister(scope, regT0);
682 callOperation(operationCreateActivation, regT0);
81345200 683 emitStoreCell(dst, returnValueGPR);
ed1e77d3 684 emitStoreCell(scope, returnValueGPR);
ba379fdc 685}
14957cd0 686
ed1e77d3 687void JIT::emit_op_get_scope(Instruction* currentInstruction)
ba379fdc 688{
81345200 689 int dst = currentInstruction[1].u.operand;
ed1e77d3
A
690 emitGetFromCallFrameHeaderPtr(JSStack::Callee, regT0);
691 loadPtr(Address(regT0, JSFunction::offsetOfScopeChain()), regT0);
692 emitStoreCell(dst, regT0);
ba379fdc
A
693}
694
81345200 695void JIT::emit_op_to_this(Instruction* currentInstruction)
ba379fdc 696{
81345200 697 WriteBarrierBase<Structure>* cachedStructure = &currentInstruction[2].u.structure;
93a37866 698 emitGetVirtualRegister(currentInstruction[1].u.operand, regT1);
ba379fdc 699
93a37866 700 emitJumpSlowCaseIfNotJSCell(regT1);
81345200
A
701
702 addSlowCase(branch8(NotEqual, Address(regT1, JSCell::typeInfoTypeOffset()), TrustedImm32(FinalObjectType)));
703 loadPtr(cachedStructure, regT2);
704 addSlowCase(branchTestPtr(Zero, regT2));
705 load32(Address(regT2, Structure::structureIDOffset()), regT2);
706 addSlowCase(branch32(NotEqual, Address(regT1, JSCell::structureIDOffset()), regT2));
14957cd0
A
707}
708
14957cd0
A
709void JIT::emit_op_create_this(Instruction* currentInstruction)
710{
93a37866 711 int callee = currentInstruction[2].u.operand;
ed1e77d3 712 WriteBarrierBase<JSCell>* cachedFunction = &currentInstruction[4].u.jsCell;
93a37866 713 RegisterID calleeReg = regT0;
ed1e77d3 714 RegisterID rareDataReg = regT4;
93a37866
A
715 RegisterID resultReg = regT0;
716 RegisterID allocatorReg = regT1;
717 RegisterID structureReg = regT2;
ed1e77d3 718 RegisterID cachedFunctionReg = regT4;
93a37866
A
719 RegisterID scratchReg = regT3;
720
721 emitGetVirtualRegister(callee, calleeReg);
ed1e77d3
A
722 loadPtr(Address(calleeReg, JSFunction::offsetOfRareData()), rareDataReg);
723 addSlowCase(branchTestPtr(Zero, rareDataReg));
724 loadPtr(Address(rareDataReg, FunctionRareData::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfAllocator()), allocatorReg);
725 loadPtr(Address(rareDataReg, FunctionRareData::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfStructure()), structureReg);
93a37866
A
726 addSlowCase(branchTestPtr(Zero, allocatorReg));
727
ed1e77d3
A
728 loadPtr(cachedFunction, cachedFunctionReg);
729 Jump hasSeenMultipleCallees = branchPtr(Equal, cachedFunctionReg, TrustedImmPtr(JSCell::seenMultipleCalleeObjects()));
730 addSlowCase(branchPtr(NotEqual, calleeReg, cachedFunctionReg));
731 hasSeenMultipleCallees.link(this);
732
93a37866 733 emitAllocateJSObject(allocatorReg, structureReg, resultReg, scratchReg);
6fe7ccc8
A
734 emitPutVirtualRegister(currentInstruction[1].u.operand);
735}
736
737void JIT::emitSlow_op_create_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
738{
ed1e77d3 739 linkSlowCase(iter); // doesn't have rare data
93a37866 740 linkSlowCase(iter); // doesn't have an allocation profile
6fe7ccc8 741 linkSlowCase(iter); // allocation failed
ed1e77d3 742 linkSlowCase(iter); // cached function didn't match
93a37866 743
81345200
A
744 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_this);
745 slowPathCall.call();
ba379fdc
A
746}
747
ed1e77d3
A
748void JIT::emit_op_check_tdz(Instruction* currentInstruction)
749{
750 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
751 addSlowCase(branchTest64(Zero, regT0));
752}
753
754void JIT::emitSlow_op_check_tdz(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
755{
756 linkSlowCase(iter);
757 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_throw_tdz_error);
758 slowPathCall.call();
759}
760
ba379fdc
A
761void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
762{
81345200
A
763 Jump profilerDone = branchTestPtr(Zero, AbsoluteAddress(m_vm->enabledProfilerAddress()));
764 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
765 callOperation(operationProfileWillCall, regT0);
766 profilerDone.link(this);
ba379fdc
A
767}
768
769void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
770{
81345200
A
771 Jump profilerDone = branchTestPtr(Zero, AbsoluteAddress(m_vm->enabledProfilerAddress()));
772 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
773 callOperation(operationProfileDidCall, regT0);
774 profilerDone.link(this);
ba379fdc
A
775}
776
777
778// Slow cases
779
81345200 780void JIT::emitSlow_op_to_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
ba379fdc
A
781{
782 linkSlowCase(iter);
ba379fdc 783 linkSlowCase(iter);
81345200
A
784 linkSlowCase(iter);
785 linkSlowCase(iter);
786
787 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_this);
788 slowPathCall.call();
ba379fdc
A
789}
790
791void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
792{
793 linkSlowCase(iter);
794
81345200
A
795 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_primitive);
796 slowPathCall.call();
ba379fdc
A
797}
798
ba379fdc
A
799void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
800{
801 linkSlowCase(iter);
81345200
A
802
803 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_not);
804 slowPathCall.call();
ba379fdc
A
805}
806
807void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
808{
809 linkSlowCase(iter);
81345200
A
810 callOperation(operationConvertJSValueToBoolean, regT0);
811 emitJumpSlowToHot(branchTest32(Zero, returnValueGPR), currentInstruction[2].u.operand); // inverted!
ba379fdc
A
812}
813
ba379fdc
A
814void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
815{
816 linkSlowCase(iter);
81345200
A
817 callOperation(operationConvertJSValueToBoolean, regT0);
818 emitJumpSlowToHot(branchTest32(NonZero, returnValueGPR), currentInstruction[2].u.operand);
ba379fdc
A
819}
820
821void JIT::emitSlow_op_bitxor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
822{
823 linkSlowCase(iter);
81345200
A
824 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_bitxor);
825 slowPathCall.call();
ba379fdc
A
826}
827
828void JIT::emitSlow_op_bitor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
829{
830 linkSlowCase(iter);
81345200
A
831 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_bitor);
832 slowPathCall.call();
ba379fdc
A
833}
834
835void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
836{
837 linkSlowCase(iter);
81345200
A
838 callOperation(operationCompareEq, regT0, regT1);
839 emitTagAsBoolImmediate(returnValueGPR);
840 emitPutVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
ba379fdc
A
841}
842
843void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
844{
845 linkSlowCase(iter);
81345200 846 callOperation(operationCompareEq, regT0, regT1);
14957cd0 847 xor32(TrustedImm32(0x1), regT0);
81345200
A
848 emitTagAsBoolImmediate(returnValueGPR);
849 emitPutVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
ba379fdc
A
850}
851
852void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
853{
6fe7ccc8 854 linkSlowCase(iter);
ba379fdc
A
855 linkSlowCase(iter);
856 linkSlowCase(iter);
81345200
A
857 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_stricteq);
858 slowPathCall.call();
ba379fdc
A
859}
860
861void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
862{
6fe7ccc8 863 linkSlowCase(iter);
ba379fdc
A
864 linkSlowCase(iter);
865 linkSlowCase(iter);
81345200
A
866 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_nstricteq);
867 slowPathCall.call();
ba379fdc
A
868}
869
14957cd0
A
870void JIT::emitSlow_op_check_has_instance(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
871{
81345200
A
872 int dst = currentInstruction[1].u.operand;
873 int value = currentInstruction[2].u.operand;
874 int baseVal = currentInstruction[3].u.operand;
14957cd0
A
875
876 linkSlowCaseIfNotJSCell(iter, baseVal);
877 linkSlowCase(iter);
81345200
A
878 emitGetVirtualRegister(value, regT0);
879 emitGetVirtualRegister(baseVal, regT1);
880 callOperation(operationCheckHasInstance, dst, regT0, regT1);
93a37866
A
881
882 emitJumpSlowToHot(jump(), currentInstruction[4].u.operand);
14957cd0
A
883}
884
ba379fdc
A
885void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
886{
81345200
A
887 int dst = currentInstruction[1].u.operand;
888 int value = currentInstruction[2].u.operand;
889 int proto = currentInstruction[3].u.operand;
f9bf01c6
A
890
891 linkSlowCaseIfNotJSCell(iter, value);
f9bf01c6 892 linkSlowCaseIfNotJSCell(iter, proto);
ba379fdc 893 linkSlowCase(iter);
81345200
A
894 emitGetVirtualRegister(value, regT0);
895 emitGetVirtualRegister(proto, regT1);
896 callOperation(operationInstanceOf, dst, regT0, regT1);
ba379fdc
A
897}
898
93a37866 899void JIT::emitSlow_op_to_number(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
ba379fdc 900{
ba379fdc
A
901 linkSlowCase(iter);
902
81345200
A
903 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_number);
904 slowPathCall.call();
ba379fdc
A
905}
906
ed1e77d3 907void JIT::emitSlow_op_to_string(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
14957cd0 908{
ed1e77d3
A
909 linkSlowCase(iter); // Not JSCell.
910 linkSlowCase(iter); // Not JSString.
14957cd0 911
ed1e77d3
A
912 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_string);
913 slowPathCall.call();
81345200 914}
93a37866 915
81345200 916#endif // USE(JSVALUE64)
93a37866 917
93a37866 918void JIT::emit_op_loop_hint(Instruction*)
4e4e5a6f 919{
93a37866 920 // Emit the JIT optimization check:
81345200 921 if (canBeOptimized()) {
93a37866
A
922 addSlowCase(branchAdd32(PositiveOrZero, TrustedImm32(Options::executionCounterIncrementForLoop()),
923 AbsoluteAddress(m_codeBlock->addressOfJITExecuteCounter())));
81345200 924 }
93a37866
A
925
926 // Emit the watchdog timer check:
81345200
A
927 if (m_vm->watchdog && m_vm->watchdog->isEnabled())
928 addSlowCase(branchTest8(NonZero, AbsoluteAddress(m_vm->watchdog->timerDidFireAddress())));
93a37866
A
929}
930
931void JIT::emitSlow_op_loop_hint(Instruction*, Vector<SlowCaseEntry>::iterator& iter)
932{
933#if ENABLE(DFG_JIT)
934 // Emit the slow path for the JIT optimization check:
935 if (canBeOptimized()) {
936 linkSlowCase(iter);
81345200
A
937
938 callOperation(operationOptimize, m_bytecodeOffset);
939 Jump noOptimizedEntry = branchTestPtr(Zero, returnValueGPR);
940 if (!ASSERT_DISABLED) {
941 Jump ok = branchPtr(MacroAssembler::Above, regT0, TrustedImmPtr(bitwise_cast<void*>(static_cast<intptr_t>(1000))));
942 abortWithReason(JITUnreasonableLoopHintJumpTarget);
943 ok.link(this);
944 }
945 jump(returnValueGPR);
946 noOptimizedEntry.link(this);
93a37866
A
947
948 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_loop_hint));
14957cd0 949 }
93a37866
A
950#endif
951
952 // Emit the slow path of the watchdog timer check:
81345200 953 if (m_vm->watchdog && m_vm->watchdog->isEnabled()) {
93a37866 954 linkSlowCase(iter);
81345200 955 callOperation(operationHandleWatchdogTimer);
93a37866
A
956
957 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_loop_hint));
4e4e5a6f 958 }
93a37866 959
4e4e5a6f
A
960}
961
4e4e5a6f
A
962void JIT::emit_op_new_regexp(Instruction* currentInstruction)
963{
81345200 964 callOperation(operationNewRegexp, currentInstruction[1].u.operand, m_codeBlock->regexp(currentInstruction[2].u.operand));
4e4e5a6f
A
965}
966
6fe7ccc8 967void JIT::emit_op_new_func(Instruction* currentInstruction)
4e4e5a6f 968{
6fe7ccc8
A
969 Jump lazyJump;
970 int dst = currentInstruction[1].u.operand;
ed1e77d3
A
971
972#if USE(JSVALUE64)
973 emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
4e4e5a6f 974#else
ed1e77d3 975 emitLoadPayload(currentInstruction[2].u.operand, regT0);
4e4e5a6f 976#endif
ed1e77d3
A
977 FunctionExecutable* funcExec = m_codeBlock->functionDecl(currentInstruction[3].u.operand);
978 callOperation(operationNewFunction, dst, regT0, funcExec);
14957cd0 979}
4e4e5a6f 980
6fe7ccc8 981void JIT::emit_op_new_func_exp(Instruction* currentInstruction)
14957cd0 982{
ed1e77d3 983 Jump notUndefinedScope;
81345200 984 int dst = currentInstruction[1].u.operand;
ed1e77d3
A
985#if USE(JSVALUE64)
986 emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
987 notUndefinedScope = branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsUndefined())));
988 store64(TrustedImm64(JSValue::encode(jsUndefined())), Address(callFrameRegister, sizeof(Register) * dst));
989#else
990 emitLoadPayload(currentInstruction[2].u.operand, regT0);
991 notUndefinedScope = branch32(NotEqual, tagFor(currentInstruction[2].u.operand), TrustedImm32(JSValue::UndefinedTag));
992 emitStore(dst, jsUndefined());
993#endif
994
995 Jump done = jump();
996 notUndefinedScope.link(this);
997
998 FunctionExecutable* funcExpr = m_codeBlock->functionExpr(currentInstruction[3].u.operand);
999 callOperation(operationNewFunction, dst, regT0, funcExpr);
1000 done.link(this);
6fe7ccc8
A
1001}
1002
14957cd0
A
1003void JIT::emit_op_new_array(Instruction* currentInstruction)
1004{
81345200
A
1005 int dst = currentInstruction[1].u.operand;
1006 int valuesIndex = currentInstruction[2].u.operand;
1007 int size = currentInstruction[3].u.operand;
1008 addPtr(TrustedImm32(valuesIndex * sizeof(Register)), callFrameRegister, regT0);
1009 callOperation(operationNewArrayWithProfile, dst,
1010 currentInstruction[4].u.arrayAllocationProfile, regT0, size);
93a37866
A
1011}
1012
1013void JIT::emit_op_new_array_with_size(Instruction* currentInstruction)
1014{
81345200
A
1015 int dst = currentInstruction[1].u.operand;
1016 int sizeIndex = currentInstruction[2].u.operand;
93a37866 1017#if USE(JSVALUE64)
81345200
A
1018 emitGetVirtualRegister(sizeIndex, regT0);
1019 callOperation(operationNewArrayWithSizeAndProfile, dst,
1020 currentInstruction[3].u.arrayAllocationProfile, regT0);
93a37866 1021#else
81345200
A
1022 emitLoad(sizeIndex, regT1, regT0);
1023 callOperation(operationNewArrayWithSizeAndProfile, dst,
1024 currentInstruction[3].u.arrayAllocationProfile, regT1, regT0);
93a37866 1025#endif
14957cd0
A
1026}
1027
1028void JIT::emit_op_new_array_buffer(Instruction* currentInstruction)
1029{
81345200
A
1030 int dst = currentInstruction[1].u.operand;
1031 int valuesIndex = currentInstruction[2].u.operand;
1032 int size = currentInstruction[3].u.operand;
1033 const JSValue* values = codeBlock()->constantBuffer(valuesIndex);
1034 callOperation(operationNewArrayBufferWithProfile, dst, currentInstruction[4].u.arrayAllocationProfile, values, size);
1035}
1036
ed1e77d3
A
1037#if USE(JSVALUE64)
1038void JIT::emit_op_has_structure_property(Instruction* currentInstruction)
81345200 1039{
ed1e77d3
A
1040 int dst = currentInstruction[1].u.operand;
1041 int base = currentInstruction[2].u.operand;
1042 int enumerator = currentInstruction[4].u.operand;
1043
1044 emitGetVirtualRegister(base, regT0);
1045 emitGetVirtualRegister(enumerator, regT1);
1046 emitJumpSlowCaseIfNotJSCell(regT0, base);
1047
1048 load32(Address(regT0, JSCell::structureIDOffset()), regT0);
1049 addSlowCase(branch32(NotEqual, regT0, Address(regT1, JSPropertyNameEnumerator::cachedStructureIDOffset())));
1050
1051 move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
1052 emitPutVirtualRegister(dst);
1053}
1054
1055void JIT::privateCompileHasIndexedProperty(ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode)
1056{
1057 Instruction* currentInstruction = m_codeBlock->instructions().begin() + byValInfo->bytecodeIndex;
1058
1059 PatchableJump badType;
1060
1061 // FIXME: Add support for other types like TypedArrays and Arguments.
1062 // See https://bugs.webkit.org/show_bug.cgi?id=135033 and https://bugs.webkit.org/show_bug.cgi?id=135034.
1063 JumpList slowCases = emitLoadForArrayMode(currentInstruction, arrayMode, badType);
1064 move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
1065 Jump done = jump();
1066
1067 LinkBuffer patchBuffer(*m_vm, *this, m_codeBlock);
1068
1069 patchBuffer.link(badType, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1070 patchBuffer.link(slowCases, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1071
1072 patchBuffer.link(done, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToDone));
1073
1074 byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
1075 m_codeBlock, patchBuffer,
1076 ("Baseline has_indexed_property stub for %s, return point %p", toCString(*m_codeBlock).data(), returnAddress.value()));
1077
1078 RepatchBuffer repatchBuffer(m_codeBlock);
1079 repatchBuffer.relink(byValInfo->badTypeJump, CodeLocationLabel(byValInfo->stubRoutine->code().code()));
1080 repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(operationHasIndexedPropertyGeneric));
1081}
1082
1083void JIT::emit_op_has_indexed_property(Instruction* currentInstruction)
1084{
1085 int dst = currentInstruction[1].u.operand;
1086 int base = currentInstruction[2].u.operand;
1087 int property = currentInstruction[3].u.operand;
1088 ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
1089
1090 emitGetVirtualRegisters(base, regT0, property, regT1);
1091
1092 // This is technically incorrect - we're zero-extending an int32. On the hot path this doesn't matter.
1093 // We check the value as if it was a uint32 against the m_vectorLength - which will always fail if
1094 // number was signed since m_vectorLength is always less than intmax (since the total allocation
1095 // size is always less than 4Gb). As such zero extending will have been correct (and extending the value
1096 // to 64-bits is necessary since it's used in the address calculation. We zero extend rather than sign
1097 // extending since it makes it easier to re-tag the value in the slow case.
1098 zeroExtend32ToPtr(regT1, regT1);
1099
1100 emitJumpSlowCaseIfNotJSCell(regT0, base);
1101 emitArrayProfilingSiteWithCell(regT0, regT2, profile);
1102 and32(TrustedImm32(IndexingShapeMask), regT2);
1103
1104 JITArrayMode mode = chooseArrayMode(profile);
1105 PatchableJump badType;
1106
1107 // FIXME: Add support for other types like TypedArrays and Arguments.
1108 // See https://bugs.webkit.org/show_bug.cgi?id=135033 and https://bugs.webkit.org/show_bug.cgi?id=135034.
1109 JumpList slowCases = emitLoadForArrayMode(currentInstruction, mode, badType);
1110
1111 move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
1112
1113 addSlowCase(badType);
1114 addSlowCase(slowCases);
1115
1116 Label done = label();
1117
1118 emitPutVirtualRegister(dst);
1119
1120 m_byValCompilationInfo.append(ByValCompilationInfo(m_bytecodeOffset, badType, mode, done));
1121}
1122
1123void JIT::emitSlow_op_has_indexed_property(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1124{
1125 int dst = currentInstruction[1].u.operand;
1126 int base = currentInstruction[2].u.operand;
1127 int property = currentInstruction[3].u.operand;
1128 ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
1129
1130 linkSlowCaseIfNotJSCell(iter, base); // base cell check
1131 linkSlowCase(iter); // base array check
1132 linkSlowCase(iter); // vector length check
1133 linkSlowCase(iter); // empty value
1134
1135 Label slowPath = label();
1136
1137 emitGetVirtualRegister(base, regT0);
1138 emitGetVirtualRegister(property, regT1);
1139 Call call = callOperation(operationHasIndexedPropertyDefault, dst, regT0, regT1, profile);
1140
1141 m_byValCompilationInfo[m_byValInstructionIndex].slowPathTarget = slowPath;
1142 m_byValCompilationInfo[m_byValInstructionIndex].returnAddress = call;
1143 m_byValInstructionIndex++;
1144}
1145
1146void JIT::emit_op_get_direct_pname(Instruction* currentInstruction)
1147{
1148 int dst = currentInstruction[1].u.operand;
1149 int base = currentInstruction[2].u.operand;
1150 int index = currentInstruction[4].u.operand;
1151 int enumerator = currentInstruction[5].u.operand;
1152
1153 // Check that base is a cell
1154 emitGetVirtualRegister(base, regT0);
1155 emitJumpSlowCaseIfNotJSCell(regT0, base);
1156
1157 // Check the structure
1158 emitGetVirtualRegister(enumerator, regT2);
1159 load32(Address(regT0, JSCell::structureIDOffset()), regT1);
1160 addSlowCase(branch32(NotEqual, regT1, Address(regT2, JSPropertyNameEnumerator::cachedStructureIDOffset())));
1161
1162 // Compute the offset
1163 emitGetVirtualRegister(index, regT1);
1164 // If index is less than the enumerator's cached inline storage, then it's an inline access
1165 Jump outOfLineAccess = branch32(AboveOrEqual, regT1, Address(regT2, JSPropertyNameEnumerator::cachedInlineCapacityOffset()));
1166 addPtr(TrustedImm32(JSObject::offsetOfInlineStorage()), regT0);
1167 signExtend32ToPtr(regT1, regT1);
1168 load64(BaseIndex(regT0, regT1, TimesEight), regT0);
1169
1170 Jump done = jump();
1171
1172 // Otherwise it's out of line
1173 outOfLineAccess.link(this);
1174 loadPtr(Address(regT0, JSObject::butterflyOffset()), regT0);
1175 sub32(Address(regT2, JSPropertyNameEnumerator::cachedInlineCapacityOffset()), regT1);
1176 neg32(regT1);
1177 signExtend32ToPtr(regT1, regT1);
1178 int32_t offsetOfFirstProperty = static_cast<int32_t>(offsetInButterfly(firstOutOfLineOffset)) * sizeof(EncodedJSValue);
1179 load64(BaseIndex(regT0, regT1, TimesEight, offsetOfFirstProperty), regT0);
1180
1181 done.link(this);
1182 emitValueProfilingSite();
1183 emitPutVirtualRegister(dst, regT0);
1184}
1185
1186void JIT::emitSlow_op_get_direct_pname(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1187{
1188 int base = currentInstruction[2].u.operand;
1189 linkSlowCaseIfNotJSCell(iter, base);
81345200 1190 linkSlowCase(iter);
ed1e77d3
A
1191
1192 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_direct_pname);
1193 slowPathCall.call();
1194}
1195
1196void JIT::emit_op_enumerator_structure_pname(Instruction* currentInstruction)
1197{
1198 int dst = currentInstruction[1].u.operand;
1199 int enumerator = currentInstruction[2].u.operand;
1200 int index = currentInstruction[3].u.operand;
1201
1202 emitGetVirtualRegister(index, regT0);
1203 emitGetVirtualRegister(enumerator, regT1);
1204 Jump inBounds = branch32(Below, regT0, Address(regT1, JSPropertyNameEnumerator::endStructurePropertyIndexOffset()));
1205
1206 move(TrustedImm64(JSValue::encode(jsNull())), regT0);
1207
1208 Jump done = jump();
1209 inBounds.link(this);
1210
1211 loadPtr(Address(regT1, JSPropertyNameEnumerator::cachedPropertyNamesVectorOffset()), regT1);
1212 signExtend32ToPtr(regT0, regT0);
1213 load64(BaseIndex(regT1, regT0, TimesEight), regT0);
1214
1215 done.link(this);
1216 emitPutVirtualRegister(dst);
1217}
1218
1219void JIT::emit_op_enumerator_generic_pname(Instruction* currentInstruction)
1220{
1221 int dst = currentInstruction[1].u.operand;
1222 int enumerator = currentInstruction[2].u.operand;
1223 int index = currentInstruction[3].u.operand;
1224
1225 emitGetVirtualRegister(index, regT0);
1226 emitGetVirtualRegister(enumerator, regT1);
1227 Jump inBounds = branch32(Below, regT0, Address(regT1, JSPropertyNameEnumerator::endGenericPropertyIndexOffset()));
1228
1229 move(TrustedImm64(JSValue::encode(jsNull())), regT0);
1230
1231 Jump done = jump();
1232 inBounds.link(this);
1233
1234 loadPtr(Address(regT1, JSPropertyNameEnumerator::cachedPropertyNamesVectorOffset()), regT1);
1235 signExtend32ToPtr(regT0, regT0);
1236 load64(BaseIndex(regT1, regT0, TimesEight), regT0);
1237
1238 done.link(this);
1239 emitPutVirtualRegister(dst);
1240}
1241
1242void JIT::emit_op_profile_type(Instruction* currentInstruction)
1243{
1244 TypeLocation* cachedTypeLocation = currentInstruction[2].u.location;
1245 int valueToProfile = currentInstruction[1].u.operand;
1246
1247 emitGetVirtualRegister(valueToProfile, regT0);
1248
1249 JumpList jumpToEnd;
1250
1251 // Compile in a predictive type check, if possible, to see if we can skip writing to the log.
1252 // These typechecks are inlined to match those of the 64-bit JSValue type checks.
1253 if (cachedTypeLocation->m_lastSeenType == TypeUndefined)
1254 jumpToEnd.append(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsUndefined()))));
1255 else if (cachedTypeLocation->m_lastSeenType == TypeNull)
1256 jumpToEnd.append(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNull()))));
1257 else if (cachedTypeLocation->m_lastSeenType == TypeBoolean) {
1258 move(regT0, regT1);
1259 and64(TrustedImm32(~1), regT1);
1260 jumpToEnd.append(branch64(Equal, regT1, TrustedImm64(ValueFalse)));
1261 } else if (cachedTypeLocation->m_lastSeenType == TypeMachineInt)
1262 jumpToEnd.append(emitJumpIfImmediateInteger(regT0));
1263 else if (cachedTypeLocation->m_lastSeenType == TypeNumber)
1264 jumpToEnd.append(emitJumpIfImmediateNumber(regT0));
1265 else if (cachedTypeLocation->m_lastSeenType == TypeString) {
1266 Jump isNotCell = emitJumpIfNotJSCell(regT0);
1267 jumpToEnd.append(branch8(Equal, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType)));
1268 isNotCell.link(this);
1269 }
1270
1271 // Load the type profiling log into T2.
1272 TypeProfilerLog* cachedTypeProfilerLog = m_vm->typeProfilerLog();
1273 move(TrustedImmPtr(cachedTypeProfilerLog), regT2);
1274 // Load the next log entry into T1.
1275 loadPtr(Address(regT2, TypeProfilerLog::currentLogEntryOffset()), regT1);
1276
1277 // Store the JSValue onto the log entry.
1278 store64(regT0, Address(regT1, TypeProfilerLog::LogEntry::valueOffset()));
1279
1280 // Store the structureID of the cell if T0 is a cell, otherwise, store 0 on the log entry.
1281 Jump notCell = emitJumpIfNotJSCell(regT0);
1282 load32(Address(regT0, JSCell::structureIDOffset()), regT0);
1283 store32(regT0, Address(regT1, TypeProfilerLog::LogEntry::structureIDOffset()));
1284 Jump skipIsCell = jump();
1285 notCell.link(this);
1286 store32(TrustedImm32(0), Address(regT1, TypeProfilerLog::LogEntry::structureIDOffset()));
1287 skipIsCell.link(this);
1288
1289 // Store the typeLocation on the log entry.
1290 move(TrustedImmPtr(cachedTypeLocation), regT0);
1291 store64(regT0, Address(regT1, TypeProfilerLog::LogEntry::locationOffset()));
1292
1293 // Increment the current log entry.
1294 addPtr(TrustedImm32(sizeof(TypeProfilerLog::LogEntry)), regT1);
1295 store64(regT1, Address(regT2, TypeProfilerLog::currentLogEntryOffset()));
1296 Jump skipClearLog = branchPtr(NotEqual, regT1, TrustedImmPtr(cachedTypeProfilerLog->logEndPtr()));
1297 // Clear the log if we're at the end of the log.
1298 callOperation(operationProcessTypeProfilerLog);
1299 skipClearLog.link(this);
1300
1301 jumpToEnd.link(this);
1302}
1303
1304#endif // USE(JSVALUE64)
1305
1306void JIT::emit_op_get_enumerable_length(Instruction* currentInstruction)
1307{
1308 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_enumerable_length);
1309 slowPathCall.call();
1310}
1311
1312void JIT::emitSlow_op_has_structure_property(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1313{
81345200 1314 linkSlowCase(iter);
ed1e77d3
A
1315 linkSlowCase(iter);
1316
1317 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_has_structure_property);
1318 slowPathCall.call();
1319}
1320
1321void JIT::emit_op_has_generic_property(Instruction* currentInstruction)
1322{
1323 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_has_generic_property);
1324 slowPathCall.call();
1325}
1326
1327void JIT::emit_op_get_property_enumerator(Instruction* currentInstruction)
1328{
1329 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_property_enumerator);
1330 slowPathCall.call();
1331}
1332
1333void JIT::emit_op_to_index_string(Instruction* currentInstruction)
1334{
1335 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_index_string);
1336 slowPathCall.call();
1337}
1338
1339void JIT::emit_op_profile_control_flow(Instruction* currentInstruction)
1340{
1341 BasicBlockLocation* basicBlockLocation = currentInstruction[1].u.basicBlockLocation;
1342 if (!basicBlockLocation->hasExecuted())
1343 basicBlockLocation->emitExecuteCode(*this, regT1);
1344}
1345
1346void JIT::emit_op_create_direct_arguments(Instruction* currentInstruction)
1347{
1348 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_direct_arguments);
1349 slowPathCall.call();
1350}
1351
1352void JIT::emit_op_create_scoped_arguments(Instruction* currentInstruction)
1353{
1354 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_scoped_arguments);
1355 slowPathCall.call();
1356}
1357
1358void JIT::emit_op_create_out_of_band_arguments(Instruction* currentInstruction)
1359{
1360 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_out_of_band_arguments);
81345200 1361 slowPathCall.call();
14957cd0
A
1362}
1363
ba379fdc
A
1364} // namespace JSC
1365
1366#endif // ENABLE(JIT)