]> git.saurik.com Git - apple/javascriptcore.git/blame_incremental - jit/JITOpcodes32_64.cpp
JavaScriptCore-7601.1.46.3.tar.gz
[apple/javascriptcore.git] / jit / JITOpcodes32_64.cpp
... / ...
CommitLineData
1/*
2 * Copyright (C) 2009, 2012, 2013, 2014 Apple Inc. All rights reserved.
3 * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
13 *
14 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
18 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25 */
26
27#include "config.h"
28
29#if ENABLE(JIT)
30#if USE(JSVALUE32_64)
31#include "JIT.h"
32
33#include "CCallHelpers.h"
34#include "Debugger.h"
35#include "Exception.h"
36#include "JITInlines.h"
37#include "JSArray.h"
38#include "JSCell.h"
39#include "JSEnvironmentRecord.h"
40#include "JSFunction.h"
41#include "JSNameScope.h"
42#include "JSPropertyNameEnumerator.h"
43#include "LinkBuffer.h"
44#include "MaxFrameExtentForSlowPathCall.h"
45#include "RepatchBuffer.h"
46#include "SlowPathCall.h"
47#include "TypeProfilerLog.h"
48#include "VirtualRegister.h"
49
50namespace JSC {
51
52JIT::CodeRef JIT::privateCompileCTINativeCall(VM* vm, NativeFunction func)
53{
54 Call nativeCall;
55
56 emitFunctionPrologue();
57 emitPutImmediateToCallFrameHeader(0, JSStack::CodeBlock);
58 storePtr(callFrameRegister, &m_vm->topCallFrame);
59
60#if CPU(X86)
61 // Calling convention: f(ecx, edx, ...);
62 // Host function signature: f(ExecState*);
63 move(callFrameRegister, X86Registers::ecx);
64
65 subPtr(TrustedImm32(8), stackPointerRegister); // Align stack for call.
66 storePtr(X86Registers::ecx, Address(stackPointerRegister));
67
68 // call the function
69 nativeCall = call();
70
71 addPtr(TrustedImm32(8), stackPointerRegister);
72
73#elif CPU(ARM) || CPU(SH4) || CPU(MIPS)
74#if CPU(MIPS)
75 // Allocate stack space for (unused) 16 bytes (8-byte aligned) for 4 arguments.
76 subPtr(TrustedImm32(16), stackPointerRegister);
77#endif
78
79 // Calling convention is f(argumentGPR0, argumentGPR1, ...).
80 // Host function signature is f(ExecState*).
81 move(callFrameRegister, argumentGPR0);
82
83 emitGetFromCallFrameHeaderPtr(JSStack::Callee, argumentGPR1);
84 loadPtr(Address(argumentGPR1, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
85
86 // call the function
87 nativeCall = call();
88
89#if CPU(MIPS)
90 // Restore stack space
91 addPtr(TrustedImm32(16), stackPointerRegister);
92#endif
93
94 restoreReturnAddressBeforeReturn(regT3);
95#else
96#error "JIT not supported on this platform."
97 abortWithReason(JITNotSupported);
98#endif // CPU(X86)
99
100 // Check for an exception
101 Jump sawException = branch32(NotEqual, AbsoluteAddress(vm->addressOfException()), TrustedImm32(0));
102
103 emitFunctionEpilogue();
104 // Return.
105 ret();
106
107 // Handle an exception
108 sawException.link(this);
109
110 storePtr(callFrameRegister, &m_vm->topCallFrame);
111
112#if CPU(X86)
113 addPtr(TrustedImm32(-4), stackPointerRegister);
114 loadPtr(Address(callFrameRegister), X86Registers::ecx);
115 push(X86Registers::ecx);
116#else
117 loadPtr(Address(callFrameRegister), argumentGPR0);
118#endif
119 move(TrustedImmPtr(FunctionPtr(operationVMHandleException).value()), regT3);
120 call(regT3);
121
122#if CPU(X86)
123 addPtr(TrustedImm32(8), stackPointerRegister);
124#endif
125
126 jumpToExceptionHandler();
127
128 // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
129 LinkBuffer patchBuffer(*m_vm, *this, GLOBAL_THUNK_ID);
130
131 patchBuffer.link(nativeCall, FunctionPtr(func));
132 return FINALIZE_CODE(patchBuffer, ("JIT CTI native call"));
133}
134
135void JIT::emit_op_mov(Instruction* currentInstruction)
136{
137 int dst = currentInstruction[1].u.operand;
138 int src = currentInstruction[2].u.operand;
139
140 if (m_codeBlock->isConstantRegisterIndex(src))
141 emitStore(dst, getConstantOperand(src));
142 else {
143 emitLoad(src, regT1, regT0);
144 emitStore(dst, regT1, regT0);
145 }
146}
147
148void JIT::emit_op_end(Instruction* currentInstruction)
149{
150 ASSERT(returnValueGPR != callFrameRegister);
151 emitLoad(currentInstruction[1].u.operand, regT1, regT0);
152 emitFunctionEpilogue();
153 ret();
154}
155
156void JIT::emit_op_jmp(Instruction* currentInstruction)
157{
158 unsigned target = currentInstruction[1].u.operand;
159 addJump(jump(), target);
160}
161
162void JIT::emit_op_new_object(Instruction* currentInstruction)
163{
164 Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
165 size_t allocationSize = JSFinalObject::allocationSize(structure->inlineCapacity());
166 MarkedAllocator* allocator = &m_vm->heap.allocatorForObjectWithoutDestructor(allocationSize);
167
168 RegisterID resultReg = regT0;
169 RegisterID allocatorReg = regT1;
170 RegisterID scratchReg = regT2;
171
172 move(TrustedImmPtr(allocator), allocatorReg);
173 emitAllocateJSObject(allocatorReg, TrustedImmPtr(structure), resultReg, scratchReg);
174 emitStoreCell(currentInstruction[1].u.operand, resultReg);
175}
176
177void JIT::emitSlow_op_new_object(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
178{
179 linkSlowCase(iter);
180 int dst = currentInstruction[1].u.operand;
181 Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
182 callOperation(operationNewObject, structure);
183 emitStoreCell(dst, returnValueGPR);
184}
185
186void JIT::emit_op_check_has_instance(Instruction* currentInstruction)
187{
188 int baseVal = currentInstruction[3].u.operand;
189
190 emitLoadPayload(baseVal, regT0);
191
192 // Check that baseVal is a cell.
193 emitJumpSlowCaseIfNotJSCell(baseVal);
194
195 // Check that baseVal 'ImplementsHasInstance'.
196 addSlowCase(branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance)));
197}
198
199void JIT::emit_op_instanceof(Instruction* currentInstruction)
200{
201 int dst = currentInstruction[1].u.operand;
202 int value = currentInstruction[2].u.operand;
203 int proto = currentInstruction[3].u.operand;
204
205 // Load the operands into registers.
206 // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
207 emitLoadPayload(value, regT2);
208 emitLoadPayload(proto, regT1);
209
210 // Check that proto are cells. baseVal must be a cell - this is checked by op_check_has_instance.
211 emitJumpSlowCaseIfNotJSCell(value);
212 emitJumpSlowCaseIfNotJSCell(proto);
213
214 // Check that prototype is an object
215 addSlowCase(emitJumpIfCellNotObject(regT1));
216
217 // Optimistically load the result true, and start looping.
218 // Initially, regT1 still contains proto and regT2 still contains value.
219 // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
220 move(TrustedImm32(1), regT0);
221 Label loop(this);
222
223 // Load the prototype of the cell in regT2. If this is equal to regT1 - WIN!
224 // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
225 loadPtr(Address(regT2, JSCell::structureIDOffset()), regT2);
226 load32(Address(regT2, Structure::prototypeOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT2);
227 Jump isInstance = branchPtr(Equal, regT2, regT1);
228 branchTest32(NonZero, regT2).linkTo(loop, this);
229
230 // We get here either by dropping out of the loop, or if value was not an Object. Result is false.
231 move(TrustedImm32(0), regT0);
232
233 // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
234 isInstance.link(this);
235 emitStoreBool(dst, regT0);
236}
237
238void JIT::emitSlow_op_check_has_instance(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
239{
240 int dst = currentInstruction[1].u.operand;
241 int value = currentInstruction[2].u.operand;
242 int baseVal = currentInstruction[3].u.operand;
243
244 linkSlowCaseIfNotJSCell(iter, baseVal);
245 linkSlowCase(iter);
246
247 emitLoad(value, regT1, regT0);
248 emitLoad(baseVal, regT3, regT2);
249 callOperation(operationCheckHasInstance, dst, regT1, regT0, regT3, regT2);
250
251 emitJumpSlowToHot(jump(), currentInstruction[4].u.operand);
252}
253
254void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
255{
256 int dst = currentInstruction[1].u.operand;
257 int value = currentInstruction[2].u.operand;
258 int proto = currentInstruction[3].u.operand;
259
260 linkSlowCaseIfNotJSCell(iter, value);
261 linkSlowCaseIfNotJSCell(iter, proto);
262 linkSlowCase(iter);
263
264 emitLoad(value, regT1, regT0);
265 emitLoad(proto, regT3, regT2);
266 callOperation(operationInstanceOf, dst, regT1, regT0, regT3, regT2);
267}
268
269void JIT::emit_op_is_undefined(Instruction* currentInstruction)
270{
271 int dst = currentInstruction[1].u.operand;
272 int value = currentInstruction[2].u.operand;
273
274 emitLoad(value, regT1, regT0);
275 Jump isCell = branch32(Equal, regT1, TrustedImm32(JSValue::CellTag));
276
277 compare32(Equal, regT1, TrustedImm32(JSValue::UndefinedTag), regT0);
278 Jump done = jump();
279
280 isCell.link(this);
281 Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
282 move(TrustedImm32(0), regT0);
283 Jump notMasqueradesAsUndefined = jump();
284
285 isMasqueradesAsUndefined.link(this);
286 loadPtr(Address(regT0, JSCell::structureIDOffset()), regT1);
287 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
288 loadPtr(Address(regT1, Structure::globalObjectOffset()), regT1);
289 compare32(Equal, regT0, regT1, regT0);
290
291 notMasqueradesAsUndefined.link(this);
292 done.link(this);
293 emitStoreBool(dst, regT0);
294}
295
296void JIT::emit_op_is_boolean(Instruction* currentInstruction)
297{
298 int dst = currentInstruction[1].u.operand;
299 int value = currentInstruction[2].u.operand;
300
301 emitLoadTag(value, regT0);
302 compare32(Equal, regT0, TrustedImm32(JSValue::BooleanTag), regT0);
303 emitStoreBool(dst, regT0);
304}
305
306void JIT::emit_op_is_number(Instruction* currentInstruction)
307{
308 int dst = currentInstruction[1].u.operand;
309 int value = currentInstruction[2].u.operand;
310
311 emitLoadTag(value, regT0);
312 add32(TrustedImm32(1), regT0);
313 compare32(Below, regT0, TrustedImm32(JSValue::LowestTag + 1), regT0);
314 emitStoreBool(dst, regT0);
315}
316
317void JIT::emit_op_is_string(Instruction* currentInstruction)
318{
319 int dst = currentInstruction[1].u.operand;
320 int value = currentInstruction[2].u.operand;
321
322 emitLoad(value, regT1, regT0);
323 Jump isNotCell = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
324
325 compare8(Equal, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType), regT0);
326 Jump done = jump();
327
328 isNotCell.link(this);
329 move(TrustedImm32(0), regT0);
330
331 done.link(this);
332 emitStoreBool(dst, regT0);
333}
334
335void JIT::emit_op_is_object(Instruction* currentInstruction)
336{
337 int dst = currentInstruction[1].u.operand;
338 int value = currentInstruction[2].u.operand;
339
340 emitLoad(value, regT1, regT0);
341 Jump isNotCell = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
342
343 compare8(AboveOrEqual, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(ObjectType), regT0);
344 Jump done = jump();
345
346 isNotCell.link(this);
347 move(TrustedImm32(0), regT0);
348
349 done.link(this);
350 emitStoreBool(dst, regT0);
351}
352
353void JIT::emit_op_to_primitive(Instruction* currentInstruction)
354{
355 int dst = currentInstruction[1].u.operand;
356 int src = currentInstruction[2].u.operand;
357
358 emitLoad(src, regT1, regT0);
359
360 Jump isImm = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
361 addSlowCase(emitJumpIfCellObject(regT0));
362 isImm.link(this);
363
364 if (dst != src)
365 emitStore(dst, regT1, regT0);
366}
367
368void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
369{
370 linkSlowCase(iter);
371
372 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_primitive);
373 slowPathCall.call();
374}
375
376void JIT::emit_op_strcat(Instruction* currentInstruction)
377{
378 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_strcat);
379 slowPathCall.call();
380}
381
382void JIT::emit_op_not(Instruction* currentInstruction)
383{
384 int dst = currentInstruction[1].u.operand;
385 int src = currentInstruction[2].u.operand;
386
387 emitLoadTag(src, regT0);
388
389 emitLoad(src, regT1, regT0);
390 addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::BooleanTag)));
391 xor32(TrustedImm32(1), regT0);
392
393 emitStoreBool(dst, regT0, (dst == src));
394}
395
396void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
397{
398 linkSlowCase(iter);
399
400 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_not);
401 slowPathCall.call();
402}
403
404void JIT::emit_op_jfalse(Instruction* currentInstruction)
405{
406 int cond = currentInstruction[1].u.operand;
407 unsigned target = currentInstruction[2].u.operand;
408
409 emitLoad(cond, regT1, regT0);
410
411 ASSERT((JSValue::BooleanTag + 1 == JSValue::Int32Tag) && !(JSValue::Int32Tag + 1));
412 addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::BooleanTag)));
413 addJump(branchTest32(Zero, regT0), target);
414}
415
416void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
417{
418 int cond = currentInstruction[1].u.operand;
419 unsigned target = currentInstruction[2].u.operand;
420
421 linkSlowCase(iter);
422
423 if (supportsFloatingPoint()) {
424 // regT1 contains the tag from the hot path.
425 Jump notNumber = branch32(Above, regT1, TrustedImm32(JSValue::LowestTag));
426
427 emitLoadDouble(cond, fpRegT0);
428 emitJumpSlowToHot(branchDoubleZeroOrNaN(fpRegT0, fpRegT1), target);
429 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_jfalse));
430
431 notNumber.link(this);
432 }
433
434 callOperation(operationConvertJSValueToBoolean, regT1, regT0);
435 emitJumpSlowToHot(branchTest32(Zero, returnValueGPR), target); // Inverted.
436}
437
438void JIT::emit_op_jtrue(Instruction* currentInstruction)
439{
440 int cond = currentInstruction[1].u.operand;
441 unsigned target = currentInstruction[2].u.operand;
442
443 emitLoad(cond, regT1, regT0);
444
445 ASSERT((JSValue::BooleanTag + 1 == JSValue::Int32Tag) && !(JSValue::Int32Tag + 1));
446 addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::BooleanTag)));
447 addJump(branchTest32(NonZero, regT0), target);
448}
449
450void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
451{
452 int cond = currentInstruction[1].u.operand;
453 unsigned target = currentInstruction[2].u.operand;
454
455 linkSlowCase(iter);
456
457 if (supportsFloatingPoint()) {
458 // regT1 contains the tag from the hot path.
459 Jump notNumber = branch32(Above, regT1, TrustedImm32(JSValue::LowestTag));
460
461 emitLoadDouble(cond, fpRegT0);
462 emitJumpSlowToHot(branchDoubleNonZero(fpRegT0, fpRegT1), target);
463 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_jtrue));
464
465 notNumber.link(this);
466 }
467
468 callOperation(operationConvertJSValueToBoolean, regT1, regT0);
469 emitJumpSlowToHot(branchTest32(NonZero, returnValueGPR), target);
470}
471
472void JIT::emit_op_jeq_null(Instruction* currentInstruction)
473{
474 int src = currentInstruction[1].u.operand;
475 unsigned target = currentInstruction[2].u.operand;
476
477 emitLoad(src, regT1, regT0);
478
479 Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
480
481 Jump isNotMasqueradesAsUndefined = branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
482 loadPtr(Address(regT0, JSCell::structureIDOffset()), regT2);
483 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
484 addJump(branchPtr(Equal, Address(regT2, Structure::globalObjectOffset()), regT0), target);
485 Jump masqueradesGlobalObjectIsForeign = jump();
486
487 // Now handle the immediate cases - undefined & null
488 isImmediate.link(this);
489 ASSERT((JSValue::UndefinedTag + 1 == JSValue::NullTag) && (JSValue::NullTag & 0x1));
490 or32(TrustedImm32(1), regT1);
491 addJump(branch32(Equal, regT1, TrustedImm32(JSValue::NullTag)), target);
492
493 isNotMasqueradesAsUndefined.link(this);
494 masqueradesGlobalObjectIsForeign.link(this);
495}
496
497void JIT::emit_op_jneq_null(Instruction* currentInstruction)
498{
499 int src = currentInstruction[1].u.operand;
500 unsigned target = currentInstruction[2].u.operand;
501
502 emitLoad(src, regT1, regT0);
503
504 Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
505
506 addJump(branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
507 loadPtr(Address(regT0, JSCell::structureIDOffset()), regT2);
508 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
509 addJump(branchPtr(NotEqual, Address(regT2, Structure::globalObjectOffset()), regT0), target);
510 Jump wasNotImmediate = jump();
511
512 // Now handle the immediate cases - undefined & null
513 isImmediate.link(this);
514
515 ASSERT((JSValue::UndefinedTag + 1 == JSValue::NullTag) && (JSValue::NullTag & 0x1));
516 or32(TrustedImm32(1), regT1);
517 addJump(branch32(NotEqual, regT1, TrustedImm32(JSValue::NullTag)), target);
518
519 wasNotImmediate.link(this);
520}
521
522void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
523{
524 int src = currentInstruction[1].u.operand;
525 Special::Pointer ptr = currentInstruction[2].u.specialPointer;
526 unsigned target = currentInstruction[3].u.operand;
527
528 emitLoad(src, regT1, regT0);
529 addJump(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)), target);
530 addJump(branchPtr(NotEqual, regT0, TrustedImmPtr(actualPointerFor(m_codeBlock, ptr))), target);
531}
532
533void JIT::emit_op_eq(Instruction* currentInstruction)
534{
535 int dst = currentInstruction[1].u.operand;
536 int src1 = currentInstruction[2].u.operand;
537 int src2 = currentInstruction[3].u.operand;
538
539 emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
540 addSlowCase(branch32(NotEqual, regT1, regT3));
541 addSlowCase(branch32(Equal, regT1, TrustedImm32(JSValue::CellTag)));
542 addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::LowestTag)));
543
544 compare32(Equal, regT0, regT2, regT0);
545
546 emitStoreBool(dst, regT0);
547}
548
549void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
550{
551 int dst = currentInstruction[1].u.operand;
552 int op1 = currentInstruction[2].u.operand;
553 int op2 = currentInstruction[3].u.operand;
554
555 JumpList storeResult;
556 JumpList genericCase;
557
558 genericCase.append(getSlowCase(iter)); // tags not equal
559
560 linkSlowCase(iter); // tags equal and JSCell
561 genericCase.append(branchPtr(NotEqual, Address(regT0, JSCell::structureIDOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
562 genericCase.append(branchPtr(NotEqual, Address(regT2, JSCell::structureIDOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
563
564 // String case.
565 callOperation(operationCompareStringEq, regT0, regT2);
566 storeResult.append(jump());
567
568 // Generic case.
569 genericCase.append(getSlowCase(iter)); // doubles
570 genericCase.link(this);
571 emitLoad(op1, regT1, regT0);
572 emitLoad(op2, regT3, regT2);
573 callOperation(operationCompareEq, regT1, regT0, regT3, regT2);
574
575 storeResult.link(this);
576 emitStoreBool(dst, returnValueGPR);
577}
578
579void JIT::emit_op_neq(Instruction* currentInstruction)
580{
581 int dst = currentInstruction[1].u.operand;
582 int src1 = currentInstruction[2].u.operand;
583 int src2 = currentInstruction[3].u.operand;
584
585 emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
586 addSlowCase(branch32(NotEqual, regT1, regT3));
587 addSlowCase(branch32(Equal, regT1, TrustedImm32(JSValue::CellTag)));
588 addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::LowestTag)));
589
590 compare32(NotEqual, regT0, regT2, regT0);
591
592 emitStoreBool(dst, regT0);
593}
594
595void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
596{
597 int dst = currentInstruction[1].u.operand;
598
599 JumpList storeResult;
600 JumpList genericCase;
601
602 genericCase.append(getSlowCase(iter)); // tags not equal
603
604 linkSlowCase(iter); // tags equal and JSCell
605 genericCase.append(branchPtr(NotEqual, Address(regT0, JSCell::structureIDOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
606 genericCase.append(branchPtr(NotEqual, Address(regT2, JSCell::structureIDOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
607
608 // String case.
609 callOperation(operationCompareStringEq, regT0, regT2);
610 storeResult.append(jump());
611
612 // Generic case.
613 genericCase.append(getSlowCase(iter)); // doubles
614 genericCase.link(this);
615 callOperation(operationCompareEq, regT1, regT0, regT3, regT2);
616
617 storeResult.link(this);
618 xor32(TrustedImm32(0x1), returnValueGPR);
619 emitStoreBool(dst, returnValueGPR);
620}
621
622void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
623{
624 int dst = currentInstruction[1].u.operand;
625 int src1 = currentInstruction[2].u.operand;
626 int src2 = currentInstruction[3].u.operand;
627
628 emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
629
630 // Bail if the tags differ, or are double.
631 addSlowCase(branch32(NotEqual, regT1, regT3));
632 addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::LowestTag)));
633
634 // Jump to a slow case if both are strings or symbols (non object).
635 Jump notCell = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
636 Jump firstIsObject = emitJumpIfCellObject(regT0);
637 addSlowCase(emitJumpIfCellNotObject(regT2));
638 notCell.link(this);
639 firstIsObject.link(this);
640
641 // Simply compare the payloads.
642 if (type == OpStrictEq)
643 compare32(Equal, regT0, regT2, regT0);
644 else
645 compare32(NotEqual, regT0, regT2, regT0);
646
647 emitStoreBool(dst, regT0);
648}
649
650void JIT::emit_op_stricteq(Instruction* currentInstruction)
651{
652 compileOpStrictEq(currentInstruction, OpStrictEq);
653}
654
655void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
656{
657 linkSlowCase(iter);
658 linkSlowCase(iter);
659 linkSlowCase(iter);
660
661 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_stricteq);
662 slowPathCall.call();
663}
664
665void JIT::emit_op_nstricteq(Instruction* currentInstruction)
666{
667 compileOpStrictEq(currentInstruction, OpNStrictEq);
668}
669
670void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
671{
672 linkSlowCase(iter);
673 linkSlowCase(iter);
674 linkSlowCase(iter);
675
676 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_nstricteq);
677 slowPathCall.call();
678}
679
680void JIT::emit_op_eq_null(Instruction* currentInstruction)
681{
682 int dst = currentInstruction[1].u.operand;
683 int src = currentInstruction[2].u.operand;
684
685 emitLoad(src, regT1, regT0);
686 Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
687
688 Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
689 move(TrustedImm32(0), regT1);
690 Jump wasNotMasqueradesAsUndefined = jump();
691
692 isMasqueradesAsUndefined.link(this);
693 loadPtr(Address(regT0, JSCell::structureIDOffset()), regT2);
694 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
695 loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
696 compare32(Equal, regT0, regT2, regT1);
697 Jump wasNotImmediate = jump();
698
699 isImmediate.link(this);
700
701 compare32(Equal, regT1, TrustedImm32(JSValue::NullTag), regT2);
702 compare32(Equal, regT1, TrustedImm32(JSValue::UndefinedTag), regT1);
703 or32(regT2, regT1);
704
705 wasNotImmediate.link(this);
706 wasNotMasqueradesAsUndefined.link(this);
707
708 emitStoreBool(dst, regT1);
709}
710
711void JIT::emit_op_neq_null(Instruction* currentInstruction)
712{
713 int dst = currentInstruction[1].u.operand;
714 int src = currentInstruction[2].u.operand;
715
716 emitLoad(src, regT1, regT0);
717 Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
718
719 Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
720 move(TrustedImm32(1), regT1);
721 Jump wasNotMasqueradesAsUndefined = jump();
722
723 isMasqueradesAsUndefined.link(this);
724 loadPtr(Address(regT0, JSCell::structureIDOffset()), regT2);
725 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
726 loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
727 compare32(NotEqual, regT0, regT2, regT1);
728 Jump wasNotImmediate = jump();
729
730 isImmediate.link(this);
731
732 compare32(NotEqual, regT1, TrustedImm32(JSValue::NullTag), regT2);
733 compare32(NotEqual, regT1, TrustedImm32(JSValue::UndefinedTag), regT1);
734 and32(regT2, regT1);
735
736 wasNotImmediate.link(this);
737 wasNotMasqueradesAsUndefined.link(this);
738
739 emitStoreBool(dst, regT1);
740}
741
742void JIT::emit_op_throw(Instruction* currentInstruction)
743{
744 ASSERT(regT0 == returnValueGPR);
745 emitLoad(currentInstruction[1].u.operand, regT1, regT0);
746 callOperationNoExceptionCheck(operationThrow, regT1, regT0);
747 jumpToExceptionHandler();
748}
749
750void JIT::emit_op_push_with_scope(Instruction* currentInstruction)
751{
752 int dst = currentInstruction[1].u.operand;
753 emitLoad(currentInstruction[2].u.operand, regT1, regT0);
754 callOperation(operationPushWithScope, dst, regT1, regT0);
755}
756
757void JIT::emit_op_pop_scope(Instruction* currentInstruction)
758{
759 int scope = currentInstruction[1].u.operand;
760 callOperation(operationPopScope, scope);
761}
762
763void JIT::emit_op_to_number(Instruction* currentInstruction)
764{
765 int dst = currentInstruction[1].u.operand;
766 int src = currentInstruction[2].u.operand;
767
768 emitLoad(src, regT1, regT0);
769
770 Jump isInt32 = branch32(Equal, regT1, TrustedImm32(JSValue::Int32Tag));
771 addSlowCase(branch32(AboveOrEqual, regT1, TrustedImm32(JSValue::LowestTag)));
772 isInt32.link(this);
773
774 if (src != dst)
775 emitStore(dst, regT1, regT0);
776}
777
778void JIT::emitSlow_op_to_number(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
779{
780 linkSlowCase(iter);
781
782 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_number);
783 slowPathCall.call();
784}
785
786void JIT::emit_op_to_string(Instruction* currentInstruction)
787{
788 int dst = currentInstruction[1].u.operand;
789 int src = currentInstruction[2].u.operand;
790
791 emitLoad(src, regT1, regT0);
792
793 addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)));
794 addSlowCase(branch8(NotEqual, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType)));
795
796 if (src != dst)
797 emitStore(dst, regT1, regT0);
798}
799
800void JIT::emitSlow_op_to_string(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
801{
802 linkSlowCase(iter); // Not JSCell.
803 linkSlowCase(iter); // Not JSString.
804
805 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_string);
806 slowPathCall.call();
807}
808
809void JIT::emit_op_push_name_scope(Instruction* currentInstruction)
810{
811 int dst = currentInstruction[1].u.operand;
812 emitLoad(currentInstruction[2].u.operand, regT1, regT0);
813 if (currentInstruction[4].u.operand == JSNameScope::CatchScope) {
814 callOperation(operationPushCatchScope, dst, jsCast<SymbolTable*>(getConstantOperand(currentInstruction[3].u.operand)), regT1, regT0);
815 return;
816 }
817
818 RELEASE_ASSERT(currentInstruction[4].u.operand == JSNameScope::FunctionNameScope);
819 callOperation(operationPushFunctionNameScope, dst, jsCast<SymbolTable*>(getConstantOperand(currentInstruction[3].u.operand)), regT1, regT0);
820}
821
822void JIT::emit_op_catch(Instruction* currentInstruction)
823{
824 move(TrustedImmPtr(m_vm), regT3);
825 // operationThrow returns the callFrame for the handler.
826 load32(Address(regT3, VM::callFrameForThrowOffset()), callFrameRegister);
827 load32(Address(regT3, VM::vmEntryFrameForThrowOffset()), regT0);
828 store32(regT0, Address(regT3, VM::topVMEntryFrameOffset()));
829
830 addPtr(TrustedImm32(stackPointerOffsetFor(codeBlock()) * sizeof(Register)), callFrameRegister, stackPointerRegister);
831
832 // Now store the exception returned by operationThrow.
833 load32(Address(regT3, VM::exceptionOffset()), regT2);
834 move(TrustedImm32(JSValue::CellTag), regT1);
835
836 store32(TrustedImm32(0), Address(regT3, VM::exceptionOffset()));
837
838 unsigned exception = currentInstruction[1].u.operand;
839 emitStore(exception, regT1, regT2);
840
841 load32(Address(regT2, Exception::valueOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
842 load32(Address(regT2, Exception::valueOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
843
844 unsigned thrownValue = currentInstruction[2].u.operand;
845 emitStore(thrownValue, regT1, regT0);
846}
847
848void JIT::emit_op_switch_imm(Instruction* currentInstruction)
849{
850 size_t tableIndex = currentInstruction[1].u.operand;
851 unsigned defaultOffset = currentInstruction[2].u.operand;
852 unsigned scrutinee = currentInstruction[3].u.operand;
853
854 // create jump table for switch destinations, track this switch statement.
855 SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex);
856 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Immediate));
857 jumpTable->ensureCTITable();
858
859 emitLoad(scrutinee, regT1, regT0);
860 callOperation(operationSwitchImmWithUnknownKeyType, regT1, regT0, tableIndex);
861 jump(returnValueGPR);
862}
863
864void JIT::emit_op_switch_char(Instruction* currentInstruction)
865{
866 size_t tableIndex = currentInstruction[1].u.operand;
867 unsigned defaultOffset = currentInstruction[2].u.operand;
868 unsigned scrutinee = currentInstruction[3].u.operand;
869
870 // create jump table for switch destinations, track this switch statement.
871 SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex);
872 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Character));
873 jumpTable->ensureCTITable();
874
875 emitLoad(scrutinee, regT1, regT0);
876 callOperation(operationSwitchCharWithUnknownKeyType, regT1, regT0, tableIndex);
877 jump(returnValueGPR);
878}
879
880void JIT::emit_op_switch_string(Instruction* currentInstruction)
881{
882 size_t tableIndex = currentInstruction[1].u.operand;
883 unsigned defaultOffset = currentInstruction[2].u.operand;
884 unsigned scrutinee = currentInstruction[3].u.operand;
885
886 // create jump table for switch destinations, track this switch statement.
887 StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
888 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset));
889
890 emitLoad(scrutinee, regT1, regT0);
891 callOperation(operationSwitchStringWithUnknownKeyType, regT1, regT0, tableIndex);
892 jump(returnValueGPR);
893}
894
895void JIT::emit_op_throw_static_error(Instruction* currentInstruction)
896{
897 emitLoad(m_codeBlock->getConstant(currentInstruction[1].u.operand), regT1, regT0);
898 callOperation(operationThrowStaticError, regT1, regT0, currentInstruction[2].u.operand);
899}
900
901void JIT::emit_op_debug(Instruction* currentInstruction)
902{
903 load32(codeBlock()->debuggerRequestsAddress(), regT0);
904 Jump noDebuggerRequests = branchTest32(Zero, regT0);
905 callOperation(operationDebug, currentInstruction[1].u.operand);
906 noDebuggerRequests.link(this);
907}
908
909
910void JIT::emit_op_enter(Instruction* currentInstruction)
911{
912 emitEnterOptimizationCheck();
913
914 // Even though JIT code doesn't use them, we initialize our constant
915 // registers to zap stale pointers, to avoid unnecessarily prolonging
916 // object lifetime and increasing GC pressure.
917 for (int i = 0; i < m_codeBlock->m_numVars; ++i)
918 emitStore(virtualRegisterForLocal(i).offset(), jsUndefined());
919
920 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_enter);
921 slowPathCall.call();
922}
923
924void JIT::emit_op_create_lexical_environment(Instruction* currentInstruction)
925{
926 int lexicalEnvironment = currentInstruction[1].u.operand;
927 int scope = currentInstruction[2].u.operand;
928
929 emitLoadPayload(currentInstruction[2].u.operand, regT0);
930 callOperation(operationCreateActivation, regT0);
931 emitStoreCell(lexicalEnvironment, returnValueGPR);
932 emitStoreCell(scope, returnValueGPR);
933}
934
935void JIT::emit_op_get_scope(Instruction* currentInstruction)
936{
937 int dst = currentInstruction[1].u.operand;
938 emitGetFromCallFrameHeaderPtr(JSStack::Callee, regT0);
939 loadPtr(Address(regT0, JSFunction::offsetOfScopeChain()), regT0);
940 emitStoreCell(dst, regT0);
941}
942
943void JIT::emit_op_create_this(Instruction* currentInstruction)
944{
945 int callee = currentInstruction[2].u.operand;
946 WriteBarrierBase<JSCell>* cachedFunction = &currentInstruction[4].u.jsCell;
947 RegisterID calleeReg = regT0;
948 RegisterID rareDataReg = regT4;
949 RegisterID resultReg = regT0;
950 RegisterID allocatorReg = regT1;
951 RegisterID structureReg = regT2;
952 RegisterID cachedFunctionReg = regT4;
953 RegisterID scratchReg = regT3;
954
955 emitLoadPayload(callee, calleeReg);
956 loadPtr(Address(calleeReg, JSFunction::offsetOfRareData()), rareDataReg);
957 addSlowCase(branchTestPtr(Zero, rareDataReg));
958 loadPtr(Address(rareDataReg, FunctionRareData::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfAllocator()), allocatorReg);
959 loadPtr(Address(rareDataReg, FunctionRareData::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfStructure()), structureReg);
960 addSlowCase(branchTestPtr(Zero, allocatorReg));
961
962 loadPtr(cachedFunction, cachedFunctionReg);
963 Jump hasSeenMultipleCallees = branchPtr(Equal, cachedFunctionReg, TrustedImmPtr(JSCell::seenMultipleCalleeObjects()));
964 addSlowCase(branchPtr(NotEqual, calleeReg, cachedFunctionReg));
965 hasSeenMultipleCallees.link(this);
966
967 emitAllocateJSObject(allocatorReg, structureReg, resultReg, scratchReg);
968 emitStoreCell(currentInstruction[1].u.operand, resultReg);
969}
970
971void JIT::emitSlow_op_create_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
972{
973 linkSlowCase(iter); // doesn't have rare data
974 linkSlowCase(iter); // doesn't have an allocation profile
975 linkSlowCase(iter); // allocation failed
976 linkSlowCase(iter); // cached function didn't match
977
978 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_this);
979 slowPathCall.call();
980}
981
982void JIT::emit_op_to_this(Instruction* currentInstruction)
983{
984 WriteBarrierBase<Structure>* cachedStructure = &currentInstruction[2].u.structure;
985 int thisRegister = currentInstruction[1].u.operand;
986
987 emitLoad(thisRegister, regT3, regT2);
988
989 addSlowCase(branch32(NotEqual, regT3, TrustedImm32(JSValue::CellTag)));
990 addSlowCase(branch8(NotEqual, Address(regT2, JSCell::typeInfoTypeOffset()), TrustedImm32(FinalObjectType)));
991 loadPtr(Address(regT2, JSCell::structureIDOffset()), regT0);
992 loadPtr(cachedStructure, regT2);
993 addSlowCase(branchPtr(NotEqual, regT0, regT2));
994}
995
996void JIT::emitSlow_op_to_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
997{
998 linkSlowCase(iter);
999 linkSlowCase(iter);
1000 linkSlowCase(iter);
1001 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_this);
1002 slowPathCall.call();
1003}
1004
1005void JIT::emit_op_check_tdz(Instruction* currentInstruction)
1006{
1007 emitLoadTag(currentInstruction[1].u.operand, regT0);
1008 addSlowCase(branch32(Equal, regT0, TrustedImm32(JSValue::EmptyValueTag)));
1009}
1010
1011void JIT::emitSlow_op_check_tdz(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1012{
1013 linkSlowCase(iter);
1014 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_throw_tdz_error);
1015 slowPathCall.call();
1016}
1017
1018void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
1019{
1020 load32(m_vm->enabledProfilerAddress(), regT0);
1021 Jump profilerDone = branchTestPtr(Zero, regT0);
1022 emitLoad(currentInstruction[1].u.operand, regT1, regT0);
1023 callOperation(operationProfileWillCall, regT1, regT0);
1024 profilerDone.link(this);
1025}
1026
1027void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
1028{
1029 load32(m_vm->enabledProfilerAddress(), regT0);
1030 Jump profilerDone = branchTestPtr(Zero, regT0);
1031 emitLoad(currentInstruction[1].u.operand, regT1, regT0);
1032 callOperation(operationProfileDidCall, regT1, regT0);
1033 profilerDone.link(this);
1034}
1035
1036void JIT::emit_op_has_structure_property(Instruction* currentInstruction)
1037{
1038 int dst = currentInstruction[1].u.operand;
1039 int base = currentInstruction[2].u.operand;
1040 int enumerator = currentInstruction[4].u.operand;
1041
1042 emitLoadPayload(base, regT0);
1043 emitJumpSlowCaseIfNotJSCell(base);
1044
1045 emitLoadPayload(enumerator, regT1);
1046
1047 load32(Address(regT0, JSCell::structureIDOffset()), regT0);
1048 addSlowCase(branch32(NotEqual, regT0, Address(regT1, JSPropertyNameEnumerator::cachedStructureIDOffset())));
1049
1050 move(TrustedImm32(1), regT0);
1051 emitStoreBool(dst, regT0);
1052}
1053
1054void JIT::privateCompileHasIndexedProperty(ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode)
1055{
1056 Instruction* currentInstruction = m_codeBlock->instructions().begin() + byValInfo->bytecodeIndex;
1057
1058 PatchableJump badType;
1059
1060 // FIXME: Add support for other types like TypedArrays and Arguments.
1061 // See https://bugs.webkit.org/show_bug.cgi?id=135033 and https://bugs.webkit.org/show_bug.cgi?id=135034.
1062 JumpList slowCases = emitLoadForArrayMode(currentInstruction, arrayMode, badType);
1063 move(TrustedImm32(1), regT0);
1064 Jump done = jump();
1065
1066 LinkBuffer patchBuffer(*m_vm, *this, m_codeBlock);
1067
1068 patchBuffer.link(badType, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1069 patchBuffer.link(slowCases, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1070
1071 patchBuffer.link(done, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToDone));
1072
1073 byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
1074 m_codeBlock, patchBuffer,
1075 ("Baseline has_indexed_property stub for %s, return point %p", toCString(*m_codeBlock).data(), returnAddress.value()));
1076
1077 RepatchBuffer repatchBuffer(m_codeBlock);
1078 repatchBuffer.relink(byValInfo->badTypeJump, CodeLocationLabel(byValInfo->stubRoutine->code().code()));
1079 repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(operationHasIndexedPropertyGeneric));
1080}
1081
1082void JIT::emit_op_has_indexed_property(Instruction* currentInstruction)
1083{
1084 int dst = currentInstruction[1].u.operand;
1085 int base = currentInstruction[2].u.operand;
1086 int property = currentInstruction[3].u.operand;
1087 ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
1088
1089 emitLoadPayload(base, regT0);
1090 emitJumpSlowCaseIfNotJSCell(base);
1091
1092 emitLoadPayload(property, regT1);
1093
1094 // This is technically incorrect - we're zero-extending an int32. On the hot path this doesn't matter.
1095 // We check the value as if it was a uint32 against the m_vectorLength - which will always fail if
1096 // number was signed since m_vectorLength is always less than intmax (since the total allocation
1097 // size is always less than 4Gb). As such zero extending will have been correct (and extending the value
1098 // to 64-bits is necessary since it's used in the address calculation. We zero extend rather than sign
1099 // extending since it makes it easier to re-tag the value in the slow case.
1100 zeroExtend32ToPtr(regT1, regT1);
1101
1102 emitArrayProfilingSiteWithCell(regT0, regT2, profile);
1103 and32(TrustedImm32(IndexingShapeMask), regT2);
1104
1105 JITArrayMode mode = chooseArrayMode(profile);
1106 PatchableJump badType;
1107
1108 // FIXME: Add support for other types like TypedArrays and Arguments.
1109 // See https://bugs.webkit.org/show_bug.cgi?id=135033 and https://bugs.webkit.org/show_bug.cgi?id=135034.
1110 JumpList slowCases = emitLoadForArrayMode(currentInstruction, mode, badType);
1111 move(TrustedImm32(1), regT0);
1112
1113 addSlowCase(badType);
1114 addSlowCase(slowCases);
1115
1116 Label done = label();
1117
1118 emitStoreBool(dst, regT0);
1119
1120 m_byValCompilationInfo.append(ByValCompilationInfo(m_bytecodeOffset, badType, mode, done));
1121}
1122
1123void JIT::emitSlow_op_has_indexed_property(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1124{
1125 int dst = currentInstruction[1].u.operand;
1126 int base = currentInstruction[2].u.operand;
1127 int property = currentInstruction[3].u.operand;
1128 ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
1129
1130 linkSlowCaseIfNotJSCell(iter, base); // base cell check
1131 linkSlowCase(iter); // base array check
1132 linkSlowCase(iter); // vector length check
1133 linkSlowCase(iter); // empty value
1134
1135 Label slowPath = label();
1136
1137 emitLoad(base, regT1, regT0);
1138 emitLoad(property, regT3, regT2);
1139 Call call = callOperation(operationHasIndexedPropertyDefault, dst, regT1, regT0, regT3, regT2, profile);
1140
1141 m_byValCompilationInfo[m_byValInstructionIndex].slowPathTarget = slowPath;
1142 m_byValCompilationInfo[m_byValInstructionIndex].returnAddress = call;
1143 m_byValInstructionIndex++;
1144}
1145
1146void JIT::emit_op_get_direct_pname(Instruction* currentInstruction)
1147{
1148 int dst = currentInstruction[1].u.operand;
1149 int base = currentInstruction[2].u.operand;
1150 int index = currentInstruction[4].u.operand;
1151 int enumerator = currentInstruction[5].u.operand;
1152
1153 // Check that base is a cell
1154 emitLoadPayload(base, regT0);
1155 emitJumpSlowCaseIfNotJSCell(base);
1156
1157 // Check the structure
1158 emitLoadPayload(enumerator, regT1);
1159 load32(Address(regT0, JSCell::structureIDOffset()), regT2);
1160 addSlowCase(branch32(NotEqual, regT2, Address(regT1, JSPropertyNameEnumerator::cachedStructureIDOffset())));
1161
1162 // Compute the offset
1163 emitLoadPayload(index, regT2);
1164 // If index is less than the enumerator's cached inline storage, then it's an inline access
1165 Jump outOfLineAccess = branch32(AboveOrEqual, regT2, Address(regT1, JSPropertyNameEnumerator::cachedInlineCapacityOffset()));
1166 addPtr(TrustedImm32(JSObject::offsetOfInlineStorage()), regT0);
1167 load32(BaseIndex(regT0, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
1168 load32(BaseIndex(regT0, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
1169
1170 Jump done = jump();
1171
1172 // Otherwise it's out of line
1173 outOfLineAccess.link(this);
1174 loadPtr(Address(regT0, JSObject::butterflyOffset()), regT0);
1175 sub32(Address(regT1, JSPropertyNameEnumerator::cachedInlineCapacityOffset()), regT2);
1176 neg32(regT2);
1177 int32_t offsetOfFirstProperty = static_cast<int32_t>(offsetInButterfly(firstOutOfLineOffset)) * sizeof(EncodedJSValue);
1178 load32(BaseIndex(regT0, regT2, TimesEight, offsetOfFirstProperty + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
1179 load32(BaseIndex(regT0, regT2, TimesEight, offsetOfFirstProperty + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
1180
1181 done.link(this);
1182 emitValueProfilingSite();
1183 emitStore(dst, regT1, regT0);
1184}
1185
1186void JIT::emitSlow_op_get_direct_pname(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1187{
1188 int base = currentInstruction[2].u.operand;
1189 linkSlowCaseIfNotJSCell(iter, base);
1190 linkSlowCase(iter);
1191
1192 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_direct_pname);
1193 slowPathCall.call();
1194}
1195
1196void JIT::emit_op_enumerator_structure_pname(Instruction* currentInstruction)
1197{
1198 int dst = currentInstruction[1].u.operand;
1199 int enumerator = currentInstruction[2].u.operand;
1200 int index = currentInstruction[3].u.operand;
1201
1202 emitLoadPayload(index, regT0);
1203 emitLoadPayload(enumerator, regT1);
1204 Jump inBounds = branch32(Below, regT0, Address(regT1, JSPropertyNameEnumerator::endStructurePropertyIndexOffset()));
1205
1206 move(TrustedImm32(JSValue::NullTag), regT2);
1207 move(TrustedImm32(0), regT0);
1208
1209 Jump done = jump();
1210 inBounds.link(this);
1211
1212 loadPtr(Address(regT1, JSPropertyNameEnumerator::cachedPropertyNamesVectorOffset()), regT1);
1213 loadPtr(BaseIndex(regT1, regT0, timesPtr()), regT0);
1214 move(TrustedImm32(JSValue::CellTag), regT2);
1215
1216 done.link(this);
1217 emitStore(dst, regT2, regT0);
1218}
1219
1220void JIT::emit_op_enumerator_generic_pname(Instruction* currentInstruction)
1221{
1222 int dst = currentInstruction[1].u.operand;
1223 int enumerator = currentInstruction[2].u.operand;
1224 int index = currentInstruction[3].u.operand;
1225
1226 emitLoadPayload(index, regT0);
1227 emitLoadPayload(enumerator, regT1);
1228 Jump inBounds = branch32(Below, regT0, Address(regT1, JSPropertyNameEnumerator::endGenericPropertyIndexOffset()));
1229
1230 move(TrustedImm32(JSValue::NullTag), regT2);
1231 move(TrustedImm32(0), regT0);
1232
1233 Jump done = jump();
1234 inBounds.link(this);
1235
1236 loadPtr(Address(regT1, JSPropertyNameEnumerator::cachedPropertyNamesVectorOffset()), regT1);
1237 loadPtr(BaseIndex(regT1, regT0, timesPtr()), regT0);
1238 move(TrustedImm32(JSValue::CellTag), regT2);
1239
1240 done.link(this);
1241 emitStore(dst, regT2, regT0);
1242}
1243
1244void JIT::emit_op_profile_type(Instruction* currentInstruction)
1245{
1246 TypeLocation* cachedTypeLocation = currentInstruction[2].u.location;
1247 int valueToProfile = currentInstruction[1].u.operand;
1248
1249 // Load payload in T0. Load tag in T3.
1250 emitLoadPayload(valueToProfile, regT0);
1251 emitLoadTag(valueToProfile, regT3);
1252
1253 JumpList jumpToEnd;
1254
1255 // Compile in a predictive type check, if possible, to see if we can skip writing to the log.
1256 // These typechecks are inlined to match those of the 32-bit JSValue type checks.
1257 if (cachedTypeLocation->m_lastSeenType == TypeUndefined)
1258 jumpToEnd.append(branch32(Equal, regT3, TrustedImm32(JSValue::UndefinedTag)));
1259 else if (cachedTypeLocation->m_lastSeenType == TypeNull)
1260 jumpToEnd.append(branch32(Equal, regT3, TrustedImm32(JSValue::NullTag)));
1261 else if (cachedTypeLocation->m_lastSeenType == TypeBoolean)
1262 jumpToEnd.append(branch32(Equal, regT3, TrustedImm32(JSValue::BooleanTag)));
1263 else if (cachedTypeLocation->m_lastSeenType == TypeMachineInt)
1264 jumpToEnd.append(branch32(Equal, regT3, TrustedImm32(JSValue::Int32Tag)));
1265 else if (cachedTypeLocation->m_lastSeenType == TypeNumber) {
1266 jumpToEnd.append(branch32(Below, regT3, TrustedImm32(JSValue::LowestTag)));
1267 jumpToEnd.append(branch32(Equal, regT3, TrustedImm32(JSValue::Int32Tag)));
1268 } else if (cachedTypeLocation->m_lastSeenType == TypeString) {
1269 Jump isNotCell = branch32(NotEqual, regT3, TrustedImm32(JSValue::CellTag));
1270 jumpToEnd.append(branch8(Equal, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType)));
1271 isNotCell.link(this);
1272 }
1273
1274 // Load the type profiling log into T2.
1275 TypeProfilerLog* cachedTypeProfilerLog = m_vm->typeProfilerLog();
1276 move(TrustedImmPtr(cachedTypeProfilerLog), regT2);
1277
1278 // Load the next log entry into T1.
1279 loadPtr(Address(regT2, TypeProfilerLog::currentLogEntryOffset()), regT1);
1280
1281 // Store the JSValue onto the log entry.
1282 store32(regT0, Address(regT1, TypeProfilerLog::LogEntry::valueOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.payload)));
1283 store32(regT3, Address(regT1, TypeProfilerLog::LogEntry::valueOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.tag)));
1284
1285 // Store the structureID of the cell if argument is a cell, otherwise, store 0 on the log entry.
1286 Jump notCell = branch32(NotEqual, regT3, TrustedImm32(JSValue::CellTag));
1287 load32(Address(regT0, JSCell::structureIDOffset()), regT0);
1288 store32(regT0, Address(regT1, TypeProfilerLog::LogEntry::structureIDOffset()));
1289 Jump skipNotCell = jump();
1290 notCell.link(this);
1291 store32(TrustedImm32(0), Address(regT1, TypeProfilerLog::LogEntry::structureIDOffset()));
1292 skipNotCell.link(this);
1293
1294 // Store the typeLocation on the log entry.
1295 move(TrustedImmPtr(cachedTypeLocation), regT0);
1296 store32(regT0, Address(regT1, TypeProfilerLog::LogEntry::locationOffset()));
1297
1298 // Increment the current log entry.
1299 addPtr(TrustedImm32(sizeof(TypeProfilerLog::LogEntry)), regT1);
1300 store32(regT1, Address(regT2, TypeProfilerLog::currentLogEntryOffset()));
1301 jumpToEnd.append(branchPtr(NotEqual, regT1, TrustedImmPtr(cachedTypeProfilerLog->logEndPtr())));
1302 // Clear the log if we're at the end of the log.
1303 callOperation(operationProcessTypeProfilerLog);
1304
1305 jumpToEnd.link(this);
1306}
1307
1308} // namespace JSC
1309
1310#endif // USE(JSVALUE32_64)
1311#endif // ENABLE(JIT)