]> git.saurik.com Git - apple/javascriptcore.git/blob - jit/JITOpcodes.cpp
0813b1555c83dabab5a0843aff9ea46998b2f439
[apple/javascriptcore.git] / jit / JITOpcodes.cpp
1 /*
2 * Copyright (C) 2009, 2012-2015 Apple Inc. All rights reserved.
3 * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
13 *
14 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
18 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25 */
26
27 #include "config.h"
28 #if ENABLE(JIT)
29 #include "JIT.h"
30
31 #include "BasicBlockLocation.h"
32 #include "CopiedSpaceInlines.h"
33 #include "Debugger.h"
34 #include "Exception.h"
35 #include "Heap.h"
36 #include "JITInlines.h"
37 #include "JSArray.h"
38 #include "JSCell.h"
39 #include "JSFunction.h"
40 #include "JSNameScope.h"
41 #include "JSPropertyNameEnumerator.h"
42 #include "LinkBuffer.h"
43 #include "MaxFrameExtentForSlowPathCall.h"
44 #include "RepatchBuffer.h"
45 #include "SlowPathCall.h"
46 #include "TypeLocation.h"
47 #include "TypeProfilerLog.h"
48 #include "VirtualRegister.h"
49
50 namespace JSC {
51
52 #if USE(JSVALUE64)
53
54 JIT::CodeRef JIT::privateCompileCTINativeCall(VM* vm, NativeFunction)
55 {
56 return vm->getCTIStub(nativeCallGenerator);
57 }
58
59 void JIT::emit_op_mov(Instruction* currentInstruction)
60 {
61 int dst = currentInstruction[1].u.operand;
62 int src = currentInstruction[2].u.operand;
63
64 emitGetVirtualRegister(src, regT0);
65 emitPutVirtualRegister(dst);
66 }
67
68
69 void JIT::emit_op_end(Instruction* currentInstruction)
70 {
71 RELEASE_ASSERT(returnValueGPR != callFrameRegister);
72 emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
73 emitFunctionEpilogue();
74 ret();
75 }
76
77 void JIT::emit_op_jmp(Instruction* currentInstruction)
78 {
79 unsigned target = currentInstruction[1].u.operand;
80 addJump(jump(), target);
81 }
82
83 void JIT::emit_op_new_object(Instruction* currentInstruction)
84 {
85 Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
86 size_t allocationSize = JSFinalObject::allocationSize(structure->inlineCapacity());
87 MarkedAllocator* allocator = &m_vm->heap.allocatorForObjectWithoutDestructor(allocationSize);
88
89 RegisterID resultReg = regT0;
90 RegisterID allocatorReg = regT1;
91 RegisterID scratchReg = regT2;
92
93 move(TrustedImmPtr(allocator), allocatorReg);
94 emitAllocateJSObject(allocatorReg, TrustedImmPtr(structure), resultReg, scratchReg);
95 emitPutVirtualRegister(currentInstruction[1].u.operand);
96 }
97
98 void JIT::emitSlow_op_new_object(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
99 {
100 linkSlowCase(iter);
101 int dst = currentInstruction[1].u.operand;
102 Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
103 callOperation(operationNewObject, structure);
104 emitStoreCell(dst, returnValueGPR);
105 }
106
107 void JIT::emit_op_check_has_instance(Instruction* currentInstruction)
108 {
109 int baseVal = currentInstruction[3].u.operand;
110
111 emitGetVirtualRegister(baseVal, regT0);
112
113 // Check that baseVal is a cell.
114 emitJumpSlowCaseIfNotJSCell(regT0, baseVal);
115
116 // Check that baseVal 'ImplementsHasInstance'.
117 addSlowCase(branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance)));
118 }
119
120 void JIT::emit_op_instanceof(Instruction* currentInstruction)
121 {
122 int dst = currentInstruction[1].u.operand;
123 int value = currentInstruction[2].u.operand;
124 int proto = currentInstruction[3].u.operand;
125
126 // Load the operands (baseVal, proto, and value respectively) into registers.
127 // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
128 emitGetVirtualRegister(value, regT2);
129 emitGetVirtualRegister(proto, regT1);
130
131 // Check that proto are cells. baseVal must be a cell - this is checked by op_check_has_instance.
132 emitJumpSlowCaseIfNotJSCell(regT2, value);
133 emitJumpSlowCaseIfNotJSCell(regT1, proto);
134
135 // Check that prototype is an object
136 addSlowCase(emitJumpIfCellNotObject(regT1));
137
138 // Optimistically load the result true, and start looping.
139 // Initially, regT1 still contains proto and regT2 still contains value.
140 // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
141 move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
142 Label loop(this);
143
144 // Load the prototype of the object in regT2. If this is equal to regT1 - WIN!
145 // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
146 emitLoadStructure(regT2, regT2, regT3);
147 load64(Address(regT2, Structure::prototypeOffset()), regT2);
148 Jump isInstance = branchPtr(Equal, regT2, regT1);
149 emitJumpIfJSCell(regT2).linkTo(loop, this);
150
151 // We get here either by dropping out of the loop, or if value was not an Object. Result is false.
152 move(TrustedImm64(JSValue::encode(jsBoolean(false))), regT0);
153
154 // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
155 isInstance.link(this);
156 emitPutVirtualRegister(dst);
157 }
158
159 void JIT::emit_op_is_undefined(Instruction* currentInstruction)
160 {
161 int dst = currentInstruction[1].u.operand;
162 int value = currentInstruction[2].u.operand;
163
164 emitGetVirtualRegister(value, regT0);
165 Jump isCell = emitJumpIfJSCell(regT0);
166
167 compare64(Equal, regT0, TrustedImm32(ValueUndefined), regT0);
168 Jump done = jump();
169
170 isCell.link(this);
171 Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
172 move(TrustedImm32(0), regT0);
173 Jump notMasqueradesAsUndefined = jump();
174
175 isMasqueradesAsUndefined.link(this);
176 emitLoadStructure(regT0, regT1, regT2);
177 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
178 loadPtr(Address(regT1, Structure::globalObjectOffset()), regT1);
179 comparePtr(Equal, regT0, regT1, regT0);
180
181 notMasqueradesAsUndefined.link(this);
182 done.link(this);
183 emitTagAsBoolImmediate(regT0);
184 emitPutVirtualRegister(dst);
185 }
186
187 void JIT::emit_op_is_boolean(Instruction* currentInstruction)
188 {
189 int dst = currentInstruction[1].u.operand;
190 int value = currentInstruction[2].u.operand;
191
192 emitGetVirtualRegister(value, regT0);
193 xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
194 test64(Zero, regT0, TrustedImm32(static_cast<int32_t>(~1)), regT0);
195 emitTagAsBoolImmediate(regT0);
196 emitPutVirtualRegister(dst);
197 }
198
199 void JIT::emit_op_is_number(Instruction* currentInstruction)
200 {
201 int dst = currentInstruction[1].u.operand;
202 int value = currentInstruction[2].u.operand;
203
204 emitGetVirtualRegister(value, regT0);
205 test64(NonZero, regT0, tagTypeNumberRegister, regT0);
206 emitTagAsBoolImmediate(regT0);
207 emitPutVirtualRegister(dst);
208 }
209
210 void JIT::emit_op_is_string(Instruction* currentInstruction)
211 {
212 int dst = currentInstruction[1].u.operand;
213 int value = currentInstruction[2].u.operand;
214
215 emitGetVirtualRegister(value, regT0);
216 Jump isNotCell = emitJumpIfNotJSCell(regT0);
217
218 compare8(Equal, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType), regT0);
219 emitTagAsBoolImmediate(regT0);
220 Jump done = jump();
221
222 isNotCell.link(this);
223 move(TrustedImm32(ValueFalse), regT0);
224
225 done.link(this);
226 emitPutVirtualRegister(dst);
227 }
228
229 void JIT::emit_op_is_object(Instruction* currentInstruction)
230 {
231 int dst = currentInstruction[1].u.operand;
232 int value = currentInstruction[2].u.operand;
233
234 emitGetVirtualRegister(value, regT0);
235 Jump isNotCell = emitJumpIfNotJSCell(regT0);
236
237 compare8(AboveOrEqual, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(ObjectType), regT0);
238 emitTagAsBoolImmediate(regT0);
239 Jump done = jump();
240
241 isNotCell.link(this);
242 move(TrustedImm32(ValueFalse), regT0);
243
244 done.link(this);
245 emitPutVirtualRegister(dst);
246 }
247
248 void JIT::emit_op_ret(Instruction* currentInstruction)
249 {
250 ASSERT(callFrameRegister != regT1);
251 ASSERT(regT1 != returnValueGPR);
252 ASSERT(returnValueGPR != callFrameRegister);
253
254 // Return the result in %eax.
255 emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
256
257 checkStackPointerAlignment();
258 emitFunctionEpilogue();
259 ret();
260 }
261
262 void JIT::emit_op_to_primitive(Instruction* currentInstruction)
263 {
264 int dst = currentInstruction[1].u.operand;
265 int src = currentInstruction[2].u.operand;
266
267 emitGetVirtualRegister(src, regT0);
268
269 Jump isImm = emitJumpIfNotJSCell(regT0);
270 addSlowCase(emitJumpIfCellObject(regT0));
271 isImm.link(this);
272
273 if (dst != src)
274 emitPutVirtualRegister(dst);
275
276 }
277
278 void JIT::emit_op_strcat(Instruction* currentInstruction)
279 {
280 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_strcat);
281 slowPathCall.call();
282 }
283
284 void JIT::emit_op_not(Instruction* currentInstruction)
285 {
286 emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
287
288 // Invert against JSValue(false); if the value was tagged as a boolean, then all bits will be
289 // clear other than the low bit (which will be 0 or 1 for false or true inputs respectively).
290 // Then invert against JSValue(true), which will add the tag back in, and flip the low bit.
291 xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
292 addSlowCase(branchTestPtr(NonZero, regT0, TrustedImm32(static_cast<int32_t>(~1))));
293 xor64(TrustedImm32(static_cast<int32_t>(ValueTrue)), regT0);
294
295 emitPutVirtualRegister(currentInstruction[1].u.operand);
296 }
297
298 void JIT::emit_op_jfalse(Instruction* currentInstruction)
299 {
300 unsigned target = currentInstruction[2].u.operand;
301 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
302
303 addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNumber(0)))), target);
304 Jump isNonZero = emitJumpIfImmediateInteger(regT0);
305
306 addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsBoolean(false)))), target);
307 addSlowCase(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsBoolean(true)))));
308
309 isNonZero.link(this);
310 }
311
312 void JIT::emit_op_jeq_null(Instruction* currentInstruction)
313 {
314 int src = currentInstruction[1].u.operand;
315 unsigned target = currentInstruction[2].u.operand;
316
317 emitGetVirtualRegister(src, regT0);
318 Jump isImmediate = emitJumpIfNotJSCell(regT0);
319
320 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
321 Jump isNotMasqueradesAsUndefined = branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
322 emitLoadStructure(regT0, regT2, regT1);
323 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
324 addJump(branchPtr(Equal, Address(regT2, Structure::globalObjectOffset()), regT0), target);
325 Jump masqueradesGlobalObjectIsForeign = jump();
326
327 // Now handle the immediate cases - undefined & null
328 isImmediate.link(this);
329 and64(TrustedImm32(~TagBitUndefined), regT0);
330 addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNull()))), target);
331
332 isNotMasqueradesAsUndefined.link(this);
333 masqueradesGlobalObjectIsForeign.link(this);
334 };
335 void JIT::emit_op_jneq_null(Instruction* currentInstruction)
336 {
337 int src = currentInstruction[1].u.operand;
338 unsigned target = currentInstruction[2].u.operand;
339
340 emitGetVirtualRegister(src, regT0);
341 Jump isImmediate = emitJumpIfNotJSCell(regT0);
342
343 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
344 addJump(branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
345 emitLoadStructure(regT0, regT2, regT1);
346 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
347 addJump(branchPtr(NotEqual, Address(regT2, Structure::globalObjectOffset()), regT0), target);
348 Jump wasNotImmediate = jump();
349
350 // Now handle the immediate cases - undefined & null
351 isImmediate.link(this);
352 and64(TrustedImm32(~TagBitUndefined), regT0);
353 addJump(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsNull()))), target);
354
355 wasNotImmediate.link(this);
356 }
357
358 void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
359 {
360 int src = currentInstruction[1].u.operand;
361 Special::Pointer ptr = currentInstruction[2].u.specialPointer;
362 unsigned target = currentInstruction[3].u.operand;
363
364 emitGetVirtualRegister(src, regT0);
365 addJump(branchPtr(NotEqual, regT0, TrustedImmPtr(actualPointerFor(m_codeBlock, ptr))), target);
366 }
367
368 void JIT::emit_op_eq(Instruction* currentInstruction)
369 {
370 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
371 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
372 compare32(Equal, regT1, regT0, regT0);
373 emitTagAsBoolImmediate(regT0);
374 emitPutVirtualRegister(currentInstruction[1].u.operand);
375 }
376
377 void JIT::emit_op_jtrue(Instruction* currentInstruction)
378 {
379 unsigned target = currentInstruction[2].u.operand;
380 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
381
382 Jump isZero = branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNumber(0))));
383 addJump(emitJumpIfImmediateInteger(regT0), target);
384
385 addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsBoolean(true)))), target);
386 addSlowCase(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsBoolean(false)))));
387
388 isZero.link(this);
389 }
390
391 void JIT::emit_op_neq(Instruction* currentInstruction)
392 {
393 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
394 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
395 compare32(NotEqual, regT1, regT0, regT0);
396 emitTagAsBoolImmediate(regT0);
397
398 emitPutVirtualRegister(currentInstruction[1].u.operand);
399
400 }
401
402 void JIT::emit_op_bitxor(Instruction* currentInstruction)
403 {
404 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
405 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
406 xor64(regT1, regT0);
407 emitFastArithReTagImmediate(regT0, regT0);
408 emitPutVirtualRegister(currentInstruction[1].u.operand);
409 }
410
411 void JIT::emit_op_bitor(Instruction* currentInstruction)
412 {
413 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
414 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
415 or64(regT1, regT0);
416 emitPutVirtualRegister(currentInstruction[1].u.operand);
417 }
418
419 void JIT::emit_op_throw(Instruction* currentInstruction)
420 {
421 ASSERT(regT0 == returnValueGPR);
422 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
423 callOperationNoExceptionCheck(operationThrow, regT0);
424 jumpToExceptionHandler();
425 }
426
427 void JIT::emit_op_push_with_scope(Instruction* currentInstruction)
428 {
429 int dst = currentInstruction[1].u.operand;
430 emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
431 callOperation(operationPushWithScope, dst, regT0);
432 }
433
434 void JIT::emit_op_pop_scope(Instruction* currentInstruction)
435 {
436 int scope = currentInstruction[1].u.operand;
437
438 callOperation(operationPopScope, scope);
439 }
440
441 void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
442 {
443 int dst = currentInstruction[1].u.operand;
444 int src1 = currentInstruction[2].u.operand;
445 int src2 = currentInstruction[3].u.operand;
446
447 emitGetVirtualRegisters(src1, regT0, src2, regT1);
448
449 // Jump slow if both are cells (to cover strings).
450 move(regT0, regT2);
451 or64(regT1, regT2);
452 addSlowCase(emitJumpIfJSCell(regT2));
453
454 // Jump slow if either is a double. First test if it's an integer, which is fine, and then test
455 // if it's a double.
456 Jump leftOK = emitJumpIfImmediateInteger(regT0);
457 addSlowCase(emitJumpIfImmediateNumber(regT0));
458 leftOK.link(this);
459 Jump rightOK = emitJumpIfImmediateInteger(regT1);
460 addSlowCase(emitJumpIfImmediateNumber(regT1));
461 rightOK.link(this);
462
463 if (type == OpStrictEq)
464 compare64(Equal, regT1, regT0, regT0);
465 else
466 compare64(NotEqual, regT1, regT0, regT0);
467 emitTagAsBoolImmediate(regT0);
468
469 emitPutVirtualRegister(dst);
470 }
471
472 void JIT::emit_op_stricteq(Instruction* currentInstruction)
473 {
474 compileOpStrictEq(currentInstruction, OpStrictEq);
475 }
476
477 void JIT::emit_op_nstricteq(Instruction* currentInstruction)
478 {
479 compileOpStrictEq(currentInstruction, OpNStrictEq);
480 }
481
482 void JIT::emit_op_to_number(Instruction* currentInstruction)
483 {
484 int srcVReg = currentInstruction[2].u.operand;
485 emitGetVirtualRegister(srcVReg, regT0);
486
487 addSlowCase(emitJumpIfNotImmediateNumber(regT0));
488
489 emitPutVirtualRegister(currentInstruction[1].u.operand);
490 }
491
492 void JIT::emit_op_to_string(Instruction* currentInstruction)
493 {
494 int srcVReg = currentInstruction[2].u.operand;
495 emitGetVirtualRegister(srcVReg, regT0);
496
497 addSlowCase(emitJumpIfNotJSCell(regT0));
498 addSlowCase(branch8(NotEqual, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType)));
499
500 emitPutVirtualRegister(currentInstruction[1].u.operand);
501 }
502
503 void JIT::emit_op_push_name_scope(Instruction* currentInstruction)
504 {
505 int dst = currentInstruction[1].u.operand;
506 emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
507 if (currentInstruction[4].u.operand == JSNameScope::CatchScope) {
508 callOperation(operationPushCatchScope, dst, jsCast<SymbolTable*>(getConstantOperand(currentInstruction[3].u.operand)), regT0);
509 return;
510 }
511
512 RELEASE_ASSERT(currentInstruction[4].u.operand == JSNameScope::FunctionNameScope);
513 callOperation(operationPushFunctionNameScope, dst, jsCast<SymbolTable*>(getConstantOperand(currentInstruction[3].u.operand)), regT0);
514 }
515
516 void JIT::emit_op_catch(Instruction* currentInstruction)
517 {
518 // Gotta restore the tag registers. We could be throwing from FTL, which may
519 // clobber them.
520 move(TrustedImm64(TagTypeNumber), tagTypeNumberRegister);
521 move(TrustedImm64(TagMask), tagMaskRegister);
522
523 move(TrustedImmPtr(m_vm), regT3);
524 load64(Address(regT3, VM::callFrameForThrowOffset()), callFrameRegister);
525 load64(Address(regT3, VM::vmEntryFrameForThrowOffset()), regT0);
526 store64(regT0, Address(regT3, VM::topVMEntryFrameOffset()));
527
528 addPtr(TrustedImm32(stackPointerOffsetFor(codeBlock()) * sizeof(Register)), callFrameRegister, stackPointerRegister);
529
530 load64(Address(regT3, VM::exceptionOffset()), regT0);
531 store64(TrustedImm64(JSValue::encode(JSValue())), Address(regT3, VM::exceptionOffset()));
532 emitPutVirtualRegister(currentInstruction[1].u.operand);
533
534 load64(Address(regT0, Exception::valueOffset()), regT0);
535 emitPutVirtualRegister(currentInstruction[2].u.operand);
536 }
537
538 void JIT::emit_op_switch_imm(Instruction* currentInstruction)
539 {
540 size_t tableIndex = currentInstruction[1].u.operand;
541 unsigned defaultOffset = currentInstruction[2].u.operand;
542 unsigned scrutinee = currentInstruction[3].u.operand;
543
544 // create jump table for switch destinations, track this switch statement.
545 SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex);
546 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Immediate));
547 jumpTable->ensureCTITable();
548
549 emitGetVirtualRegister(scrutinee, regT0);
550 callOperation(operationSwitchImmWithUnknownKeyType, regT0, tableIndex);
551 jump(returnValueGPR);
552 }
553
554 void JIT::emit_op_switch_char(Instruction* currentInstruction)
555 {
556 size_t tableIndex = currentInstruction[1].u.operand;
557 unsigned defaultOffset = currentInstruction[2].u.operand;
558 unsigned scrutinee = currentInstruction[3].u.operand;
559
560 // create jump table for switch destinations, track this switch statement.
561 SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex);
562 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Character));
563 jumpTable->ensureCTITable();
564
565 emitGetVirtualRegister(scrutinee, regT0);
566 callOperation(operationSwitchCharWithUnknownKeyType, regT0, tableIndex);
567 jump(returnValueGPR);
568 }
569
570 void JIT::emit_op_switch_string(Instruction* currentInstruction)
571 {
572 size_t tableIndex = currentInstruction[1].u.operand;
573 unsigned defaultOffset = currentInstruction[2].u.operand;
574 unsigned scrutinee = currentInstruction[3].u.operand;
575
576 // create jump table for switch destinations, track this switch statement.
577 StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
578 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset));
579
580 emitGetVirtualRegister(scrutinee, regT0);
581 callOperation(operationSwitchStringWithUnknownKeyType, regT0, tableIndex);
582 jump(returnValueGPR);
583 }
584
585 void JIT::emit_op_throw_static_error(Instruction* currentInstruction)
586 {
587 move(TrustedImm64(JSValue::encode(m_codeBlock->getConstant(currentInstruction[1].u.operand))), regT0);
588 callOperation(operationThrowStaticError, regT0, currentInstruction[2].u.operand);
589 }
590
591 void JIT::emit_op_debug(Instruction* currentInstruction)
592 {
593 load32(codeBlock()->debuggerRequestsAddress(), regT0);
594 Jump noDebuggerRequests = branchTest32(Zero, regT0);
595 callOperation(operationDebug, currentInstruction[1].u.operand);
596 noDebuggerRequests.link(this);
597 }
598
599 void JIT::emit_op_eq_null(Instruction* currentInstruction)
600 {
601 int dst = currentInstruction[1].u.operand;
602 int src1 = currentInstruction[2].u.operand;
603
604 emitGetVirtualRegister(src1, regT0);
605 Jump isImmediate = emitJumpIfNotJSCell(regT0);
606
607 Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
608 move(TrustedImm32(0), regT0);
609 Jump wasNotMasqueradesAsUndefined = jump();
610
611 isMasqueradesAsUndefined.link(this);
612 emitLoadStructure(regT0, regT2, regT1);
613 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
614 loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
615 comparePtr(Equal, regT0, regT2, regT0);
616 Jump wasNotImmediate = jump();
617
618 isImmediate.link(this);
619
620 and64(TrustedImm32(~TagBitUndefined), regT0);
621 compare64(Equal, regT0, TrustedImm32(ValueNull), regT0);
622
623 wasNotImmediate.link(this);
624 wasNotMasqueradesAsUndefined.link(this);
625
626 emitTagAsBoolImmediate(regT0);
627 emitPutVirtualRegister(dst);
628
629 }
630
631 void JIT::emit_op_neq_null(Instruction* currentInstruction)
632 {
633 int dst = currentInstruction[1].u.operand;
634 int src1 = currentInstruction[2].u.operand;
635
636 emitGetVirtualRegister(src1, regT0);
637 Jump isImmediate = emitJumpIfNotJSCell(regT0);
638
639 Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
640 move(TrustedImm32(1), regT0);
641 Jump wasNotMasqueradesAsUndefined = jump();
642
643 isMasqueradesAsUndefined.link(this);
644 emitLoadStructure(regT0, regT2, regT1);
645 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
646 loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
647 comparePtr(NotEqual, regT0, regT2, regT0);
648 Jump wasNotImmediate = jump();
649
650 isImmediate.link(this);
651
652 and64(TrustedImm32(~TagBitUndefined), regT0);
653 compare64(NotEqual, regT0, TrustedImm32(ValueNull), regT0);
654
655 wasNotImmediate.link(this);
656 wasNotMasqueradesAsUndefined.link(this);
657
658 emitTagAsBoolImmediate(regT0);
659 emitPutVirtualRegister(dst);
660 }
661
662 void JIT::emit_op_enter(Instruction*)
663 {
664 // Even though CTI doesn't use them, we initialize our constant
665 // registers to zap stale pointers, to avoid unnecessarily prolonging
666 // object lifetime and increasing GC pressure.
667 size_t count = m_codeBlock->m_numVars;
668 for (size_t j = 0; j < count; ++j)
669 emitInitRegister(virtualRegisterForLocal(j).offset());
670
671 emitWriteBarrier(m_codeBlock->ownerExecutable());
672
673 emitEnterOptimizationCheck();
674 }
675
676 void JIT::emit_op_create_lexical_environment(Instruction* currentInstruction)
677 {
678 int dst = currentInstruction[1].u.operand;
679 int scope = currentInstruction[2].u.operand;
680
681 emitGetVirtualRegister(scope, regT0);
682 callOperation(operationCreateActivation, regT0);
683 emitStoreCell(dst, returnValueGPR);
684 emitStoreCell(scope, returnValueGPR);
685 }
686
687 void JIT::emit_op_get_scope(Instruction* currentInstruction)
688 {
689 int dst = currentInstruction[1].u.operand;
690 emitGetFromCallFrameHeaderPtr(JSStack::Callee, regT0);
691 loadPtr(Address(regT0, JSFunction::offsetOfScopeChain()), regT0);
692 emitStoreCell(dst, regT0);
693 }
694
695 void JIT::emit_op_to_this(Instruction* currentInstruction)
696 {
697 WriteBarrierBase<Structure>* cachedStructure = &currentInstruction[2].u.structure;
698 emitGetVirtualRegister(currentInstruction[1].u.operand, regT1);
699
700 emitJumpSlowCaseIfNotJSCell(regT1);
701
702 addSlowCase(branch8(NotEqual, Address(regT1, JSCell::typeInfoTypeOffset()), TrustedImm32(FinalObjectType)));
703 loadPtr(cachedStructure, regT2);
704 addSlowCase(branchTestPtr(Zero, regT2));
705 load32(Address(regT2, Structure::structureIDOffset()), regT2);
706 addSlowCase(branch32(NotEqual, Address(regT1, JSCell::structureIDOffset()), regT2));
707 }
708
709 void JIT::emit_op_create_this(Instruction* currentInstruction)
710 {
711 int callee = currentInstruction[2].u.operand;
712 WriteBarrierBase<JSCell>* cachedFunction = &currentInstruction[4].u.jsCell;
713 RegisterID calleeReg = regT0;
714 RegisterID rareDataReg = regT4;
715 RegisterID resultReg = regT0;
716 RegisterID allocatorReg = regT1;
717 RegisterID structureReg = regT2;
718 RegisterID cachedFunctionReg = regT4;
719 RegisterID scratchReg = regT3;
720
721 emitGetVirtualRegister(callee, calleeReg);
722 loadPtr(Address(calleeReg, JSFunction::offsetOfRareData()), rareDataReg);
723 addSlowCase(branchTestPtr(Zero, rareDataReg));
724 loadPtr(Address(rareDataReg, FunctionRareData::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfAllocator()), allocatorReg);
725 loadPtr(Address(rareDataReg, FunctionRareData::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfStructure()), structureReg);
726 addSlowCase(branchTestPtr(Zero, allocatorReg));
727
728 loadPtr(cachedFunction, cachedFunctionReg);
729 Jump hasSeenMultipleCallees = branchPtr(Equal, cachedFunctionReg, TrustedImmPtr(JSCell::seenMultipleCalleeObjects()));
730 addSlowCase(branchPtr(NotEqual, calleeReg, cachedFunctionReg));
731 hasSeenMultipleCallees.link(this);
732
733 emitAllocateJSObject(allocatorReg, structureReg, resultReg, scratchReg);
734 emitPutVirtualRegister(currentInstruction[1].u.operand);
735 }
736
737 void JIT::emitSlow_op_create_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
738 {
739 linkSlowCase(iter); // doesn't have rare data
740 linkSlowCase(iter); // doesn't have an allocation profile
741 linkSlowCase(iter); // allocation failed
742 linkSlowCase(iter); // cached function didn't match
743
744 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_this);
745 slowPathCall.call();
746 }
747
748 void JIT::emit_op_check_tdz(Instruction* currentInstruction)
749 {
750 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
751 addSlowCase(branchTest64(Zero, regT0));
752 }
753
754 void JIT::emitSlow_op_check_tdz(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
755 {
756 linkSlowCase(iter);
757 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_throw_tdz_error);
758 slowPathCall.call();
759 }
760
761 void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
762 {
763 Jump profilerDone = branchTestPtr(Zero, AbsoluteAddress(m_vm->enabledProfilerAddress()));
764 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
765 callOperation(operationProfileWillCall, regT0);
766 profilerDone.link(this);
767 }
768
769 void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
770 {
771 Jump profilerDone = branchTestPtr(Zero, AbsoluteAddress(m_vm->enabledProfilerAddress()));
772 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
773 callOperation(operationProfileDidCall, regT0);
774 profilerDone.link(this);
775 }
776
777
778 // Slow cases
779
780 void JIT::emitSlow_op_to_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
781 {
782 linkSlowCase(iter);
783 linkSlowCase(iter);
784 linkSlowCase(iter);
785 linkSlowCase(iter);
786
787 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_this);
788 slowPathCall.call();
789 }
790
791 void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
792 {
793 linkSlowCase(iter);
794
795 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_primitive);
796 slowPathCall.call();
797 }
798
799 void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
800 {
801 linkSlowCase(iter);
802
803 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_not);
804 slowPathCall.call();
805 }
806
807 void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
808 {
809 linkSlowCase(iter);
810 callOperation(operationConvertJSValueToBoolean, regT0);
811 emitJumpSlowToHot(branchTest32(Zero, returnValueGPR), currentInstruction[2].u.operand); // inverted!
812 }
813
814 void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
815 {
816 linkSlowCase(iter);
817 callOperation(operationConvertJSValueToBoolean, regT0);
818 emitJumpSlowToHot(branchTest32(NonZero, returnValueGPR), currentInstruction[2].u.operand);
819 }
820
821 void JIT::emitSlow_op_bitxor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
822 {
823 linkSlowCase(iter);
824 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_bitxor);
825 slowPathCall.call();
826 }
827
828 void JIT::emitSlow_op_bitor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
829 {
830 linkSlowCase(iter);
831 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_bitor);
832 slowPathCall.call();
833 }
834
835 void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
836 {
837 linkSlowCase(iter);
838 callOperation(operationCompareEq, regT0, regT1);
839 emitTagAsBoolImmediate(returnValueGPR);
840 emitPutVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
841 }
842
843 void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
844 {
845 linkSlowCase(iter);
846 callOperation(operationCompareEq, regT0, regT1);
847 xor32(TrustedImm32(0x1), regT0);
848 emitTagAsBoolImmediate(returnValueGPR);
849 emitPutVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
850 }
851
852 void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
853 {
854 linkSlowCase(iter);
855 linkSlowCase(iter);
856 linkSlowCase(iter);
857 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_stricteq);
858 slowPathCall.call();
859 }
860
861 void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
862 {
863 linkSlowCase(iter);
864 linkSlowCase(iter);
865 linkSlowCase(iter);
866 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_nstricteq);
867 slowPathCall.call();
868 }
869
870 void JIT::emitSlow_op_check_has_instance(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
871 {
872 int dst = currentInstruction[1].u.operand;
873 int value = currentInstruction[2].u.operand;
874 int baseVal = currentInstruction[3].u.operand;
875
876 linkSlowCaseIfNotJSCell(iter, baseVal);
877 linkSlowCase(iter);
878 emitGetVirtualRegister(value, regT0);
879 emitGetVirtualRegister(baseVal, regT1);
880 callOperation(operationCheckHasInstance, dst, regT0, regT1);
881
882 emitJumpSlowToHot(jump(), currentInstruction[4].u.operand);
883 }
884
885 void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
886 {
887 int dst = currentInstruction[1].u.operand;
888 int value = currentInstruction[2].u.operand;
889 int proto = currentInstruction[3].u.operand;
890
891 linkSlowCaseIfNotJSCell(iter, value);
892 linkSlowCaseIfNotJSCell(iter, proto);
893 linkSlowCase(iter);
894 emitGetVirtualRegister(value, regT0);
895 emitGetVirtualRegister(proto, regT1);
896 callOperation(operationInstanceOf, dst, regT0, regT1);
897 }
898
899 void JIT::emitSlow_op_to_number(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
900 {
901 linkSlowCase(iter);
902
903 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_number);
904 slowPathCall.call();
905 }
906
907 void JIT::emitSlow_op_to_string(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
908 {
909 linkSlowCase(iter); // Not JSCell.
910 linkSlowCase(iter); // Not JSString.
911
912 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_string);
913 slowPathCall.call();
914 }
915
916 #endif // USE(JSVALUE64)
917
918 void JIT::emit_op_loop_hint(Instruction*)
919 {
920 // Emit the JIT optimization check:
921 if (canBeOptimized()) {
922 addSlowCase(branchAdd32(PositiveOrZero, TrustedImm32(Options::executionCounterIncrementForLoop()),
923 AbsoluteAddress(m_codeBlock->addressOfJITExecuteCounter())));
924 }
925
926 // Emit the watchdog timer check:
927 if (m_vm->watchdog && m_vm->watchdog->isEnabled())
928 addSlowCase(branchTest8(NonZero, AbsoluteAddress(m_vm->watchdog->timerDidFireAddress())));
929 }
930
931 void JIT::emitSlow_op_loop_hint(Instruction*, Vector<SlowCaseEntry>::iterator& iter)
932 {
933 #if ENABLE(DFG_JIT)
934 // Emit the slow path for the JIT optimization check:
935 if (canBeOptimized()) {
936 linkSlowCase(iter);
937
938 callOperation(operationOptimize, m_bytecodeOffset);
939 Jump noOptimizedEntry = branchTestPtr(Zero, returnValueGPR);
940 if (!ASSERT_DISABLED) {
941 Jump ok = branchPtr(MacroAssembler::Above, regT0, TrustedImmPtr(bitwise_cast<void*>(static_cast<intptr_t>(1000))));
942 abortWithReason(JITUnreasonableLoopHintJumpTarget);
943 ok.link(this);
944 }
945 jump(returnValueGPR);
946 noOptimizedEntry.link(this);
947
948 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_loop_hint));
949 }
950 #endif
951
952 // Emit the slow path of the watchdog timer check:
953 if (m_vm->watchdog && m_vm->watchdog->isEnabled()) {
954 linkSlowCase(iter);
955 callOperation(operationHandleWatchdogTimer);
956
957 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_loop_hint));
958 }
959
960 }
961
962 void JIT::emit_op_new_regexp(Instruction* currentInstruction)
963 {
964 callOperation(operationNewRegexp, currentInstruction[1].u.operand, m_codeBlock->regexp(currentInstruction[2].u.operand));
965 }
966
967 void JIT::emit_op_new_func(Instruction* currentInstruction)
968 {
969 Jump lazyJump;
970 int dst = currentInstruction[1].u.operand;
971
972 #if USE(JSVALUE64)
973 emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
974 #else
975 emitLoadPayload(currentInstruction[2].u.operand, regT0);
976 #endif
977 FunctionExecutable* funcExec = m_codeBlock->functionDecl(currentInstruction[3].u.operand);
978 callOperation(operationNewFunction, dst, regT0, funcExec);
979 }
980
981 void JIT::emit_op_new_func_exp(Instruction* currentInstruction)
982 {
983 Jump notUndefinedScope;
984 int dst = currentInstruction[1].u.operand;
985 #if USE(JSVALUE64)
986 emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
987 notUndefinedScope = branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsUndefined())));
988 store64(TrustedImm64(JSValue::encode(jsUndefined())), Address(callFrameRegister, sizeof(Register) * dst));
989 #else
990 emitLoadPayload(currentInstruction[2].u.operand, regT0);
991 notUndefinedScope = branch32(NotEqual, tagFor(currentInstruction[2].u.operand), TrustedImm32(JSValue::UndefinedTag));
992 emitStore(dst, jsUndefined());
993 #endif
994
995 Jump done = jump();
996 notUndefinedScope.link(this);
997
998 FunctionExecutable* funcExpr = m_codeBlock->functionExpr(currentInstruction[3].u.operand);
999 callOperation(operationNewFunction, dst, regT0, funcExpr);
1000 done.link(this);
1001 }
1002
1003 void JIT::emit_op_new_array(Instruction* currentInstruction)
1004 {
1005 int dst = currentInstruction[1].u.operand;
1006 int valuesIndex = currentInstruction[2].u.operand;
1007 int size = currentInstruction[3].u.operand;
1008 addPtr(TrustedImm32(valuesIndex * sizeof(Register)), callFrameRegister, regT0);
1009 callOperation(operationNewArrayWithProfile, dst,
1010 currentInstruction[4].u.arrayAllocationProfile, regT0, size);
1011 }
1012
1013 void JIT::emit_op_new_array_with_size(Instruction* currentInstruction)
1014 {
1015 int dst = currentInstruction[1].u.operand;
1016 int sizeIndex = currentInstruction[2].u.operand;
1017 #if USE(JSVALUE64)
1018 emitGetVirtualRegister(sizeIndex, regT0);
1019 callOperation(operationNewArrayWithSizeAndProfile, dst,
1020 currentInstruction[3].u.arrayAllocationProfile, regT0);
1021 #else
1022 emitLoad(sizeIndex, regT1, regT0);
1023 callOperation(operationNewArrayWithSizeAndProfile, dst,
1024 currentInstruction[3].u.arrayAllocationProfile, regT1, regT0);
1025 #endif
1026 }
1027
1028 void JIT::emit_op_new_array_buffer(Instruction* currentInstruction)
1029 {
1030 int dst = currentInstruction[1].u.operand;
1031 int valuesIndex = currentInstruction[2].u.operand;
1032 int size = currentInstruction[3].u.operand;
1033 const JSValue* values = codeBlock()->constantBuffer(valuesIndex);
1034 callOperation(operationNewArrayBufferWithProfile, dst, currentInstruction[4].u.arrayAllocationProfile, values, size);
1035 }
1036
1037 #if USE(JSVALUE64)
1038 void JIT::emit_op_has_structure_property(Instruction* currentInstruction)
1039 {
1040 int dst = currentInstruction[1].u.operand;
1041 int base = currentInstruction[2].u.operand;
1042 int enumerator = currentInstruction[4].u.operand;
1043
1044 emitGetVirtualRegister(base, regT0);
1045 emitGetVirtualRegister(enumerator, regT1);
1046 emitJumpSlowCaseIfNotJSCell(regT0, base);
1047
1048 load32(Address(regT0, JSCell::structureIDOffset()), regT0);
1049 addSlowCase(branch32(NotEqual, regT0, Address(regT1, JSPropertyNameEnumerator::cachedStructureIDOffset())));
1050
1051 move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
1052 emitPutVirtualRegister(dst);
1053 }
1054
1055 void JIT::privateCompileHasIndexedProperty(ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode)
1056 {
1057 Instruction* currentInstruction = m_codeBlock->instructions().begin() + byValInfo->bytecodeIndex;
1058
1059 PatchableJump badType;
1060
1061 // FIXME: Add support for other types like TypedArrays and Arguments.
1062 // See https://bugs.webkit.org/show_bug.cgi?id=135033 and https://bugs.webkit.org/show_bug.cgi?id=135034.
1063 JumpList slowCases = emitLoadForArrayMode(currentInstruction, arrayMode, badType);
1064 move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
1065 Jump done = jump();
1066
1067 LinkBuffer patchBuffer(*m_vm, *this, m_codeBlock);
1068
1069 patchBuffer.link(badType, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1070 patchBuffer.link(slowCases, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1071
1072 patchBuffer.link(done, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToDone));
1073
1074 byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
1075 m_codeBlock, patchBuffer,
1076 ("Baseline has_indexed_property stub for %s, return point %p", toCString(*m_codeBlock).data(), returnAddress.value()));
1077
1078 RepatchBuffer repatchBuffer(m_codeBlock);
1079 repatchBuffer.relink(byValInfo->badTypeJump, CodeLocationLabel(byValInfo->stubRoutine->code().code()));
1080 repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(operationHasIndexedPropertyGeneric));
1081 }
1082
1083 void JIT::emit_op_has_indexed_property(Instruction* currentInstruction)
1084 {
1085 int dst = currentInstruction[1].u.operand;
1086 int base = currentInstruction[2].u.operand;
1087 int property = currentInstruction[3].u.operand;
1088 ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
1089
1090 emitGetVirtualRegisters(base, regT0, property, regT1);
1091
1092 // This is technically incorrect - we're zero-extending an int32. On the hot path this doesn't matter.
1093 // We check the value as if it was a uint32 against the m_vectorLength - which will always fail if
1094 // number was signed since m_vectorLength is always less than intmax (since the total allocation
1095 // size is always less than 4Gb). As such zero extending will have been correct (and extending the value
1096 // to 64-bits is necessary since it's used in the address calculation. We zero extend rather than sign
1097 // extending since it makes it easier to re-tag the value in the slow case.
1098 zeroExtend32ToPtr(regT1, regT1);
1099
1100 emitJumpSlowCaseIfNotJSCell(regT0, base);
1101 emitArrayProfilingSiteWithCell(regT0, regT2, profile);
1102 and32(TrustedImm32(IndexingShapeMask), regT2);
1103
1104 JITArrayMode mode = chooseArrayMode(profile);
1105 PatchableJump badType;
1106
1107 // FIXME: Add support for other types like TypedArrays and Arguments.
1108 // See https://bugs.webkit.org/show_bug.cgi?id=135033 and https://bugs.webkit.org/show_bug.cgi?id=135034.
1109 JumpList slowCases = emitLoadForArrayMode(currentInstruction, mode, badType);
1110
1111 move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
1112
1113 addSlowCase(badType);
1114 addSlowCase(slowCases);
1115
1116 Label done = label();
1117
1118 emitPutVirtualRegister(dst);
1119
1120 m_byValCompilationInfo.append(ByValCompilationInfo(m_bytecodeOffset, badType, mode, done));
1121 }
1122
1123 void JIT::emitSlow_op_has_indexed_property(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1124 {
1125 int dst = currentInstruction[1].u.operand;
1126 int base = currentInstruction[2].u.operand;
1127 int property = currentInstruction[3].u.operand;
1128 ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
1129
1130 linkSlowCaseIfNotJSCell(iter, base); // base cell check
1131 linkSlowCase(iter); // base array check
1132 linkSlowCase(iter); // vector length check
1133 linkSlowCase(iter); // empty value
1134
1135 Label slowPath = label();
1136
1137 emitGetVirtualRegister(base, regT0);
1138 emitGetVirtualRegister(property, regT1);
1139 Call call = callOperation(operationHasIndexedPropertyDefault, dst, regT0, regT1, profile);
1140
1141 m_byValCompilationInfo[m_byValInstructionIndex].slowPathTarget = slowPath;
1142 m_byValCompilationInfo[m_byValInstructionIndex].returnAddress = call;
1143 m_byValInstructionIndex++;
1144 }
1145
1146 void JIT::emit_op_get_direct_pname(Instruction* currentInstruction)
1147 {
1148 int dst = currentInstruction[1].u.operand;
1149 int base = currentInstruction[2].u.operand;
1150 int index = currentInstruction[4].u.operand;
1151 int enumerator = currentInstruction[5].u.operand;
1152
1153 // Check that base is a cell
1154 emitGetVirtualRegister(base, regT0);
1155 emitJumpSlowCaseIfNotJSCell(regT0, base);
1156
1157 // Check the structure
1158 emitGetVirtualRegister(enumerator, regT2);
1159 load32(Address(regT0, JSCell::structureIDOffset()), regT1);
1160 addSlowCase(branch32(NotEqual, regT1, Address(regT2, JSPropertyNameEnumerator::cachedStructureIDOffset())));
1161
1162 // Compute the offset
1163 emitGetVirtualRegister(index, regT1);
1164 // If index is less than the enumerator's cached inline storage, then it's an inline access
1165 Jump outOfLineAccess = branch32(AboveOrEqual, regT1, Address(regT2, JSPropertyNameEnumerator::cachedInlineCapacityOffset()));
1166 addPtr(TrustedImm32(JSObject::offsetOfInlineStorage()), regT0);
1167 signExtend32ToPtr(regT1, regT1);
1168 load64(BaseIndex(regT0, regT1, TimesEight), regT0);
1169
1170 Jump done = jump();
1171
1172 // Otherwise it's out of line
1173 outOfLineAccess.link(this);
1174 loadPtr(Address(regT0, JSObject::butterflyOffset()), regT0);
1175 sub32(Address(regT2, JSPropertyNameEnumerator::cachedInlineCapacityOffset()), regT1);
1176 neg32(regT1);
1177 signExtend32ToPtr(regT1, regT1);
1178 int32_t offsetOfFirstProperty = static_cast<int32_t>(offsetInButterfly(firstOutOfLineOffset)) * sizeof(EncodedJSValue);
1179 load64(BaseIndex(regT0, regT1, TimesEight, offsetOfFirstProperty), regT0);
1180
1181 done.link(this);
1182 emitValueProfilingSite();
1183 emitPutVirtualRegister(dst, regT0);
1184 }
1185
1186 void JIT::emitSlow_op_get_direct_pname(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1187 {
1188 int base = currentInstruction[2].u.operand;
1189 linkSlowCaseIfNotJSCell(iter, base);
1190 linkSlowCase(iter);
1191
1192 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_direct_pname);
1193 slowPathCall.call();
1194 }
1195
1196 void JIT::emit_op_enumerator_structure_pname(Instruction* currentInstruction)
1197 {
1198 int dst = currentInstruction[1].u.operand;
1199 int enumerator = currentInstruction[2].u.operand;
1200 int index = currentInstruction[3].u.operand;
1201
1202 emitGetVirtualRegister(index, regT0);
1203 emitGetVirtualRegister(enumerator, regT1);
1204 Jump inBounds = branch32(Below, regT0, Address(regT1, JSPropertyNameEnumerator::endStructurePropertyIndexOffset()));
1205
1206 move(TrustedImm64(JSValue::encode(jsNull())), regT0);
1207
1208 Jump done = jump();
1209 inBounds.link(this);
1210
1211 loadPtr(Address(regT1, JSPropertyNameEnumerator::cachedPropertyNamesVectorOffset()), regT1);
1212 signExtend32ToPtr(regT0, regT0);
1213 load64(BaseIndex(regT1, regT0, TimesEight), regT0);
1214
1215 done.link(this);
1216 emitPutVirtualRegister(dst);
1217 }
1218
1219 void JIT::emit_op_enumerator_generic_pname(Instruction* currentInstruction)
1220 {
1221 int dst = currentInstruction[1].u.operand;
1222 int enumerator = currentInstruction[2].u.operand;
1223 int index = currentInstruction[3].u.operand;
1224
1225 emitGetVirtualRegister(index, regT0);
1226 emitGetVirtualRegister(enumerator, regT1);
1227 Jump inBounds = branch32(Below, regT0, Address(regT1, JSPropertyNameEnumerator::endGenericPropertyIndexOffset()));
1228
1229 move(TrustedImm64(JSValue::encode(jsNull())), regT0);
1230
1231 Jump done = jump();
1232 inBounds.link(this);
1233
1234 loadPtr(Address(regT1, JSPropertyNameEnumerator::cachedPropertyNamesVectorOffset()), regT1);
1235 signExtend32ToPtr(regT0, regT0);
1236 load64(BaseIndex(regT1, regT0, TimesEight), regT0);
1237
1238 done.link(this);
1239 emitPutVirtualRegister(dst);
1240 }
1241
1242 void JIT::emit_op_profile_type(Instruction* currentInstruction)
1243 {
1244 TypeLocation* cachedTypeLocation = currentInstruction[2].u.location;
1245 int valueToProfile = currentInstruction[1].u.operand;
1246
1247 emitGetVirtualRegister(valueToProfile, regT0);
1248
1249 JumpList jumpToEnd;
1250
1251 // Compile in a predictive type check, if possible, to see if we can skip writing to the log.
1252 // These typechecks are inlined to match those of the 64-bit JSValue type checks.
1253 if (cachedTypeLocation->m_lastSeenType == TypeUndefined)
1254 jumpToEnd.append(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsUndefined()))));
1255 else if (cachedTypeLocation->m_lastSeenType == TypeNull)
1256 jumpToEnd.append(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNull()))));
1257 else if (cachedTypeLocation->m_lastSeenType == TypeBoolean) {
1258 move(regT0, regT1);
1259 and64(TrustedImm32(~1), regT1);
1260 jumpToEnd.append(branch64(Equal, regT1, TrustedImm64(ValueFalse)));
1261 } else if (cachedTypeLocation->m_lastSeenType == TypeMachineInt)
1262 jumpToEnd.append(emitJumpIfImmediateInteger(regT0));
1263 else if (cachedTypeLocation->m_lastSeenType == TypeNumber)
1264 jumpToEnd.append(emitJumpIfImmediateNumber(regT0));
1265 else if (cachedTypeLocation->m_lastSeenType == TypeString) {
1266 Jump isNotCell = emitJumpIfNotJSCell(regT0);
1267 jumpToEnd.append(branch8(Equal, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType)));
1268 isNotCell.link(this);
1269 }
1270
1271 // Load the type profiling log into T2.
1272 TypeProfilerLog* cachedTypeProfilerLog = m_vm->typeProfilerLog();
1273 move(TrustedImmPtr(cachedTypeProfilerLog), regT2);
1274 // Load the next log entry into T1.
1275 loadPtr(Address(regT2, TypeProfilerLog::currentLogEntryOffset()), regT1);
1276
1277 // Store the JSValue onto the log entry.
1278 store64(regT0, Address(regT1, TypeProfilerLog::LogEntry::valueOffset()));
1279
1280 // Store the structureID of the cell if T0 is a cell, otherwise, store 0 on the log entry.
1281 Jump notCell = emitJumpIfNotJSCell(regT0);
1282 load32(Address(regT0, JSCell::structureIDOffset()), regT0);
1283 store32(regT0, Address(regT1, TypeProfilerLog::LogEntry::structureIDOffset()));
1284 Jump skipIsCell = jump();
1285 notCell.link(this);
1286 store32(TrustedImm32(0), Address(regT1, TypeProfilerLog::LogEntry::structureIDOffset()));
1287 skipIsCell.link(this);
1288
1289 // Store the typeLocation on the log entry.
1290 move(TrustedImmPtr(cachedTypeLocation), regT0);
1291 store64(regT0, Address(regT1, TypeProfilerLog::LogEntry::locationOffset()));
1292
1293 // Increment the current log entry.
1294 addPtr(TrustedImm32(sizeof(TypeProfilerLog::LogEntry)), regT1);
1295 store64(regT1, Address(regT2, TypeProfilerLog::currentLogEntryOffset()));
1296 Jump skipClearLog = branchPtr(NotEqual, regT1, TrustedImmPtr(cachedTypeProfilerLog->logEndPtr()));
1297 // Clear the log if we're at the end of the log.
1298 callOperation(operationProcessTypeProfilerLog);
1299 skipClearLog.link(this);
1300
1301 jumpToEnd.link(this);
1302 }
1303
1304 #endif // USE(JSVALUE64)
1305
1306 void JIT::emit_op_get_enumerable_length(Instruction* currentInstruction)
1307 {
1308 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_enumerable_length);
1309 slowPathCall.call();
1310 }
1311
1312 void JIT::emitSlow_op_has_structure_property(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1313 {
1314 linkSlowCase(iter);
1315 linkSlowCase(iter);
1316
1317 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_has_structure_property);
1318 slowPathCall.call();
1319 }
1320
1321 void JIT::emit_op_has_generic_property(Instruction* currentInstruction)
1322 {
1323 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_has_generic_property);
1324 slowPathCall.call();
1325 }
1326
1327 void JIT::emit_op_get_property_enumerator(Instruction* currentInstruction)
1328 {
1329 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_property_enumerator);
1330 slowPathCall.call();
1331 }
1332
1333 void JIT::emit_op_to_index_string(Instruction* currentInstruction)
1334 {
1335 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_index_string);
1336 slowPathCall.call();
1337 }
1338
1339 void JIT::emit_op_profile_control_flow(Instruction* currentInstruction)
1340 {
1341 BasicBlockLocation* basicBlockLocation = currentInstruction[1].u.basicBlockLocation;
1342 if (!basicBlockLocation->hasExecuted())
1343 basicBlockLocation->emitExecuteCode(*this, regT1);
1344 }
1345
1346 void JIT::emit_op_create_direct_arguments(Instruction* currentInstruction)
1347 {
1348 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_direct_arguments);
1349 slowPathCall.call();
1350 }
1351
1352 void JIT::emit_op_create_scoped_arguments(Instruction* currentInstruction)
1353 {
1354 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_scoped_arguments);
1355 slowPathCall.call();
1356 }
1357
1358 void JIT::emit_op_create_out_of_band_arguments(Instruction* currentInstruction)
1359 {
1360 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_out_of_band_arguments);
1361 slowPathCall.call();
1362 }
1363
1364 } // namespace JSC
1365
1366 #endif // ENABLE(JIT)