]> git.saurik.com Git - apple/javascriptcore.git/blame_incremental - jit/JITOpcodes32_64.cpp
JavaScriptCore-7600.1.4.16.1.tar.gz
[apple/javascriptcore.git] / jit / JITOpcodes32_64.cpp
... / ...
CommitLineData
1/*
2 * Copyright (C) 2009, 2012, 2013, 2014 Apple Inc. All rights reserved.
3 * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
13 *
14 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
18 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25 */
26
27#include "config.h"
28
29#if ENABLE(JIT)
30#if USE(JSVALUE32_64)
31#include "JIT.h"
32
33#include "CCallHelpers.h"
34#include "Debugger.h"
35#include "JITInlines.h"
36#include "JSArray.h"
37#include "JSCell.h"
38#include "JSFunction.h"
39#include "JSPropertyNameIterator.h"
40#include "JSVariableObject.h"
41#include "LinkBuffer.h"
42#include "MaxFrameExtentForSlowPathCall.h"
43#include "SlowPathCall.h"
44#include "VirtualRegister.h"
45
46namespace JSC {
47
48JIT::CodeRef JIT::privateCompileCTINativeCall(VM* vm, NativeFunction func)
49{
50 Call nativeCall;
51
52 emitFunctionPrologue();
53 emitPutImmediateToCallFrameHeader(0, JSStack::CodeBlock);
54 storePtr(callFrameRegister, &m_vm->topCallFrame);
55
56#if CPU(X86)
57 // Load caller frame's scope chain into this callframe so that whatever we call can
58 // get to its global data.
59 emitGetCallerFrameFromCallFrameHeaderPtr(regT0);
60 emitGetFromCallFrameHeaderPtr(JSStack::ScopeChain, regT1, regT0);
61 emitPutCellToCallFrameHeader(regT1, JSStack::ScopeChain);
62
63 // Calling convention: f(ecx, edx, ...);
64 // Host function signature: f(ExecState*);
65 move(callFrameRegister, X86Registers::ecx);
66
67 subPtr(TrustedImm32(8), stackPointerRegister); // Align stack for call.
68 storePtr(X86Registers::ecx, Address(stackPointerRegister));
69
70 // call the function
71 nativeCall = call();
72
73 addPtr(TrustedImm32(8), stackPointerRegister);
74
75#elif CPU(ARM) || CPU(SH4) || CPU(MIPS)
76 // Load caller frame's scope chain into this callframe so that whatever we call can get to its global data.
77 emitGetCallerFrameFromCallFrameHeaderPtr(regT2);
78 emitGetFromCallFrameHeaderPtr(JSStack::ScopeChain, regT1, regT2);
79 emitPutCellToCallFrameHeader(regT1, JSStack::ScopeChain);
80
81#if CPU(MIPS)
82 // Allocate stack space for (unused) 16 bytes (8-byte aligned) for 4 arguments.
83 subPtr(TrustedImm32(16), stackPointerRegister);
84#endif
85
86 // Calling convention is f(argumentGPR0, argumentGPR1, ...).
87 // Host function signature is f(ExecState*).
88 move(callFrameRegister, argumentGPR0);
89
90 emitGetFromCallFrameHeaderPtr(JSStack::Callee, argumentGPR1);
91 loadPtr(Address(argumentGPR1, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
92
93 // call the function
94 nativeCall = call();
95
96#if CPU(MIPS)
97 // Restore stack space
98 addPtr(TrustedImm32(16), stackPointerRegister);
99#endif
100
101 restoreReturnAddressBeforeReturn(regT3);
102#else
103#error "JIT not supported on this platform."
104 abortWithReason(JITNotSupported);
105#endif // CPU(X86)
106
107 // Check for an exception
108 Jump sawException = branch32(NotEqual, AbsoluteAddress(reinterpret_cast<char*>(vm->addressOfException()) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), TrustedImm32(JSValue::EmptyValueTag));
109
110 emitFunctionEpilogue();
111 // Return.
112 ret();
113
114 // Handle an exception
115 sawException.link(this);
116
117 storePtr(callFrameRegister, &m_vm->topCallFrame);
118
119#if CPU(X86)
120 addPtr(TrustedImm32(-4), stackPointerRegister);
121 loadPtr(Address(callFrameRegister), X86Registers::ecx);
122 push(X86Registers::ecx);
123#else
124 loadPtr(Address(callFrameRegister), argumentGPR0);
125#endif
126 move(TrustedImmPtr(FunctionPtr(operationVMHandleException).value()), regT3);
127 call(regT3);
128
129#if CPU(X86)
130 addPtr(TrustedImm32(8), stackPointerRegister);
131#endif
132
133 jumpToExceptionHandler();
134
135 // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
136 LinkBuffer patchBuffer(*m_vm, *this, GLOBAL_THUNK_ID);
137
138 patchBuffer.link(nativeCall, FunctionPtr(func));
139 return FINALIZE_CODE(patchBuffer, ("JIT CTI native call"));
140}
141
142void JIT::emit_op_mov(Instruction* currentInstruction)
143{
144 int dst = currentInstruction[1].u.operand;
145 int src = currentInstruction[2].u.operand;
146
147 if (m_codeBlock->isConstantRegisterIndex(src))
148 emitStore(dst, getConstantOperand(src));
149 else {
150 emitLoad(src, regT1, regT0);
151 emitStore(dst, regT1, regT0);
152 }
153}
154
155void JIT::emit_op_captured_mov(Instruction* currentInstruction)
156{
157 int dst = currentInstruction[1].u.operand;
158 int src = currentInstruction[2].u.operand;
159
160 emitLoad(src, regT1, regT0);
161 emitNotifyWrite(regT1, regT0, regT2, currentInstruction[3].u.watchpointSet);
162 emitStore(dst, regT1, regT0);
163}
164
165void JIT::emit_op_end(Instruction* currentInstruction)
166{
167 ASSERT(returnValueGPR != callFrameRegister);
168 emitLoad(currentInstruction[1].u.operand, regT1, regT0);
169 emitFunctionEpilogue();
170 ret();
171}
172
173void JIT::emit_op_jmp(Instruction* currentInstruction)
174{
175 unsigned target = currentInstruction[1].u.operand;
176 addJump(jump(), target);
177}
178
179void JIT::emit_op_new_object(Instruction* currentInstruction)
180{
181 Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
182 size_t allocationSize = JSFinalObject::allocationSize(structure->inlineCapacity());
183 MarkedAllocator* allocator = &m_vm->heap.allocatorForObjectWithoutDestructor(allocationSize);
184
185 RegisterID resultReg = regT0;
186 RegisterID allocatorReg = regT1;
187 RegisterID scratchReg = regT2;
188
189 move(TrustedImmPtr(allocator), allocatorReg);
190 emitAllocateJSObject(allocatorReg, TrustedImmPtr(structure), resultReg, scratchReg);
191 emitStoreCell(currentInstruction[1].u.operand, resultReg);
192}
193
194void JIT::emitSlow_op_new_object(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
195{
196 linkSlowCase(iter);
197 int dst = currentInstruction[1].u.operand;
198 Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
199 callOperation(operationNewObject, structure);
200 emitStoreCell(dst, returnValueGPR);
201}
202
203void JIT::emit_op_check_has_instance(Instruction* currentInstruction)
204{
205 int baseVal = currentInstruction[3].u.operand;
206
207 emitLoadPayload(baseVal, regT0);
208
209 // Check that baseVal is a cell.
210 emitJumpSlowCaseIfNotJSCell(baseVal);
211
212 // Check that baseVal 'ImplementsHasInstance'.
213 addSlowCase(branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance)));
214}
215
216void JIT::emit_op_instanceof(Instruction* currentInstruction)
217{
218 int dst = currentInstruction[1].u.operand;
219 int value = currentInstruction[2].u.operand;
220 int proto = currentInstruction[3].u.operand;
221
222 // Load the operands into registers.
223 // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
224 emitLoadPayload(value, regT2);
225 emitLoadPayload(proto, regT1);
226
227 // Check that proto are cells. baseVal must be a cell - this is checked by op_check_has_instance.
228 emitJumpSlowCaseIfNotJSCell(value);
229 emitJumpSlowCaseIfNotJSCell(proto);
230
231 // Check that prototype is an object
232 addSlowCase(emitJumpIfCellNotObject(regT1));
233
234 // Optimistically load the result true, and start looping.
235 // Initially, regT1 still contains proto and regT2 still contains value.
236 // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
237 move(TrustedImm32(1), regT0);
238 Label loop(this);
239
240 // Load the prototype of the cell in regT2. If this is equal to regT1 - WIN!
241 // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
242 loadPtr(Address(regT2, JSCell::structureIDOffset()), regT2);
243 load32(Address(regT2, Structure::prototypeOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT2);
244 Jump isInstance = branchPtr(Equal, regT2, regT1);
245 branchTest32(NonZero, regT2).linkTo(loop, this);
246
247 // We get here either by dropping out of the loop, or if value was not an Object. Result is false.
248 move(TrustedImm32(0), regT0);
249
250 // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
251 isInstance.link(this);
252 emitStoreBool(dst, regT0);
253}
254
255void JIT::emitSlow_op_check_has_instance(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
256{
257 int dst = currentInstruction[1].u.operand;
258 int value = currentInstruction[2].u.operand;
259 int baseVal = currentInstruction[3].u.operand;
260
261 linkSlowCaseIfNotJSCell(iter, baseVal);
262 linkSlowCase(iter);
263
264 emitLoad(value, regT1, regT0);
265 emitLoad(baseVal, regT3, regT2);
266 callOperation(operationCheckHasInstance, dst, regT1, regT0, regT3, regT2);
267
268 emitJumpSlowToHot(jump(), currentInstruction[4].u.operand);
269}
270
271void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
272{
273 int dst = currentInstruction[1].u.operand;
274 int value = currentInstruction[2].u.operand;
275 int proto = currentInstruction[3].u.operand;
276
277 linkSlowCaseIfNotJSCell(iter, value);
278 linkSlowCaseIfNotJSCell(iter, proto);
279 linkSlowCase(iter);
280
281 emitLoad(value, regT1, regT0);
282 emitLoad(proto, regT3, regT2);
283 callOperation(operationInstanceOf, dst, regT1, regT0, regT3, regT2);
284}
285
286void JIT::emit_op_is_undefined(Instruction* currentInstruction)
287{
288 int dst = currentInstruction[1].u.operand;
289 int value = currentInstruction[2].u.operand;
290
291 emitLoad(value, regT1, regT0);
292 Jump isCell = branch32(Equal, regT1, TrustedImm32(JSValue::CellTag));
293
294 compare32(Equal, regT1, TrustedImm32(JSValue::UndefinedTag), regT0);
295 Jump done = jump();
296
297 isCell.link(this);
298 Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
299 move(TrustedImm32(0), regT0);
300 Jump notMasqueradesAsUndefined = jump();
301
302 isMasqueradesAsUndefined.link(this);
303 loadPtr(Address(regT0, JSCell::structureIDOffset()), regT1);
304 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
305 loadPtr(Address(regT1, Structure::globalObjectOffset()), regT1);
306 compare32(Equal, regT0, regT1, regT0);
307
308 notMasqueradesAsUndefined.link(this);
309 done.link(this);
310 emitStoreBool(dst, regT0);
311}
312
313void JIT::emit_op_is_boolean(Instruction* currentInstruction)
314{
315 int dst = currentInstruction[1].u.operand;
316 int value = currentInstruction[2].u.operand;
317
318 emitLoadTag(value, regT0);
319 compare32(Equal, regT0, TrustedImm32(JSValue::BooleanTag), regT0);
320 emitStoreBool(dst, regT0);
321}
322
323void JIT::emit_op_is_number(Instruction* currentInstruction)
324{
325 int dst = currentInstruction[1].u.operand;
326 int value = currentInstruction[2].u.operand;
327
328 emitLoadTag(value, regT0);
329 add32(TrustedImm32(1), regT0);
330 compare32(Below, regT0, TrustedImm32(JSValue::LowestTag + 1), regT0);
331 emitStoreBool(dst, regT0);
332}
333
334void JIT::emit_op_is_string(Instruction* currentInstruction)
335{
336 int dst = currentInstruction[1].u.operand;
337 int value = currentInstruction[2].u.operand;
338
339 emitLoad(value, regT1, regT0);
340 Jump isNotCell = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
341
342 compare8(Equal, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType), regT0);
343 Jump done = jump();
344
345 isNotCell.link(this);
346 move(TrustedImm32(0), regT0);
347
348 done.link(this);
349 emitStoreBool(dst, regT0);
350}
351
352void JIT::emit_op_tear_off_activation(Instruction* currentInstruction)
353{
354 int activation = currentInstruction[1].u.operand;
355 Jump activationNotCreated = branch32(Equal, tagFor(activation), TrustedImm32(JSValue::EmptyValueTag));
356 emitLoadPayload(activation, regT0);
357 callOperation(operationTearOffActivation, regT0);
358 activationNotCreated.link(this);
359}
360
361void JIT::emit_op_tear_off_arguments(Instruction* currentInstruction)
362{
363 VirtualRegister arguments = VirtualRegister(currentInstruction[1].u.operand);
364 int activation = currentInstruction[2].u.operand;
365
366 Jump argsNotCreated = branch32(Equal, tagFor(unmodifiedArgumentsRegister(arguments).offset()), TrustedImm32(JSValue::EmptyValueTag));
367 emitLoadPayload(unmodifiedArgumentsRegister(VirtualRegister(arguments)).offset(), regT0);
368 emitLoadPayload(activation, regT1);
369 callOperation(operationTearOffArguments, regT0, regT1);
370 argsNotCreated.link(this);
371}
372
373void JIT::emit_op_to_primitive(Instruction* currentInstruction)
374{
375 int dst = currentInstruction[1].u.operand;
376 int src = currentInstruction[2].u.operand;
377
378 emitLoad(src, regT1, regT0);
379
380 Jump isImm = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
381 addSlowCase(branchPtr(NotEqual, Address(regT0, JSCell::structureIDOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
382 isImm.link(this);
383
384 if (dst != src)
385 emitStore(dst, regT1, regT0);
386}
387
388void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
389{
390 linkSlowCase(iter);
391
392 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_primitive);
393 slowPathCall.call();
394}
395
396void JIT::emit_op_strcat(Instruction* currentInstruction)
397{
398 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_strcat);
399 slowPathCall.call();
400}
401
402void JIT::emit_op_not(Instruction* currentInstruction)
403{
404 int dst = currentInstruction[1].u.operand;
405 int src = currentInstruction[2].u.operand;
406
407 emitLoadTag(src, regT0);
408
409 emitLoad(src, regT1, regT0);
410 addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::BooleanTag)));
411 xor32(TrustedImm32(1), regT0);
412
413 emitStoreBool(dst, regT0, (dst == src));
414}
415
416void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
417{
418 linkSlowCase(iter);
419
420 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_not);
421 slowPathCall.call();
422}
423
424void JIT::emit_op_jfalse(Instruction* currentInstruction)
425{
426 int cond = currentInstruction[1].u.operand;
427 unsigned target = currentInstruction[2].u.operand;
428
429 emitLoad(cond, regT1, regT0);
430
431 ASSERT((JSValue::BooleanTag + 1 == JSValue::Int32Tag) && !(JSValue::Int32Tag + 1));
432 addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::BooleanTag)));
433 addJump(branchTest32(Zero, regT0), target);
434}
435
436void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
437{
438 int cond = currentInstruction[1].u.operand;
439 unsigned target = currentInstruction[2].u.operand;
440
441 linkSlowCase(iter);
442
443 if (supportsFloatingPoint()) {
444 // regT1 contains the tag from the hot path.
445 Jump notNumber = branch32(Above, regT1, TrustedImm32(JSValue::LowestTag));
446
447 emitLoadDouble(cond, fpRegT0);
448 emitJumpSlowToHot(branchDoubleZeroOrNaN(fpRegT0, fpRegT1), target);
449 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_jfalse));
450
451 notNumber.link(this);
452 }
453
454 callOperation(operationConvertJSValueToBoolean, regT1, regT0);
455 emitJumpSlowToHot(branchTest32(Zero, returnValueGPR), target); // Inverted.
456}
457
458void JIT::emit_op_jtrue(Instruction* currentInstruction)
459{
460 int cond = currentInstruction[1].u.operand;
461 unsigned target = currentInstruction[2].u.operand;
462
463 emitLoad(cond, regT1, regT0);
464
465 ASSERT((JSValue::BooleanTag + 1 == JSValue::Int32Tag) && !(JSValue::Int32Tag + 1));
466 addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::BooleanTag)));
467 addJump(branchTest32(NonZero, regT0), target);
468}
469
470void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
471{
472 int cond = currentInstruction[1].u.operand;
473 unsigned target = currentInstruction[2].u.operand;
474
475 linkSlowCase(iter);
476
477 if (supportsFloatingPoint()) {
478 // regT1 contains the tag from the hot path.
479 Jump notNumber = branch32(Above, regT1, TrustedImm32(JSValue::LowestTag));
480
481 emitLoadDouble(cond, fpRegT0);
482 emitJumpSlowToHot(branchDoubleNonZero(fpRegT0, fpRegT1), target);
483 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_jtrue));
484
485 notNumber.link(this);
486 }
487
488 callOperation(operationConvertJSValueToBoolean, regT1, regT0);
489 emitJumpSlowToHot(branchTest32(NonZero, returnValueGPR), target);
490}
491
492void JIT::emit_op_jeq_null(Instruction* currentInstruction)
493{
494 int src = currentInstruction[1].u.operand;
495 unsigned target = currentInstruction[2].u.operand;
496
497 emitLoad(src, regT1, regT0);
498
499 Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
500
501 Jump isNotMasqueradesAsUndefined = branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
502 loadPtr(Address(regT0, JSCell::structureIDOffset()), regT2);
503 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
504 addJump(branchPtr(Equal, Address(regT2, Structure::globalObjectOffset()), regT0), target);
505 Jump masqueradesGlobalObjectIsForeign = jump();
506
507 // Now handle the immediate cases - undefined & null
508 isImmediate.link(this);
509 ASSERT((JSValue::UndefinedTag + 1 == JSValue::NullTag) && (JSValue::NullTag & 0x1));
510 or32(TrustedImm32(1), regT1);
511 addJump(branch32(Equal, regT1, TrustedImm32(JSValue::NullTag)), target);
512
513 isNotMasqueradesAsUndefined.link(this);
514 masqueradesGlobalObjectIsForeign.link(this);
515}
516
517void JIT::emit_op_jneq_null(Instruction* currentInstruction)
518{
519 int src = currentInstruction[1].u.operand;
520 unsigned target = currentInstruction[2].u.operand;
521
522 emitLoad(src, regT1, regT0);
523
524 Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
525
526 addJump(branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
527 loadPtr(Address(regT0, JSCell::structureIDOffset()), regT2);
528 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
529 addJump(branchPtr(NotEqual, Address(regT2, Structure::globalObjectOffset()), regT0), target);
530 Jump wasNotImmediate = jump();
531
532 // Now handle the immediate cases - undefined & null
533 isImmediate.link(this);
534
535 ASSERT((JSValue::UndefinedTag + 1 == JSValue::NullTag) && (JSValue::NullTag & 0x1));
536 or32(TrustedImm32(1), regT1);
537 addJump(branch32(NotEqual, regT1, TrustedImm32(JSValue::NullTag)), target);
538
539 wasNotImmediate.link(this);
540}
541
542void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
543{
544 int src = currentInstruction[1].u.operand;
545 Special::Pointer ptr = currentInstruction[2].u.specialPointer;
546 unsigned target = currentInstruction[3].u.operand;
547
548 emitLoad(src, regT1, regT0);
549 addJump(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)), target);
550 addJump(branchPtr(NotEqual, regT0, TrustedImmPtr(actualPointerFor(m_codeBlock, ptr))), target);
551}
552
553void JIT::emit_op_eq(Instruction* currentInstruction)
554{
555 int dst = currentInstruction[1].u.operand;
556 int src1 = currentInstruction[2].u.operand;
557 int src2 = currentInstruction[3].u.operand;
558
559 emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
560 addSlowCase(branch32(NotEqual, regT1, regT3));
561 addSlowCase(branch32(Equal, regT1, TrustedImm32(JSValue::CellTag)));
562 addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::LowestTag)));
563
564 compare32(Equal, regT0, regT2, regT0);
565
566 emitStoreBool(dst, regT0);
567}
568
569void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
570{
571 int dst = currentInstruction[1].u.operand;
572 int op1 = currentInstruction[2].u.operand;
573 int op2 = currentInstruction[3].u.operand;
574
575 JumpList storeResult;
576 JumpList genericCase;
577
578 genericCase.append(getSlowCase(iter)); // tags not equal
579
580 linkSlowCase(iter); // tags equal and JSCell
581 genericCase.append(branchPtr(NotEqual, Address(regT0, JSCell::structureIDOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
582 genericCase.append(branchPtr(NotEqual, Address(regT2, JSCell::structureIDOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
583
584 // String case.
585 callOperation(operationCompareStringEq, regT0, regT2);
586 storeResult.append(jump());
587
588 // Generic case.
589 genericCase.append(getSlowCase(iter)); // doubles
590 genericCase.link(this);
591 emitLoad(op1, regT1, regT0);
592 emitLoad(op2, regT3, regT2);
593 callOperation(operationCompareEq, regT1, regT0, regT3, regT2);
594
595 storeResult.link(this);
596 emitStoreBool(dst, returnValueGPR);
597}
598
599void JIT::emit_op_neq(Instruction* currentInstruction)
600{
601 int dst = currentInstruction[1].u.operand;
602 int src1 = currentInstruction[2].u.operand;
603 int src2 = currentInstruction[3].u.operand;
604
605 emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
606 addSlowCase(branch32(NotEqual, regT1, regT3));
607 addSlowCase(branch32(Equal, regT1, TrustedImm32(JSValue::CellTag)));
608 addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::LowestTag)));
609
610 compare32(NotEqual, regT0, regT2, regT0);
611
612 emitStoreBool(dst, regT0);
613}
614
615void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
616{
617 int dst = currentInstruction[1].u.operand;
618
619 JumpList storeResult;
620 JumpList genericCase;
621
622 genericCase.append(getSlowCase(iter)); // tags not equal
623
624 linkSlowCase(iter); // tags equal and JSCell
625 genericCase.append(branchPtr(NotEqual, Address(regT0, JSCell::structureIDOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
626 genericCase.append(branchPtr(NotEqual, Address(regT2, JSCell::structureIDOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
627
628 // String case.
629 callOperation(operationCompareStringEq, regT0, regT2);
630 storeResult.append(jump());
631
632 // Generic case.
633 genericCase.append(getSlowCase(iter)); // doubles
634 genericCase.link(this);
635 callOperation(operationCompareEq, regT1, regT0, regT3, regT2);
636
637 storeResult.link(this);
638 xor32(TrustedImm32(0x1), returnValueGPR);
639 emitStoreBool(dst, returnValueGPR);
640}
641
642void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
643{
644 int dst = currentInstruction[1].u.operand;
645 int src1 = currentInstruction[2].u.operand;
646 int src2 = currentInstruction[3].u.operand;
647
648 emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
649
650 // Bail if the tags differ, or are double.
651 addSlowCase(branch32(NotEqual, regT1, regT3));
652 addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::LowestTag)));
653
654 // Jump to a slow case if both are strings.
655 Jump notCell = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
656 Jump firstNotString = branchPtr(NotEqual, Address(regT0, JSCell::structureIDOffset()), TrustedImmPtr(m_vm->stringStructure.get()));
657 addSlowCase(branchPtr(Equal, Address(regT2, JSCell::structureIDOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
658 notCell.link(this);
659 firstNotString.link(this);
660
661 // Simply compare the payloads.
662 if (type == OpStrictEq)
663 compare32(Equal, regT0, regT2, regT0);
664 else
665 compare32(NotEqual, regT0, regT2, regT0);
666
667 emitStoreBool(dst, regT0);
668}
669
670void JIT::emit_op_stricteq(Instruction* currentInstruction)
671{
672 compileOpStrictEq(currentInstruction, OpStrictEq);
673}
674
675void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
676{
677 linkSlowCase(iter);
678 linkSlowCase(iter);
679 linkSlowCase(iter);
680
681 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_stricteq);
682 slowPathCall.call();
683}
684
685void JIT::emit_op_nstricteq(Instruction* currentInstruction)
686{
687 compileOpStrictEq(currentInstruction, OpNStrictEq);
688}
689
690void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
691{
692 linkSlowCase(iter);
693 linkSlowCase(iter);
694 linkSlowCase(iter);
695
696 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_nstricteq);
697 slowPathCall.call();
698}
699
700void JIT::emit_op_eq_null(Instruction* currentInstruction)
701{
702 int dst = currentInstruction[1].u.operand;
703 int src = currentInstruction[2].u.operand;
704
705 emitLoad(src, regT1, regT0);
706 Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
707
708 Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
709 move(TrustedImm32(0), regT1);
710 Jump wasNotMasqueradesAsUndefined = jump();
711
712 isMasqueradesAsUndefined.link(this);
713 loadPtr(Address(regT0, JSCell::structureIDOffset()), regT2);
714 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
715 loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
716 compare32(Equal, regT0, regT2, regT1);
717 Jump wasNotImmediate = jump();
718
719 isImmediate.link(this);
720
721 compare32(Equal, regT1, TrustedImm32(JSValue::NullTag), regT2);
722 compare32(Equal, regT1, TrustedImm32(JSValue::UndefinedTag), regT1);
723 or32(regT2, regT1);
724
725 wasNotImmediate.link(this);
726 wasNotMasqueradesAsUndefined.link(this);
727
728 emitStoreBool(dst, regT1);
729}
730
731void JIT::emit_op_neq_null(Instruction* currentInstruction)
732{
733 int dst = currentInstruction[1].u.operand;
734 int src = currentInstruction[2].u.operand;
735
736 emitLoad(src, regT1, regT0);
737 Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
738
739 Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
740 move(TrustedImm32(1), regT1);
741 Jump wasNotMasqueradesAsUndefined = jump();
742
743 isMasqueradesAsUndefined.link(this);
744 loadPtr(Address(regT0, JSCell::structureIDOffset()), regT2);
745 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
746 loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
747 compare32(NotEqual, regT0, regT2, regT1);
748 Jump wasNotImmediate = jump();
749
750 isImmediate.link(this);
751
752 compare32(NotEqual, regT1, TrustedImm32(JSValue::NullTag), regT2);
753 compare32(NotEqual, regT1, TrustedImm32(JSValue::UndefinedTag), regT1);
754 and32(regT2, regT1);
755
756 wasNotImmediate.link(this);
757 wasNotMasqueradesAsUndefined.link(this);
758
759 emitStoreBool(dst, regT1);
760}
761
762void JIT::emit_op_throw(Instruction* currentInstruction)
763{
764 ASSERT(regT0 == returnValueGPR);
765 emitLoad(currentInstruction[1].u.operand, regT1, regT0);
766 callOperationNoExceptionCheck(operationThrow, regT1, regT0);
767 jumpToExceptionHandler();
768}
769
770void JIT::emit_op_get_pnames(Instruction* currentInstruction)
771{
772 int dst = currentInstruction[1].u.operand;
773 int base = currentInstruction[2].u.operand;
774 int i = currentInstruction[3].u.operand;
775 int size = currentInstruction[4].u.operand;
776 int breakTarget = currentInstruction[5].u.operand;
777
778 JumpList isNotObject;
779
780 emitLoad(base, regT1, regT0);
781 if (!m_codeBlock->isKnownNotImmediate(base))
782 isNotObject.append(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)));
783 if (VirtualRegister(base) != m_codeBlock->thisRegister() || m_codeBlock->isStrictMode())
784 isNotObject.append(emitJumpIfCellNotObject(regT0));
785
786 // We could inline the case where you have a valid cache, but
787 // this call doesn't seem to be hot.
788 Label isObject(this);
789 callOperation(operationGetPNames, regT0);
790 emitStoreCell(dst, returnValueGPR);
791 load32(Address(regT0, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStringsSize)), regT3);
792 store32(TrustedImm32(Int32Tag), intTagFor(i));
793 store32(TrustedImm32(0), intPayloadFor(i));
794 store32(TrustedImm32(Int32Tag), intTagFor(size));
795 store32(regT3, payloadFor(size));
796 Jump end = jump();
797
798 isNotObject.link(this);
799 addJump(branch32(Equal, regT1, TrustedImm32(JSValue::NullTag)), breakTarget);
800 addJump(branch32(Equal, regT1, TrustedImm32(JSValue::UndefinedTag)), breakTarget);
801 callOperation(operationToObject, base, regT1, regT0);
802 jump().linkTo(isObject, this);
803
804 end.link(this);
805}
806
807void JIT::emit_op_next_pname(Instruction* currentInstruction)
808{
809 int dst = currentInstruction[1].u.operand;
810 int base = currentInstruction[2].u.operand;
811 int i = currentInstruction[3].u.operand;
812 int size = currentInstruction[4].u.operand;
813 int it = currentInstruction[5].u.operand;
814 int target = currentInstruction[6].u.operand;
815
816 JumpList callHasProperty;
817
818 Label begin(this);
819 load32(intPayloadFor(i), regT0);
820 Jump end = branch32(Equal, regT0, intPayloadFor(size));
821
822 // Grab key @ i
823 loadPtr(payloadFor(it), regT1);
824 loadPtr(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStrings)), regT2);
825 load32(BaseIndex(regT2, regT0, TimesEight), regT2);
826 store32(TrustedImm32(JSValue::CellTag), tagFor(dst));
827 store32(regT2, payloadFor(dst));
828
829 // Increment i
830 add32(TrustedImm32(1), regT0);
831 store32(regT0, intPayloadFor(i));
832
833 // Verify that i is valid:
834 loadPtr(payloadFor(base), regT0);
835
836 // Test base's structure
837 loadPtr(Address(regT0, JSCell::structureIDOffset()), regT2);
838 callHasProperty.append(branchPtr(NotEqual, regT2, Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure)))));
839
840 // Test base's prototype chain
841 loadPtr(Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedPrototypeChain))), regT3);
842 loadPtr(Address(regT3, OBJECT_OFFSETOF(StructureChain, m_vector)), regT3);
843 addJump(branchTestPtr(Zero, Address(regT3)), target);
844
845 Label checkPrototype(this);
846 callHasProperty.append(branch32(Equal, Address(regT2, Structure::prototypeOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), TrustedImm32(JSValue::NullTag)));
847 loadPtr(Address(regT2, Structure::prototypeOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT2);
848 loadPtr(Address(regT2, JSCell::structureIDOffset()), regT2);
849 callHasProperty.append(branchPtr(NotEqual, regT2, Address(regT3)));
850 addPtr(TrustedImm32(sizeof(Structure*)), regT3);
851 branchTestPtr(NonZero, Address(regT3)).linkTo(checkPrototype, this);
852
853 // Continue loop.
854 addJump(jump(), target);
855
856 // Slow case: Ask the object if i is valid.
857 callHasProperty.link(this);
858 loadPtr(addressFor(dst), regT1);
859 callOperation(operationHasProperty, regT0, regT1);
860
861 // Test for valid key.
862 addJump(branchTest32(NonZero, regT0), target);
863 jump().linkTo(begin, this);
864
865 // End of loop.
866 end.link(this);
867}
868
869void JIT::emit_op_push_with_scope(Instruction* currentInstruction)
870{
871 emitLoad(currentInstruction[1].u.operand, regT1, regT0);
872 callOperation(operationPushWithScope, regT1, regT0);
873}
874
875void JIT::emit_op_pop_scope(Instruction*)
876{
877 callOperation(operationPopScope);
878}
879
880void JIT::emit_op_to_number(Instruction* currentInstruction)
881{
882 int dst = currentInstruction[1].u.operand;
883 int src = currentInstruction[2].u.operand;
884
885 emitLoad(src, regT1, regT0);
886
887 Jump isInt32 = branch32(Equal, regT1, TrustedImm32(JSValue::Int32Tag));
888 addSlowCase(branch32(AboveOrEqual, regT1, TrustedImm32(JSValue::LowestTag)));
889 isInt32.link(this);
890
891 if (src != dst)
892 emitStore(dst, regT1, regT0);
893}
894
895void JIT::emitSlow_op_to_number(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
896{
897 linkSlowCase(iter);
898
899 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_number);
900 slowPathCall.call();
901}
902
903void JIT::emit_op_push_name_scope(Instruction* currentInstruction)
904{
905 emitLoad(currentInstruction[2].u.operand, regT1, regT0);
906 callOperation(operationPushNameScope, &m_codeBlock->identifier(currentInstruction[1].u.operand), regT1, regT0, currentInstruction[3].u.operand);
907}
908
909void JIT::emit_op_catch(Instruction* currentInstruction)
910{
911 move(TrustedImmPtr(m_vm), regT3);
912 // operationThrow returns the callFrame for the handler.
913 load32(Address(regT3, VM::callFrameForThrowOffset()), callFrameRegister);
914
915 addPtr(TrustedImm32(stackPointerOffsetFor(codeBlock()) * sizeof(Register)), callFrameRegister, stackPointerRegister);
916
917 // Now store the exception returned by operationThrow.
918 load32(Address(regT3, VM::exceptionOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
919 load32(Address(regT3, VM::exceptionOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
920 store32(TrustedImm32(JSValue().payload()), Address(regT3, VM::exceptionOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.payload)));
921 store32(TrustedImm32(JSValue().tag()), Address(regT3, VM::exceptionOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.tag)));
922
923 unsigned exception = currentInstruction[1].u.operand;
924 emitStore(exception, regT1, regT0);
925}
926
927void JIT::emit_op_switch_imm(Instruction* currentInstruction)
928{
929 size_t tableIndex = currentInstruction[1].u.operand;
930 unsigned defaultOffset = currentInstruction[2].u.operand;
931 unsigned scrutinee = currentInstruction[3].u.operand;
932
933 // create jump table for switch destinations, track this switch statement.
934 SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex);
935 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Immediate));
936 jumpTable->ensureCTITable();
937
938 emitLoad(scrutinee, regT1, regT0);
939 callOperation(operationSwitchImmWithUnknownKeyType, regT1, regT0, tableIndex);
940 jump(returnValueGPR);
941}
942
943void JIT::emit_op_switch_char(Instruction* currentInstruction)
944{
945 size_t tableIndex = currentInstruction[1].u.operand;
946 unsigned defaultOffset = currentInstruction[2].u.operand;
947 unsigned scrutinee = currentInstruction[3].u.operand;
948
949 // create jump table for switch destinations, track this switch statement.
950 SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex);
951 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Character));
952 jumpTable->ensureCTITable();
953
954 emitLoad(scrutinee, regT1, regT0);
955 callOperation(operationSwitchCharWithUnknownKeyType, regT1, regT0, tableIndex);
956 jump(returnValueGPR);
957}
958
959void JIT::emit_op_switch_string(Instruction* currentInstruction)
960{
961 size_t tableIndex = currentInstruction[1].u.operand;
962 unsigned defaultOffset = currentInstruction[2].u.operand;
963 unsigned scrutinee = currentInstruction[3].u.operand;
964
965 // create jump table for switch destinations, track this switch statement.
966 StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
967 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset));
968
969 emitLoad(scrutinee, regT1, regT0);
970 callOperation(operationSwitchStringWithUnknownKeyType, regT1, regT0, tableIndex);
971 jump(returnValueGPR);
972}
973
974void JIT::emit_op_throw_static_error(Instruction* currentInstruction)
975{
976 emitLoad(m_codeBlock->getConstant(currentInstruction[1].u.operand), regT1, regT0);
977 callOperation(operationThrowStaticError, regT1, regT0, currentInstruction[2].u.operand);
978}
979
980void JIT::emit_op_debug(Instruction* currentInstruction)
981{
982 load32(codeBlock()->debuggerRequestsAddress(), regT0);
983 Jump noDebuggerRequests = branchTest32(Zero, regT0);
984 callOperation(operationDebug, currentInstruction[1].u.operand);
985 noDebuggerRequests.link(this);
986}
987
988
989void JIT::emit_op_enter(Instruction* currentInstruction)
990{
991 emitEnterOptimizationCheck();
992
993 // Even though JIT code doesn't use them, we initialize our constant
994 // registers to zap stale pointers, to avoid unnecessarily prolonging
995 // object lifetime and increasing GC pressure.
996 for (int i = 0; i < m_codeBlock->m_numVars; ++i)
997 emitStore(virtualRegisterForLocal(i).offset(), jsUndefined());
998
999 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_enter);
1000 slowPathCall.call();
1001}
1002
1003void JIT::emit_op_create_activation(Instruction* currentInstruction)
1004{
1005 int activation = currentInstruction[1].u.operand;
1006
1007 Jump activationCreated = branch32(NotEqual, tagFor(activation), TrustedImm32(JSValue::EmptyValueTag));
1008 callOperation(operationCreateActivation, 0);
1009 emitStoreCell(activation, returnValueGPR);
1010 activationCreated.link(this);
1011}
1012
1013void JIT::emit_op_create_arguments(Instruction* currentInstruction)
1014{
1015 int dst = currentInstruction[1].u.operand;
1016
1017 Jump argsCreated = branch32(NotEqual, tagFor(dst), TrustedImm32(JSValue::EmptyValueTag));
1018 callOperation(operationCreateArguments);
1019 emitStoreCell(dst, returnValueGPR);
1020 emitStoreCell(unmodifiedArgumentsRegister(VirtualRegister(dst)).offset(), returnValueGPR);
1021 argsCreated.link(this);
1022}
1023
1024void JIT::emit_op_init_lazy_reg(Instruction* currentInstruction)
1025{
1026 int dst = currentInstruction[1].u.operand;
1027
1028 emitStore(dst, JSValue());
1029}
1030
1031void JIT::emit_op_get_callee(Instruction* currentInstruction)
1032{
1033 int result = currentInstruction[1].u.operand;
1034 WriteBarrierBase<JSCell>* cachedFunction = &currentInstruction[2].u.jsCell;
1035 emitGetFromCallFrameHeaderPtr(JSStack::Callee, regT0);
1036
1037 loadPtr(cachedFunction, regT2);
1038 addSlowCase(branchPtr(NotEqual, regT0, regT2));
1039
1040 move(TrustedImm32(JSValue::CellTag), regT1);
1041 emitStore(result, regT1, regT0);
1042}
1043
1044void JIT::emitSlow_op_get_callee(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1045{
1046 linkSlowCase(iter);
1047
1048 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_callee);
1049 slowPathCall.call();
1050}
1051
1052void JIT::emit_op_create_this(Instruction* currentInstruction)
1053{
1054 int callee = currentInstruction[2].u.operand;
1055 RegisterID calleeReg = regT0;
1056 RegisterID resultReg = regT0;
1057 RegisterID allocatorReg = regT1;
1058 RegisterID structureReg = regT2;
1059 RegisterID scratchReg = regT3;
1060
1061 emitLoadPayload(callee, calleeReg);
1062 loadPtr(Address(calleeReg, JSFunction::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfAllocator()), allocatorReg);
1063 loadPtr(Address(calleeReg, JSFunction::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfStructure()), structureReg);
1064 addSlowCase(branchTestPtr(Zero, allocatorReg));
1065
1066 emitAllocateJSObject(allocatorReg, structureReg, resultReg, scratchReg);
1067 emitStoreCell(currentInstruction[1].u.operand, resultReg);
1068}
1069
1070void JIT::emitSlow_op_create_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1071{
1072 linkSlowCase(iter); // doesn't have an allocation profile
1073 linkSlowCase(iter); // allocation failed
1074
1075 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_this);
1076 slowPathCall.call();
1077}
1078
1079void JIT::emit_op_to_this(Instruction* currentInstruction)
1080{
1081 WriteBarrierBase<Structure>* cachedStructure = &currentInstruction[2].u.structure;
1082 int thisRegister = currentInstruction[1].u.operand;
1083
1084 emitLoad(thisRegister, regT3, regT2);
1085
1086 addSlowCase(branch32(NotEqual, regT3, TrustedImm32(JSValue::CellTag)));
1087 addSlowCase(branch8(NotEqual, Address(regT2, JSCell::typeInfoTypeOffset()), TrustedImm32(FinalObjectType)));
1088 loadPtr(Address(regT2, JSCell::structureIDOffset()), regT0);
1089 loadPtr(cachedStructure, regT2);
1090 addSlowCase(branchPtr(NotEqual, regT0, regT2));
1091}
1092
1093void JIT::emitSlow_op_to_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1094{
1095 linkSlowCase(iter);
1096 linkSlowCase(iter);
1097 linkSlowCase(iter);
1098 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_this);
1099 slowPathCall.call();
1100}
1101
1102void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
1103{
1104 load32(m_vm->enabledProfilerAddress(), regT0);
1105 Jump profilerDone = branchTestPtr(Zero, regT0);
1106 emitLoad(currentInstruction[1].u.operand, regT1, regT0);
1107 callOperation(operationProfileWillCall, regT1, regT0);
1108 profilerDone.link(this);
1109}
1110
1111void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
1112{
1113 load32(m_vm->enabledProfilerAddress(), regT0);
1114 Jump profilerDone = branchTestPtr(Zero, regT0);
1115 emitLoad(currentInstruction[1].u.operand, regT1, regT0);
1116 callOperation(operationProfileDidCall, regT1, regT0);
1117 profilerDone.link(this);
1118}
1119
1120void JIT::emit_op_get_arguments_length(Instruction* currentInstruction)
1121{
1122 int dst = currentInstruction[1].u.operand;
1123 int argumentsRegister = currentInstruction[2].u.operand;
1124 addSlowCase(branch32(NotEqual, tagFor(argumentsRegister), TrustedImm32(JSValue::EmptyValueTag)));
1125 load32(payloadFor(JSStack::ArgumentCount), regT0);
1126 sub32(TrustedImm32(1), regT0);
1127 emitStoreInt32(dst, regT0);
1128}
1129
1130void JIT::emitSlow_op_get_arguments_length(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1131{
1132 linkSlowCase(iter);
1133 int dst = currentInstruction[1].u.operand;
1134 int base = currentInstruction[2].u.operand;
1135 callOperation(operationGetArgumentsLength, dst, base);
1136}
1137
1138void JIT::emit_op_get_argument_by_val(Instruction* currentInstruction)
1139{
1140 int dst = currentInstruction[1].u.operand;
1141 int argumentsRegister = currentInstruction[2].u.operand;
1142 int property = currentInstruction[3].u.operand;
1143 addSlowCase(branch32(NotEqual, tagFor(argumentsRegister), TrustedImm32(JSValue::EmptyValueTag)));
1144 emitLoad(property, regT1, regT2);
1145 addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::Int32Tag)));
1146 add32(TrustedImm32(1), regT2);
1147 // regT2 now contains the integer index of the argument we want, including this
1148 load32(payloadFor(JSStack::ArgumentCount), regT3);
1149 addSlowCase(branch32(AboveOrEqual, regT2, regT3));
1150
1151 loadPtr(BaseIndex(callFrameRegister, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload) + CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register))), regT0);
1152 loadPtr(BaseIndex(callFrameRegister, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag) + CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register))), regT1);
1153 emitValueProfilingSite();
1154 emitStore(dst, regT1, regT0);
1155}
1156
1157void JIT::emitSlow_op_get_argument_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1158{
1159 int dst = currentInstruction[1].u.operand;
1160 int arguments = currentInstruction[2].u.operand;
1161 int property = currentInstruction[3].u.operand;
1162
1163 linkSlowCase(iter);
1164 Jump skipArgumentsCreation = jump();
1165
1166 linkSlowCase(iter);
1167 linkSlowCase(iter);
1168
1169 callOperation(operationCreateArguments);
1170 emitStoreCell(arguments, returnValueGPR);
1171 emitStoreCell(unmodifiedArgumentsRegister(VirtualRegister(arguments)).offset(), returnValueGPR);
1172
1173 skipArgumentsCreation.link(this);
1174 emitLoad(arguments, regT1, regT0);
1175 emitLoad(property, regT3, regT2);
1176 callOperation(WithProfile, operationGetByValGeneric, dst, regT1, regT0, regT3, regT2);
1177}
1178
1179} // namespace JSC
1180
1181#endif // USE(JSVALUE32_64)
1182#endif // ENABLE(JIT)