]> git.saurik.com Git - apple/javascriptcore.git/blob - jit/JITOpcodes32_64.cpp
JavaScriptCore-1218.35.tar.gz
[apple/javascriptcore.git] / jit / JITOpcodes32_64.cpp
1 /*
2 * Copyright (C) 2009, 2012 Apple Inc. All rights reserved.
3 * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
13 *
14 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
18 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25 */
26
27 #include "config.h"
28
29 #if ENABLE(JIT)
30 #if USE(JSVALUE32_64)
31 #include "JIT.h"
32
33 #include "JITInlines.h"
34 #include "JITStubCall.h"
35 #include "JSArray.h"
36 #include "JSCell.h"
37 #include "JSFunction.h"
38 #include "JSPropertyNameIterator.h"
39 #include "JSVariableObject.h"
40 #include "LinkBuffer.h"
41
42 namespace JSC {
43
44 JIT::CodeRef JIT::privateCompileCTINativeCall(VM* vm, NativeFunction func)
45 {
46 Call nativeCall;
47
48 emitPutImmediateToCallFrameHeader(0, JSStack::CodeBlock);
49 storePtr(callFrameRegister, &m_vm->topCallFrame);
50
51 #if CPU(X86)
52 // Load caller frame's scope chain into this callframe so that whatever we call can
53 // get to its global data.
54 emitGetFromCallFrameHeaderPtr(JSStack::CallerFrame, regT0);
55 emitGetFromCallFrameHeaderPtr(JSStack::ScopeChain, regT1, regT0);
56 emitPutCellToCallFrameHeader(regT1, JSStack::ScopeChain);
57
58 peek(regT1);
59 emitPutToCallFrameHeader(regT1, JSStack::ReturnPC);
60
61 // Calling convention: f(ecx, edx, ...);
62 // Host function signature: f(ExecState*);
63 move(callFrameRegister, X86Registers::ecx);
64
65 subPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister); // Align stack after call.
66
67 move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
68
69 // call the function
70 nativeCall = call();
71
72 addPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister);
73
74 #elif CPU(ARM)
75 // Load caller frame's scope chain into this callframe so that whatever we call can
76 // get to its global data.
77 emitGetFromCallFrameHeaderPtr(JSStack::CallerFrame, regT2);
78 emitGetFromCallFrameHeaderPtr(JSStack::ScopeChain, regT1, regT2);
79 emitPutCellToCallFrameHeader(regT1, JSStack::ScopeChain);
80
81 preserveReturnAddressAfterCall(regT3); // Callee preserved
82 emitPutToCallFrameHeader(regT3, JSStack::ReturnPC);
83
84 // Calling convention: f(r0 == regT0, r1 == regT1, ...);
85 // Host function signature: f(ExecState*);
86 move(callFrameRegister, ARMRegisters::r0);
87
88 emitGetFromCallFrameHeaderPtr(JSStack::Callee, ARMRegisters::r1);
89 move(regT2, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
90 loadPtr(Address(ARMRegisters::r1, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
91
92 // call the function
93 nativeCall = call();
94
95 restoreReturnAddressBeforeReturn(regT3);
96
97 #elif CPU(MIPS)
98 // Load caller frame's scope chain into this callframe so that whatever we call can
99 // get to its global data.
100 emitGetFromCallFrameHeaderPtr(JSStack::CallerFrame, regT0);
101 emitGetFromCallFrameHeaderPtr(JSStack::ScopeChain, regT1, regT0);
102 emitPutCellToCallFrameHeader(regT1, JSStack::ScopeChain);
103
104 preserveReturnAddressAfterCall(regT3); // Callee preserved
105 emitPutToCallFrameHeader(regT3, JSStack::ReturnPC);
106
107 // Calling convention: f(a0, a1, a2, a3);
108 // Host function signature: f(ExecState*);
109
110 // Allocate stack space for 16 bytes (8-byte aligned)
111 // 16 bytes (unused) for 4 arguments
112 subPtr(TrustedImm32(16), stackPointerRegister);
113
114 // Setup arg0
115 move(callFrameRegister, MIPSRegisters::a0);
116
117 // Call
118 emitGetFromCallFrameHeaderPtr(JSStack::Callee, MIPSRegisters::a2);
119 loadPtr(Address(MIPSRegisters::a2, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
120 move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
121
122 // call the function
123 nativeCall = call();
124
125 // Restore stack space
126 addPtr(TrustedImm32(16), stackPointerRegister);
127
128 restoreReturnAddressBeforeReturn(regT3);
129 #elif CPU(SH4)
130 // Load caller frame's scope chain into this callframe so that whatever we call can
131 // get to its global data.
132 emitGetFromCallFrameHeaderPtr(JSStack::CallerFrame, regT2);
133 emitGetFromCallFrameHeaderPtr(JSStack::ScopeChain, regT1, regT2);
134 emitPutCellToCallFrameHeader(regT1, JSStack::ScopeChain);
135
136 preserveReturnAddressAfterCall(regT3); // Callee preserved
137 emitPutToCallFrameHeader(regT3, JSStack::ReturnPC);
138
139 // Calling convention: f(r0 == regT4, r1 == regT5, ...);
140 // Host function signature: f(ExecState*);
141 move(callFrameRegister, regT4);
142
143 emitGetFromCallFrameHeaderPtr(JSStack::Callee, regT5);
144 move(regT2, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
145 loadPtr(Address(regT5, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
146
147 // call the function
148 nativeCall = call();
149
150 restoreReturnAddressBeforeReturn(regT3);
151 #else
152 #error "JIT not supported on this platform."
153 breakpoint();
154 #endif // CPU(X86)
155
156 // Check for an exception
157 Jump sawException = branch32(NotEqual, AbsoluteAddress(reinterpret_cast<char*>(&vm->exception) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), TrustedImm32(JSValue::EmptyValueTag));
158
159 // Return.
160 ret();
161
162 // Handle an exception
163 sawException.link(this);
164
165 // Grab the return address.
166 preserveReturnAddressAfterCall(regT1);
167
168 move(TrustedImmPtr(&vm->exceptionLocation), regT2);
169 storePtr(regT1, regT2);
170 poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
171
172 storePtr(callFrameRegister, &m_vm->topCallFrame);
173 // Set the return address.
174 move(TrustedImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()), regT1);
175 restoreReturnAddressBeforeReturn(regT1);
176
177 ret();
178
179 // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
180 LinkBuffer patchBuffer(*m_vm, this, GLOBAL_THUNK_ID);
181
182 patchBuffer.link(nativeCall, FunctionPtr(func));
183 return FINALIZE_CODE(patchBuffer, ("JIT CTI native call"));
184 }
185
186 void JIT::emit_op_mov(Instruction* currentInstruction)
187 {
188 unsigned dst = currentInstruction[1].u.operand;
189 unsigned src = currentInstruction[2].u.operand;
190
191 if (m_codeBlock->isConstantRegisterIndex(src))
192 emitStore(dst, getConstantOperand(src));
193 else {
194 emitLoad(src, regT1, regT0);
195 emitStore(dst, regT1, regT0);
196 map(m_bytecodeOffset + OPCODE_LENGTH(op_mov), dst, regT1, regT0);
197 }
198 }
199
200 void JIT::emit_op_end(Instruction* currentInstruction)
201 {
202 ASSERT(returnValueRegister != callFrameRegister);
203 emitLoad(currentInstruction[1].u.operand, regT1, regT0);
204 restoreReturnAddressBeforeReturn(Address(callFrameRegister, JSStack::ReturnPC * static_cast<int>(sizeof(Register))));
205 ret();
206 }
207
208 void JIT::emit_op_jmp(Instruction* currentInstruction)
209 {
210 unsigned target = currentInstruction[1].u.operand;
211 addJump(jump(), target);
212 }
213
214 void JIT::emit_op_new_object(Instruction* currentInstruction)
215 {
216 Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
217 size_t allocationSize = JSObject::allocationSize(structure->inlineCapacity());
218 MarkedAllocator* allocator = &m_vm->heap.allocatorForObjectWithoutDestructor(allocationSize);
219
220 RegisterID resultReg = regT0;
221 RegisterID allocatorReg = regT1;
222 RegisterID scratchReg = regT2;
223
224 move(TrustedImmPtr(allocator), allocatorReg);
225 emitAllocateJSObject(allocatorReg, TrustedImmPtr(structure), resultReg, scratchReg);
226 emitStoreCell(currentInstruction[1].u.operand, resultReg);
227 }
228
229 void JIT::emitSlow_op_new_object(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
230 {
231 linkSlowCase(iter);
232 JITStubCall stubCall(this, cti_op_new_object);
233 stubCall.addArgument(TrustedImmPtr(currentInstruction[3].u.objectAllocationProfile->structure()));
234 stubCall.call(currentInstruction[1].u.operand);
235 }
236
237 void JIT::emit_op_check_has_instance(Instruction* currentInstruction)
238 {
239 unsigned baseVal = currentInstruction[3].u.operand;
240
241 emitLoadPayload(baseVal, regT0);
242
243 // Check that baseVal is a cell.
244 emitJumpSlowCaseIfNotJSCell(baseVal);
245
246 // Check that baseVal 'ImplementsHasInstance'.
247 loadPtr(Address(regT0, JSCell::structureOffset()), regT0);
248 addSlowCase(branchTest8(Zero, Address(regT0, Structure::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance)));
249 }
250
251 void JIT::emit_op_instanceof(Instruction* currentInstruction)
252 {
253 unsigned dst = currentInstruction[1].u.operand;
254 unsigned value = currentInstruction[2].u.operand;
255 unsigned proto = currentInstruction[3].u.operand;
256
257 // Load the operands into registers.
258 // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
259 emitLoadPayload(value, regT2);
260 emitLoadPayload(proto, regT1);
261
262 // Check that proto are cells. baseVal must be a cell - this is checked by op_check_has_instance.
263 emitJumpSlowCaseIfNotJSCell(value);
264 emitJumpSlowCaseIfNotJSCell(proto);
265
266 // Check that prototype is an object
267 loadPtr(Address(regT1, JSCell::structureOffset()), regT3);
268 addSlowCase(emitJumpIfNotObject(regT3));
269
270 // Optimistically load the result true, and start looping.
271 // Initially, regT1 still contains proto and regT2 still contains value.
272 // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
273 move(TrustedImm32(1), regT0);
274 Label loop(this);
275
276 // Load the prototype of the cell in regT2. If this is equal to regT1 - WIN!
277 // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
278 loadPtr(Address(regT2, JSCell::structureOffset()), regT2);
279 load32(Address(regT2, Structure::prototypeOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT2);
280 Jump isInstance = branchPtr(Equal, regT2, regT1);
281 branchTest32(NonZero, regT2).linkTo(loop, this);
282
283 // We get here either by dropping out of the loop, or if value was not an Object. Result is false.
284 move(TrustedImm32(0), regT0);
285
286 // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
287 isInstance.link(this);
288 emitStoreBool(dst, regT0);
289 }
290
291 void JIT::emitSlow_op_check_has_instance(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
292 {
293 unsigned dst = currentInstruction[1].u.operand;
294 unsigned value = currentInstruction[2].u.operand;
295 unsigned baseVal = currentInstruction[3].u.operand;
296
297 linkSlowCaseIfNotJSCell(iter, baseVal);
298 linkSlowCase(iter);
299
300 JITStubCall stubCall(this, cti_op_check_has_instance);
301 stubCall.addArgument(value);
302 stubCall.addArgument(baseVal);
303 stubCall.call(dst);
304
305 emitJumpSlowToHot(jump(), currentInstruction[4].u.operand);
306 }
307
308 void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
309 {
310 unsigned dst = currentInstruction[1].u.operand;
311 unsigned value = currentInstruction[2].u.operand;
312 unsigned proto = currentInstruction[3].u.operand;
313
314 linkSlowCaseIfNotJSCell(iter, value);
315 linkSlowCaseIfNotJSCell(iter, proto);
316 linkSlowCase(iter);
317
318 JITStubCall stubCall(this, cti_op_instanceof);
319 stubCall.addArgument(value);
320 stubCall.addArgument(proto);
321 stubCall.call(dst);
322 }
323
324 void JIT::emit_op_is_undefined(Instruction* currentInstruction)
325 {
326 unsigned dst = currentInstruction[1].u.operand;
327 unsigned value = currentInstruction[2].u.operand;
328
329 emitLoad(value, regT1, regT0);
330 Jump isCell = branch32(Equal, regT1, TrustedImm32(JSValue::CellTag));
331
332 compare32(Equal, regT1, TrustedImm32(JSValue::UndefinedTag), regT0);
333 Jump done = jump();
334
335 isCell.link(this);
336 loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
337 Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT1, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
338 move(TrustedImm32(0), regT0);
339 Jump notMasqueradesAsUndefined = jump();
340
341 isMasqueradesAsUndefined.link(this);
342 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
343 loadPtr(Address(regT1, Structure::globalObjectOffset()), regT1);
344 compare32(Equal, regT0, regT1, regT0);
345
346 notMasqueradesAsUndefined.link(this);
347 done.link(this);
348 emitStoreBool(dst, regT0);
349 }
350
351 void JIT::emit_op_is_boolean(Instruction* currentInstruction)
352 {
353 unsigned dst = currentInstruction[1].u.operand;
354 unsigned value = currentInstruction[2].u.operand;
355
356 emitLoadTag(value, regT0);
357 compare32(Equal, regT0, TrustedImm32(JSValue::BooleanTag), regT0);
358 emitStoreBool(dst, regT0);
359 }
360
361 void JIT::emit_op_is_number(Instruction* currentInstruction)
362 {
363 unsigned dst = currentInstruction[1].u.operand;
364 unsigned value = currentInstruction[2].u.operand;
365
366 emitLoadTag(value, regT0);
367 add32(TrustedImm32(1), regT0);
368 compare32(Below, regT0, TrustedImm32(JSValue::LowestTag + 1), regT0);
369 emitStoreBool(dst, regT0);
370 }
371
372 void JIT::emit_op_is_string(Instruction* currentInstruction)
373 {
374 unsigned dst = currentInstruction[1].u.operand;
375 unsigned value = currentInstruction[2].u.operand;
376
377 emitLoad(value, regT1, regT0);
378 Jump isNotCell = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
379
380 loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
381 compare8(Equal, Address(regT1, Structure::typeInfoTypeOffset()), TrustedImm32(StringType), regT0);
382 Jump done = jump();
383
384 isNotCell.link(this);
385 move(TrustedImm32(0), regT0);
386
387 done.link(this);
388 emitStoreBool(dst, regT0);
389 }
390
391 void JIT::emit_op_tear_off_activation(Instruction* currentInstruction)
392 {
393 unsigned activation = currentInstruction[1].u.operand;
394 Jump activationNotCreated = branch32(Equal, tagFor(activation), TrustedImm32(JSValue::EmptyValueTag));
395 JITStubCall stubCall(this, cti_op_tear_off_activation);
396 stubCall.addArgument(activation);
397 stubCall.call();
398 activationNotCreated.link(this);
399 }
400
401 void JIT::emit_op_tear_off_arguments(Instruction* currentInstruction)
402 {
403 int arguments = currentInstruction[1].u.operand;
404 int activation = currentInstruction[2].u.operand;
405
406 Jump argsNotCreated = branch32(Equal, tagFor(unmodifiedArgumentsRegister(arguments)), TrustedImm32(JSValue::EmptyValueTag));
407 JITStubCall stubCall(this, cti_op_tear_off_arguments);
408 stubCall.addArgument(unmodifiedArgumentsRegister(arguments));
409 stubCall.addArgument(activation);
410 stubCall.call();
411 argsNotCreated.link(this);
412 }
413
414 void JIT::emit_op_to_primitive(Instruction* currentInstruction)
415 {
416 int dst = currentInstruction[1].u.operand;
417 int src = currentInstruction[2].u.operand;
418
419 emitLoad(src, regT1, regT0);
420
421 Jump isImm = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
422 addSlowCase(branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
423 isImm.link(this);
424
425 if (dst != src)
426 emitStore(dst, regT1, regT0);
427 map(m_bytecodeOffset + OPCODE_LENGTH(op_to_primitive), dst, regT1, regT0);
428 }
429
430 void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
431 {
432 int dst = currentInstruction[1].u.operand;
433
434 linkSlowCase(iter);
435
436 JITStubCall stubCall(this, cti_op_to_primitive);
437 stubCall.addArgument(regT1, regT0);
438 stubCall.call(dst);
439 }
440
441 void JIT::emit_op_strcat(Instruction* currentInstruction)
442 {
443 JITStubCall stubCall(this, cti_op_strcat);
444 stubCall.addArgument(TrustedImm32(currentInstruction[2].u.operand));
445 stubCall.addArgument(TrustedImm32(currentInstruction[3].u.operand));
446 stubCall.call(currentInstruction[1].u.operand);
447 }
448
449 void JIT::emit_op_not(Instruction* currentInstruction)
450 {
451 unsigned dst = currentInstruction[1].u.operand;
452 unsigned src = currentInstruction[2].u.operand;
453
454 emitLoadTag(src, regT0);
455
456 emitLoad(src, regT1, regT0);
457 addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::BooleanTag)));
458 xor32(TrustedImm32(1), regT0);
459
460 emitStoreBool(dst, regT0, (dst == src));
461 }
462
463 void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
464 {
465 unsigned dst = currentInstruction[1].u.operand;
466 unsigned src = currentInstruction[2].u.operand;
467
468 linkSlowCase(iter);
469
470 JITStubCall stubCall(this, cti_op_not);
471 stubCall.addArgument(src);
472 stubCall.call(dst);
473 }
474
475 void JIT::emit_op_jfalse(Instruction* currentInstruction)
476 {
477 unsigned cond = currentInstruction[1].u.operand;
478 unsigned target = currentInstruction[2].u.operand;
479
480 emitLoad(cond, regT1, regT0);
481
482 ASSERT((JSValue::BooleanTag + 1 == JSValue::Int32Tag) && !(JSValue::Int32Tag + 1));
483 addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::BooleanTag)));
484 addJump(branchTest32(Zero, regT0), target);
485 }
486
487 void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
488 {
489 unsigned cond = currentInstruction[1].u.operand;
490 unsigned target = currentInstruction[2].u.operand;
491
492 linkSlowCase(iter);
493
494 if (supportsFloatingPoint()) {
495 // regT1 contains the tag from the hot path.
496 Jump notNumber = branch32(Above, regT1, TrustedImm32(JSValue::LowestTag));
497
498 emitLoadDouble(cond, fpRegT0);
499 emitJumpSlowToHot(branchDoubleZeroOrNaN(fpRegT0, fpRegT1), target);
500 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_jfalse));
501
502 notNumber.link(this);
503 }
504
505 JITStubCall stubCall(this, cti_op_jtrue);
506 stubCall.addArgument(cond);
507 stubCall.call();
508 emitJumpSlowToHot(branchTest32(Zero, regT0), target); // Inverted.
509 }
510
511 void JIT::emit_op_jtrue(Instruction* currentInstruction)
512 {
513 unsigned cond = currentInstruction[1].u.operand;
514 unsigned target = currentInstruction[2].u.operand;
515
516 emitLoad(cond, regT1, regT0);
517
518 ASSERT((JSValue::BooleanTag + 1 == JSValue::Int32Tag) && !(JSValue::Int32Tag + 1));
519 addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::BooleanTag)));
520 addJump(branchTest32(NonZero, regT0), target);
521 }
522
523 void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
524 {
525 unsigned cond = currentInstruction[1].u.operand;
526 unsigned target = currentInstruction[2].u.operand;
527
528 linkSlowCase(iter);
529
530 if (supportsFloatingPoint()) {
531 // regT1 contains the tag from the hot path.
532 Jump notNumber = branch32(Above, regT1, TrustedImm32(JSValue::LowestTag));
533
534 emitLoadDouble(cond, fpRegT0);
535 emitJumpSlowToHot(branchDoubleNonZero(fpRegT0, fpRegT1), target);
536 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_jtrue));
537
538 notNumber.link(this);
539 }
540
541 JITStubCall stubCall(this, cti_op_jtrue);
542 stubCall.addArgument(cond);
543 stubCall.call();
544 emitJumpSlowToHot(branchTest32(NonZero, regT0), target);
545 }
546
547 void JIT::emit_op_jeq_null(Instruction* currentInstruction)
548 {
549 unsigned src = currentInstruction[1].u.operand;
550 unsigned target = currentInstruction[2].u.operand;
551
552 emitLoad(src, regT1, regT0);
553
554 Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
555
556 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
557 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
558 Jump isNotMasqueradesAsUndefined = branchTest8(Zero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
559 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
560 addJump(branchPtr(Equal, Address(regT2, Structure::globalObjectOffset()), regT0), target);
561 Jump masqueradesGlobalObjectIsForeign = jump();
562
563 // Now handle the immediate cases - undefined & null
564 isImmediate.link(this);
565 ASSERT((JSValue::UndefinedTag + 1 == JSValue::NullTag) && (JSValue::NullTag & 0x1));
566 or32(TrustedImm32(1), regT1);
567 addJump(branch32(Equal, regT1, TrustedImm32(JSValue::NullTag)), target);
568
569 isNotMasqueradesAsUndefined.link(this);
570 masqueradesGlobalObjectIsForeign.link(this);
571 }
572
573 void JIT::emit_op_jneq_null(Instruction* currentInstruction)
574 {
575 unsigned src = currentInstruction[1].u.operand;
576 unsigned target = currentInstruction[2].u.operand;
577
578 emitLoad(src, regT1, regT0);
579
580 Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
581
582 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
583 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
584 addJump(branchTest8(Zero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
585 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
586 addJump(branchPtr(NotEqual, Address(regT2, Structure::globalObjectOffset()), regT0), target);
587 Jump wasNotImmediate = jump();
588
589 // Now handle the immediate cases - undefined & null
590 isImmediate.link(this);
591
592 ASSERT((JSValue::UndefinedTag + 1 == JSValue::NullTag) && (JSValue::NullTag & 0x1));
593 or32(TrustedImm32(1), regT1);
594 addJump(branch32(NotEqual, regT1, TrustedImm32(JSValue::NullTag)), target);
595
596 wasNotImmediate.link(this);
597 }
598
599 void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
600 {
601 unsigned src = currentInstruction[1].u.operand;
602 Special::Pointer ptr = currentInstruction[2].u.specialPointer;
603 unsigned target = currentInstruction[3].u.operand;
604
605 emitLoad(src, regT1, regT0);
606 addJump(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)), target);
607 addJump(branchPtr(NotEqual, regT0, TrustedImmPtr(actualPointerFor(m_codeBlock, ptr))), target);
608 }
609
610 void JIT::emit_op_eq(Instruction* currentInstruction)
611 {
612 unsigned dst = currentInstruction[1].u.operand;
613 unsigned src1 = currentInstruction[2].u.operand;
614 unsigned src2 = currentInstruction[3].u.operand;
615
616 emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
617 addSlowCase(branch32(NotEqual, regT1, regT3));
618 addSlowCase(branch32(Equal, regT1, TrustedImm32(JSValue::CellTag)));
619 addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::LowestTag)));
620
621 compare32(Equal, regT0, regT2, regT0);
622
623 emitStoreBool(dst, regT0);
624 }
625
626 void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
627 {
628 unsigned dst = currentInstruction[1].u.operand;
629 unsigned op1 = currentInstruction[2].u.operand;
630 unsigned op2 = currentInstruction[3].u.operand;
631
632 JumpList storeResult;
633 JumpList genericCase;
634
635 genericCase.append(getSlowCase(iter)); // tags not equal
636
637 linkSlowCase(iter); // tags equal and JSCell
638 genericCase.append(branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
639 genericCase.append(branchPtr(NotEqual, Address(regT2, JSCell::structureOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
640
641 // String case.
642 JITStubCall stubCallEqStrings(this, cti_op_eq_strings);
643 stubCallEqStrings.addArgument(regT0);
644 stubCallEqStrings.addArgument(regT2);
645 stubCallEqStrings.call();
646 storeResult.append(jump());
647
648 // Generic case.
649 genericCase.append(getSlowCase(iter)); // doubles
650 genericCase.link(this);
651 JITStubCall stubCallEq(this, cti_op_eq);
652 stubCallEq.addArgument(op1);
653 stubCallEq.addArgument(op2);
654 stubCallEq.call(regT0);
655
656 storeResult.link(this);
657 emitStoreBool(dst, regT0);
658 }
659
660 void JIT::emit_op_neq(Instruction* currentInstruction)
661 {
662 unsigned dst = currentInstruction[1].u.operand;
663 unsigned src1 = currentInstruction[2].u.operand;
664 unsigned src2 = currentInstruction[3].u.operand;
665
666 emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
667 addSlowCase(branch32(NotEqual, regT1, regT3));
668 addSlowCase(branch32(Equal, regT1, TrustedImm32(JSValue::CellTag)));
669 addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::LowestTag)));
670
671 compare32(NotEqual, regT0, regT2, regT0);
672
673 emitStoreBool(dst, regT0);
674 }
675
676 void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
677 {
678 unsigned dst = currentInstruction[1].u.operand;
679
680 JumpList storeResult;
681 JumpList genericCase;
682
683 genericCase.append(getSlowCase(iter)); // tags not equal
684
685 linkSlowCase(iter); // tags equal and JSCell
686 genericCase.append(branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
687 genericCase.append(branchPtr(NotEqual, Address(regT2, JSCell::structureOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
688
689 // String case.
690 JITStubCall stubCallEqStrings(this, cti_op_eq_strings);
691 stubCallEqStrings.addArgument(regT0);
692 stubCallEqStrings.addArgument(regT2);
693 stubCallEqStrings.call(regT0);
694 storeResult.append(jump());
695
696 // Generic case.
697 genericCase.append(getSlowCase(iter)); // doubles
698 genericCase.link(this);
699 JITStubCall stubCallEq(this, cti_op_eq);
700 stubCallEq.addArgument(regT1, regT0);
701 stubCallEq.addArgument(regT3, regT2);
702 stubCallEq.call(regT0);
703
704 storeResult.link(this);
705 xor32(TrustedImm32(0x1), regT0);
706 emitStoreBool(dst, regT0);
707 }
708
709 void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
710 {
711 unsigned dst = currentInstruction[1].u.operand;
712 unsigned src1 = currentInstruction[2].u.operand;
713 unsigned src2 = currentInstruction[3].u.operand;
714
715 emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
716
717 // Bail if the tags differ, or are double.
718 addSlowCase(branch32(NotEqual, regT1, regT3));
719 addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::LowestTag)));
720
721 // Jump to a slow case if both are strings.
722 Jump notCell = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
723 Jump firstNotString = branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(m_vm->stringStructure.get()));
724 addSlowCase(branchPtr(Equal, Address(regT2, JSCell::structureOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
725 notCell.link(this);
726 firstNotString.link(this);
727
728 // Simply compare the payloads.
729 if (type == OpStrictEq)
730 compare32(Equal, regT0, regT2, regT0);
731 else
732 compare32(NotEqual, regT0, regT2, regT0);
733
734 emitStoreBool(dst, regT0);
735 }
736
737 void JIT::emit_op_stricteq(Instruction* currentInstruction)
738 {
739 compileOpStrictEq(currentInstruction, OpStrictEq);
740 }
741
742 void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
743 {
744 unsigned dst = currentInstruction[1].u.operand;
745 unsigned src1 = currentInstruction[2].u.operand;
746 unsigned src2 = currentInstruction[3].u.operand;
747
748 linkSlowCase(iter);
749 linkSlowCase(iter);
750 linkSlowCase(iter);
751
752 JITStubCall stubCall(this, cti_op_stricteq);
753 stubCall.addArgument(src1);
754 stubCall.addArgument(src2);
755 stubCall.call(dst);
756 }
757
758 void JIT::emit_op_nstricteq(Instruction* currentInstruction)
759 {
760 compileOpStrictEq(currentInstruction, OpNStrictEq);
761 }
762
763 void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
764 {
765 unsigned dst = currentInstruction[1].u.operand;
766 unsigned src1 = currentInstruction[2].u.operand;
767 unsigned src2 = currentInstruction[3].u.operand;
768
769 linkSlowCase(iter);
770 linkSlowCase(iter);
771 linkSlowCase(iter);
772
773 JITStubCall stubCall(this, cti_op_nstricteq);
774 stubCall.addArgument(src1);
775 stubCall.addArgument(src2);
776 stubCall.call(dst);
777 }
778
779 void JIT::emit_op_eq_null(Instruction* currentInstruction)
780 {
781 unsigned dst = currentInstruction[1].u.operand;
782 unsigned src = currentInstruction[2].u.operand;
783
784 emitLoad(src, regT1, regT0);
785 Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
786
787 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
788 Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
789 move(TrustedImm32(0), regT1);
790 Jump wasNotMasqueradesAsUndefined = jump();
791
792 isMasqueradesAsUndefined.link(this);
793 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
794 loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
795 compare32(Equal, regT0, regT2, regT1);
796 Jump wasNotImmediate = jump();
797
798 isImmediate.link(this);
799
800 compare32(Equal, regT1, TrustedImm32(JSValue::NullTag), regT2);
801 compare32(Equal, regT1, TrustedImm32(JSValue::UndefinedTag), regT1);
802 or32(regT2, regT1);
803
804 wasNotImmediate.link(this);
805 wasNotMasqueradesAsUndefined.link(this);
806
807 emitStoreBool(dst, regT1);
808 }
809
810 void JIT::emit_op_neq_null(Instruction* currentInstruction)
811 {
812 unsigned dst = currentInstruction[1].u.operand;
813 unsigned src = currentInstruction[2].u.operand;
814
815 emitLoad(src, regT1, regT0);
816 Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
817
818 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
819 Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
820 move(TrustedImm32(1), regT1);
821 Jump wasNotMasqueradesAsUndefined = jump();
822
823 isMasqueradesAsUndefined.link(this);
824 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
825 loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
826 compare32(NotEqual, regT0, regT2, regT1);
827 Jump wasNotImmediate = jump();
828
829 isImmediate.link(this);
830
831 compare32(NotEqual, regT1, TrustedImm32(JSValue::NullTag), regT2);
832 compare32(NotEqual, regT1, TrustedImm32(JSValue::UndefinedTag), regT1);
833 and32(regT2, regT1);
834
835 wasNotImmediate.link(this);
836 wasNotMasqueradesAsUndefined.link(this);
837
838 emitStoreBool(dst, regT1);
839 }
840
841 void JIT::emit_op_throw(Instruction* currentInstruction)
842 {
843 unsigned exception = currentInstruction[1].u.operand;
844 JITStubCall stubCall(this, cti_op_throw);
845 stubCall.addArgument(exception);
846 stubCall.call();
847
848 #ifndef NDEBUG
849 // cti_op_throw always changes it's return address,
850 // this point in the code should never be reached.
851 breakpoint();
852 #endif
853 }
854
855 void JIT::emit_op_get_pnames(Instruction* currentInstruction)
856 {
857 int dst = currentInstruction[1].u.operand;
858 int base = currentInstruction[2].u.operand;
859 int i = currentInstruction[3].u.operand;
860 int size = currentInstruction[4].u.operand;
861 int breakTarget = currentInstruction[5].u.operand;
862
863 JumpList isNotObject;
864
865 emitLoad(base, regT1, regT0);
866 if (!m_codeBlock->isKnownNotImmediate(base))
867 isNotObject.append(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)));
868 if (base != m_codeBlock->thisRegister() || m_codeBlock->isStrictMode()) {
869 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
870 isNotObject.append(emitJumpIfNotObject(regT2));
871 }
872
873 // We could inline the case where you have a valid cache, but
874 // this call doesn't seem to be hot.
875 Label isObject(this);
876 JITStubCall getPnamesStubCall(this, cti_op_get_pnames);
877 getPnamesStubCall.addArgument(regT0);
878 getPnamesStubCall.call(dst);
879 load32(Address(regT0, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStringsSize)), regT3);
880 store32(TrustedImm32(Int32Tag), intTagFor(i));
881 store32(TrustedImm32(0), intPayloadFor(i));
882 store32(TrustedImm32(Int32Tag), intTagFor(size));
883 store32(regT3, payloadFor(size));
884 Jump end = jump();
885
886 isNotObject.link(this);
887 addJump(branch32(Equal, regT1, TrustedImm32(JSValue::NullTag)), breakTarget);
888 addJump(branch32(Equal, regT1, TrustedImm32(JSValue::UndefinedTag)), breakTarget);
889 JITStubCall toObjectStubCall(this, cti_to_object);
890 toObjectStubCall.addArgument(regT1, regT0);
891 toObjectStubCall.call(base);
892 jump().linkTo(isObject, this);
893
894 end.link(this);
895 }
896
897 void JIT::emit_op_next_pname(Instruction* currentInstruction)
898 {
899 int dst = currentInstruction[1].u.operand;
900 int base = currentInstruction[2].u.operand;
901 int i = currentInstruction[3].u.operand;
902 int size = currentInstruction[4].u.operand;
903 int it = currentInstruction[5].u.operand;
904 int target = currentInstruction[6].u.operand;
905
906 JumpList callHasProperty;
907
908 Label begin(this);
909 load32(intPayloadFor(i), regT0);
910 Jump end = branch32(Equal, regT0, intPayloadFor(size));
911
912 // Grab key @ i
913 loadPtr(payloadFor(it), regT1);
914 loadPtr(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStrings)), regT2);
915 load32(BaseIndex(regT2, regT0, TimesEight), regT2);
916 store32(TrustedImm32(JSValue::CellTag), tagFor(dst));
917 store32(regT2, payloadFor(dst));
918
919 // Increment i
920 add32(TrustedImm32(1), regT0);
921 store32(regT0, intPayloadFor(i));
922
923 // Verify that i is valid:
924 loadPtr(payloadFor(base), regT0);
925
926 // Test base's structure
927 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
928 callHasProperty.append(branchPtr(NotEqual, regT2, Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure)))));
929
930 // Test base's prototype chain
931 loadPtr(Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedPrototypeChain))), regT3);
932 loadPtr(Address(regT3, OBJECT_OFFSETOF(StructureChain, m_vector)), regT3);
933 addJump(branchTestPtr(Zero, Address(regT3)), target);
934
935 Label checkPrototype(this);
936 callHasProperty.append(branch32(Equal, Address(regT2, Structure::prototypeOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), TrustedImm32(JSValue::NullTag)));
937 loadPtr(Address(regT2, Structure::prototypeOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT2);
938 loadPtr(Address(regT2, JSCell::structureOffset()), regT2);
939 callHasProperty.append(branchPtr(NotEqual, regT2, Address(regT3)));
940 addPtr(TrustedImm32(sizeof(Structure*)), regT3);
941 branchTestPtr(NonZero, Address(regT3)).linkTo(checkPrototype, this);
942
943 // Continue loop.
944 addJump(jump(), target);
945
946 // Slow case: Ask the object if i is valid.
947 callHasProperty.link(this);
948 loadPtr(addressFor(dst), regT1);
949 JITStubCall stubCall(this, cti_has_property);
950 stubCall.addArgument(regT0);
951 stubCall.addArgument(regT1);
952 stubCall.call();
953
954 // Test for valid key.
955 addJump(branchTest32(NonZero, regT0), target);
956 jump().linkTo(begin, this);
957
958 // End of loop.
959 end.link(this);
960 }
961
962 void JIT::emit_op_push_with_scope(Instruction* currentInstruction)
963 {
964 JITStubCall stubCall(this, cti_op_push_with_scope);
965 stubCall.addArgument(currentInstruction[1].u.operand);
966 stubCall.call();
967 }
968
969 void JIT::emit_op_pop_scope(Instruction*)
970 {
971 JITStubCall(this, cti_op_pop_scope).call();
972 }
973
974 void JIT::emit_op_to_number(Instruction* currentInstruction)
975 {
976 int dst = currentInstruction[1].u.operand;
977 int src = currentInstruction[2].u.operand;
978
979 emitLoad(src, regT1, regT0);
980
981 Jump isInt32 = branch32(Equal, regT1, TrustedImm32(JSValue::Int32Tag));
982 addSlowCase(branch32(AboveOrEqual, regT1, TrustedImm32(JSValue::LowestTag)));
983 isInt32.link(this);
984
985 if (src != dst)
986 emitStore(dst, regT1, regT0);
987 map(m_bytecodeOffset + OPCODE_LENGTH(op_to_number), dst, regT1, regT0);
988 }
989
990 void JIT::emitSlow_op_to_number(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
991 {
992 int dst = currentInstruction[1].u.operand;
993
994 linkSlowCase(iter);
995
996 JITStubCall stubCall(this, cti_op_to_number);
997 stubCall.addArgument(regT1, regT0);
998 stubCall.call(dst);
999 }
1000
1001 void JIT::emit_op_push_name_scope(Instruction* currentInstruction)
1002 {
1003 JITStubCall stubCall(this, cti_op_push_name_scope);
1004 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[1].u.operand)));
1005 stubCall.addArgument(currentInstruction[2].u.operand);
1006 stubCall.addArgument(TrustedImm32(currentInstruction[3].u.operand));
1007 stubCall.call();
1008 }
1009
1010 void JIT::emit_op_catch(Instruction* currentInstruction)
1011 {
1012 // cti_op_throw returns the callFrame for the handler.
1013 move(regT0, callFrameRegister);
1014
1015 // Now store the exception returned by cti_op_throw.
1016 loadPtr(Address(stackPointerRegister, OBJECT_OFFSETOF(struct JITStackFrame, vm)), regT3);
1017 load32(Address(regT3, OBJECT_OFFSETOF(VM, exception) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
1018 load32(Address(regT3, OBJECT_OFFSETOF(VM, exception) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
1019 store32(TrustedImm32(JSValue().payload()), Address(regT3, OBJECT_OFFSETOF(VM, exception) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)));
1020 store32(TrustedImm32(JSValue().tag()), Address(regT3, OBJECT_OFFSETOF(VM, exception) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)));
1021
1022 unsigned exception = currentInstruction[1].u.operand;
1023 emitStore(exception, regT1, regT0);
1024 map(m_bytecodeOffset + OPCODE_LENGTH(op_catch), exception, regT1, regT0);
1025 }
1026
1027 void JIT::emit_op_switch_imm(Instruction* currentInstruction)
1028 {
1029 unsigned tableIndex = currentInstruction[1].u.operand;
1030 unsigned defaultOffset = currentInstruction[2].u.operand;
1031 unsigned scrutinee = currentInstruction[3].u.operand;
1032
1033 // create jump table for switch destinations, track this switch statement.
1034 SimpleJumpTable* jumpTable = &m_codeBlock->immediateSwitchJumpTable(tableIndex);
1035 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Immediate));
1036 jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1037
1038 JITStubCall stubCall(this, cti_op_switch_imm);
1039 stubCall.addArgument(scrutinee);
1040 stubCall.addArgument(TrustedImm32(tableIndex));
1041 stubCall.call();
1042 jump(regT0);
1043 }
1044
1045 void JIT::emit_op_switch_char(Instruction* currentInstruction)
1046 {
1047 unsigned tableIndex = currentInstruction[1].u.operand;
1048 unsigned defaultOffset = currentInstruction[2].u.operand;
1049 unsigned scrutinee = currentInstruction[3].u.operand;
1050
1051 // create jump table for switch destinations, track this switch statement.
1052 SimpleJumpTable* jumpTable = &m_codeBlock->characterSwitchJumpTable(tableIndex);
1053 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Character));
1054 jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1055
1056 JITStubCall stubCall(this, cti_op_switch_char);
1057 stubCall.addArgument(scrutinee);
1058 stubCall.addArgument(TrustedImm32(tableIndex));
1059 stubCall.call();
1060 jump(regT0);
1061 }
1062
1063 void JIT::emit_op_switch_string(Instruction* currentInstruction)
1064 {
1065 unsigned tableIndex = currentInstruction[1].u.operand;
1066 unsigned defaultOffset = currentInstruction[2].u.operand;
1067 unsigned scrutinee = currentInstruction[3].u.operand;
1068
1069 // create jump table for switch destinations, track this switch statement.
1070 StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
1071 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset));
1072
1073 JITStubCall stubCall(this, cti_op_switch_string);
1074 stubCall.addArgument(scrutinee);
1075 stubCall.addArgument(TrustedImm32(tableIndex));
1076 stubCall.call();
1077 jump(regT0);
1078 }
1079
1080 void JIT::emit_op_throw_static_error(Instruction* currentInstruction)
1081 {
1082 unsigned message = currentInstruction[1].u.operand;
1083
1084 JITStubCall stubCall(this, cti_op_throw_static_error);
1085 stubCall.addArgument(m_codeBlock->getConstant(message));
1086 stubCall.addArgument(TrustedImm32(currentInstruction[2].u.operand));
1087 stubCall.call();
1088 }
1089
1090 void JIT::emit_op_debug(Instruction* currentInstruction)
1091 {
1092 #if ENABLE(DEBUG_WITH_BREAKPOINT)
1093 UNUSED_PARAM(currentInstruction);
1094 breakpoint();
1095 #else
1096 JITStubCall stubCall(this, cti_op_debug);
1097 stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
1098 stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
1099 stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
1100 stubCall.addArgument(Imm32(currentInstruction[4].u.operand));
1101 stubCall.call();
1102 #endif
1103 }
1104
1105
1106 void JIT::emit_op_enter(Instruction*)
1107 {
1108 emitEnterOptimizationCheck();
1109
1110 // Even though JIT code doesn't use them, we initialize our constant
1111 // registers to zap stale pointers, to avoid unnecessarily prolonging
1112 // object lifetime and increasing GC pressure.
1113 for (int i = 0; i < m_codeBlock->m_numVars; ++i)
1114 emitStore(i, jsUndefined());
1115 }
1116
1117 void JIT::emit_op_create_activation(Instruction* currentInstruction)
1118 {
1119 unsigned activation = currentInstruction[1].u.operand;
1120
1121 Jump activationCreated = branch32(NotEqual, tagFor(activation), TrustedImm32(JSValue::EmptyValueTag));
1122 JITStubCall(this, cti_op_push_activation).call(activation);
1123 activationCreated.link(this);
1124 }
1125
1126 void JIT::emit_op_create_arguments(Instruction* currentInstruction)
1127 {
1128 unsigned dst = currentInstruction[1].u.operand;
1129
1130 Jump argsCreated = branch32(NotEqual, tagFor(dst), TrustedImm32(JSValue::EmptyValueTag));
1131
1132 JITStubCall(this, cti_op_create_arguments).call();
1133 emitStore(dst, regT1, regT0);
1134 emitStore(unmodifiedArgumentsRegister(dst), regT1, regT0);
1135
1136 argsCreated.link(this);
1137 }
1138
1139 void JIT::emit_op_init_lazy_reg(Instruction* currentInstruction)
1140 {
1141 unsigned dst = currentInstruction[1].u.operand;
1142
1143 emitStore(dst, JSValue());
1144 }
1145
1146 void JIT::emit_op_get_callee(Instruction* currentInstruction)
1147 {
1148 int dst = currentInstruction[1].u.operand;
1149 emitGetFromCallFrameHeaderPtr(JSStack::Callee, regT0);
1150 move(TrustedImm32(JSValue::CellTag), regT1);
1151 emitValueProfilingSite();
1152 emitStore(dst, regT1, regT0);
1153 }
1154
1155 void JIT::emit_op_create_this(Instruction* currentInstruction)
1156 {
1157 int callee = currentInstruction[2].u.operand;
1158 RegisterID calleeReg = regT0;
1159 RegisterID resultReg = regT0;
1160 RegisterID allocatorReg = regT1;
1161 RegisterID structureReg = regT2;
1162 RegisterID scratchReg = regT3;
1163
1164 emitLoadPayload(callee, calleeReg);
1165 loadPtr(Address(calleeReg, JSFunction::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfAllocator()), allocatorReg);
1166 loadPtr(Address(calleeReg, JSFunction::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfStructure()), structureReg);
1167 addSlowCase(branchTestPtr(Zero, allocatorReg));
1168
1169 emitAllocateJSObject(allocatorReg, structureReg, resultReg, scratchReg);
1170 emitStoreCell(currentInstruction[1].u.operand, resultReg);
1171 }
1172
1173 void JIT::emitSlow_op_create_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1174 {
1175 linkSlowCase(iter); // doesn't have an allocation profile
1176 linkSlowCase(iter); // allocation failed
1177
1178 JITStubCall stubCall(this, cti_op_create_this);
1179 stubCall.addArgument(TrustedImm32(currentInstruction[3].u.operand));
1180 stubCall.call(currentInstruction[1].u.operand);
1181 }
1182
1183 void JIT::emit_op_convert_this(Instruction* currentInstruction)
1184 {
1185 unsigned thisRegister = currentInstruction[1].u.operand;
1186
1187 emitLoad(thisRegister, regT3, regT2);
1188
1189 addSlowCase(branch32(NotEqual, regT3, TrustedImm32(JSValue::CellTag)));
1190 if (shouldEmitProfiling()) {
1191 loadPtr(Address(regT2, JSCell::structureOffset()), regT0);
1192 move(regT3, regT1);
1193 emitValueProfilingSite();
1194 }
1195 addSlowCase(branchPtr(Equal, Address(regT2, JSCell::structureOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
1196 }
1197
1198 void JIT::emitSlow_op_convert_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1199 {
1200 void* globalThis = m_codeBlock->globalObject()->globalThis();
1201 unsigned thisRegister = currentInstruction[1].u.operand;
1202
1203 linkSlowCase(iter);
1204 if (shouldEmitProfiling()) {
1205 move(TrustedImm32(JSValue::UndefinedTag), regT1);
1206 move(TrustedImm32(0), regT0);
1207 }
1208 Jump isNotUndefined = branch32(NotEqual, regT3, TrustedImm32(JSValue::UndefinedTag));
1209 emitValueProfilingSite();
1210 move(TrustedImmPtr(globalThis), regT0);
1211 move(TrustedImm32(JSValue::CellTag), regT1);
1212 emitStore(thisRegister, regT1, regT0);
1213 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_convert_this));
1214
1215 linkSlowCase(iter);
1216 if (shouldEmitProfiling()) {
1217 move(TrustedImm32(JSValue::CellTag), regT1);
1218 move(TrustedImmPtr(m_vm->stringStructure.get()), regT0);
1219 }
1220 isNotUndefined.link(this);
1221 emitValueProfilingSite();
1222 JITStubCall stubCall(this, cti_op_convert_this);
1223 stubCall.addArgument(regT3, regT2);
1224 stubCall.call(thisRegister);
1225 }
1226
1227 void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
1228 {
1229 JITStubCall stubCall(this, cti_op_profile_will_call);
1230 stubCall.addArgument(currentInstruction[1].u.operand);
1231 stubCall.call();
1232 }
1233
1234 void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
1235 {
1236 JITStubCall stubCall(this, cti_op_profile_did_call);
1237 stubCall.addArgument(currentInstruction[1].u.operand);
1238 stubCall.call();
1239 }
1240
1241 void JIT::emit_op_get_arguments_length(Instruction* currentInstruction)
1242 {
1243 int dst = currentInstruction[1].u.operand;
1244 int argumentsRegister = currentInstruction[2].u.operand;
1245 addSlowCase(branch32(NotEqual, tagFor(argumentsRegister), TrustedImm32(JSValue::EmptyValueTag)));
1246 load32(payloadFor(JSStack::ArgumentCount), regT0);
1247 sub32(TrustedImm32(1), regT0);
1248 emitStoreInt32(dst, regT0);
1249 }
1250
1251 void JIT::emitSlow_op_get_arguments_length(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1252 {
1253 linkSlowCase(iter);
1254 int dst = currentInstruction[1].u.operand;
1255 int base = currentInstruction[2].u.operand;
1256 int ident = currentInstruction[3].u.operand;
1257
1258 JITStubCall stubCall(this, cti_op_get_by_id_generic);
1259 stubCall.addArgument(base);
1260 stubCall.addArgument(TrustedImmPtr(&(m_codeBlock->identifier(ident))));
1261 stubCall.call(dst);
1262 }
1263
1264 void JIT::emit_op_get_argument_by_val(Instruction* currentInstruction)
1265 {
1266 int dst = currentInstruction[1].u.operand;
1267 int argumentsRegister = currentInstruction[2].u.operand;
1268 int property = currentInstruction[3].u.operand;
1269 addSlowCase(branch32(NotEqual, tagFor(argumentsRegister), TrustedImm32(JSValue::EmptyValueTag)));
1270 emitLoad(property, regT1, regT2);
1271 addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::Int32Tag)));
1272 add32(TrustedImm32(1), regT2);
1273 // regT2 now contains the integer index of the argument we want, including this
1274 load32(payloadFor(JSStack::ArgumentCount), regT3);
1275 addSlowCase(branch32(AboveOrEqual, regT2, regT3));
1276
1277 neg32(regT2);
1278 loadPtr(BaseIndex(callFrameRegister, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload) + CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register))), regT0);
1279 loadPtr(BaseIndex(callFrameRegister, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag) + CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register))), regT1);
1280 emitValueProfilingSite();
1281 emitStore(dst, regT1, regT0);
1282 }
1283
1284 void JIT::emitSlow_op_get_argument_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1285 {
1286 unsigned dst = currentInstruction[1].u.operand;
1287 unsigned arguments = currentInstruction[2].u.operand;
1288 unsigned property = currentInstruction[3].u.operand;
1289
1290 linkSlowCase(iter);
1291 Jump skipArgumentsCreation = jump();
1292
1293 linkSlowCase(iter);
1294 linkSlowCase(iter);
1295 JITStubCall(this, cti_op_create_arguments).call();
1296 emitStore(arguments, regT1, regT0);
1297 emitStore(unmodifiedArgumentsRegister(arguments), regT1, regT0);
1298
1299 skipArgumentsCreation.link(this);
1300 JITStubCall stubCall(this, cti_op_get_by_val_generic);
1301 stubCall.addArgument(arguments);
1302 stubCall.addArgument(property);
1303 stubCall.callWithValueProfiling(dst);
1304 }
1305
1306 void JIT::emit_op_put_to_base(Instruction* currentInstruction)
1307 {
1308 int base = currentInstruction[1].u.operand;
1309 int id = currentInstruction[2].u.operand;
1310 int value = currentInstruction[3].u.operand;
1311
1312 PutToBaseOperation* operation = currentInstruction[4].u.putToBaseOperation;
1313
1314
1315 switch (operation->m_kind) {
1316 case PutToBaseOperation::GlobalVariablePutChecked:
1317 addSlowCase(branchTest8(NonZero, AbsoluteAddress(operation->m_predicatePointer)));
1318 case PutToBaseOperation::GlobalVariablePut: {
1319 JSGlobalObject* globalObject = m_codeBlock->globalObject();
1320 if (operation->m_isDynamic)
1321 addSlowCase(branchPtr(NotEqual, payloadFor(base), TrustedImmPtr(globalObject)));
1322
1323 emitLoad(value, regT1, regT0);
1324 storePtr(regT0, reinterpret_cast<char*>(operation->m_registerAddress) + OBJECT_OFFSETOF(JSValue, u.asBits.payload));
1325 storePtr(regT1, reinterpret_cast<char*>(operation->m_registerAddress) + OBJECT_OFFSETOF(JSValue, u.asBits.tag));
1326 if (Heap::isWriteBarrierEnabled())
1327 emitWriteBarrier(globalObject, regT0, regT2, ShouldFilterImmediates, WriteBarrierForVariableAccess);
1328 break;
1329 }
1330 case PutToBaseOperation::VariablePut: {
1331 loadPtr(payloadFor(base), regT3);
1332 emitLoad(value, regT1, regT0);
1333 loadPtr(Address(regT3, JSVariableObject::offsetOfRegisters()), regT2);
1334 store32(regT0, Address(regT2, operation->m_offset * sizeof(Register) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)));
1335 store32(regT1, Address(regT2, operation->m_offset * sizeof(Register) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)));
1336 if (Heap::isWriteBarrierEnabled())
1337 emitWriteBarrier(regT3, regT1, regT0, regT2, ShouldFilterImmediates, WriteBarrierForVariableAccess);
1338 break;
1339 }
1340
1341 case PutToBaseOperation::GlobalPropertyPut: {
1342 JSGlobalObject* globalObject = m_codeBlock->globalObject();
1343 loadPtr(payloadFor(base), regT3);
1344 emitLoad(value, regT1, regT0);
1345 loadPtr(&operation->m_structure, regT2);
1346 addSlowCase(branchPtr(NotEqual, Address(regT3, JSCell::structureOffset()), regT2));
1347 ASSERT(!operation->m_structure || !operation->m_structure->inlineCapacity());
1348 loadPtr(Address(regT3, JSObject::butterflyOffset()), regT2);
1349 load32(&operation->m_offsetInButterfly, regT3);
1350 storePtr(regT0, BaseIndex(regT2, regT3, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)));
1351 storePtr(regT1, BaseIndex(regT2, regT3, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)));
1352 if (Heap::isWriteBarrierEnabled())
1353 emitWriteBarrier(globalObject, regT1, regT2, ShouldFilterImmediates, WriteBarrierForVariableAccess);
1354 break;
1355 }
1356
1357 case PutToBaseOperation::Uninitialised:
1358 case PutToBaseOperation::Readonly:
1359 case PutToBaseOperation::Generic:
1360 JITStubCall stubCall(this, cti_op_put_to_base);
1361
1362 stubCall.addArgument(TrustedImm32(base));
1363 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(id)));
1364 stubCall.addArgument(TrustedImm32(value));
1365 stubCall.addArgument(TrustedImmPtr(operation));
1366 stubCall.call();
1367 break;
1368 }
1369 }
1370
1371 } // namespace JSC
1372
1373 #endif // USE(JSVALUE32_64)
1374 #endif // ENABLE(JIT)