]> git.saurik.com Git - apple/javascriptcore.git/blob - jit/JITOpcodes.cpp
JavaScriptCore-1218.34.tar.gz
[apple/javascriptcore.git] / jit / JITOpcodes.cpp
1 /*
2 * Copyright (C) 2009, 2012 Apple Inc. All rights reserved.
3 * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
13 *
14 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
18 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25 */
26
27 #include "config.h"
28 #if ENABLE(JIT)
29 #include "JIT.h"
30
31 #include "Arguments.h"
32 #include "CopiedSpaceInlines.h"
33 #include "Heap.h"
34 #include "JITInlines.h"
35 #include "JITStubCall.h"
36 #include "JSArray.h"
37 #include "JSCell.h"
38 #include "JSFunction.h"
39 #include "JSPropertyNameIterator.h"
40 #include "LinkBuffer.h"
41
42 namespace JSC {
43
44 #if USE(JSVALUE64)
45
46 JIT::CodeRef JIT::privateCompileCTINativeCall(VM* vm, NativeFunction)
47 {
48 return vm->getCTIStub(nativeCallGenerator);
49 }
50
51 void JIT::emit_op_mov(Instruction* currentInstruction)
52 {
53 int dst = currentInstruction[1].u.operand;
54 int src = currentInstruction[2].u.operand;
55
56 if (canBeOptimizedOrInlined()) {
57 // Use simpler approach, since the DFG thinks that the last result register
58 // is always set to the destination on every operation.
59 emitGetVirtualRegister(src, regT0);
60 emitPutVirtualRegister(dst);
61 } else {
62 if (m_codeBlock->isConstantRegisterIndex(src)) {
63 if (!getConstantOperand(src).isNumber())
64 store64(TrustedImm64(JSValue::encode(getConstantOperand(src))), Address(callFrameRegister, dst * sizeof(Register)));
65 else
66 store64(Imm64(JSValue::encode(getConstantOperand(src))), Address(callFrameRegister, dst * sizeof(Register)));
67 if (dst == m_lastResultBytecodeRegister)
68 killLastResultRegister();
69 } else if ((src == m_lastResultBytecodeRegister) || (dst == m_lastResultBytecodeRegister)) {
70 // If either the src or dst is the cached register go though
71 // get/put registers to make sure we track this correctly.
72 emitGetVirtualRegister(src, regT0);
73 emitPutVirtualRegister(dst);
74 } else {
75 // Perform the copy via regT1; do not disturb any mapping in regT0.
76 load64(Address(callFrameRegister, src * sizeof(Register)), regT1);
77 store64(regT1, Address(callFrameRegister, dst * sizeof(Register)));
78 }
79 }
80 }
81
82 void JIT::emit_op_end(Instruction* currentInstruction)
83 {
84 RELEASE_ASSERT(returnValueRegister != callFrameRegister);
85 emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
86 restoreReturnAddressBeforeReturn(Address(callFrameRegister, JSStack::ReturnPC * static_cast<int>(sizeof(Register))));
87 ret();
88 }
89
90 void JIT::emit_op_jmp(Instruction* currentInstruction)
91 {
92 unsigned target = currentInstruction[1].u.operand;
93 addJump(jump(), target);
94 }
95
96 void JIT::emit_op_new_object(Instruction* currentInstruction)
97 {
98 Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
99 size_t allocationSize = JSObject::allocationSize(structure->inlineCapacity());
100 MarkedAllocator* allocator = &m_vm->heap.allocatorForObjectWithoutDestructor(allocationSize);
101
102 RegisterID resultReg = regT0;
103 RegisterID allocatorReg = regT1;
104 RegisterID scratchReg = regT2;
105
106 move(TrustedImmPtr(allocator), allocatorReg);
107 emitAllocateJSObject(allocatorReg, TrustedImmPtr(structure), resultReg, scratchReg);
108 emitPutVirtualRegister(currentInstruction[1].u.operand);
109 }
110
111 void JIT::emitSlow_op_new_object(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
112 {
113 linkSlowCase(iter);
114 JITStubCall stubCall(this, cti_op_new_object);
115 stubCall.addArgument(TrustedImmPtr(currentInstruction[3].u.objectAllocationProfile->structure()));
116 stubCall.call(currentInstruction[1].u.operand);
117 }
118
119 void JIT::emit_op_check_has_instance(Instruction* currentInstruction)
120 {
121 unsigned baseVal = currentInstruction[3].u.operand;
122
123 emitGetVirtualRegister(baseVal, regT0);
124
125 // Check that baseVal is a cell.
126 emitJumpSlowCaseIfNotJSCell(regT0, baseVal);
127
128 // Check that baseVal 'ImplementsHasInstance'.
129 loadPtr(Address(regT0, JSCell::structureOffset()), regT0);
130 addSlowCase(branchTest8(Zero, Address(regT0, Structure::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance)));
131 }
132
133 void JIT::emit_op_instanceof(Instruction* currentInstruction)
134 {
135 unsigned dst = currentInstruction[1].u.operand;
136 unsigned value = currentInstruction[2].u.operand;
137 unsigned proto = currentInstruction[3].u.operand;
138
139 // Load the operands (baseVal, proto, and value respectively) into registers.
140 // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
141 emitGetVirtualRegister(value, regT2);
142 emitGetVirtualRegister(proto, regT1);
143
144 // Check that proto are cells. baseVal must be a cell - this is checked by op_check_has_instance.
145 emitJumpSlowCaseIfNotJSCell(regT2, value);
146 emitJumpSlowCaseIfNotJSCell(regT1, proto);
147
148 // Check that prototype is an object
149 loadPtr(Address(regT1, JSCell::structureOffset()), regT3);
150 addSlowCase(emitJumpIfNotObject(regT3));
151
152 // Optimistically load the result true, and start looping.
153 // Initially, regT1 still contains proto and regT2 still contains value.
154 // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
155 move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
156 Label loop(this);
157
158 // Load the prototype of the object in regT2. If this is equal to regT1 - WIN!
159 // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
160 loadPtr(Address(regT2, JSCell::structureOffset()), regT2);
161 load64(Address(regT2, Structure::prototypeOffset()), regT2);
162 Jump isInstance = branchPtr(Equal, regT2, regT1);
163 emitJumpIfJSCell(regT2).linkTo(loop, this);
164
165 // We get here either by dropping out of the loop, or if value was not an Object. Result is false.
166 move(TrustedImm64(JSValue::encode(jsBoolean(false))), regT0);
167
168 // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
169 isInstance.link(this);
170 emitPutVirtualRegister(dst);
171 }
172
173 void JIT::emit_op_is_undefined(Instruction* currentInstruction)
174 {
175 unsigned dst = currentInstruction[1].u.operand;
176 unsigned value = currentInstruction[2].u.operand;
177
178 emitGetVirtualRegister(value, regT0);
179 Jump isCell = emitJumpIfJSCell(regT0);
180
181 compare64(Equal, regT0, TrustedImm32(ValueUndefined), regT0);
182 Jump done = jump();
183
184 isCell.link(this);
185 loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
186 Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT1, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
187 move(TrustedImm32(0), regT0);
188 Jump notMasqueradesAsUndefined = jump();
189
190 isMasqueradesAsUndefined.link(this);
191 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
192 loadPtr(Address(regT1, Structure::globalObjectOffset()), regT1);
193 comparePtr(Equal, regT0, regT1, regT0);
194
195 notMasqueradesAsUndefined.link(this);
196 done.link(this);
197 emitTagAsBoolImmediate(regT0);
198 emitPutVirtualRegister(dst);
199 }
200
201 void JIT::emit_op_is_boolean(Instruction* currentInstruction)
202 {
203 unsigned dst = currentInstruction[1].u.operand;
204 unsigned value = currentInstruction[2].u.operand;
205
206 emitGetVirtualRegister(value, regT0);
207 xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
208 test64(Zero, regT0, TrustedImm32(static_cast<int32_t>(~1)), regT0);
209 emitTagAsBoolImmediate(regT0);
210 emitPutVirtualRegister(dst);
211 }
212
213 void JIT::emit_op_is_number(Instruction* currentInstruction)
214 {
215 unsigned dst = currentInstruction[1].u.operand;
216 unsigned value = currentInstruction[2].u.operand;
217
218 emitGetVirtualRegister(value, regT0);
219 test64(NonZero, regT0, tagTypeNumberRegister, regT0);
220 emitTagAsBoolImmediate(regT0);
221 emitPutVirtualRegister(dst);
222 }
223
224 void JIT::emit_op_is_string(Instruction* currentInstruction)
225 {
226 unsigned dst = currentInstruction[1].u.operand;
227 unsigned value = currentInstruction[2].u.operand;
228
229 emitGetVirtualRegister(value, regT0);
230 Jump isNotCell = emitJumpIfNotJSCell(regT0);
231
232 loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
233 compare8(Equal, Address(regT1, Structure::typeInfoTypeOffset()), TrustedImm32(StringType), regT0);
234 emitTagAsBoolImmediate(regT0);
235 Jump done = jump();
236
237 isNotCell.link(this);
238 move(TrustedImm32(ValueFalse), regT0);
239
240 done.link(this);
241 emitPutVirtualRegister(dst);
242 }
243
244 void JIT::emit_op_call(Instruction* currentInstruction)
245 {
246 compileOpCall(op_call, currentInstruction, m_callLinkInfoIndex++);
247 }
248
249 void JIT::emit_op_call_eval(Instruction* currentInstruction)
250 {
251 compileOpCall(op_call_eval, currentInstruction, m_callLinkInfoIndex);
252 }
253
254 void JIT::emit_op_call_varargs(Instruction* currentInstruction)
255 {
256 compileOpCall(op_call_varargs, currentInstruction, m_callLinkInfoIndex++);
257 }
258
259 void JIT::emit_op_construct(Instruction* currentInstruction)
260 {
261 compileOpCall(op_construct, currentInstruction, m_callLinkInfoIndex++);
262 }
263
264 void JIT::emit_op_tear_off_activation(Instruction* currentInstruction)
265 {
266 int activation = currentInstruction[1].u.operand;
267 Jump activationNotCreated = branchTest64(Zero, addressFor(activation));
268 JITStubCall stubCall(this, cti_op_tear_off_activation);
269 stubCall.addArgument(activation, regT2);
270 stubCall.call();
271 activationNotCreated.link(this);
272 }
273
274 void JIT::emit_op_tear_off_arguments(Instruction* currentInstruction)
275 {
276 int arguments = currentInstruction[1].u.operand;
277 int activation = currentInstruction[2].u.operand;
278
279 Jump argsNotCreated = branchTest64(Zero, Address(callFrameRegister, sizeof(Register) * (unmodifiedArgumentsRegister(arguments))));
280 JITStubCall stubCall(this, cti_op_tear_off_arguments);
281 stubCall.addArgument(unmodifiedArgumentsRegister(arguments), regT2);
282 stubCall.addArgument(activation, regT2);
283 stubCall.call();
284 argsNotCreated.link(this);
285 }
286
287 void JIT::emit_op_ret(Instruction* currentInstruction)
288 {
289 ASSERT(callFrameRegister != regT1);
290 ASSERT(regT1 != returnValueRegister);
291 ASSERT(returnValueRegister != callFrameRegister);
292
293 // Return the result in %eax.
294 emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
295
296 // Grab the return address.
297 emitGetFromCallFrameHeaderPtr(JSStack::ReturnPC, regT1);
298
299 // Restore our caller's "r".
300 emitGetFromCallFrameHeaderPtr(JSStack::CallerFrame, callFrameRegister);
301
302 // Return.
303 restoreReturnAddressBeforeReturn(regT1);
304 ret();
305 }
306
307 void JIT::emit_op_ret_object_or_this(Instruction* currentInstruction)
308 {
309 ASSERT(callFrameRegister != regT1);
310 ASSERT(regT1 != returnValueRegister);
311 ASSERT(returnValueRegister != callFrameRegister);
312
313 // Return the result in %eax.
314 emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
315 Jump notJSCell = emitJumpIfNotJSCell(returnValueRegister);
316 loadPtr(Address(returnValueRegister, JSCell::structureOffset()), regT2);
317 Jump notObject = emitJumpIfNotObject(regT2);
318
319 // Grab the return address.
320 emitGetFromCallFrameHeaderPtr(JSStack::ReturnPC, regT1);
321
322 // Restore our caller's "r".
323 emitGetFromCallFrameHeaderPtr(JSStack::CallerFrame, callFrameRegister);
324
325 // Return.
326 restoreReturnAddressBeforeReturn(regT1);
327 ret();
328
329 // Return 'this' in %eax.
330 notJSCell.link(this);
331 notObject.link(this);
332 emitGetVirtualRegister(currentInstruction[2].u.operand, returnValueRegister);
333
334 // Grab the return address.
335 emitGetFromCallFrameHeaderPtr(JSStack::ReturnPC, regT1);
336
337 // Restore our caller's "r".
338 emitGetFromCallFrameHeaderPtr(JSStack::CallerFrame, callFrameRegister);
339
340 // Return.
341 restoreReturnAddressBeforeReturn(regT1);
342 ret();
343 }
344
345 void JIT::emit_op_to_primitive(Instruction* currentInstruction)
346 {
347 int dst = currentInstruction[1].u.operand;
348 int src = currentInstruction[2].u.operand;
349
350 emitGetVirtualRegister(src, regT0);
351
352 Jump isImm = emitJumpIfNotJSCell(regT0);
353 addSlowCase(branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
354 isImm.link(this);
355
356 if (dst != src)
357 emitPutVirtualRegister(dst);
358
359 }
360
361 void JIT::emit_op_strcat(Instruction* currentInstruction)
362 {
363 JITStubCall stubCall(this, cti_op_strcat);
364 stubCall.addArgument(TrustedImm32(currentInstruction[2].u.operand));
365 stubCall.addArgument(TrustedImm32(currentInstruction[3].u.operand));
366 stubCall.call(currentInstruction[1].u.operand);
367 }
368
369 void JIT::emit_op_not(Instruction* currentInstruction)
370 {
371 emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
372
373 // Invert against JSValue(false); if the value was tagged as a boolean, then all bits will be
374 // clear other than the low bit (which will be 0 or 1 for false or true inputs respectively).
375 // Then invert against JSValue(true), which will add the tag back in, and flip the low bit.
376 xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
377 addSlowCase(branchTestPtr(NonZero, regT0, TrustedImm32(static_cast<int32_t>(~1))));
378 xor64(TrustedImm32(static_cast<int32_t>(ValueTrue)), regT0);
379
380 emitPutVirtualRegister(currentInstruction[1].u.operand);
381 }
382
383 void JIT::emit_op_jfalse(Instruction* currentInstruction)
384 {
385 unsigned target = currentInstruction[2].u.operand;
386 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
387
388 addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNumber(0)))), target);
389 Jump isNonZero = emitJumpIfImmediateInteger(regT0);
390
391 addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsBoolean(false)))), target);
392 addSlowCase(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsBoolean(true)))));
393
394 isNonZero.link(this);
395 }
396
397 void JIT::emit_op_jeq_null(Instruction* currentInstruction)
398 {
399 unsigned src = currentInstruction[1].u.operand;
400 unsigned target = currentInstruction[2].u.operand;
401
402 emitGetVirtualRegister(src, regT0);
403 Jump isImmediate = emitJumpIfNotJSCell(regT0);
404
405 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
406 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
407 Jump isNotMasqueradesAsUndefined = branchTest8(Zero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
408 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
409 addJump(branchPtr(Equal, Address(regT2, Structure::globalObjectOffset()), regT0), target);
410 Jump masqueradesGlobalObjectIsForeign = jump();
411
412 // Now handle the immediate cases - undefined & null
413 isImmediate.link(this);
414 and64(TrustedImm32(~TagBitUndefined), regT0);
415 addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNull()))), target);
416
417 isNotMasqueradesAsUndefined.link(this);
418 masqueradesGlobalObjectIsForeign.link(this);
419 };
420 void JIT::emit_op_jneq_null(Instruction* currentInstruction)
421 {
422 unsigned src = currentInstruction[1].u.operand;
423 unsigned target = currentInstruction[2].u.operand;
424
425 emitGetVirtualRegister(src, regT0);
426 Jump isImmediate = emitJumpIfNotJSCell(regT0);
427
428 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
429 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
430 addJump(branchTest8(Zero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
431 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
432 addJump(branchPtr(NotEqual, Address(regT2, Structure::globalObjectOffset()), regT0), target);
433 Jump wasNotImmediate = jump();
434
435 // Now handle the immediate cases - undefined & null
436 isImmediate.link(this);
437 and64(TrustedImm32(~TagBitUndefined), regT0);
438 addJump(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsNull()))), target);
439
440 wasNotImmediate.link(this);
441 }
442
443 void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
444 {
445 unsigned src = currentInstruction[1].u.operand;
446 Special::Pointer ptr = currentInstruction[2].u.specialPointer;
447 unsigned target = currentInstruction[3].u.operand;
448
449 emitGetVirtualRegister(src, regT0);
450 addJump(branchPtr(NotEqual, regT0, TrustedImmPtr(actualPointerFor(m_codeBlock, ptr))), target);
451 }
452
453 void JIT::emit_op_eq(Instruction* currentInstruction)
454 {
455 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
456 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
457 compare32(Equal, regT1, regT0, regT0);
458 emitTagAsBoolImmediate(regT0);
459 emitPutVirtualRegister(currentInstruction[1].u.operand);
460 }
461
462 void JIT::emit_op_jtrue(Instruction* currentInstruction)
463 {
464 unsigned target = currentInstruction[2].u.operand;
465 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
466
467 Jump isZero = branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNumber(0))));
468 addJump(emitJumpIfImmediateInteger(regT0), target);
469
470 addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsBoolean(true)))), target);
471 addSlowCase(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsBoolean(false)))));
472
473 isZero.link(this);
474 }
475
476 void JIT::emit_op_neq(Instruction* currentInstruction)
477 {
478 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
479 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
480 compare32(NotEqual, regT1, regT0, regT0);
481 emitTagAsBoolImmediate(regT0);
482
483 emitPutVirtualRegister(currentInstruction[1].u.operand);
484
485 }
486
487 void JIT::emit_op_bitxor(Instruction* currentInstruction)
488 {
489 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
490 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
491 xor64(regT1, regT0);
492 emitFastArithReTagImmediate(regT0, regT0);
493 emitPutVirtualRegister(currentInstruction[1].u.operand);
494 }
495
496 void JIT::emit_op_bitor(Instruction* currentInstruction)
497 {
498 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
499 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
500 or64(regT1, regT0);
501 emitPutVirtualRegister(currentInstruction[1].u.operand);
502 }
503
504 void JIT::emit_op_throw(Instruction* currentInstruction)
505 {
506 JITStubCall stubCall(this, cti_op_throw);
507 stubCall.addArgument(currentInstruction[1].u.operand, regT2);
508 stubCall.call();
509 ASSERT(regT0 == returnValueRegister);
510 #ifndef NDEBUG
511 // cti_op_throw always changes it's return address,
512 // this point in the code should never be reached.
513 breakpoint();
514 #endif
515 }
516
517 void JIT::emit_op_get_pnames(Instruction* currentInstruction)
518 {
519 int dst = currentInstruction[1].u.operand;
520 int base = currentInstruction[2].u.operand;
521 int i = currentInstruction[3].u.operand;
522 int size = currentInstruction[4].u.operand;
523 int breakTarget = currentInstruction[5].u.operand;
524
525 JumpList isNotObject;
526
527 emitGetVirtualRegister(base, regT0);
528 if (!m_codeBlock->isKnownNotImmediate(base))
529 isNotObject.append(emitJumpIfNotJSCell(regT0));
530 if (base != m_codeBlock->thisRegister() || m_codeBlock->isStrictMode()) {
531 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
532 isNotObject.append(emitJumpIfNotObject(regT2));
533 }
534
535 // We could inline the case where you have a valid cache, but
536 // this call doesn't seem to be hot.
537 Label isObject(this);
538 JITStubCall getPnamesStubCall(this, cti_op_get_pnames);
539 getPnamesStubCall.addArgument(regT0);
540 getPnamesStubCall.call(dst);
541 load32(Address(regT0, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStringsSize)), regT3);
542 store64(tagTypeNumberRegister, addressFor(i));
543 store32(TrustedImm32(Int32Tag), intTagFor(size));
544 store32(regT3, intPayloadFor(size));
545 Jump end = jump();
546
547 isNotObject.link(this);
548 move(regT0, regT1);
549 and32(TrustedImm32(~TagBitUndefined), regT1);
550 addJump(branch32(Equal, regT1, TrustedImm32(ValueNull)), breakTarget);
551
552 JITStubCall toObjectStubCall(this, cti_to_object);
553 toObjectStubCall.addArgument(regT0);
554 toObjectStubCall.call(base);
555 jump().linkTo(isObject, this);
556
557 end.link(this);
558 }
559
560 void JIT::emit_op_next_pname(Instruction* currentInstruction)
561 {
562 int dst = currentInstruction[1].u.operand;
563 int base = currentInstruction[2].u.operand;
564 int i = currentInstruction[3].u.operand;
565 int size = currentInstruction[4].u.operand;
566 int it = currentInstruction[5].u.operand;
567 int target = currentInstruction[6].u.operand;
568
569 JumpList callHasProperty;
570
571 Label begin(this);
572 load32(intPayloadFor(i), regT0);
573 Jump end = branch32(Equal, regT0, intPayloadFor(size));
574
575 // Grab key @ i
576 loadPtr(addressFor(it), regT1);
577 loadPtr(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStrings)), regT2);
578
579 load64(BaseIndex(regT2, regT0, TimesEight), regT2);
580
581 emitPutVirtualRegister(dst, regT2);
582
583 // Increment i
584 add32(TrustedImm32(1), regT0);
585 store32(regT0, intPayloadFor(i));
586
587 // Verify that i is valid:
588 emitGetVirtualRegister(base, regT0);
589
590 // Test base's structure
591 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
592 callHasProperty.append(branchPtr(NotEqual, regT2, Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure)))));
593
594 // Test base's prototype chain
595 loadPtr(Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedPrototypeChain))), regT3);
596 loadPtr(Address(regT3, OBJECT_OFFSETOF(StructureChain, m_vector)), regT3);
597 addJump(branchTestPtr(Zero, Address(regT3)), target);
598
599 Label checkPrototype(this);
600 load64(Address(regT2, Structure::prototypeOffset()), regT2);
601 callHasProperty.append(emitJumpIfNotJSCell(regT2));
602 loadPtr(Address(regT2, JSCell::structureOffset()), regT2);
603 callHasProperty.append(branchPtr(NotEqual, regT2, Address(regT3)));
604 addPtr(TrustedImm32(sizeof(Structure*)), regT3);
605 branchTestPtr(NonZero, Address(regT3)).linkTo(checkPrototype, this);
606
607 // Continue loop.
608 addJump(jump(), target);
609
610 // Slow case: Ask the object if i is valid.
611 callHasProperty.link(this);
612 emitGetVirtualRegister(dst, regT1);
613 JITStubCall stubCall(this, cti_has_property);
614 stubCall.addArgument(regT0);
615 stubCall.addArgument(regT1);
616 stubCall.call();
617
618 // Test for valid key.
619 addJump(branchTest32(NonZero, regT0), target);
620 jump().linkTo(begin, this);
621
622 // End of loop.
623 end.link(this);
624 }
625
626 void JIT::emit_op_push_with_scope(Instruction* currentInstruction)
627 {
628 JITStubCall stubCall(this, cti_op_push_with_scope);
629 stubCall.addArgument(currentInstruction[1].u.operand, regT2);
630 stubCall.call();
631 }
632
633 void JIT::emit_op_pop_scope(Instruction*)
634 {
635 JITStubCall(this, cti_op_pop_scope).call();
636 }
637
638 void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
639 {
640 unsigned dst = currentInstruction[1].u.operand;
641 unsigned src1 = currentInstruction[2].u.operand;
642 unsigned src2 = currentInstruction[3].u.operand;
643
644 emitGetVirtualRegisters(src1, regT0, src2, regT1);
645
646 // Jump slow if both are cells (to cover strings).
647 move(regT0, regT2);
648 or64(regT1, regT2);
649 addSlowCase(emitJumpIfJSCell(regT2));
650
651 // Jump slow if either is a double. First test if it's an integer, which is fine, and then test
652 // if it's a double.
653 Jump leftOK = emitJumpIfImmediateInteger(regT0);
654 addSlowCase(emitJumpIfImmediateNumber(regT0));
655 leftOK.link(this);
656 Jump rightOK = emitJumpIfImmediateInteger(regT1);
657 addSlowCase(emitJumpIfImmediateNumber(regT1));
658 rightOK.link(this);
659
660 if (type == OpStrictEq)
661 compare64(Equal, regT1, regT0, regT0);
662 else
663 compare64(NotEqual, regT1, regT0, regT0);
664 emitTagAsBoolImmediate(regT0);
665
666 emitPutVirtualRegister(dst);
667 }
668
669 void JIT::emit_op_stricteq(Instruction* currentInstruction)
670 {
671 compileOpStrictEq(currentInstruction, OpStrictEq);
672 }
673
674 void JIT::emit_op_nstricteq(Instruction* currentInstruction)
675 {
676 compileOpStrictEq(currentInstruction, OpNStrictEq);
677 }
678
679 void JIT::emit_op_to_number(Instruction* currentInstruction)
680 {
681 int srcVReg = currentInstruction[2].u.operand;
682 emitGetVirtualRegister(srcVReg, regT0);
683
684 addSlowCase(emitJumpIfNotImmediateNumber(regT0));
685
686 emitPutVirtualRegister(currentInstruction[1].u.operand);
687 }
688
689 void JIT::emit_op_push_name_scope(Instruction* currentInstruction)
690 {
691 JITStubCall stubCall(this, cti_op_push_name_scope);
692 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[1].u.operand)));
693 stubCall.addArgument(currentInstruction[2].u.operand, regT2);
694 stubCall.addArgument(TrustedImm32(currentInstruction[3].u.operand));
695 stubCall.call();
696 }
697
698 void JIT::emit_op_catch(Instruction* currentInstruction)
699 {
700 killLastResultRegister(); // FIXME: Implicitly treat op_catch as a labeled statement, and remove this line of code.
701 move(regT0, callFrameRegister);
702 peek(regT3, OBJECT_OFFSETOF(struct JITStackFrame, vm) / sizeof(void*));
703 load64(Address(regT3, OBJECT_OFFSETOF(VM, exception)), regT0);
704 store64(TrustedImm64(JSValue::encode(JSValue())), Address(regT3, OBJECT_OFFSETOF(VM, exception)));
705 emitPutVirtualRegister(currentInstruction[1].u.operand);
706 }
707
708 void JIT::emit_op_switch_imm(Instruction* currentInstruction)
709 {
710 unsigned tableIndex = currentInstruction[1].u.operand;
711 unsigned defaultOffset = currentInstruction[2].u.operand;
712 unsigned scrutinee = currentInstruction[3].u.operand;
713
714 // create jump table for switch destinations, track this switch statement.
715 SimpleJumpTable* jumpTable = &m_codeBlock->immediateSwitchJumpTable(tableIndex);
716 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Immediate));
717 jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
718
719 JITStubCall stubCall(this, cti_op_switch_imm);
720 stubCall.addArgument(scrutinee, regT2);
721 stubCall.addArgument(TrustedImm32(tableIndex));
722 stubCall.call();
723 jump(regT0);
724 }
725
726 void JIT::emit_op_switch_char(Instruction* currentInstruction)
727 {
728 unsigned tableIndex = currentInstruction[1].u.operand;
729 unsigned defaultOffset = currentInstruction[2].u.operand;
730 unsigned scrutinee = currentInstruction[3].u.operand;
731
732 // create jump table for switch destinations, track this switch statement.
733 SimpleJumpTable* jumpTable = &m_codeBlock->characterSwitchJumpTable(tableIndex);
734 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Character));
735 jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
736
737 JITStubCall stubCall(this, cti_op_switch_char);
738 stubCall.addArgument(scrutinee, regT2);
739 stubCall.addArgument(TrustedImm32(tableIndex));
740 stubCall.call();
741 jump(regT0);
742 }
743
744 void JIT::emit_op_switch_string(Instruction* currentInstruction)
745 {
746 unsigned tableIndex = currentInstruction[1].u.operand;
747 unsigned defaultOffset = currentInstruction[2].u.operand;
748 unsigned scrutinee = currentInstruction[3].u.operand;
749
750 // create jump table for switch destinations, track this switch statement.
751 StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
752 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset));
753
754 JITStubCall stubCall(this, cti_op_switch_string);
755 stubCall.addArgument(scrutinee, regT2);
756 stubCall.addArgument(TrustedImm32(tableIndex));
757 stubCall.call();
758 jump(regT0);
759 }
760
761 void JIT::emit_op_throw_static_error(Instruction* currentInstruction)
762 {
763 JITStubCall stubCall(this, cti_op_throw_static_error);
764 if (!m_codeBlock->getConstant(currentInstruction[1].u.operand).isNumber())
765 stubCall.addArgument(TrustedImm64(JSValue::encode(m_codeBlock->getConstant(currentInstruction[1].u.operand))));
766 else
767 stubCall.addArgument(Imm64(JSValue::encode(m_codeBlock->getConstant(currentInstruction[1].u.operand))));
768 stubCall.addArgument(TrustedImm32(currentInstruction[2].u.operand));
769 stubCall.call();
770 }
771
772 void JIT::emit_op_debug(Instruction* currentInstruction)
773 {
774 #if ENABLE(DEBUG_WITH_BREAKPOINT)
775 UNUSED_PARAM(currentInstruction);
776 breakpoint();
777 #else
778 JITStubCall stubCall(this, cti_op_debug);
779 stubCall.addArgument(TrustedImm32(currentInstruction[1].u.operand));
780 stubCall.addArgument(TrustedImm32(currentInstruction[2].u.operand));
781 stubCall.addArgument(TrustedImm32(currentInstruction[3].u.operand));
782 stubCall.addArgument(TrustedImm32(currentInstruction[4].u.operand));
783 stubCall.call();
784 #endif
785 }
786
787 void JIT::emit_op_eq_null(Instruction* currentInstruction)
788 {
789 unsigned dst = currentInstruction[1].u.operand;
790 unsigned src1 = currentInstruction[2].u.operand;
791
792 emitGetVirtualRegister(src1, regT0);
793 Jump isImmediate = emitJumpIfNotJSCell(regT0);
794
795 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
796 Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
797 move(TrustedImm32(0), regT0);
798 Jump wasNotMasqueradesAsUndefined = jump();
799
800 isMasqueradesAsUndefined.link(this);
801 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
802 loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
803 comparePtr(Equal, regT0, regT2, regT0);
804 Jump wasNotImmediate = jump();
805
806 isImmediate.link(this);
807
808 and64(TrustedImm32(~TagBitUndefined), regT0);
809 compare64(Equal, regT0, TrustedImm32(ValueNull), regT0);
810
811 wasNotImmediate.link(this);
812 wasNotMasqueradesAsUndefined.link(this);
813
814 emitTagAsBoolImmediate(regT0);
815 emitPutVirtualRegister(dst);
816
817 }
818
819 void JIT::emit_op_neq_null(Instruction* currentInstruction)
820 {
821 unsigned dst = currentInstruction[1].u.operand;
822 unsigned src1 = currentInstruction[2].u.operand;
823
824 emitGetVirtualRegister(src1, regT0);
825 Jump isImmediate = emitJumpIfNotJSCell(regT0);
826
827 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
828 Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
829 move(TrustedImm32(1), regT0);
830 Jump wasNotMasqueradesAsUndefined = jump();
831
832 isMasqueradesAsUndefined.link(this);
833 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
834 loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
835 comparePtr(NotEqual, regT0, regT2, regT0);
836 Jump wasNotImmediate = jump();
837
838 isImmediate.link(this);
839
840 and64(TrustedImm32(~TagBitUndefined), regT0);
841 compare64(NotEqual, regT0, TrustedImm32(ValueNull), regT0);
842
843 wasNotImmediate.link(this);
844 wasNotMasqueradesAsUndefined.link(this);
845
846 emitTagAsBoolImmediate(regT0);
847 emitPutVirtualRegister(dst);
848 }
849
850 void JIT::emit_op_enter(Instruction*)
851 {
852 emitEnterOptimizationCheck();
853
854 // Even though CTI doesn't use them, we initialize our constant
855 // registers to zap stale pointers, to avoid unnecessarily prolonging
856 // object lifetime and increasing GC pressure.
857 size_t count = m_codeBlock->m_numVars;
858 for (size_t j = 0; j < count; ++j)
859 emitInitRegister(j);
860 }
861
862 void JIT::emit_op_create_activation(Instruction* currentInstruction)
863 {
864 unsigned dst = currentInstruction[1].u.operand;
865
866 Jump activationCreated = branchTest64(NonZero, Address(callFrameRegister, sizeof(Register) * dst));
867 JITStubCall(this, cti_op_push_activation).call(currentInstruction[1].u.operand);
868 emitPutVirtualRegister(dst);
869 activationCreated.link(this);
870 }
871
872 void JIT::emit_op_create_arguments(Instruction* currentInstruction)
873 {
874 unsigned dst = currentInstruction[1].u.operand;
875
876 Jump argsCreated = branchTest64(NonZero, Address(callFrameRegister, sizeof(Register) * dst));
877 JITStubCall(this, cti_op_create_arguments).call();
878 emitPutVirtualRegister(dst);
879 emitPutVirtualRegister(unmodifiedArgumentsRegister(dst));
880 argsCreated.link(this);
881 }
882
883 void JIT::emit_op_init_lazy_reg(Instruction* currentInstruction)
884 {
885 unsigned dst = currentInstruction[1].u.operand;
886
887 store64(TrustedImm64((int64_t)0), Address(callFrameRegister, sizeof(Register) * dst));
888 }
889
890 void JIT::emit_op_convert_this(Instruction* currentInstruction)
891 {
892 emitGetVirtualRegister(currentInstruction[1].u.operand, regT1);
893
894 emitJumpSlowCaseIfNotJSCell(regT1);
895 if (shouldEmitProfiling()) {
896 loadPtr(Address(regT1, JSCell::structureOffset()), regT0);
897 emitValueProfilingSite();
898 }
899 addSlowCase(branchPtr(Equal, Address(regT1, JSCell::structureOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
900 }
901
902 void JIT::emit_op_get_callee(Instruction* currentInstruction)
903 {
904 unsigned result = currentInstruction[1].u.operand;
905 emitGetFromCallFrameHeaderPtr(JSStack::Callee, regT0);
906 emitValueProfilingSite();
907 emitPutVirtualRegister(result);
908 }
909
910 void JIT::emit_op_create_this(Instruction* currentInstruction)
911 {
912 int callee = currentInstruction[2].u.operand;
913 RegisterID calleeReg = regT0;
914 RegisterID resultReg = regT0;
915 RegisterID allocatorReg = regT1;
916 RegisterID structureReg = regT2;
917 RegisterID scratchReg = regT3;
918
919 emitGetVirtualRegister(callee, calleeReg);
920 loadPtr(Address(calleeReg, JSFunction::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfAllocator()), allocatorReg);
921 loadPtr(Address(calleeReg, JSFunction::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfStructure()), structureReg);
922 addSlowCase(branchTestPtr(Zero, allocatorReg));
923
924 emitAllocateJSObject(allocatorReg, structureReg, resultReg, scratchReg);
925 emitPutVirtualRegister(currentInstruction[1].u.operand);
926 }
927
928 void JIT::emitSlow_op_create_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
929 {
930 linkSlowCase(iter); // doesn't have an allocation profile
931 linkSlowCase(iter); // allocation failed
932
933 JITStubCall stubCall(this, cti_op_create_this);
934 stubCall.addArgument(TrustedImm32(currentInstruction[3].u.operand));
935 stubCall.call(currentInstruction[1].u.operand);
936 }
937
938 void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
939 {
940 JITStubCall stubCall(this, cti_op_profile_will_call);
941 stubCall.addArgument(currentInstruction[1].u.operand, regT1);
942 stubCall.call();
943 }
944
945 void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
946 {
947 JITStubCall stubCall(this, cti_op_profile_did_call);
948 stubCall.addArgument(currentInstruction[1].u.operand, regT1);
949 stubCall.call();
950 }
951
952
953 // Slow cases
954
955 void JIT::emitSlow_op_convert_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
956 {
957 void* globalThis = m_codeBlock->globalObject()->globalThis();
958
959 linkSlowCase(iter);
960 if (shouldEmitProfiling())
961 move(TrustedImm64((JSValue::encode(jsUndefined()))), regT0);
962 Jump isNotUndefined = branch64(NotEqual, regT1, TrustedImm64(JSValue::encode(jsUndefined())));
963 emitValueProfilingSite();
964 move(TrustedImm64(JSValue::encode(JSValue(static_cast<JSCell*>(globalThis)))), regT0);
965 emitPutVirtualRegister(currentInstruction[1].u.operand, regT0);
966 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_convert_this));
967
968 linkSlowCase(iter);
969 if (shouldEmitProfiling())
970 move(TrustedImm64(JSValue::encode(m_vm->stringStructure.get())), regT0);
971 isNotUndefined.link(this);
972 emitValueProfilingSite();
973 JITStubCall stubCall(this, cti_op_convert_this);
974 stubCall.addArgument(regT1);
975 stubCall.call(currentInstruction[1].u.operand);
976 }
977
978 void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
979 {
980 linkSlowCase(iter);
981
982 JITStubCall stubCall(this, cti_op_to_primitive);
983 stubCall.addArgument(regT0);
984 stubCall.call(currentInstruction[1].u.operand);
985 }
986
987 void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
988 {
989 linkSlowCase(iter);
990 xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
991 JITStubCall stubCall(this, cti_op_not);
992 stubCall.addArgument(regT0);
993 stubCall.call(currentInstruction[1].u.operand);
994 }
995
996 void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
997 {
998 linkSlowCase(iter);
999 JITStubCall stubCall(this, cti_op_jtrue);
1000 stubCall.addArgument(regT0);
1001 stubCall.call();
1002 emitJumpSlowToHot(branchTest32(Zero, regT0), currentInstruction[2].u.operand); // inverted!
1003 }
1004
1005 void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1006 {
1007 linkSlowCase(iter);
1008 JITStubCall stubCall(this, cti_op_jtrue);
1009 stubCall.addArgument(regT0);
1010 stubCall.call();
1011 emitJumpSlowToHot(branchTest32(NonZero, regT0), currentInstruction[2].u.operand);
1012 }
1013
1014 void JIT::emitSlow_op_bitxor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1015 {
1016 linkSlowCase(iter);
1017 JITStubCall stubCall(this, cti_op_bitxor);
1018 stubCall.addArgument(regT0);
1019 stubCall.addArgument(regT1);
1020 stubCall.call(currentInstruction[1].u.operand);
1021 }
1022
1023 void JIT::emitSlow_op_bitor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1024 {
1025 linkSlowCase(iter);
1026 JITStubCall stubCall(this, cti_op_bitor);
1027 stubCall.addArgument(regT0);
1028 stubCall.addArgument(regT1);
1029 stubCall.call(currentInstruction[1].u.operand);
1030 }
1031
1032 void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1033 {
1034 linkSlowCase(iter);
1035 JITStubCall stubCall(this, cti_op_eq);
1036 stubCall.addArgument(regT0);
1037 stubCall.addArgument(regT1);
1038 stubCall.call();
1039 emitTagAsBoolImmediate(regT0);
1040 emitPutVirtualRegister(currentInstruction[1].u.operand);
1041 }
1042
1043 void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1044 {
1045 linkSlowCase(iter);
1046 JITStubCall stubCall(this, cti_op_eq);
1047 stubCall.addArgument(regT0);
1048 stubCall.addArgument(regT1);
1049 stubCall.call();
1050 xor32(TrustedImm32(0x1), regT0);
1051 emitTagAsBoolImmediate(regT0);
1052 emitPutVirtualRegister(currentInstruction[1].u.operand);
1053 }
1054
1055 void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1056 {
1057 linkSlowCase(iter);
1058 linkSlowCase(iter);
1059 linkSlowCase(iter);
1060 JITStubCall stubCall(this, cti_op_stricteq);
1061 stubCall.addArgument(regT0);
1062 stubCall.addArgument(regT1);
1063 stubCall.call(currentInstruction[1].u.operand);
1064 }
1065
1066 void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1067 {
1068 linkSlowCase(iter);
1069 linkSlowCase(iter);
1070 linkSlowCase(iter);
1071 JITStubCall stubCall(this, cti_op_nstricteq);
1072 stubCall.addArgument(regT0);
1073 stubCall.addArgument(regT1);
1074 stubCall.call(currentInstruction[1].u.operand);
1075 }
1076
1077 void JIT::emitSlow_op_check_has_instance(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1078 {
1079 unsigned dst = currentInstruction[1].u.operand;
1080 unsigned value = currentInstruction[2].u.operand;
1081 unsigned baseVal = currentInstruction[3].u.operand;
1082
1083 linkSlowCaseIfNotJSCell(iter, baseVal);
1084 linkSlowCase(iter);
1085 JITStubCall stubCall(this, cti_op_check_has_instance);
1086 stubCall.addArgument(value, regT2);
1087 stubCall.addArgument(baseVal, regT2);
1088 stubCall.call(dst);
1089
1090 emitJumpSlowToHot(jump(), currentInstruction[4].u.operand);
1091 }
1092
1093 void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1094 {
1095 unsigned dst = currentInstruction[1].u.operand;
1096 unsigned value = currentInstruction[2].u.operand;
1097 unsigned proto = currentInstruction[3].u.operand;
1098
1099 linkSlowCaseIfNotJSCell(iter, value);
1100 linkSlowCaseIfNotJSCell(iter, proto);
1101 linkSlowCase(iter);
1102 JITStubCall stubCall(this, cti_op_instanceof);
1103 stubCall.addArgument(value, regT2);
1104 stubCall.addArgument(proto, regT2);
1105 stubCall.call(dst);
1106 }
1107
1108 void JIT::emitSlow_op_call(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1109 {
1110 compileOpCallSlowCase(op_call, currentInstruction, iter, m_callLinkInfoIndex++);
1111 }
1112
1113 void JIT::emitSlow_op_call_eval(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1114 {
1115 compileOpCallSlowCase(op_call_eval, currentInstruction, iter, m_callLinkInfoIndex);
1116 }
1117
1118 void JIT::emitSlow_op_call_varargs(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1119 {
1120 compileOpCallSlowCase(op_call_varargs, currentInstruction, iter, m_callLinkInfoIndex++);
1121 }
1122
1123 void JIT::emitSlow_op_construct(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1124 {
1125 compileOpCallSlowCase(op_construct, currentInstruction, iter, m_callLinkInfoIndex++);
1126 }
1127
1128 void JIT::emitSlow_op_to_number(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1129 {
1130 linkSlowCase(iter);
1131
1132 JITStubCall stubCall(this, cti_op_to_number);
1133 stubCall.addArgument(regT0);
1134 stubCall.call(currentInstruction[1].u.operand);
1135 }
1136
1137 void JIT::emit_op_get_arguments_length(Instruction* currentInstruction)
1138 {
1139 int dst = currentInstruction[1].u.operand;
1140 int argumentsRegister = currentInstruction[2].u.operand;
1141 addSlowCase(branchTest64(NonZero, addressFor(argumentsRegister)));
1142 emitGetFromCallFrameHeader32(JSStack::ArgumentCount, regT0);
1143 sub32(TrustedImm32(1), regT0);
1144 emitFastArithReTagImmediate(regT0, regT0);
1145 emitPutVirtualRegister(dst, regT0);
1146 }
1147
1148 void JIT::emitSlow_op_get_arguments_length(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1149 {
1150 linkSlowCase(iter);
1151 unsigned dst = currentInstruction[1].u.operand;
1152 unsigned base = currentInstruction[2].u.operand;
1153 Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
1154
1155 emitGetVirtualRegister(base, regT0);
1156 JITStubCall stubCall(this, cti_op_get_by_id_generic);
1157 stubCall.addArgument(regT0);
1158 stubCall.addArgument(TrustedImmPtr(ident));
1159 stubCall.call(dst);
1160 }
1161
1162 void JIT::emit_op_get_argument_by_val(Instruction* currentInstruction)
1163 {
1164 int dst = currentInstruction[1].u.operand;
1165 int argumentsRegister = currentInstruction[2].u.operand;
1166 int property = currentInstruction[3].u.operand;
1167 addSlowCase(branchTest64(NonZero, addressFor(argumentsRegister)));
1168 emitGetVirtualRegister(property, regT1);
1169 addSlowCase(emitJumpIfNotImmediateInteger(regT1));
1170 add32(TrustedImm32(1), regT1);
1171 // regT1 now contains the integer index of the argument we want, including this
1172 emitGetFromCallFrameHeader32(JSStack::ArgumentCount, regT2);
1173 addSlowCase(branch32(AboveOrEqual, regT1, regT2));
1174
1175 neg32(regT1);
1176 signExtend32ToPtr(regT1, regT1);
1177 load64(BaseIndex(callFrameRegister, regT1, TimesEight, CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register))), regT0);
1178 emitValueProfilingSite();
1179 emitPutVirtualRegister(dst, regT0);
1180 }
1181
1182 void JIT::emitSlow_op_get_argument_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1183 {
1184 unsigned dst = currentInstruction[1].u.operand;
1185 unsigned arguments = currentInstruction[2].u.operand;
1186 unsigned property = currentInstruction[3].u.operand;
1187
1188 linkSlowCase(iter);
1189 Jump skipArgumentsCreation = jump();
1190
1191 linkSlowCase(iter);
1192 linkSlowCase(iter);
1193 JITStubCall(this, cti_op_create_arguments).call();
1194 emitPutVirtualRegister(arguments);
1195 emitPutVirtualRegister(unmodifiedArgumentsRegister(arguments));
1196
1197 skipArgumentsCreation.link(this);
1198 JITStubCall stubCall(this, cti_op_get_by_val_generic);
1199 stubCall.addArgument(arguments, regT2);
1200 stubCall.addArgument(property, regT2);
1201 stubCall.callWithValueProfiling(dst);
1202 }
1203
1204 void JIT::emit_op_put_to_base(Instruction* currentInstruction)
1205 {
1206 int base = currentInstruction[1].u.operand;
1207 int id = currentInstruction[2].u.operand;
1208 int value = currentInstruction[3].u.operand;
1209
1210 PutToBaseOperation* operation = currentInstruction[4].u.putToBaseOperation;
1211 switch (operation->m_kind) {
1212 case PutToBaseOperation::GlobalVariablePutChecked:
1213 addSlowCase(branchTest8(NonZero, AbsoluteAddress(operation->m_predicatePointer)));
1214 case PutToBaseOperation::GlobalVariablePut: {
1215 JSGlobalObject* globalObject = m_codeBlock->globalObject();
1216 if (operation->m_isDynamic) {
1217 emitGetVirtualRegister(base, regT0);
1218 addSlowCase(branchPtr(NotEqual, regT0, TrustedImmPtr(globalObject)));
1219 }
1220 emitGetVirtualRegister(value, regT0);
1221 store64(regT0, operation->m_registerAddress);
1222 if (Heap::isWriteBarrierEnabled())
1223 emitWriteBarrier(globalObject, regT0, regT2, ShouldFilterImmediates, WriteBarrierForVariableAccess);
1224 return;
1225 }
1226 case PutToBaseOperation::VariablePut: {
1227 emitGetVirtualRegisters(base, regT0, value, regT1);
1228 loadPtr(Address(regT0, JSVariableObject::offsetOfRegisters()), regT2);
1229 store64(regT1, Address(regT2, operation->m_offset * sizeof(Register)));
1230 if (Heap::isWriteBarrierEnabled())
1231 emitWriteBarrier(regT0, regT1, regT2, regT3, ShouldFilterImmediates, WriteBarrierForVariableAccess);
1232 return;
1233 }
1234
1235 case PutToBaseOperation::GlobalPropertyPut: {
1236 emitGetVirtualRegisters(base, regT0, value, regT1);
1237 loadPtr(&operation->m_structure, regT2);
1238 addSlowCase(branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), regT2));
1239 ASSERT(!operation->m_structure || !operation->m_structure->inlineCapacity());
1240 loadPtr(Address(regT0, JSObject::butterflyOffset()), regT2);
1241 load32(&operation->m_offsetInButterfly, regT3);
1242 signExtend32ToPtr(regT3, regT3);
1243 store64(regT1, BaseIndex(regT2, regT3, TimesEight));
1244 if (Heap::isWriteBarrierEnabled())
1245 emitWriteBarrier(regT0, regT1, regT2, regT3, ShouldFilterImmediates, WriteBarrierForVariableAccess);
1246 return;
1247 }
1248
1249 case PutToBaseOperation::Uninitialised:
1250 case PutToBaseOperation::Readonly:
1251 case PutToBaseOperation::Generic:
1252 JITStubCall stubCall(this, cti_op_put_to_base);
1253
1254 stubCall.addArgument(TrustedImm32(base));
1255 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(id)));
1256 stubCall.addArgument(TrustedImm32(value));
1257 stubCall.addArgument(TrustedImmPtr(operation));
1258 stubCall.call();
1259 return;
1260 }
1261 }
1262
1263 #endif // USE(JSVALUE64)
1264
1265 void JIT::emit_op_loop_hint(Instruction*)
1266 {
1267 // Emit the JIT optimization check:
1268 if (canBeOptimized())
1269 addSlowCase(branchAdd32(PositiveOrZero, TrustedImm32(Options::executionCounterIncrementForLoop()),
1270 AbsoluteAddress(m_codeBlock->addressOfJITExecuteCounter())));
1271
1272 // Emit the watchdog timer check:
1273 if (m_vm->watchdog.isEnabled())
1274 addSlowCase(branchTest8(NonZero, AbsoluteAddress(m_vm->watchdog.timerDidFireAddress())));
1275 }
1276
1277 void JIT::emitSlow_op_loop_hint(Instruction*, Vector<SlowCaseEntry>::iterator& iter)
1278 {
1279 #if ENABLE(DFG_JIT)
1280 // Emit the slow path for the JIT optimization check:
1281 if (canBeOptimized()) {
1282 linkSlowCase(iter);
1283
1284 JITStubCall stubCall(this, cti_optimize);
1285 stubCall.addArgument(TrustedImm32(m_bytecodeOffset));
1286 stubCall.call();
1287
1288 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_loop_hint));
1289 }
1290 #endif
1291
1292 // Emit the slow path of the watchdog timer check:
1293 if (m_vm->watchdog.isEnabled()) {
1294 linkSlowCase(iter);
1295
1296 JITStubCall stubCall(this, cti_handle_watchdog_timer);
1297 stubCall.call();
1298
1299 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_loop_hint));
1300 }
1301
1302 }
1303
1304 void JIT::emit_resolve_operations(ResolveOperations* resolveOperations, const int* baseVR, const int* valueVR)
1305 {
1306
1307 #if USE(JSVALUE32_64)
1308 unmap();
1309 #else
1310 killLastResultRegister();
1311 #endif
1312
1313 if (resolveOperations->isEmpty()) {
1314 addSlowCase(jump());
1315 return;
1316 }
1317
1318 const RegisterID value = regT0;
1319 #if USE(JSVALUE32_64)
1320 const RegisterID valueTag = regT1;
1321 #endif
1322 const RegisterID scope = regT2;
1323 const RegisterID scratch = regT3;
1324
1325 JSGlobalObject* globalObject = m_codeBlock->globalObject();
1326 ResolveOperation* pc = resolveOperations->data();
1327 emitGetFromCallFrameHeaderPtr(JSStack::ScopeChain, scope);
1328 bool setBase = false;
1329 bool resolvingBase = true;
1330 while (resolvingBase) {
1331 switch (pc->m_operation) {
1332 case ResolveOperation::ReturnGlobalObjectAsBase:
1333 move(TrustedImmPtr(globalObject), value);
1334 #if USE(JSVALUE32_64)
1335 move(TrustedImm32(JSValue::CellTag), valueTag);
1336 #endif
1337 emitValueProfilingSite();
1338 emitStoreCell(*baseVR, value);
1339 return;
1340 case ResolveOperation::SetBaseToGlobal:
1341 RELEASE_ASSERT(baseVR);
1342 setBase = true;
1343 move(TrustedImmPtr(globalObject), scratch);
1344 emitStoreCell(*baseVR, scratch);
1345 resolvingBase = false;
1346 ++pc;
1347 break;
1348 case ResolveOperation::SetBaseToUndefined: {
1349 RELEASE_ASSERT(baseVR);
1350 setBase = true;
1351 #if USE(JSVALUE64)
1352 move(TrustedImm64(JSValue::encode(jsUndefined())), scratch);
1353 emitPutVirtualRegister(*baseVR, scratch);
1354 #else
1355 emitStore(*baseVR, jsUndefined());
1356 #endif
1357 resolvingBase = false;
1358 ++pc;
1359 break;
1360 }
1361 case ResolveOperation::SetBaseToScope:
1362 RELEASE_ASSERT(baseVR);
1363 setBase = true;
1364 emitStoreCell(*baseVR, scope);
1365 resolvingBase = false;
1366 ++pc;
1367 break;
1368 case ResolveOperation::ReturnScopeAsBase:
1369 emitStoreCell(*baseVR, scope);
1370 RELEASE_ASSERT(value == regT0);
1371 move(scope, value);
1372 #if USE(JSVALUE32_64)
1373 move(TrustedImm32(JSValue::CellTag), valueTag);
1374 #endif
1375 emitValueProfilingSite();
1376 return;
1377 case ResolveOperation::SkipTopScopeNode: {
1378 #if USE(JSVALUE32_64)
1379 Jump activationNotCreated = branch32(Equal, tagFor(m_codeBlock->activationRegister()), TrustedImm32(JSValue::EmptyValueTag));
1380 #else
1381 Jump activationNotCreated = branchTest64(Zero, addressFor(m_codeBlock->activationRegister()));
1382 #endif
1383 loadPtr(Address(scope, JSScope::offsetOfNext()), scope);
1384 activationNotCreated.link(this);
1385 ++pc;
1386 break;
1387 }
1388 case ResolveOperation::CheckForDynamicEntriesBeforeGlobalScope: {
1389 move(scope, regT3);
1390 loadPtr(Address(regT3, JSScope::offsetOfNext()), regT1);
1391 Jump atTopOfScope = branchTestPtr(Zero, regT1);
1392 Label loopStart = label();
1393 loadPtr(Address(regT3, JSCell::structureOffset()), regT2);
1394 Jump isActivation = branchPtr(Equal, regT2, TrustedImmPtr(globalObject->activationStructure()));
1395 addSlowCase(branchPtr(NotEqual, regT2, TrustedImmPtr(globalObject->nameScopeStructure())));
1396 isActivation.link(this);
1397 move(regT1, regT3);
1398 loadPtr(Address(regT3, JSScope::offsetOfNext()), regT1);
1399 branchTestPtr(NonZero, regT1, loopStart);
1400 atTopOfScope.link(this);
1401 ++pc;
1402 break;
1403 }
1404 case ResolveOperation::SkipScopes: {
1405 for (int i = 0; i < pc->m_scopesToSkip; i++)
1406 loadPtr(Address(scope, JSScope::offsetOfNext()), scope);
1407 ++pc;
1408 break;
1409 }
1410 case ResolveOperation::Fail:
1411 addSlowCase(jump());
1412 return;
1413 default:
1414 resolvingBase = false;
1415 }
1416 }
1417 if (baseVR && !setBase)
1418 emitStoreCell(*baseVR, scope);
1419
1420 RELEASE_ASSERT(valueVR);
1421 ResolveOperation* resolveValueOperation = pc;
1422 switch (resolveValueOperation->m_operation) {
1423 case ResolveOperation::GetAndReturnGlobalProperty: {
1424 // Verify structure.
1425 move(TrustedImmPtr(globalObject), regT2);
1426 move(TrustedImmPtr(resolveValueOperation), regT3);
1427 loadPtr(Address(regT3, OBJECT_OFFSETOF(ResolveOperation, m_structure)), regT1);
1428 addSlowCase(branchPtr(NotEqual, regT1, Address(regT2, JSCell::structureOffset())));
1429
1430 // Load property.
1431 load32(Address(regT3, OBJECT_OFFSETOF(ResolveOperation, m_offset)), regT3);
1432
1433 // regT2: GlobalObject
1434 // regT3: offset
1435 #if USE(JSVALUE32_64)
1436 compileGetDirectOffset(regT2, valueTag, value, regT3, KnownNotFinal);
1437 #else
1438 compileGetDirectOffset(regT2, value, regT3, regT1, KnownNotFinal);
1439 #endif
1440 break;
1441 }
1442 case ResolveOperation::GetAndReturnGlobalVarWatchable:
1443 case ResolveOperation::GetAndReturnGlobalVar: {
1444 #if USE(JSVALUE32_64)
1445 load32(reinterpret_cast<char*>(pc->m_registerAddress) + OBJECT_OFFSETOF(JSValue, u.asBits.tag), valueTag);
1446 load32(reinterpret_cast<char*>(pc->m_registerAddress) + OBJECT_OFFSETOF(JSValue, u.asBits.payload), value);
1447 #else
1448 load64(reinterpret_cast<char*>(pc->m_registerAddress), value);
1449 #endif
1450 break;
1451 }
1452 case ResolveOperation::GetAndReturnScopedVar: {
1453 loadPtr(Address(scope, JSVariableObject::offsetOfRegisters()), scope);
1454 #if USE(JSVALUE32_64)
1455 load32(Address(scope, pc->m_offset * sizeof(Register) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), valueTag);
1456 load32(Address(scope, pc->m_offset * sizeof(Register) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), value);
1457 #else
1458 load64(Address(scope, pc->m_offset * sizeof(Register)), value);
1459 #endif
1460 break;
1461 }
1462 default:
1463 CRASH();
1464 return;
1465 }
1466
1467 #if USE(JSVALUE32_64)
1468 emitStore(*valueVR, valueTag, value);
1469 #else
1470 emitPutVirtualRegister(*valueVR, value);
1471 #endif
1472 emitValueProfilingSite();
1473 }
1474
1475 void JIT::emitSlow_link_resolve_operations(ResolveOperations* resolveOperations, Vector<SlowCaseEntry>::iterator& iter)
1476 {
1477 if (resolveOperations->isEmpty()) {
1478 linkSlowCase(iter);
1479 return;
1480 }
1481
1482 ResolveOperation* pc = resolveOperations->data();
1483 bool resolvingBase = true;
1484 while (resolvingBase) {
1485 switch (pc->m_operation) {
1486 case ResolveOperation::ReturnGlobalObjectAsBase:
1487 return;
1488 case ResolveOperation::SetBaseToGlobal:
1489 resolvingBase = false;
1490 ++pc;
1491 break;
1492 case ResolveOperation::SetBaseToUndefined: {
1493 resolvingBase = false;
1494 ++pc;
1495 break;
1496 }
1497 case ResolveOperation::SetBaseToScope:
1498 resolvingBase = false;
1499 ++pc;
1500 break;
1501 case ResolveOperation::ReturnScopeAsBase:
1502 return;
1503 case ResolveOperation::SkipTopScopeNode: {
1504 ++pc;
1505 break;
1506 }
1507 case ResolveOperation::SkipScopes:
1508 ++pc;
1509 break;
1510 case ResolveOperation::Fail:
1511 linkSlowCase(iter);
1512 return;
1513 case ResolveOperation::CheckForDynamicEntriesBeforeGlobalScope: {
1514 linkSlowCase(iter);
1515 ++pc;
1516 break;
1517 }
1518 default:
1519 resolvingBase = false;
1520 }
1521 }
1522 ResolveOperation* resolveValueOperation = pc;
1523 switch (resolveValueOperation->m_operation) {
1524 case ResolveOperation::GetAndReturnGlobalProperty: {
1525 linkSlowCase(iter);
1526 break;
1527 }
1528 case ResolveOperation::GetAndReturnGlobalVarWatchable:
1529 case ResolveOperation::GetAndReturnGlobalVar:
1530 break;
1531 case ResolveOperation::GetAndReturnScopedVar:
1532 break;
1533 default:
1534 CRASH();
1535 return;
1536 }
1537 }
1538
1539 void JIT::emit_op_resolve(Instruction* currentInstruction)
1540 {
1541 ResolveOperations* operations = currentInstruction[3].u.resolveOperations;
1542 int dst = currentInstruction[1].u.operand;
1543 emit_resolve_operations(operations, 0, &dst);
1544 }
1545
1546 void JIT::emitSlow_op_resolve(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1547 {
1548 ResolveOperations* operations = currentInstruction[3].u.resolveOperations;
1549 emitSlow_link_resolve_operations(operations, iter);
1550 JITStubCall stubCall(this, cti_op_resolve);
1551 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
1552 stubCall.addArgument(TrustedImmPtr(currentInstruction[3].u.resolveOperations));
1553 stubCall.callWithValueProfiling(currentInstruction[1].u.operand);
1554 }
1555
1556 void JIT::emit_op_resolve_base(Instruction* currentInstruction)
1557 {
1558 ResolveOperations* operations = currentInstruction[4].u.resolveOperations;
1559 int dst = currentInstruction[1].u.operand;
1560 emit_resolve_operations(operations, &dst, 0);
1561 }
1562
1563 void JIT::emitSlow_op_resolve_base(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1564 {
1565 ResolveOperations* operations = currentInstruction[4].u.resolveOperations;
1566 emitSlow_link_resolve_operations(operations, iter);
1567 JITStubCall stubCall(this, currentInstruction[3].u.operand ? cti_op_resolve_base_strict_put : cti_op_resolve_base);
1568 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
1569 stubCall.addArgument(TrustedImmPtr(currentInstruction[4].u.resolveOperations));
1570 stubCall.addArgument(TrustedImmPtr(currentInstruction[5].u.putToBaseOperation));
1571 stubCall.callWithValueProfiling(currentInstruction[1].u.operand);
1572 }
1573
1574 void JIT::emit_op_resolve_with_base(Instruction* currentInstruction)
1575 {
1576 ResolveOperations* operations = currentInstruction[4].u.resolveOperations;
1577 int base = currentInstruction[1].u.operand;
1578 int value = currentInstruction[2].u.operand;
1579 emit_resolve_operations(operations, &base, &value);
1580 }
1581
1582 void JIT::emitSlow_op_resolve_with_base(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1583 {
1584 ResolveOperations* operations = currentInstruction[4].u.resolveOperations;
1585 emitSlow_link_resolve_operations(operations, iter);
1586 JITStubCall stubCall(this, cti_op_resolve_with_base);
1587 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[3].u.operand)));
1588 stubCall.addArgument(TrustedImm32(currentInstruction[1].u.operand));
1589 stubCall.addArgument(TrustedImmPtr(currentInstruction[4].u.resolveOperations));
1590 stubCall.addArgument(TrustedImmPtr(currentInstruction[5].u.putToBaseOperation));
1591 stubCall.callWithValueProfiling(currentInstruction[2].u.operand);
1592 }
1593
1594 void JIT::emit_op_resolve_with_this(Instruction* currentInstruction)
1595 {
1596 ResolveOperations* operations = currentInstruction[4].u.resolveOperations;
1597 int base = currentInstruction[1].u.operand;
1598 int value = currentInstruction[2].u.operand;
1599 emit_resolve_operations(operations, &base, &value);
1600 }
1601
1602 void JIT::emitSlow_op_resolve_with_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1603 {
1604 ResolveOperations* operations = currentInstruction[4].u.resolveOperations;
1605 emitSlow_link_resolve_operations(operations, iter);
1606 JITStubCall stubCall(this, cti_op_resolve_with_this);
1607 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[3].u.operand)));
1608 stubCall.addArgument(TrustedImm32(currentInstruction[1].u.operand));
1609 stubCall.addArgument(TrustedImmPtr(currentInstruction[4].u.resolveOperations));
1610 stubCall.callWithValueProfiling(currentInstruction[2].u.operand);
1611 }
1612
1613 void JIT::emitSlow_op_put_to_base(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1614 {
1615 int base = currentInstruction[1].u.operand;
1616 int id = currentInstruction[2].u.operand;
1617 int value = currentInstruction[3].u.operand;
1618
1619 PutToBaseOperation* putToBaseOperation = currentInstruction[4].u.putToBaseOperation;
1620 switch (putToBaseOperation->m_kind) {
1621 case PutToBaseOperation::VariablePut:
1622 return;
1623
1624 case PutToBaseOperation::GlobalVariablePutChecked:
1625 linkSlowCase(iter);
1626 case PutToBaseOperation::GlobalVariablePut:
1627 if (!putToBaseOperation->m_isDynamic)
1628 return;
1629 linkSlowCase(iter);
1630 break;
1631
1632 case PutToBaseOperation::Uninitialised:
1633 case PutToBaseOperation::Readonly:
1634 case PutToBaseOperation::Generic:
1635 return;
1636
1637 case PutToBaseOperation::GlobalPropertyPut:
1638 linkSlowCase(iter);
1639 break;
1640
1641 }
1642
1643 JITStubCall stubCall(this, cti_op_put_to_base);
1644
1645 stubCall.addArgument(TrustedImm32(base));
1646 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(id)));
1647 stubCall.addArgument(TrustedImm32(value));
1648 stubCall.addArgument(TrustedImmPtr(putToBaseOperation));
1649 stubCall.call();
1650 }
1651
1652 void JIT::emit_op_new_regexp(Instruction* currentInstruction)
1653 {
1654 JITStubCall stubCall(this, cti_op_new_regexp);
1655 stubCall.addArgument(TrustedImmPtr(m_codeBlock->regexp(currentInstruction[2].u.operand)));
1656 stubCall.call(currentInstruction[1].u.operand);
1657 }
1658
1659 void JIT::emit_op_new_func(Instruction* currentInstruction)
1660 {
1661 Jump lazyJump;
1662 int dst = currentInstruction[1].u.operand;
1663 if (currentInstruction[3].u.operand) {
1664 #if USE(JSVALUE32_64)
1665 lazyJump = branch32(NotEqual, tagFor(dst), TrustedImm32(JSValue::EmptyValueTag));
1666 #else
1667 lazyJump = branchTest64(NonZero, addressFor(dst));
1668 #endif
1669 }
1670
1671 JITStubCall stubCall(this, cti_op_new_func);
1672 stubCall.addArgument(TrustedImmPtr(m_codeBlock->functionDecl(currentInstruction[2].u.operand)));
1673 stubCall.call(dst);
1674
1675 if (currentInstruction[3].u.operand) {
1676 #if USE(JSVALUE32_64)
1677 unmap();
1678 #else
1679 killLastResultRegister();
1680 #endif
1681 lazyJump.link(this);
1682 }
1683 }
1684
1685 void JIT::emit_op_new_func_exp(Instruction* currentInstruction)
1686 {
1687 JITStubCall stubCall(this, cti_op_new_func_exp);
1688 stubCall.addArgument(TrustedImmPtr(m_codeBlock->functionExpr(currentInstruction[2].u.operand)));
1689 stubCall.call(currentInstruction[1].u.operand);
1690 }
1691
1692 void JIT::emit_op_new_array(Instruction* currentInstruction)
1693 {
1694 JITStubCall stubCall(this, cti_op_new_array);
1695 stubCall.addArgument(TrustedImm32(currentInstruction[2].u.operand));
1696 stubCall.addArgument(TrustedImm32(currentInstruction[3].u.operand));
1697 stubCall.addArgument(TrustedImmPtr(currentInstruction[4].u.arrayAllocationProfile));
1698 stubCall.call(currentInstruction[1].u.operand);
1699 }
1700
1701 void JIT::emit_op_new_array_with_size(Instruction* currentInstruction)
1702 {
1703 JITStubCall stubCall(this, cti_op_new_array_with_size);
1704 #if USE(JSVALUE64)
1705 stubCall.addArgument(currentInstruction[2].u.operand, regT2);
1706 #else
1707 stubCall.addArgument(currentInstruction[2].u.operand);
1708 #endif
1709 stubCall.addArgument(TrustedImmPtr(currentInstruction[3].u.arrayAllocationProfile));
1710 stubCall.call(currentInstruction[1].u.operand);
1711 }
1712
1713 void JIT::emit_op_new_array_buffer(Instruction* currentInstruction)
1714 {
1715 JITStubCall stubCall(this, cti_op_new_array_buffer);
1716 stubCall.addArgument(TrustedImm32(currentInstruction[2].u.operand));
1717 stubCall.addArgument(TrustedImm32(currentInstruction[3].u.operand));
1718 stubCall.addArgument(TrustedImmPtr(currentInstruction[4].u.arrayAllocationProfile));
1719 stubCall.call(currentInstruction[1].u.operand);
1720 }
1721
1722 } // namespace JSC
1723
1724 #endif // ENABLE(JIT)