]> git.saurik.com Git - apple/javascriptcore.git/blob - jit/JITOpcodes.cpp
JavaScriptCore-7600.1.4.16.1.tar.gz
[apple/javascriptcore.git] / jit / JITOpcodes.cpp
1 /*
2 * Copyright (C) 2009, 2012, 2013 Apple Inc. All rights reserved.
3 * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
13 *
14 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
18 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25 */
26
27 #include "config.h"
28 #if ENABLE(JIT)
29 #include "JIT.h"
30
31 #include "Arguments.h"
32 #include "CopiedSpaceInlines.h"
33 #include "Debugger.h"
34 #include "Heap.h"
35 #include "JITInlines.h"
36 #include "JSArray.h"
37 #include "JSCell.h"
38 #include "JSFunction.h"
39 #include "JSPropertyNameIterator.h"
40 #include "MaxFrameExtentForSlowPathCall.h"
41 #include "SlowPathCall.h"
42 #include "VirtualRegister.h"
43
44 namespace JSC {
45
46 #if USE(JSVALUE64)
47
48 JIT::CodeRef JIT::privateCompileCTINativeCall(VM* vm, NativeFunction)
49 {
50 return vm->getCTIStub(nativeCallGenerator);
51 }
52
53 void JIT::emit_op_mov(Instruction* currentInstruction)
54 {
55 int dst = currentInstruction[1].u.operand;
56 int src = currentInstruction[2].u.operand;
57
58 emitGetVirtualRegister(src, regT0);
59 emitPutVirtualRegister(dst);
60 }
61
62 void JIT::emit_op_captured_mov(Instruction* currentInstruction)
63 {
64 int dst = currentInstruction[1].u.operand;
65 int src = currentInstruction[2].u.operand;
66
67 emitGetVirtualRegister(src, regT0);
68 emitNotifyWrite(regT0, regT1, currentInstruction[3].u.watchpointSet);
69 emitPutVirtualRegister(dst);
70 }
71
72 void JIT::emit_op_end(Instruction* currentInstruction)
73 {
74 RELEASE_ASSERT(returnValueGPR != callFrameRegister);
75 emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
76 emitFunctionEpilogue();
77 ret();
78 }
79
80 void JIT::emit_op_jmp(Instruction* currentInstruction)
81 {
82 unsigned target = currentInstruction[1].u.operand;
83 addJump(jump(), target);
84 }
85
86 void JIT::emit_op_new_object(Instruction* currentInstruction)
87 {
88 Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
89 size_t allocationSize = JSFinalObject::allocationSize(structure->inlineCapacity());
90 MarkedAllocator* allocator = &m_vm->heap.allocatorForObjectWithoutDestructor(allocationSize);
91
92 RegisterID resultReg = regT0;
93 RegisterID allocatorReg = regT1;
94 RegisterID scratchReg = regT2;
95
96 move(TrustedImmPtr(allocator), allocatorReg);
97 emitAllocateJSObject(allocatorReg, TrustedImmPtr(structure), resultReg, scratchReg);
98 emitPutVirtualRegister(currentInstruction[1].u.operand);
99 }
100
101 void JIT::emitSlow_op_new_object(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
102 {
103 linkSlowCase(iter);
104 int dst = currentInstruction[1].u.operand;
105 Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
106 callOperation(operationNewObject, structure);
107 emitStoreCell(dst, returnValueGPR);
108 }
109
110 void JIT::emit_op_check_has_instance(Instruction* currentInstruction)
111 {
112 int baseVal = currentInstruction[3].u.operand;
113
114 emitGetVirtualRegister(baseVal, regT0);
115
116 // Check that baseVal is a cell.
117 emitJumpSlowCaseIfNotJSCell(regT0, baseVal);
118
119 // Check that baseVal 'ImplementsHasInstance'.
120 addSlowCase(branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance)));
121 }
122
123 void JIT::emit_op_instanceof(Instruction* currentInstruction)
124 {
125 int dst = currentInstruction[1].u.operand;
126 int value = currentInstruction[2].u.operand;
127 int proto = currentInstruction[3].u.operand;
128
129 // Load the operands (baseVal, proto, and value respectively) into registers.
130 // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
131 emitGetVirtualRegister(value, regT2);
132 emitGetVirtualRegister(proto, regT1);
133
134 // Check that proto are cells. baseVal must be a cell - this is checked by op_check_has_instance.
135 emitJumpSlowCaseIfNotJSCell(regT2, value);
136 emitJumpSlowCaseIfNotJSCell(regT1, proto);
137
138 // Check that prototype is an object
139 addSlowCase(emitJumpIfCellNotObject(regT1));
140
141 // Optimistically load the result true, and start looping.
142 // Initially, regT1 still contains proto and regT2 still contains value.
143 // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
144 move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
145 Label loop(this);
146
147 // Load the prototype of the object in regT2. If this is equal to regT1 - WIN!
148 // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
149 emitLoadStructure(regT2, regT2, regT3);
150 load64(Address(regT2, Structure::prototypeOffset()), regT2);
151 Jump isInstance = branchPtr(Equal, regT2, regT1);
152 emitJumpIfJSCell(regT2).linkTo(loop, this);
153
154 // We get here either by dropping out of the loop, or if value was not an Object. Result is false.
155 move(TrustedImm64(JSValue::encode(jsBoolean(false))), regT0);
156
157 // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
158 isInstance.link(this);
159 emitPutVirtualRegister(dst);
160 }
161
162 void JIT::emit_op_is_undefined(Instruction* currentInstruction)
163 {
164 int dst = currentInstruction[1].u.operand;
165 int value = currentInstruction[2].u.operand;
166
167 emitGetVirtualRegister(value, regT0);
168 Jump isCell = emitJumpIfJSCell(regT0);
169
170 compare64(Equal, regT0, TrustedImm32(ValueUndefined), regT0);
171 Jump done = jump();
172
173 isCell.link(this);
174 Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
175 move(TrustedImm32(0), regT0);
176 Jump notMasqueradesAsUndefined = jump();
177
178 isMasqueradesAsUndefined.link(this);
179 emitLoadStructure(regT0, regT1, regT2);
180 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
181 loadPtr(Address(regT1, Structure::globalObjectOffset()), regT1);
182 comparePtr(Equal, regT0, regT1, regT0);
183
184 notMasqueradesAsUndefined.link(this);
185 done.link(this);
186 emitTagAsBoolImmediate(regT0);
187 emitPutVirtualRegister(dst);
188 }
189
190 void JIT::emit_op_is_boolean(Instruction* currentInstruction)
191 {
192 int dst = currentInstruction[1].u.operand;
193 int value = currentInstruction[2].u.operand;
194
195 emitGetVirtualRegister(value, regT0);
196 xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
197 test64(Zero, regT0, TrustedImm32(static_cast<int32_t>(~1)), regT0);
198 emitTagAsBoolImmediate(regT0);
199 emitPutVirtualRegister(dst);
200 }
201
202 void JIT::emit_op_is_number(Instruction* currentInstruction)
203 {
204 int dst = currentInstruction[1].u.operand;
205 int value = currentInstruction[2].u.operand;
206
207 emitGetVirtualRegister(value, regT0);
208 test64(NonZero, regT0, tagTypeNumberRegister, regT0);
209 emitTagAsBoolImmediate(regT0);
210 emitPutVirtualRegister(dst);
211 }
212
213 void JIT::emit_op_is_string(Instruction* currentInstruction)
214 {
215 int dst = currentInstruction[1].u.operand;
216 int value = currentInstruction[2].u.operand;
217
218 emitGetVirtualRegister(value, regT0);
219 Jump isNotCell = emitJumpIfNotJSCell(regT0);
220
221 compare8(Equal, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType), regT0);
222 emitTagAsBoolImmediate(regT0);
223 Jump done = jump();
224
225 isNotCell.link(this);
226 move(TrustedImm32(ValueFalse), regT0);
227
228 done.link(this);
229 emitPutVirtualRegister(dst);
230 }
231
232 void JIT::emit_op_tear_off_activation(Instruction* currentInstruction)
233 {
234 int activation = currentInstruction[1].u.operand;
235 Jump activationNotCreated = branchTest64(Zero, addressFor(activation));
236 emitGetVirtualRegister(activation, regT0);
237 callOperation(operationTearOffActivation, regT0);
238 activationNotCreated.link(this);
239 }
240
241 void JIT::emit_op_tear_off_arguments(Instruction* currentInstruction)
242 {
243 int arguments = currentInstruction[1].u.operand;
244 int activation = currentInstruction[2].u.operand;
245
246 Jump argsNotCreated = branchTest64(Zero, Address(callFrameRegister, sizeof(Register) * (unmodifiedArgumentsRegister(VirtualRegister(arguments)).offset())));
247 emitGetVirtualRegister(unmodifiedArgumentsRegister(VirtualRegister(arguments)).offset(), regT0);
248 emitGetVirtualRegister(activation, regT1);
249 callOperation(operationTearOffArguments, regT0, regT1);
250 argsNotCreated.link(this);
251 }
252
253 void JIT::emit_op_ret(Instruction* currentInstruction)
254 {
255 ASSERT(callFrameRegister != regT1);
256 ASSERT(regT1 != returnValueGPR);
257 ASSERT(returnValueGPR != callFrameRegister);
258
259 // Return the result in %eax.
260 emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
261
262 checkStackPointerAlignment();
263 emitFunctionEpilogue();
264 ret();
265 }
266
267 void JIT::emit_op_ret_object_or_this(Instruction* currentInstruction)
268 {
269 ASSERT(callFrameRegister != regT1);
270 ASSERT(regT1 != returnValueGPR);
271 ASSERT(returnValueGPR != callFrameRegister);
272
273 // Return the result in %eax.
274 emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
275 Jump notJSCell = emitJumpIfNotJSCell(returnValueGPR);
276 Jump notObject = emitJumpIfCellNotObject(returnValueGPR);
277
278 // Return.
279 emitFunctionEpilogue();
280 ret();
281
282 // Return 'this' in %eax.
283 notJSCell.link(this);
284 notObject.link(this);
285 emitGetVirtualRegister(currentInstruction[2].u.operand, returnValueGPR);
286
287 // Return.
288 emitFunctionEpilogue();
289 ret();
290 }
291
292 void JIT::emit_op_to_primitive(Instruction* currentInstruction)
293 {
294 int dst = currentInstruction[1].u.operand;
295 int src = currentInstruction[2].u.operand;
296
297 emitGetVirtualRegister(src, regT0);
298
299 Jump isImm = emitJumpIfNotJSCell(regT0);
300 addSlowCase(branchStructure(NotEqual,
301 Address(regT0, JSCell::structureIDOffset()),
302 m_vm->stringStructure.get()));
303 isImm.link(this);
304
305 if (dst != src)
306 emitPutVirtualRegister(dst);
307
308 }
309
310 void JIT::emit_op_strcat(Instruction* currentInstruction)
311 {
312 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_strcat);
313 slowPathCall.call();
314 }
315
316 void JIT::emit_op_not(Instruction* currentInstruction)
317 {
318 emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
319
320 // Invert against JSValue(false); if the value was tagged as a boolean, then all bits will be
321 // clear other than the low bit (which will be 0 or 1 for false or true inputs respectively).
322 // Then invert against JSValue(true), which will add the tag back in, and flip the low bit.
323 xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
324 addSlowCase(branchTestPtr(NonZero, regT0, TrustedImm32(static_cast<int32_t>(~1))));
325 xor64(TrustedImm32(static_cast<int32_t>(ValueTrue)), regT0);
326
327 emitPutVirtualRegister(currentInstruction[1].u.operand);
328 }
329
330 void JIT::emit_op_jfalse(Instruction* currentInstruction)
331 {
332 unsigned target = currentInstruction[2].u.operand;
333 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
334
335 addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNumber(0)))), target);
336 Jump isNonZero = emitJumpIfImmediateInteger(regT0);
337
338 addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsBoolean(false)))), target);
339 addSlowCase(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsBoolean(true)))));
340
341 isNonZero.link(this);
342 }
343
344 void JIT::emit_op_jeq_null(Instruction* currentInstruction)
345 {
346 int src = currentInstruction[1].u.operand;
347 unsigned target = currentInstruction[2].u.operand;
348
349 emitGetVirtualRegister(src, regT0);
350 Jump isImmediate = emitJumpIfNotJSCell(regT0);
351
352 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
353 Jump isNotMasqueradesAsUndefined = branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
354 emitLoadStructure(regT0, regT2, regT1);
355 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
356 addJump(branchPtr(Equal, Address(regT2, Structure::globalObjectOffset()), regT0), target);
357 Jump masqueradesGlobalObjectIsForeign = jump();
358
359 // Now handle the immediate cases - undefined & null
360 isImmediate.link(this);
361 and64(TrustedImm32(~TagBitUndefined), regT0);
362 addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNull()))), target);
363
364 isNotMasqueradesAsUndefined.link(this);
365 masqueradesGlobalObjectIsForeign.link(this);
366 };
367 void JIT::emit_op_jneq_null(Instruction* currentInstruction)
368 {
369 int src = currentInstruction[1].u.operand;
370 unsigned target = currentInstruction[2].u.operand;
371
372 emitGetVirtualRegister(src, regT0);
373 Jump isImmediate = emitJumpIfNotJSCell(regT0);
374
375 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
376 addJump(branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
377 emitLoadStructure(regT0, regT2, regT1);
378 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
379 addJump(branchPtr(NotEqual, Address(regT2, Structure::globalObjectOffset()), regT0), target);
380 Jump wasNotImmediate = jump();
381
382 // Now handle the immediate cases - undefined & null
383 isImmediate.link(this);
384 and64(TrustedImm32(~TagBitUndefined), regT0);
385 addJump(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsNull()))), target);
386
387 wasNotImmediate.link(this);
388 }
389
390 void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
391 {
392 int src = currentInstruction[1].u.operand;
393 Special::Pointer ptr = currentInstruction[2].u.specialPointer;
394 unsigned target = currentInstruction[3].u.operand;
395
396 emitGetVirtualRegister(src, regT0);
397 addJump(branchPtr(NotEqual, regT0, TrustedImmPtr(actualPointerFor(m_codeBlock, ptr))), target);
398 }
399
400 void JIT::emit_op_eq(Instruction* currentInstruction)
401 {
402 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
403 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
404 compare32(Equal, regT1, regT0, regT0);
405 emitTagAsBoolImmediate(regT0);
406 emitPutVirtualRegister(currentInstruction[1].u.operand);
407 }
408
409 void JIT::emit_op_jtrue(Instruction* currentInstruction)
410 {
411 unsigned target = currentInstruction[2].u.operand;
412 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
413
414 Jump isZero = branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNumber(0))));
415 addJump(emitJumpIfImmediateInteger(regT0), target);
416
417 addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsBoolean(true)))), target);
418 addSlowCase(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsBoolean(false)))));
419
420 isZero.link(this);
421 }
422
423 void JIT::emit_op_neq(Instruction* currentInstruction)
424 {
425 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
426 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
427 compare32(NotEqual, regT1, regT0, regT0);
428 emitTagAsBoolImmediate(regT0);
429
430 emitPutVirtualRegister(currentInstruction[1].u.operand);
431
432 }
433
434 void JIT::emit_op_bitxor(Instruction* currentInstruction)
435 {
436 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
437 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
438 xor64(regT1, regT0);
439 emitFastArithReTagImmediate(regT0, regT0);
440 emitPutVirtualRegister(currentInstruction[1].u.operand);
441 }
442
443 void JIT::emit_op_bitor(Instruction* currentInstruction)
444 {
445 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
446 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
447 or64(regT1, regT0);
448 emitPutVirtualRegister(currentInstruction[1].u.operand);
449 }
450
451 void JIT::emit_op_throw(Instruction* currentInstruction)
452 {
453 ASSERT(regT0 == returnValueGPR);
454 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
455 callOperationNoExceptionCheck(operationThrow, regT0);
456 jumpToExceptionHandler();
457 }
458
459 void JIT::emit_op_get_pnames(Instruction* currentInstruction)
460 {
461 int dst = currentInstruction[1].u.operand;
462 int base = currentInstruction[2].u.operand;
463 int i = currentInstruction[3].u.operand;
464 int size = currentInstruction[4].u.operand;
465 int breakTarget = currentInstruction[5].u.operand;
466
467 JumpList isNotObject;
468
469 emitGetVirtualRegister(base, regT0);
470 if (!m_codeBlock->isKnownNotImmediate(base))
471 isNotObject.append(emitJumpIfNotJSCell(regT0));
472 if (base != m_codeBlock->thisRegister().offset() || m_codeBlock->isStrictMode())
473 isNotObject.append(emitJumpIfCellNotObject(regT0));
474
475 // We could inline the case where you have a valid cache, but
476 // this call doesn't seem to be hot.
477 Label isObject(this);
478 callOperation(operationGetPNames, regT0);
479 emitStoreCell(dst, returnValueGPR);
480 load32(Address(regT0, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStringsSize)), regT3);
481 store64(tagTypeNumberRegister, addressFor(i));
482 store32(TrustedImm32(Int32Tag), intTagFor(size));
483 store32(regT3, intPayloadFor(size));
484 Jump end = jump();
485
486 isNotObject.link(this);
487 move(regT0, regT1);
488 and32(TrustedImm32(~TagBitUndefined), regT1);
489 addJump(branch32(Equal, regT1, TrustedImm32(ValueNull)), breakTarget);
490 callOperation(operationToObject, base, regT0);
491 jump().linkTo(isObject, this);
492
493 end.link(this);
494 }
495
496 void JIT::emit_op_next_pname(Instruction* currentInstruction)
497 {
498 int dst = currentInstruction[1].u.operand;
499 int base = currentInstruction[2].u.operand;
500 int i = currentInstruction[3].u.operand;
501 int size = currentInstruction[4].u.operand;
502 int it = currentInstruction[5].u.operand;
503 int target = currentInstruction[6].u.operand;
504
505 JumpList callHasProperty;
506
507 Label begin(this);
508 load32(intPayloadFor(i), regT0);
509 Jump end = branch32(Equal, regT0, intPayloadFor(size));
510
511 // Grab key @ i
512 loadPtr(addressFor(it), regT1);
513 loadPtr(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStrings)), regT2);
514
515 load64(BaseIndex(regT2, regT0, TimesEight), regT2);
516
517 emitPutVirtualRegister(dst, regT2);
518
519 // Increment i
520 add32(TrustedImm32(1), regT0);
521 store32(regT0, intPayloadFor(i));
522
523 // Verify that i is valid:
524 emitGetVirtualRegister(base, regT0);
525
526 // Test base's structure
527 emitLoadStructure(regT0, regT2, regT3);
528 callHasProperty.append(branchPtr(NotEqual, regT2, Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure)))));
529
530 // Test base's prototype chain
531 loadPtr(Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedPrototypeChain))), regT3);
532 loadPtr(Address(regT3, OBJECT_OFFSETOF(StructureChain, m_vector)), regT3);
533 addJump(branchTestPtr(Zero, Address(regT3)), target);
534
535 Label checkPrototype(this);
536 load64(Address(regT2, Structure::prototypeOffset()), regT2);
537 callHasProperty.append(emitJumpIfNotJSCell(regT2));
538 emitLoadStructure(regT2, regT2, regT1);
539 callHasProperty.append(branchPtr(NotEqual, regT2, Address(regT3)));
540 addPtr(TrustedImm32(sizeof(Structure*)), regT3);
541 branchTestPtr(NonZero, Address(regT3)).linkTo(checkPrototype, this);
542
543 // Continue loop.
544 addJump(jump(), target);
545
546 // Slow case: Ask the object if i is valid.
547 callHasProperty.link(this);
548 emitGetVirtualRegister(dst, regT1);
549 callOperation(operationHasProperty, regT0, regT1);
550
551 // Test for valid key.
552 addJump(branchTest32(NonZero, regT0), target);
553 jump().linkTo(begin, this);
554
555 // End of loop.
556 end.link(this);
557 }
558
559 void JIT::emit_op_push_with_scope(Instruction* currentInstruction)
560 {
561 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
562 callOperation(operationPushWithScope, regT0);
563 }
564
565 void JIT::emit_op_pop_scope(Instruction*)
566 {
567 callOperation(operationPopScope);
568 }
569
570 void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
571 {
572 int dst = currentInstruction[1].u.operand;
573 int src1 = currentInstruction[2].u.operand;
574 int src2 = currentInstruction[3].u.operand;
575
576 emitGetVirtualRegisters(src1, regT0, src2, regT1);
577
578 // Jump slow if both are cells (to cover strings).
579 move(regT0, regT2);
580 or64(regT1, regT2);
581 addSlowCase(emitJumpIfJSCell(regT2));
582
583 // Jump slow if either is a double. First test if it's an integer, which is fine, and then test
584 // if it's a double.
585 Jump leftOK = emitJumpIfImmediateInteger(regT0);
586 addSlowCase(emitJumpIfImmediateNumber(regT0));
587 leftOK.link(this);
588 Jump rightOK = emitJumpIfImmediateInteger(regT1);
589 addSlowCase(emitJumpIfImmediateNumber(regT1));
590 rightOK.link(this);
591
592 if (type == OpStrictEq)
593 compare64(Equal, regT1, regT0, regT0);
594 else
595 compare64(NotEqual, regT1, regT0, regT0);
596 emitTagAsBoolImmediate(regT0);
597
598 emitPutVirtualRegister(dst);
599 }
600
601 void JIT::emit_op_stricteq(Instruction* currentInstruction)
602 {
603 compileOpStrictEq(currentInstruction, OpStrictEq);
604 }
605
606 void JIT::emit_op_nstricteq(Instruction* currentInstruction)
607 {
608 compileOpStrictEq(currentInstruction, OpNStrictEq);
609 }
610
611 void JIT::emit_op_to_number(Instruction* currentInstruction)
612 {
613 int srcVReg = currentInstruction[2].u.operand;
614 emitGetVirtualRegister(srcVReg, regT0);
615
616 addSlowCase(emitJumpIfNotImmediateNumber(regT0));
617
618 emitPutVirtualRegister(currentInstruction[1].u.operand);
619 }
620
621 void JIT::emit_op_push_name_scope(Instruction* currentInstruction)
622 {
623 emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
624 callOperation(operationPushNameScope, &m_codeBlock->identifier(currentInstruction[1].u.operand), regT0, currentInstruction[3].u.operand);
625 }
626
627 void JIT::emit_op_catch(Instruction* currentInstruction)
628 {
629 // Gotta restore the tag registers. We could be throwing from FTL, which may
630 // clobber them.
631 move(TrustedImm64(TagTypeNumber), tagTypeNumberRegister);
632 move(TrustedImm64(TagMask), tagMaskRegister);
633
634 move(TrustedImmPtr(m_vm), regT3);
635 load64(Address(regT3, VM::callFrameForThrowOffset()), callFrameRegister);
636
637 addPtr(TrustedImm32(stackPointerOffsetFor(codeBlock()) * sizeof(Register)), callFrameRegister, stackPointerRegister);
638
639 load64(Address(regT3, VM::exceptionOffset()), regT0);
640 store64(TrustedImm64(JSValue::encode(JSValue())), Address(regT3, VM::exceptionOffset()));
641 emitPutVirtualRegister(currentInstruction[1].u.operand);
642 }
643
644 void JIT::emit_op_switch_imm(Instruction* currentInstruction)
645 {
646 size_t tableIndex = currentInstruction[1].u.operand;
647 unsigned defaultOffset = currentInstruction[2].u.operand;
648 unsigned scrutinee = currentInstruction[3].u.operand;
649
650 // create jump table for switch destinations, track this switch statement.
651 SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex);
652 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Immediate));
653 jumpTable->ensureCTITable();
654
655 emitGetVirtualRegister(scrutinee, regT0);
656 callOperation(operationSwitchImmWithUnknownKeyType, regT0, tableIndex);
657 jump(returnValueGPR);
658 }
659
660 void JIT::emit_op_switch_char(Instruction* currentInstruction)
661 {
662 size_t tableIndex = currentInstruction[1].u.operand;
663 unsigned defaultOffset = currentInstruction[2].u.operand;
664 unsigned scrutinee = currentInstruction[3].u.operand;
665
666 // create jump table for switch destinations, track this switch statement.
667 SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex);
668 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Character));
669 jumpTable->ensureCTITable();
670
671 emitGetVirtualRegister(scrutinee, regT0);
672 callOperation(operationSwitchCharWithUnknownKeyType, regT0, tableIndex);
673 jump(returnValueGPR);
674 }
675
676 void JIT::emit_op_switch_string(Instruction* currentInstruction)
677 {
678 size_t tableIndex = currentInstruction[1].u.operand;
679 unsigned defaultOffset = currentInstruction[2].u.operand;
680 unsigned scrutinee = currentInstruction[3].u.operand;
681
682 // create jump table for switch destinations, track this switch statement.
683 StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
684 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset));
685
686 emitGetVirtualRegister(scrutinee, regT0);
687 callOperation(operationSwitchStringWithUnknownKeyType, regT0, tableIndex);
688 jump(returnValueGPR);
689 }
690
691 void JIT::emit_op_throw_static_error(Instruction* currentInstruction)
692 {
693 move(TrustedImm64(JSValue::encode(m_codeBlock->getConstant(currentInstruction[1].u.operand))), regT0);
694 callOperation(operationThrowStaticError, regT0, currentInstruction[2].u.operand);
695 }
696
697 void JIT::emit_op_debug(Instruction* currentInstruction)
698 {
699 load32(codeBlock()->debuggerRequestsAddress(), regT0);
700 Jump noDebuggerRequests = branchTest32(Zero, regT0);
701 callOperation(operationDebug, currentInstruction[1].u.operand);
702 noDebuggerRequests.link(this);
703 }
704
705 void JIT::emit_op_eq_null(Instruction* currentInstruction)
706 {
707 int dst = currentInstruction[1].u.operand;
708 int src1 = currentInstruction[2].u.operand;
709
710 emitGetVirtualRegister(src1, regT0);
711 Jump isImmediate = emitJumpIfNotJSCell(regT0);
712
713 Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
714 move(TrustedImm32(0), regT0);
715 Jump wasNotMasqueradesAsUndefined = jump();
716
717 isMasqueradesAsUndefined.link(this);
718 emitLoadStructure(regT0, regT2, regT1);
719 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
720 loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
721 comparePtr(Equal, regT0, regT2, regT0);
722 Jump wasNotImmediate = jump();
723
724 isImmediate.link(this);
725
726 and64(TrustedImm32(~TagBitUndefined), regT0);
727 compare64(Equal, regT0, TrustedImm32(ValueNull), regT0);
728
729 wasNotImmediate.link(this);
730 wasNotMasqueradesAsUndefined.link(this);
731
732 emitTagAsBoolImmediate(regT0);
733 emitPutVirtualRegister(dst);
734
735 }
736
737 void JIT::emit_op_neq_null(Instruction* currentInstruction)
738 {
739 int dst = currentInstruction[1].u.operand;
740 int src1 = currentInstruction[2].u.operand;
741
742 emitGetVirtualRegister(src1, regT0);
743 Jump isImmediate = emitJumpIfNotJSCell(regT0);
744
745 Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
746 move(TrustedImm32(1), regT0);
747 Jump wasNotMasqueradesAsUndefined = jump();
748
749 isMasqueradesAsUndefined.link(this);
750 emitLoadStructure(regT0, regT2, regT1);
751 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
752 loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
753 comparePtr(NotEqual, regT0, regT2, regT0);
754 Jump wasNotImmediate = jump();
755
756 isImmediate.link(this);
757
758 and64(TrustedImm32(~TagBitUndefined), regT0);
759 compare64(NotEqual, regT0, TrustedImm32(ValueNull), regT0);
760
761 wasNotImmediate.link(this);
762 wasNotMasqueradesAsUndefined.link(this);
763
764 emitTagAsBoolImmediate(regT0);
765 emitPutVirtualRegister(dst);
766 }
767
768 void JIT::emit_op_enter(Instruction*)
769 {
770 // Even though CTI doesn't use them, we initialize our constant
771 // registers to zap stale pointers, to avoid unnecessarily prolonging
772 // object lifetime and increasing GC pressure.
773 size_t count = m_codeBlock->m_numVars;
774 for (size_t j = 0; j < count; ++j)
775 emitInitRegister(virtualRegisterForLocal(j).offset());
776
777 emitWriteBarrier(m_codeBlock->ownerExecutable());
778
779 emitEnterOptimizationCheck();
780 }
781
782 void JIT::emit_op_create_activation(Instruction* currentInstruction)
783 {
784 int dst = currentInstruction[1].u.operand;
785
786 Jump activationCreated = branchTest64(NonZero, Address(callFrameRegister, sizeof(Register) * dst));
787 callOperation(operationCreateActivation, 0);
788 emitStoreCell(dst, returnValueGPR);
789 activationCreated.link(this);
790 }
791
792 void JIT::emit_op_create_arguments(Instruction* currentInstruction)
793 {
794 int dst = currentInstruction[1].u.operand;
795
796 Jump argsCreated = branchTest64(NonZero, Address(callFrameRegister, sizeof(Register) * dst));
797
798 callOperation(operationCreateArguments);
799 emitStoreCell(dst, returnValueGPR);
800 emitStoreCell(unmodifiedArgumentsRegister(VirtualRegister(dst)), returnValueGPR);
801
802 argsCreated.link(this);
803 }
804
805 void JIT::emit_op_init_lazy_reg(Instruction* currentInstruction)
806 {
807 int dst = currentInstruction[1].u.operand;
808
809 store64(TrustedImm64((int64_t)0), Address(callFrameRegister, sizeof(Register) * dst));
810 }
811
812 void JIT::emit_op_to_this(Instruction* currentInstruction)
813 {
814 WriteBarrierBase<Structure>* cachedStructure = &currentInstruction[2].u.structure;
815 emitGetVirtualRegister(currentInstruction[1].u.operand, regT1);
816
817 emitJumpSlowCaseIfNotJSCell(regT1);
818
819 addSlowCase(branch8(NotEqual, Address(regT1, JSCell::typeInfoTypeOffset()), TrustedImm32(FinalObjectType)));
820 loadPtr(cachedStructure, regT2);
821 addSlowCase(branchTestPtr(Zero, regT2));
822 load32(Address(regT2, Structure::structureIDOffset()), regT2);
823 addSlowCase(branch32(NotEqual, Address(regT1, JSCell::structureIDOffset()), regT2));
824 }
825
826 void JIT::emit_op_get_callee(Instruction* currentInstruction)
827 {
828 int result = currentInstruction[1].u.operand;
829 WriteBarrierBase<JSCell>* cachedFunction = &currentInstruction[2].u.jsCell;
830 emitGetFromCallFrameHeaderPtr(JSStack::Callee, regT0);
831
832 loadPtr(cachedFunction, regT2);
833 addSlowCase(branchPtr(NotEqual, regT0, regT2));
834
835 emitPutVirtualRegister(result);
836 }
837
838 void JIT::emitSlow_op_get_callee(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
839 {
840 linkSlowCase(iter);
841
842 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_callee);
843 slowPathCall.call();
844 }
845
846 void JIT::emit_op_create_this(Instruction* currentInstruction)
847 {
848 int callee = currentInstruction[2].u.operand;
849 RegisterID calleeReg = regT0;
850 RegisterID resultReg = regT0;
851 RegisterID allocatorReg = regT1;
852 RegisterID structureReg = regT2;
853 RegisterID scratchReg = regT3;
854
855 emitGetVirtualRegister(callee, calleeReg);
856 loadPtr(Address(calleeReg, JSFunction::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfAllocator()), allocatorReg);
857 loadPtr(Address(calleeReg, JSFunction::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfStructure()), structureReg);
858 addSlowCase(branchTestPtr(Zero, allocatorReg));
859
860 emitAllocateJSObject(allocatorReg, structureReg, resultReg, scratchReg);
861 emitPutVirtualRegister(currentInstruction[1].u.operand);
862 }
863
864 void JIT::emitSlow_op_create_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
865 {
866 linkSlowCase(iter); // doesn't have an allocation profile
867 linkSlowCase(iter); // allocation failed
868
869 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_this);
870 slowPathCall.call();
871 }
872
873 void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
874 {
875 Jump profilerDone = branchTestPtr(Zero, AbsoluteAddress(m_vm->enabledProfilerAddress()));
876 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
877 callOperation(operationProfileWillCall, regT0);
878 profilerDone.link(this);
879 }
880
881 void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
882 {
883 Jump profilerDone = branchTestPtr(Zero, AbsoluteAddress(m_vm->enabledProfilerAddress()));
884 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
885 callOperation(operationProfileDidCall, regT0);
886 profilerDone.link(this);
887 }
888
889
890 // Slow cases
891
892 void JIT::emitSlow_op_to_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
893 {
894 linkSlowCase(iter);
895 linkSlowCase(iter);
896 linkSlowCase(iter);
897 linkSlowCase(iter);
898
899 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_this);
900 slowPathCall.call();
901 }
902
903 void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
904 {
905 linkSlowCase(iter);
906
907 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_primitive);
908 slowPathCall.call();
909 }
910
911 void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
912 {
913 linkSlowCase(iter);
914
915 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_not);
916 slowPathCall.call();
917 }
918
919 void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
920 {
921 linkSlowCase(iter);
922 callOperation(operationConvertJSValueToBoolean, regT0);
923 emitJumpSlowToHot(branchTest32(Zero, returnValueGPR), currentInstruction[2].u.operand); // inverted!
924 }
925
926 void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
927 {
928 linkSlowCase(iter);
929 callOperation(operationConvertJSValueToBoolean, regT0);
930 emitJumpSlowToHot(branchTest32(NonZero, returnValueGPR), currentInstruction[2].u.operand);
931 }
932
933 void JIT::emitSlow_op_bitxor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
934 {
935 linkSlowCase(iter);
936 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_bitxor);
937 slowPathCall.call();
938 }
939
940 void JIT::emitSlow_op_bitor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
941 {
942 linkSlowCase(iter);
943 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_bitor);
944 slowPathCall.call();
945 }
946
947 void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
948 {
949 linkSlowCase(iter);
950 callOperation(operationCompareEq, regT0, regT1);
951 emitTagAsBoolImmediate(returnValueGPR);
952 emitPutVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
953 }
954
955 void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
956 {
957 linkSlowCase(iter);
958 callOperation(operationCompareEq, regT0, regT1);
959 xor32(TrustedImm32(0x1), regT0);
960 emitTagAsBoolImmediate(returnValueGPR);
961 emitPutVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
962 }
963
964 void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
965 {
966 linkSlowCase(iter);
967 linkSlowCase(iter);
968 linkSlowCase(iter);
969 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_stricteq);
970 slowPathCall.call();
971 }
972
973 void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
974 {
975 linkSlowCase(iter);
976 linkSlowCase(iter);
977 linkSlowCase(iter);
978 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_nstricteq);
979 slowPathCall.call();
980 }
981
982 void JIT::emitSlow_op_check_has_instance(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
983 {
984 int dst = currentInstruction[1].u.operand;
985 int value = currentInstruction[2].u.operand;
986 int baseVal = currentInstruction[3].u.operand;
987
988 linkSlowCaseIfNotJSCell(iter, baseVal);
989 linkSlowCase(iter);
990 emitGetVirtualRegister(value, regT0);
991 emitGetVirtualRegister(baseVal, regT1);
992 callOperation(operationCheckHasInstance, dst, regT0, regT1);
993
994 emitJumpSlowToHot(jump(), currentInstruction[4].u.operand);
995 }
996
997 void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
998 {
999 int dst = currentInstruction[1].u.operand;
1000 int value = currentInstruction[2].u.operand;
1001 int proto = currentInstruction[3].u.operand;
1002
1003 linkSlowCaseIfNotJSCell(iter, value);
1004 linkSlowCaseIfNotJSCell(iter, proto);
1005 linkSlowCase(iter);
1006 emitGetVirtualRegister(value, regT0);
1007 emitGetVirtualRegister(proto, regT1);
1008 callOperation(operationInstanceOf, dst, regT0, regT1);
1009 }
1010
1011 void JIT::emitSlow_op_to_number(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1012 {
1013 linkSlowCase(iter);
1014
1015 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_number);
1016 slowPathCall.call();
1017 }
1018
1019 void JIT::emit_op_get_arguments_length(Instruction* currentInstruction)
1020 {
1021 int dst = currentInstruction[1].u.operand;
1022 int argumentsRegister = currentInstruction[2].u.operand;
1023 addSlowCase(branchTest64(NonZero, addressFor(argumentsRegister)));
1024 emitGetFromCallFrameHeader32(JSStack::ArgumentCount, regT0);
1025 sub32(TrustedImm32(1), regT0);
1026 emitFastArithReTagImmediate(regT0, regT0);
1027 emitPutVirtualRegister(dst, regT0);
1028 }
1029
1030 void JIT::emitSlow_op_get_arguments_length(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1031 {
1032 linkSlowCase(iter);
1033 int dst = currentInstruction[1].u.operand;
1034 int base = currentInstruction[2].u.operand;
1035 callOperation(operationGetArgumentsLength, dst, base);
1036 }
1037
1038 void JIT::emit_op_get_argument_by_val(Instruction* currentInstruction)
1039 {
1040 int dst = currentInstruction[1].u.operand;
1041 int argumentsRegister = currentInstruction[2].u.operand;
1042 int property = currentInstruction[3].u.operand;
1043 addSlowCase(branchTest64(NonZero, addressFor(argumentsRegister)));
1044 emitGetVirtualRegister(property, regT1);
1045 addSlowCase(emitJumpIfNotImmediateInteger(regT1));
1046 add32(TrustedImm32(1), regT1);
1047 // regT1 now contains the integer index of the argument we want, including this
1048 emitGetFromCallFrameHeader32(JSStack::ArgumentCount, regT2);
1049 addSlowCase(branch32(AboveOrEqual, regT1, regT2));
1050
1051 signExtend32ToPtr(regT1, regT1);
1052 load64(BaseIndex(callFrameRegister, regT1, TimesEight, CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register))), regT0);
1053 emitValueProfilingSite();
1054 emitPutVirtualRegister(dst, regT0);
1055 }
1056
1057 void JIT::emitSlow_op_get_argument_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1058 {
1059 int dst = currentInstruction[1].u.operand;
1060 int arguments = currentInstruction[2].u.operand;
1061 int property = currentInstruction[3].u.operand;
1062
1063 linkSlowCase(iter);
1064 Jump skipArgumentsCreation = jump();
1065
1066 linkSlowCase(iter);
1067 linkSlowCase(iter);
1068 callOperation(operationCreateArguments);
1069 emitStoreCell(arguments, returnValueGPR);
1070 emitStoreCell(unmodifiedArgumentsRegister(VirtualRegister(arguments)), returnValueGPR);
1071
1072 skipArgumentsCreation.link(this);
1073 emitGetVirtualRegister(arguments, regT0);
1074 emitGetVirtualRegister(property, regT1);
1075 callOperation(WithProfile, operationGetByValGeneric, dst, regT0, regT1);
1076 }
1077
1078 #endif // USE(JSVALUE64)
1079
1080 void JIT::emit_op_touch_entry(Instruction* currentInstruction)
1081 {
1082 if (m_codeBlock->symbolTable()->m_functionEnteredOnce.hasBeenInvalidated())
1083 return;
1084
1085 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_touch_entry);
1086 slowPathCall.call();
1087 }
1088
1089 void JIT::emit_op_loop_hint(Instruction*)
1090 {
1091 // Emit the JIT optimization check:
1092 if (canBeOptimized()) {
1093 addSlowCase(branchAdd32(PositiveOrZero, TrustedImm32(Options::executionCounterIncrementForLoop()),
1094 AbsoluteAddress(m_codeBlock->addressOfJITExecuteCounter())));
1095 }
1096
1097 // Emit the watchdog timer check:
1098 if (m_vm->watchdog && m_vm->watchdog->isEnabled())
1099 addSlowCase(branchTest8(NonZero, AbsoluteAddress(m_vm->watchdog->timerDidFireAddress())));
1100 }
1101
1102 void JIT::emitSlow_op_loop_hint(Instruction*, Vector<SlowCaseEntry>::iterator& iter)
1103 {
1104 #if ENABLE(DFG_JIT)
1105 // Emit the slow path for the JIT optimization check:
1106 if (canBeOptimized()) {
1107 linkSlowCase(iter);
1108
1109 callOperation(operationOptimize, m_bytecodeOffset);
1110 Jump noOptimizedEntry = branchTestPtr(Zero, returnValueGPR);
1111 if (!ASSERT_DISABLED) {
1112 Jump ok = branchPtr(MacroAssembler::Above, regT0, TrustedImmPtr(bitwise_cast<void*>(static_cast<intptr_t>(1000))));
1113 abortWithReason(JITUnreasonableLoopHintJumpTarget);
1114 ok.link(this);
1115 }
1116 jump(returnValueGPR);
1117 noOptimizedEntry.link(this);
1118
1119 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_loop_hint));
1120 }
1121 #endif
1122
1123 // Emit the slow path of the watchdog timer check:
1124 if (m_vm->watchdog && m_vm->watchdog->isEnabled()) {
1125 linkSlowCase(iter);
1126 callOperation(operationHandleWatchdogTimer);
1127
1128 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_loop_hint));
1129 }
1130
1131 }
1132
1133 void JIT::emit_op_new_regexp(Instruction* currentInstruction)
1134 {
1135 callOperation(operationNewRegexp, currentInstruction[1].u.operand, m_codeBlock->regexp(currentInstruction[2].u.operand));
1136 }
1137
1138 void JIT::emit_op_new_func(Instruction* currentInstruction)
1139 {
1140 Jump lazyJump;
1141 int dst = currentInstruction[1].u.operand;
1142 if (currentInstruction[3].u.operand) {
1143 #if USE(JSVALUE32_64)
1144 lazyJump = branch32(NotEqual, tagFor(dst), TrustedImm32(JSValue::EmptyValueTag));
1145 #else
1146 lazyJump = branchTest64(NonZero, addressFor(dst));
1147 #endif
1148 }
1149
1150 FunctionExecutable* funcExec = m_codeBlock->functionDecl(currentInstruction[2].u.operand);
1151 callOperation(operationNewFunction, dst, funcExec);
1152
1153 if (currentInstruction[3].u.operand)
1154 lazyJump.link(this);
1155 }
1156
1157 void JIT::emit_op_new_captured_func(Instruction* currentInstruction)
1158 {
1159 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_new_captured_func);
1160 slowPathCall.call();
1161 }
1162
1163 void JIT::emit_op_new_func_exp(Instruction* currentInstruction)
1164 {
1165 int dst = currentInstruction[1].u.operand;
1166 FunctionExecutable* funcExpr = m_codeBlock->functionExpr(currentInstruction[2].u.operand);
1167 callOperation(operationNewFunction, dst, funcExpr);
1168 }
1169
1170 void JIT::emit_op_new_array(Instruction* currentInstruction)
1171 {
1172 int dst = currentInstruction[1].u.operand;
1173 int valuesIndex = currentInstruction[2].u.operand;
1174 int size = currentInstruction[3].u.operand;
1175 addPtr(TrustedImm32(valuesIndex * sizeof(Register)), callFrameRegister, regT0);
1176 callOperation(operationNewArrayWithProfile, dst,
1177 currentInstruction[4].u.arrayAllocationProfile, regT0, size);
1178 }
1179
1180 void JIT::emit_op_new_array_with_size(Instruction* currentInstruction)
1181 {
1182 int dst = currentInstruction[1].u.operand;
1183 int sizeIndex = currentInstruction[2].u.operand;
1184 #if USE(JSVALUE64)
1185 emitGetVirtualRegister(sizeIndex, regT0);
1186 callOperation(operationNewArrayWithSizeAndProfile, dst,
1187 currentInstruction[3].u.arrayAllocationProfile, regT0);
1188 #else
1189 emitLoad(sizeIndex, regT1, regT0);
1190 callOperation(operationNewArrayWithSizeAndProfile, dst,
1191 currentInstruction[3].u.arrayAllocationProfile, regT1, regT0);
1192 #endif
1193 }
1194
1195 void JIT::emit_op_new_array_buffer(Instruction* currentInstruction)
1196 {
1197 int dst = currentInstruction[1].u.operand;
1198 int valuesIndex = currentInstruction[2].u.operand;
1199 int size = currentInstruction[3].u.operand;
1200 const JSValue* values = codeBlock()->constantBuffer(valuesIndex);
1201 callOperation(operationNewArrayBufferWithProfile, dst, currentInstruction[4].u.arrayAllocationProfile, values, size);
1202 }
1203
1204 void JIT::emitSlow_op_captured_mov(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1205 {
1206 VariableWatchpointSet* set = currentInstruction[3].u.watchpointSet;
1207 if (!set || set->state() == IsInvalidated)
1208 return;
1209 #if USE(JSVALUE32_64)
1210 linkSlowCase(iter);
1211 #endif
1212 linkSlowCase(iter);
1213 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_captured_mov);
1214 slowPathCall.call();
1215 }
1216
1217 } // namespace JSC
1218
1219 #endif // ENABLE(JIT)