]> git.saurik.com Git - apple/javascriptcore.git/blame - jit/JITOpcodes32_64.cpp
JavaScriptCore-1218.0.1.tar.gz
[apple/javascriptcore.git] / jit / JITOpcodes32_64.cpp
CommitLineData
4e4e5a6f 1/*
93a37866 2 * Copyright (C) 2009, 2012 Apple Inc. All rights reserved.
4e4e5a6f
A
3 * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
13 *
14 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
18 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25 */
26
27#include "config.h"
4e4e5a6f 28
14957cd0
A
29#if ENABLE(JIT)
30#if USE(JSVALUE32_64)
31#include "JIT.h"
4e4e5a6f 32
93a37866 33#include "JITInlines.h"
4e4e5a6f
A
34#include "JITStubCall.h"
35#include "JSArray.h"
36#include "JSCell.h"
37#include "JSFunction.h"
38#include "JSPropertyNameIterator.h"
93a37866 39#include "JSVariableObject.h"
4e4e5a6f
A
40#include "LinkBuffer.h"
41
42namespace JSC {
43
93a37866 44JIT::CodeRef JIT::privateCompileCTINativeCall(VM* vm, NativeFunction func)
14957cd0
A
45{
46 Call nativeCall;
4e4e5a6f 47
93a37866
A
48 emitPutImmediateToCallFrameHeader(0, JSStack::CodeBlock);
49 storePtr(callFrameRegister, &m_vm->topCallFrame);
4e4e5a6f 50
14957cd0
A
51#if CPU(X86)
52 // Load caller frame's scope chain into this callframe so that whatever we call can
53 // get to its global data.
93a37866
A
54 emitGetFromCallFrameHeaderPtr(JSStack::CallerFrame, regT0);
55 emitGetFromCallFrameHeaderPtr(JSStack::ScopeChain, regT1, regT0);
56 emitPutCellToCallFrameHeader(regT1, JSStack::ScopeChain);
4e4e5a6f 57
14957cd0 58 peek(regT1);
93a37866 59 emitPutToCallFrameHeader(regT1, JSStack::ReturnPC);
4e4e5a6f 60
14957cd0
A
61 // Calling convention: f(ecx, edx, ...);
62 // Host function signature: f(ExecState*);
63 move(callFrameRegister, X86Registers::ecx);
4e4e5a6f 64
14957cd0 65 subPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister); // Align stack after call.
4e4e5a6f 66
14957cd0 67 move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
4e4e5a6f 68
14957cd0
A
69 // call the function
70 nativeCall = call();
4e4e5a6f 71
14957cd0 72 addPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister);
4e4e5a6f 73
14957cd0
A
74#elif CPU(ARM)
75 // Load caller frame's scope chain into this callframe so that whatever we call can
76 // get to its global data.
93a37866
A
77 emitGetFromCallFrameHeaderPtr(JSStack::CallerFrame, regT2);
78 emitGetFromCallFrameHeaderPtr(JSStack::ScopeChain, regT1, regT2);
79 emitPutCellToCallFrameHeader(regT1, JSStack::ScopeChain);
4e4e5a6f 80
14957cd0 81 preserveReturnAddressAfterCall(regT3); // Callee preserved
93a37866 82 emitPutToCallFrameHeader(regT3, JSStack::ReturnPC);
4e4e5a6f 83
14957cd0
A
84 // Calling convention: f(r0 == regT0, r1 == regT1, ...);
85 // Host function signature: f(ExecState*);
86 move(callFrameRegister, ARMRegisters::r0);
4e4e5a6f 87
93a37866 88 emitGetFromCallFrameHeaderPtr(JSStack::Callee, ARMRegisters::r1);
14957cd0
A
89 move(regT2, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
90 loadPtr(Address(ARMRegisters::r1, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
4e4e5a6f 91
14957cd0
A
92 // call the function
93 nativeCall = call();
4e4e5a6f 94
14957cd0 95 restoreReturnAddressBeforeReturn(regT3);
4e4e5a6f 96
14957cd0
A
97#elif CPU(MIPS)
98 // Load caller frame's scope chain into this callframe so that whatever we call can
99 // get to its global data.
93a37866
A
100 emitGetFromCallFrameHeaderPtr(JSStack::CallerFrame, regT0);
101 emitGetFromCallFrameHeaderPtr(JSStack::ScopeChain, regT1, regT0);
102 emitPutCellToCallFrameHeader(regT1, JSStack::ScopeChain);
4e4e5a6f 103
14957cd0 104 preserveReturnAddressAfterCall(regT3); // Callee preserved
93a37866 105 emitPutToCallFrameHeader(regT3, JSStack::ReturnPC);
4e4e5a6f 106
14957cd0
A
107 // Calling convention: f(a0, a1, a2, a3);
108 // Host function signature: f(ExecState*);
4e4e5a6f 109
14957cd0
A
110 // Allocate stack space for 16 bytes (8-byte aligned)
111 // 16 bytes (unused) for 4 arguments
112 subPtr(TrustedImm32(16), stackPointerRegister);
4e4e5a6f 113
14957cd0
A
114 // Setup arg0
115 move(callFrameRegister, MIPSRegisters::a0);
4e4e5a6f 116
14957cd0 117 // Call
93a37866 118 emitGetFromCallFrameHeaderPtr(JSStack::Callee, MIPSRegisters::a2);
14957cd0
A
119 loadPtr(Address(MIPSRegisters::a2, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
120 move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
121
122 // call the function
123 nativeCall = call();
4e4e5a6f 124
14957cd0
A
125 // Restore stack space
126 addPtr(TrustedImm32(16), stackPointerRegister);
4e4e5a6f 127
14957cd0
A
128 restoreReturnAddressBeforeReturn(regT3);
129#elif CPU(SH4)
130 // Load caller frame's scope chain into this callframe so that whatever we call can
131 // get to its global data.
93a37866
A
132 emitGetFromCallFrameHeaderPtr(JSStack::CallerFrame, regT2);
133 emitGetFromCallFrameHeaderPtr(JSStack::ScopeChain, regT1, regT2);
134 emitPutCellToCallFrameHeader(regT1, JSStack::ScopeChain);
4e4e5a6f 135
14957cd0 136 preserveReturnAddressAfterCall(regT3); // Callee preserved
93a37866 137 emitPutToCallFrameHeader(regT3, JSStack::ReturnPC);
4e4e5a6f 138
14957cd0
A
139 // Calling convention: f(r0 == regT4, r1 == regT5, ...);
140 // Host function signature: f(ExecState*);
141 move(callFrameRegister, regT4);
142
93a37866 143 emitGetFromCallFrameHeaderPtr(JSStack::Callee, regT5);
14957cd0
A
144 move(regT2, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
145 loadPtr(Address(regT5, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
146
147 // call the function
148 nativeCall = call();
4e4e5a6f 149
4e4e5a6f 150 restoreReturnAddressBeforeReturn(regT3);
14957cd0 151#else
6fe7ccc8 152#error "JIT not supported on this platform."
14957cd0
A
153 breakpoint();
154#endif // CPU(X86)
155
156 // Check for an exception
93a37866 157 Jump sawException = branch32(NotEqual, AbsoluteAddress(reinterpret_cast<char*>(&vm->exception) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), TrustedImm32(JSValue::EmptyValueTag));
14957cd0
A
158
159 // Return.
4e4e5a6f
A
160 ret();
161
162 // Handle an exception
163 sawException.link(this);
14957cd0 164
4e4e5a6f 165 // Grab the return address.
14957cd0
A
166 preserveReturnAddressAfterCall(regT1);
167
93a37866 168 move(TrustedImmPtr(&vm->exceptionLocation), regT2);
4e4e5a6f 169 storePtr(regT1, regT2);
4e4e5a6f 170 poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
6fe7ccc8 171
93a37866 172 storePtr(callFrameRegister, &m_vm->topCallFrame);
14957cd0
A
173 // Set the return address.
174 move(TrustedImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()), regT1);
175 restoreReturnAddressBeforeReturn(regT1);
4e4e5a6f 176
14957cd0 177 ret();
4e4e5a6f
A
178
179 // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
93a37866 180 LinkBuffer patchBuffer(*m_vm, this, GLOBAL_THUNK_ID);
4e4e5a6f 181
14957cd0 182 patchBuffer.link(nativeCall, FunctionPtr(func));
93a37866 183 return FINALIZE_CODE(patchBuffer, ("JIT CTI native call"));
4e4e5a6f
A
184}
185
186void JIT::emit_op_mov(Instruction* currentInstruction)
187{
188 unsigned dst = currentInstruction[1].u.operand;
189 unsigned src = currentInstruction[2].u.operand;
190
191 if (m_codeBlock->isConstantRegisterIndex(src))
192 emitStore(dst, getConstantOperand(src));
193 else {
194 emitLoad(src, regT1, regT0);
195 emitStore(dst, regT1, regT0);
14957cd0 196 map(m_bytecodeOffset + OPCODE_LENGTH(op_mov), dst, regT1, regT0);
4e4e5a6f
A
197 }
198}
199
200void JIT::emit_op_end(Instruction* currentInstruction)
201{
4e4e5a6f
A
202 ASSERT(returnValueRegister != callFrameRegister);
203 emitLoad(currentInstruction[1].u.operand, regT1, regT0);
93a37866 204 restoreReturnAddressBeforeReturn(Address(callFrameRegister, JSStack::ReturnPC * static_cast<int>(sizeof(Register))));
4e4e5a6f
A
205 ret();
206}
207
208void JIT::emit_op_jmp(Instruction* currentInstruction)
209{
210 unsigned target = currentInstruction[1].u.operand;
211 addJump(jump(), target);
212}
213
6fe7ccc8 214void JIT::emit_op_new_object(Instruction* currentInstruction)
4e4e5a6f 215{
93a37866
A
216 Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
217 size_t allocationSize = JSObject::allocationSize(structure->inlineCapacity());
218 MarkedAllocator* allocator = &m_vm->heap.allocatorForObjectWithoutDestructor(allocationSize);
219
220 RegisterID resultReg = regT0;
221 RegisterID allocatorReg = regT1;
222 RegisterID scratchReg = regT2;
223
224 move(TrustedImmPtr(allocator), allocatorReg);
225 emitAllocateJSObject(allocatorReg, TrustedImmPtr(structure), resultReg, scratchReg);
226 emitStoreCell(currentInstruction[1].u.operand, resultReg);
4e4e5a6f
A
227}
228
6fe7ccc8 229void JIT::emitSlow_op_new_object(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
4e4e5a6f 230{
6fe7ccc8 231 linkSlowCase(iter);
93a37866
A
232 JITStubCall stubCall(this, cti_op_new_object);
233 stubCall.addArgument(TrustedImmPtr(currentInstruction[3].u.objectAllocationProfile->structure()));
234 stubCall.call(currentInstruction[1].u.operand);
4e4e5a6f
A
235}
236
14957cd0
A
237void JIT::emit_op_check_has_instance(Instruction* currentInstruction)
238{
93a37866 239 unsigned baseVal = currentInstruction[3].u.operand;
14957cd0
A
240
241 emitLoadPayload(baseVal, regT0);
242
243 // Check that baseVal is a cell.
244 emitJumpSlowCaseIfNotJSCell(baseVal);
245
246 // Check that baseVal 'ImplementsHasInstance'.
247 loadPtr(Address(regT0, JSCell::structureOffset()), regT0);
93a37866 248 addSlowCase(branchTest8(Zero, Address(regT0, Structure::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance)));
14957cd0
A
249}
250
4e4e5a6f
A
251void JIT::emit_op_instanceof(Instruction* currentInstruction)
252{
253 unsigned dst = currentInstruction[1].u.operand;
254 unsigned value = currentInstruction[2].u.operand;
93a37866 255 unsigned proto = currentInstruction[3].u.operand;
4e4e5a6f
A
256
257 // Load the operands into registers.
258 // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
259 emitLoadPayload(value, regT2);
4e4e5a6f
A
260 emitLoadPayload(proto, regT1);
261
14957cd0 262 // Check that proto are cells. baseVal must be a cell - this is checked by op_check_has_instance.
4e4e5a6f 263 emitJumpSlowCaseIfNotJSCell(value);
4e4e5a6f 264 emitJumpSlowCaseIfNotJSCell(proto);
14957cd0
A
265
266 // Check that prototype is an object
267 loadPtr(Address(regT1, JSCell::structureOffset()), regT3);
6fe7ccc8 268 addSlowCase(emitJumpIfNotObject(regT3));
4e4e5a6f 269
4e4e5a6f
A
270 // Optimistically load the result true, and start looping.
271 // Initially, regT1 still contains proto and regT2 still contains value.
272 // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
14957cd0 273 move(TrustedImm32(1), regT0);
4e4e5a6f
A
274 Label loop(this);
275
276 // Load the prototype of the cell in regT2. If this is equal to regT1 - WIN!
277 // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
14957cd0
A
278 loadPtr(Address(regT2, JSCell::structureOffset()), regT2);
279 load32(Address(regT2, Structure::prototypeOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT2);
4e4e5a6f
A
280 Jump isInstance = branchPtr(Equal, regT2, regT1);
281 branchTest32(NonZero, regT2).linkTo(loop, this);
282
283 // We get here either by dropping out of the loop, or if value was not an Object. Result is false.
14957cd0 284 move(TrustedImm32(0), regT0);
4e4e5a6f
A
285
286 // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
287 isInstance.link(this);
288 emitStoreBool(dst, regT0);
289}
290
14957cd0
A
291void JIT::emitSlow_op_check_has_instance(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
292{
93a37866
A
293 unsigned dst = currentInstruction[1].u.operand;
294 unsigned value = currentInstruction[2].u.operand;
295 unsigned baseVal = currentInstruction[3].u.operand;
14957cd0
A
296
297 linkSlowCaseIfNotJSCell(iter, baseVal);
298 linkSlowCase(iter);
299
300 JITStubCall stubCall(this, cti_op_check_has_instance);
93a37866 301 stubCall.addArgument(value);
14957cd0 302 stubCall.addArgument(baseVal);
93a37866
A
303 stubCall.call(dst);
304
305 emitJumpSlowToHot(jump(), currentInstruction[4].u.operand);
14957cd0
A
306}
307
4e4e5a6f
A
308void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
309{
310 unsigned dst = currentInstruction[1].u.operand;
311 unsigned value = currentInstruction[2].u.operand;
93a37866 312 unsigned proto = currentInstruction[3].u.operand;
4e4e5a6f
A
313
314 linkSlowCaseIfNotJSCell(iter, value);
4e4e5a6f
A
315 linkSlowCaseIfNotJSCell(iter, proto);
316 linkSlowCase(iter);
317
318 JITStubCall stubCall(this, cti_op_instanceof);
319 stubCall.addArgument(value);
4e4e5a6f
A
320 stubCall.addArgument(proto);
321 stubCall.call(dst);
322}
323
6fe7ccc8 324void JIT::emit_op_is_undefined(Instruction* currentInstruction)
4e4e5a6f 325{
6fe7ccc8
A
326 unsigned dst = currentInstruction[1].u.operand;
327 unsigned value = currentInstruction[2].u.operand;
328
329 emitLoad(value, regT1, regT0);
330 Jump isCell = branch32(Equal, regT1, TrustedImm32(JSValue::CellTag));
4e4e5a6f 331
6fe7ccc8
A
332 compare32(Equal, regT1, TrustedImm32(JSValue::UndefinedTag), regT0);
333 Jump done = jump();
334
335 isCell.link(this);
336 loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
93a37866
A
337 Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT1, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
338 move(TrustedImm32(0), regT0);
339 Jump notMasqueradesAsUndefined = jump();
6fe7ccc8 340
93a37866
A
341 isMasqueradesAsUndefined.link(this);
342 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
343 loadPtr(Address(regT1, Structure::globalObjectOffset()), regT1);
344 compare32(Equal, regT0, regT1, regT0);
345
346 notMasqueradesAsUndefined.link(this);
6fe7ccc8
A
347 done.link(this);
348 emitStoreBool(dst, regT0);
4e4e5a6f
A
349}
350
6fe7ccc8 351void JIT::emit_op_is_boolean(Instruction* currentInstruction)
4e4e5a6f 352{
6fe7ccc8
A
353 unsigned dst = currentInstruction[1].u.operand;
354 unsigned value = currentInstruction[2].u.operand;
355
356 emitLoadTag(value, regT0);
357 compare32(Equal, regT0, TrustedImm32(JSValue::BooleanTag), regT0);
358 emitStoreBool(dst, regT0);
4e4e5a6f
A
359}
360
6fe7ccc8 361void JIT::emit_op_is_number(Instruction* currentInstruction)
4e4e5a6f 362{
6fe7ccc8
A
363 unsigned dst = currentInstruction[1].u.operand;
364 unsigned value = currentInstruction[2].u.operand;
365
366 emitLoadTag(value, regT0);
367 add32(TrustedImm32(1), regT0);
368 compare32(Below, regT0, TrustedImm32(JSValue::LowestTag + 1), regT0);
369 emitStoreBool(dst, regT0);
4e4e5a6f
A
370}
371
6fe7ccc8 372void JIT::emit_op_is_string(Instruction* currentInstruction)
4e4e5a6f 373{
6fe7ccc8
A
374 unsigned dst = currentInstruction[1].u.operand;
375 unsigned value = currentInstruction[2].u.operand;
376
4e4e5a6f 377 emitLoad(value, regT1, regT0);
6fe7ccc8
A
378 Jump isNotCell = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
379
380 loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
381 compare8(Equal, Address(regT1, Structure::typeInfoTypeOffset()), TrustedImm32(StringType), regT0);
382 Jump done = jump();
383
384 isNotCell.link(this);
385 move(TrustedImm32(0), regT0);
386
387 done.link(this);
388 emitStoreBool(dst, regT0);
4e4e5a6f
A
389}
390
391void JIT::emit_op_tear_off_activation(Instruction* currentInstruction)
392{
14957cd0 393 unsigned activation = currentInstruction[1].u.operand;
93a37866 394 Jump activationNotCreated = branch32(Equal, tagFor(activation), TrustedImm32(JSValue::EmptyValueTag));
4e4e5a6f 395 JITStubCall stubCall(this, cti_op_tear_off_activation);
93a37866 396 stubCall.addArgument(activation);
4e4e5a6f 397 stubCall.call();
93a37866 398 activationNotCreated.link(this);
4e4e5a6f
A
399}
400
14957cd0 401void JIT::emit_op_tear_off_arguments(Instruction* currentInstruction)
4e4e5a6f 402{
93a37866
A
403 int arguments = currentInstruction[1].u.operand;
404 int activation = currentInstruction[2].u.operand;
4e4e5a6f 405
93a37866 406 Jump argsNotCreated = branch32(Equal, tagFor(unmodifiedArgumentsRegister(arguments)), TrustedImm32(JSValue::EmptyValueTag));
14957cd0 407 JITStubCall stubCall(this, cti_op_tear_off_arguments);
93a37866
A
408 stubCall.addArgument(unmodifiedArgumentsRegister(arguments));
409 stubCall.addArgument(activation);
14957cd0
A
410 stubCall.call();
411 argsNotCreated.link(this);
4e4e5a6f
A
412}
413
4e4e5a6f
A
414void JIT::emit_op_to_primitive(Instruction* currentInstruction)
415{
416 int dst = currentInstruction[1].u.operand;
417 int src = currentInstruction[2].u.operand;
418
419 emitLoad(src, regT1, regT0);
420
14957cd0 421 Jump isImm = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
93a37866 422 addSlowCase(branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
4e4e5a6f
A
423 isImm.link(this);
424
425 if (dst != src)
426 emitStore(dst, regT1, regT0);
14957cd0 427 map(m_bytecodeOffset + OPCODE_LENGTH(op_to_primitive), dst, regT1, regT0);
4e4e5a6f
A
428}
429
430void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
431{
432 int dst = currentInstruction[1].u.operand;
433
434 linkSlowCase(iter);
435
436 JITStubCall stubCall(this, cti_op_to_primitive);
437 stubCall.addArgument(regT1, regT0);
438 stubCall.call(dst);
439}
440
441void JIT::emit_op_strcat(Instruction* currentInstruction)
442{
443 JITStubCall stubCall(this, cti_op_strcat);
6fe7ccc8
A
444 stubCall.addArgument(TrustedImm32(currentInstruction[2].u.operand));
445 stubCall.addArgument(TrustedImm32(currentInstruction[3].u.operand));
4e4e5a6f
A
446 stubCall.call(currentInstruction[1].u.operand);
447}
448
4e4e5a6f
A
449void JIT::emit_op_not(Instruction* currentInstruction)
450{
451 unsigned dst = currentInstruction[1].u.operand;
452 unsigned src = currentInstruction[2].u.operand;
453
454 emitLoadTag(src, regT0);
455
14957cd0
A
456 emitLoad(src, regT1, regT0);
457 addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::BooleanTag)));
458 xor32(TrustedImm32(1), regT0);
4e4e5a6f
A
459
460 emitStoreBool(dst, regT0, (dst == src));
461}
462
463void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
464{
465 unsigned dst = currentInstruction[1].u.operand;
466 unsigned src = currentInstruction[2].u.operand;
467
468 linkSlowCase(iter);
469
470 JITStubCall stubCall(this, cti_op_not);
471 stubCall.addArgument(src);
472 stubCall.call(dst);
473}
474
475void JIT::emit_op_jfalse(Instruction* currentInstruction)
476{
477 unsigned cond = currentInstruction[1].u.operand;
478 unsigned target = currentInstruction[2].u.operand;
479
480 emitLoad(cond, regT1, regT0);
481
14957cd0
A
482 ASSERT((JSValue::BooleanTag + 1 == JSValue::Int32Tag) && !(JSValue::Int32Tag + 1));
483 addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::BooleanTag)));
484 addJump(branchTest32(Zero, regT0), target);
4e4e5a6f
A
485}
486
487void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
488{
489 unsigned cond = currentInstruction[1].u.operand;
490 unsigned target = currentInstruction[2].u.operand;
491
492 linkSlowCase(iter);
14957cd0
A
493
494 if (supportsFloatingPoint()) {
495 // regT1 contains the tag from the hot path.
6fe7ccc8 496 Jump notNumber = branch32(Above, regT1, TrustedImm32(JSValue::LowestTag));
14957cd0
A
497
498 emitLoadDouble(cond, fpRegT0);
499 emitJumpSlowToHot(branchDoubleZeroOrNaN(fpRegT0, fpRegT1), target);
500 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_jfalse));
501
502 notNumber.link(this);
503 }
504
4e4e5a6f
A
505 JITStubCall stubCall(this, cti_op_jtrue);
506 stubCall.addArgument(cond);
507 stubCall.call();
508 emitJumpSlowToHot(branchTest32(Zero, regT0), target); // Inverted.
509}
510
511void JIT::emit_op_jtrue(Instruction* currentInstruction)
512{
513 unsigned cond = currentInstruction[1].u.operand;
514 unsigned target = currentInstruction[2].u.operand;
515
516 emitLoad(cond, regT1, regT0);
517
14957cd0
A
518 ASSERT((JSValue::BooleanTag + 1 == JSValue::Int32Tag) && !(JSValue::Int32Tag + 1));
519 addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::BooleanTag)));
520 addJump(branchTest32(NonZero, regT0), target);
4e4e5a6f
A
521}
522
523void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
524{
525 unsigned cond = currentInstruction[1].u.operand;
526 unsigned target = currentInstruction[2].u.operand;
527
528 linkSlowCase(iter);
14957cd0
A
529
530 if (supportsFloatingPoint()) {
531 // regT1 contains the tag from the hot path.
6fe7ccc8 532 Jump notNumber = branch32(Above, regT1, TrustedImm32(JSValue::LowestTag));
14957cd0
A
533
534 emitLoadDouble(cond, fpRegT0);
535 emitJumpSlowToHot(branchDoubleNonZero(fpRegT0, fpRegT1), target);
536 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_jtrue));
537
538 notNumber.link(this);
539 }
540
4e4e5a6f
A
541 JITStubCall stubCall(this, cti_op_jtrue);
542 stubCall.addArgument(cond);
543 stubCall.call();
544 emitJumpSlowToHot(branchTest32(NonZero, regT0), target);
545}
546
547void JIT::emit_op_jeq_null(Instruction* currentInstruction)
548{
549 unsigned src = currentInstruction[1].u.operand;
550 unsigned target = currentInstruction[2].u.operand;
551
552 emitLoad(src, regT1, regT0);
553
14957cd0 554 Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
4e4e5a6f
A
555
556 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
14957cd0 557 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
93a37866
A
558 Jump isNotMasqueradesAsUndefined = branchTest8(Zero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
559 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
560 addJump(branchPtr(Equal, Address(regT2, Structure::globalObjectOffset()), regT0), target);
561 Jump masqueradesGlobalObjectIsForeign = jump();
4e4e5a6f
A
562
563 // Now handle the immediate cases - undefined & null
564 isImmediate.link(this);
14957cd0
A
565 ASSERT((JSValue::UndefinedTag + 1 == JSValue::NullTag) && (JSValue::NullTag & 0x1));
566 or32(TrustedImm32(1), regT1);
567 addJump(branch32(Equal, regT1, TrustedImm32(JSValue::NullTag)), target);
4e4e5a6f 568
93a37866
A
569 isNotMasqueradesAsUndefined.link(this);
570 masqueradesGlobalObjectIsForeign.link(this);
4e4e5a6f
A
571}
572
573void JIT::emit_op_jneq_null(Instruction* currentInstruction)
574{
575 unsigned src = currentInstruction[1].u.operand;
576 unsigned target = currentInstruction[2].u.operand;
577
578 emitLoad(src, regT1, regT0);
579
14957cd0 580 Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
4e4e5a6f
A
581
582 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
14957cd0
A
583 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
584 addJump(branchTest8(Zero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
93a37866
A
585 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
586 addJump(branchPtr(NotEqual, Address(regT2, Structure::globalObjectOffset()), regT0), target);
4e4e5a6f
A
587 Jump wasNotImmediate = jump();
588
589 // Now handle the immediate cases - undefined & null
590 isImmediate.link(this);
591
14957cd0
A
592 ASSERT((JSValue::UndefinedTag + 1 == JSValue::NullTag) && (JSValue::NullTag & 0x1));
593 or32(TrustedImm32(1), regT1);
594 addJump(branch32(NotEqual, regT1, TrustedImm32(JSValue::NullTag)), target);
4e4e5a6f
A
595
596 wasNotImmediate.link(this);
597}
598
599void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
600{
601 unsigned src = currentInstruction[1].u.operand;
93a37866 602 Special::Pointer ptr = currentInstruction[2].u.specialPointer;
4e4e5a6f
A
603 unsigned target = currentInstruction[3].u.operand;
604
605 emitLoad(src, regT1, regT0);
14957cd0 606 addJump(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)), target);
93a37866 607 addJump(branchPtr(NotEqual, regT0, TrustedImmPtr(actualPointerFor(m_codeBlock, ptr))), target);
4e4e5a6f
A
608}
609
4e4e5a6f
A
610void JIT::emit_op_eq(Instruction* currentInstruction)
611{
612 unsigned dst = currentInstruction[1].u.operand;
613 unsigned src1 = currentInstruction[2].u.operand;
614 unsigned src2 = currentInstruction[3].u.operand;
615
616 emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
617 addSlowCase(branch32(NotEqual, regT1, regT3));
14957cd0
A
618 addSlowCase(branch32(Equal, regT1, TrustedImm32(JSValue::CellTag)));
619 addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::LowestTag)));
4e4e5a6f 620
14957cd0 621 compare32(Equal, regT0, regT2, regT0);
4e4e5a6f
A
622
623 emitStoreBool(dst, regT0);
624}
625
626void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
627{
628 unsigned dst = currentInstruction[1].u.operand;
629 unsigned op1 = currentInstruction[2].u.operand;
630 unsigned op2 = currentInstruction[3].u.operand;
631
632 JumpList storeResult;
633 JumpList genericCase;
634
635 genericCase.append(getSlowCase(iter)); // tags not equal
636
637 linkSlowCase(iter); // tags equal and JSCell
93a37866
A
638 genericCase.append(branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
639 genericCase.append(branchPtr(NotEqual, Address(regT2, JSCell::structureOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
4e4e5a6f
A
640
641 // String case.
642 JITStubCall stubCallEqStrings(this, cti_op_eq_strings);
643 stubCallEqStrings.addArgument(regT0);
644 stubCallEqStrings.addArgument(regT2);
645 stubCallEqStrings.call();
646 storeResult.append(jump());
647
648 // Generic case.
649 genericCase.append(getSlowCase(iter)); // doubles
650 genericCase.link(this);
651 JITStubCall stubCallEq(this, cti_op_eq);
652 stubCallEq.addArgument(op1);
653 stubCallEq.addArgument(op2);
654 stubCallEq.call(regT0);
655
656 storeResult.link(this);
4e4e5a6f
A
657 emitStoreBool(dst, regT0);
658}
659
660void JIT::emit_op_neq(Instruction* currentInstruction)
661{
662 unsigned dst = currentInstruction[1].u.operand;
663 unsigned src1 = currentInstruction[2].u.operand;
664 unsigned src2 = currentInstruction[3].u.operand;
665
666 emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
667 addSlowCase(branch32(NotEqual, regT1, regT3));
14957cd0
A
668 addSlowCase(branch32(Equal, regT1, TrustedImm32(JSValue::CellTag)));
669 addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::LowestTag)));
4e4e5a6f 670
14957cd0 671 compare32(NotEqual, regT0, regT2, regT0);
4e4e5a6f
A
672
673 emitStoreBool(dst, regT0);
674}
675
676void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
677{
678 unsigned dst = currentInstruction[1].u.operand;
679
680 JumpList storeResult;
681 JumpList genericCase;
682
683 genericCase.append(getSlowCase(iter)); // tags not equal
684
685 linkSlowCase(iter); // tags equal and JSCell
93a37866
A
686 genericCase.append(branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
687 genericCase.append(branchPtr(NotEqual, Address(regT2, JSCell::structureOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
4e4e5a6f
A
688
689 // String case.
690 JITStubCall stubCallEqStrings(this, cti_op_eq_strings);
691 stubCallEqStrings.addArgument(regT0);
692 stubCallEqStrings.addArgument(regT2);
693 stubCallEqStrings.call(regT0);
694 storeResult.append(jump());
695
696 // Generic case.
697 genericCase.append(getSlowCase(iter)); // doubles
698 genericCase.link(this);
699 JITStubCall stubCallEq(this, cti_op_eq);
700 stubCallEq.addArgument(regT1, regT0);
701 stubCallEq.addArgument(regT3, regT2);
702 stubCallEq.call(regT0);
703
704 storeResult.link(this);
14957cd0 705 xor32(TrustedImm32(0x1), regT0);
4e4e5a6f
A
706 emitStoreBool(dst, regT0);
707}
708
709void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
710{
711 unsigned dst = currentInstruction[1].u.operand;
712 unsigned src1 = currentInstruction[2].u.operand;
713 unsigned src2 = currentInstruction[3].u.operand;
714
6fe7ccc8
A
715 emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
716
717 // Bail if the tags differ, or are double.
718 addSlowCase(branch32(NotEqual, regT1, regT3));
719 addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::LowestTag)));
4e4e5a6f 720
6fe7ccc8
A
721 // Jump to a slow case if both are strings.
722 Jump notCell = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
93a37866
A
723 Jump firstNotString = branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(m_vm->stringStructure.get()));
724 addSlowCase(branchPtr(Equal, Address(regT2, JSCell::structureOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
6fe7ccc8
A
725 notCell.link(this);
726 firstNotString.link(this);
4e4e5a6f 727
6fe7ccc8 728 // Simply compare the payloads.
4e4e5a6f 729 if (type == OpStrictEq)
6fe7ccc8 730 compare32(Equal, regT0, regT2, regT0);
4e4e5a6f 731 else
6fe7ccc8 732 compare32(NotEqual, regT0, regT2, regT0);
4e4e5a6f
A
733
734 emitStoreBool(dst, regT0);
735}
736
737void JIT::emit_op_stricteq(Instruction* currentInstruction)
738{
739 compileOpStrictEq(currentInstruction, OpStrictEq);
740}
741
742void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
743{
744 unsigned dst = currentInstruction[1].u.operand;
745 unsigned src1 = currentInstruction[2].u.operand;
746 unsigned src2 = currentInstruction[3].u.operand;
747
6fe7ccc8 748 linkSlowCase(iter);
4e4e5a6f
A
749 linkSlowCase(iter);
750 linkSlowCase(iter);
751
752 JITStubCall stubCall(this, cti_op_stricteq);
753 stubCall.addArgument(src1);
754 stubCall.addArgument(src2);
755 stubCall.call(dst);
756}
757
758void JIT::emit_op_nstricteq(Instruction* currentInstruction)
759{
760 compileOpStrictEq(currentInstruction, OpNStrictEq);
761}
762
763void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
764{
765 unsigned dst = currentInstruction[1].u.operand;
766 unsigned src1 = currentInstruction[2].u.operand;
767 unsigned src2 = currentInstruction[3].u.operand;
768
6fe7ccc8 769 linkSlowCase(iter);
4e4e5a6f
A
770 linkSlowCase(iter);
771 linkSlowCase(iter);
772
773 JITStubCall stubCall(this, cti_op_nstricteq);
774 stubCall.addArgument(src1);
775 stubCall.addArgument(src2);
776 stubCall.call(dst);
777}
778
779void JIT::emit_op_eq_null(Instruction* currentInstruction)
780{
781 unsigned dst = currentInstruction[1].u.operand;
782 unsigned src = currentInstruction[2].u.operand;
783
784 emitLoad(src, regT1, regT0);
14957cd0 785 Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
4e4e5a6f 786
93a37866
A
787 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
788 Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
789 move(TrustedImm32(0), regT1);
790 Jump wasNotMasqueradesAsUndefined = jump();
791
792 isMasqueradesAsUndefined.link(this);
793 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
794 loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
795 compare32(Equal, regT0, regT2, regT1);
4e4e5a6f
A
796 Jump wasNotImmediate = jump();
797
798 isImmediate.link(this);
799
14957cd0
A
800 compare32(Equal, regT1, TrustedImm32(JSValue::NullTag), regT2);
801 compare32(Equal, regT1, TrustedImm32(JSValue::UndefinedTag), regT1);
4e4e5a6f
A
802 or32(regT2, regT1);
803
804 wasNotImmediate.link(this);
93a37866 805 wasNotMasqueradesAsUndefined.link(this);
4e4e5a6f 806
4e4e5a6f
A
807 emitStoreBool(dst, regT1);
808}
809
810void JIT::emit_op_neq_null(Instruction* currentInstruction)
811{
812 unsigned dst = currentInstruction[1].u.operand;
813 unsigned src = currentInstruction[2].u.operand;
814
815 emitLoad(src, regT1, regT0);
14957cd0 816 Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
4e4e5a6f 817
93a37866
A
818 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
819 Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
820 move(TrustedImm32(1), regT1);
821 Jump wasNotMasqueradesAsUndefined = jump();
822
823 isMasqueradesAsUndefined.link(this);
824 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
825 loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
826 compare32(NotEqual, regT0, regT2, regT1);
4e4e5a6f
A
827 Jump wasNotImmediate = jump();
828
829 isImmediate.link(this);
830
14957cd0
A
831 compare32(NotEqual, regT1, TrustedImm32(JSValue::NullTag), regT2);
832 compare32(NotEqual, regT1, TrustedImm32(JSValue::UndefinedTag), regT1);
4e4e5a6f
A
833 and32(regT2, regT1);
834
835 wasNotImmediate.link(this);
93a37866 836 wasNotMasqueradesAsUndefined.link(this);
4e4e5a6f 837
4e4e5a6f
A
838 emitStoreBool(dst, regT1);
839}
840
4e4e5a6f
A
841void JIT::emit_op_throw(Instruction* currentInstruction)
842{
843 unsigned exception = currentInstruction[1].u.operand;
844 JITStubCall stubCall(this, cti_op_throw);
845 stubCall.addArgument(exception);
846 stubCall.call();
847
848#ifndef NDEBUG
849 // cti_op_throw always changes it's return address,
850 // this point in the code should never be reached.
851 breakpoint();
852#endif
853}
854
855void JIT::emit_op_get_pnames(Instruction* currentInstruction)
856{
857 int dst = currentInstruction[1].u.operand;
858 int base = currentInstruction[2].u.operand;
859 int i = currentInstruction[3].u.operand;
860 int size = currentInstruction[4].u.operand;
861 int breakTarget = currentInstruction[5].u.operand;
862
863 JumpList isNotObject;
864
865 emitLoad(base, regT1, regT0);
866 if (!m_codeBlock->isKnownNotImmediate(base))
14957cd0
A
867 isNotObject.append(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)));
868 if (base != m_codeBlock->thisRegister() || m_codeBlock->isStrictMode()) {
869 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
6fe7ccc8 870 isNotObject.append(emitJumpIfNotObject(regT2));
4e4e5a6f
A
871 }
872
873 // We could inline the case where you have a valid cache, but
874 // this call doesn't seem to be hot.
875 Label isObject(this);
876 JITStubCall getPnamesStubCall(this, cti_op_get_pnames);
877 getPnamesStubCall.addArgument(regT0);
878 getPnamesStubCall.call(dst);
879 load32(Address(regT0, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStringsSize)), regT3);
14957cd0
A
880 store32(TrustedImm32(Int32Tag), intTagFor(i));
881 store32(TrustedImm32(0), intPayloadFor(i));
882 store32(TrustedImm32(Int32Tag), intTagFor(size));
883 store32(regT3, payloadFor(size));
4e4e5a6f
A
884 Jump end = jump();
885
886 isNotObject.link(this);
14957cd0
A
887 addJump(branch32(Equal, regT1, TrustedImm32(JSValue::NullTag)), breakTarget);
888 addJump(branch32(Equal, regT1, TrustedImm32(JSValue::UndefinedTag)), breakTarget);
4e4e5a6f
A
889 JITStubCall toObjectStubCall(this, cti_to_object);
890 toObjectStubCall.addArgument(regT1, regT0);
891 toObjectStubCall.call(base);
892 jump().linkTo(isObject, this);
893
894 end.link(this);
895}
896
897void JIT::emit_op_next_pname(Instruction* currentInstruction)
898{
899 int dst = currentInstruction[1].u.operand;
900 int base = currentInstruction[2].u.operand;
901 int i = currentInstruction[3].u.operand;
902 int size = currentInstruction[4].u.operand;
903 int it = currentInstruction[5].u.operand;
904 int target = currentInstruction[6].u.operand;
905
906 JumpList callHasProperty;
907
908 Label begin(this);
14957cd0
A
909 load32(intPayloadFor(i), regT0);
910 Jump end = branch32(Equal, regT0, intPayloadFor(size));
4e4e5a6f
A
911
912 // Grab key @ i
14957cd0 913 loadPtr(payloadFor(it), regT1);
4e4e5a6f
A
914 loadPtr(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStrings)), regT2);
915 load32(BaseIndex(regT2, regT0, TimesEight), regT2);
14957cd0 916 store32(TrustedImm32(JSValue::CellTag), tagFor(dst));
4e4e5a6f
A
917 store32(regT2, payloadFor(dst));
918
919 // Increment i
14957cd0
A
920 add32(TrustedImm32(1), regT0);
921 store32(regT0, intPayloadFor(i));
4e4e5a6f
A
922
923 // Verify that i is valid:
14957cd0 924 loadPtr(payloadFor(base), regT0);
4e4e5a6f
A
925
926 // Test base's structure
14957cd0 927 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
4e4e5a6f
A
928 callHasProperty.append(branchPtr(NotEqual, regT2, Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure)))));
929
930 // Test base's prototype chain
931 loadPtr(Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedPrototypeChain))), regT3);
932 loadPtr(Address(regT3, OBJECT_OFFSETOF(StructureChain, m_vector)), regT3);
933 addJump(branchTestPtr(Zero, Address(regT3)), target);
934
935 Label checkPrototype(this);
14957cd0
A
936 callHasProperty.append(branch32(Equal, Address(regT2, Structure::prototypeOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), TrustedImm32(JSValue::NullTag)));
937 loadPtr(Address(regT2, Structure::prototypeOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT2);
938 loadPtr(Address(regT2, JSCell::structureOffset()), regT2);
4e4e5a6f 939 callHasProperty.append(branchPtr(NotEqual, regT2, Address(regT3)));
14957cd0 940 addPtr(TrustedImm32(sizeof(Structure*)), regT3);
4e4e5a6f
A
941 branchTestPtr(NonZero, Address(regT3)).linkTo(checkPrototype, this);
942
943 // Continue loop.
944 addJump(jump(), target);
945
946 // Slow case: Ask the object if i is valid.
947 callHasProperty.link(this);
948 loadPtr(addressFor(dst), regT1);
949 JITStubCall stubCall(this, cti_has_property);
950 stubCall.addArgument(regT0);
951 stubCall.addArgument(regT1);
952 stubCall.call();
953
954 // Test for valid key.
955 addJump(branchTest32(NonZero, regT0), target);
956 jump().linkTo(begin, this);
957
958 // End of loop.
959 end.link(this);
960}
961
93a37866 962void JIT::emit_op_push_with_scope(Instruction* currentInstruction)
4e4e5a6f 963{
93a37866 964 JITStubCall stubCall(this, cti_op_push_with_scope);
4e4e5a6f 965 stubCall.addArgument(currentInstruction[1].u.operand);
93a37866 966 stubCall.call();
4e4e5a6f
A
967}
968
969void JIT::emit_op_pop_scope(Instruction*)
970{
971 JITStubCall(this, cti_op_pop_scope).call();
972}
973
93a37866 974void JIT::emit_op_to_number(Instruction* currentInstruction)
4e4e5a6f
A
975{
976 int dst = currentInstruction[1].u.operand;
977 int src = currentInstruction[2].u.operand;
978
979 emitLoad(src, regT1, regT0);
980
14957cd0 981 Jump isInt32 = branch32(Equal, regT1, TrustedImm32(JSValue::Int32Tag));
93a37866 982 addSlowCase(branch32(AboveOrEqual, regT1, TrustedImm32(JSValue::LowestTag)));
4e4e5a6f
A
983 isInt32.link(this);
984
985 if (src != dst)
986 emitStore(dst, regT1, regT0);
93a37866 987 map(m_bytecodeOffset + OPCODE_LENGTH(op_to_number), dst, regT1, regT0);
4e4e5a6f
A
988}
989
93a37866 990void JIT::emitSlow_op_to_number(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
4e4e5a6f
A
991{
992 int dst = currentInstruction[1].u.operand;
993
994 linkSlowCase(iter);
995
93a37866 996 JITStubCall stubCall(this, cti_op_to_number);
4e4e5a6f
A
997 stubCall.addArgument(regT1, regT0);
998 stubCall.call(dst);
999}
1000
93a37866 1001void JIT::emit_op_push_name_scope(Instruction* currentInstruction)
4e4e5a6f 1002{
93a37866
A
1003 JITStubCall stubCall(this, cti_op_push_name_scope);
1004 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[1].u.operand)));
1005 stubCall.addArgument(currentInstruction[2].u.operand);
1006 stubCall.addArgument(TrustedImm32(currentInstruction[3].u.operand));
1007 stubCall.call();
4e4e5a6f
A
1008}
1009
1010void JIT::emit_op_catch(Instruction* currentInstruction)
1011{
14957cd0
A
1012 // cti_op_throw returns the callFrame for the handler.
1013 move(regT0, callFrameRegister);
4e4e5a6f
A
1014
1015 // Now store the exception returned by cti_op_throw.
93a37866
A
1016 loadPtr(Address(stackPointerRegister, OBJECT_OFFSETOF(struct JITStackFrame, vm)), regT3);
1017 load32(Address(regT3, OBJECT_OFFSETOF(VM, exception) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
1018 load32(Address(regT3, OBJECT_OFFSETOF(VM, exception) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
1019 store32(TrustedImm32(JSValue().payload()), Address(regT3, OBJECT_OFFSETOF(VM, exception) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)));
1020 store32(TrustedImm32(JSValue().tag()), Address(regT3, OBJECT_OFFSETOF(VM, exception) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)));
14957cd0
A
1021
1022 unsigned exception = currentInstruction[1].u.operand;
4e4e5a6f 1023 emitStore(exception, regT1, regT0);
14957cd0 1024 map(m_bytecodeOffset + OPCODE_LENGTH(op_catch), exception, regT1, regT0);
4e4e5a6f
A
1025}
1026
4e4e5a6f
A
1027void JIT::emit_op_switch_imm(Instruction* currentInstruction)
1028{
1029 unsigned tableIndex = currentInstruction[1].u.operand;
1030 unsigned defaultOffset = currentInstruction[2].u.operand;
1031 unsigned scrutinee = currentInstruction[3].u.operand;
1032
1033 // create jump table for switch destinations, track this switch statement.
1034 SimpleJumpTable* jumpTable = &m_codeBlock->immediateSwitchJumpTable(tableIndex);
14957cd0 1035 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Immediate));
4e4e5a6f
A
1036 jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1037
1038 JITStubCall stubCall(this, cti_op_switch_imm);
1039 stubCall.addArgument(scrutinee);
6fe7ccc8 1040 stubCall.addArgument(TrustedImm32(tableIndex));
4e4e5a6f
A
1041 stubCall.call();
1042 jump(regT0);
1043}
1044
1045void JIT::emit_op_switch_char(Instruction* currentInstruction)
1046{
1047 unsigned tableIndex = currentInstruction[1].u.operand;
1048 unsigned defaultOffset = currentInstruction[2].u.operand;
1049 unsigned scrutinee = currentInstruction[3].u.operand;
1050
1051 // create jump table for switch destinations, track this switch statement.
1052 SimpleJumpTable* jumpTable = &m_codeBlock->characterSwitchJumpTable(tableIndex);
14957cd0 1053 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Character));
4e4e5a6f
A
1054 jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1055
1056 JITStubCall stubCall(this, cti_op_switch_char);
1057 stubCall.addArgument(scrutinee);
6fe7ccc8 1058 stubCall.addArgument(TrustedImm32(tableIndex));
4e4e5a6f
A
1059 stubCall.call();
1060 jump(regT0);
1061}
1062
1063void JIT::emit_op_switch_string(Instruction* currentInstruction)
1064{
1065 unsigned tableIndex = currentInstruction[1].u.operand;
1066 unsigned defaultOffset = currentInstruction[2].u.operand;
1067 unsigned scrutinee = currentInstruction[3].u.operand;
1068
1069 // create jump table for switch destinations, track this switch statement.
1070 StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
14957cd0 1071 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset));
4e4e5a6f
A
1072
1073 JITStubCall stubCall(this, cti_op_switch_string);
1074 stubCall.addArgument(scrutinee);
6fe7ccc8 1075 stubCall.addArgument(TrustedImm32(tableIndex));
4e4e5a6f
A
1076 stubCall.call();
1077 jump(regT0);
1078}
1079
93a37866 1080void JIT::emit_op_throw_static_error(Instruction* currentInstruction)
4e4e5a6f 1081{
14957cd0 1082 unsigned message = currentInstruction[1].u.operand;
4e4e5a6f 1083
93a37866 1084 JITStubCall stubCall(this, cti_op_throw_static_error);
4e4e5a6f 1085 stubCall.addArgument(m_codeBlock->getConstant(message));
93a37866 1086 stubCall.addArgument(TrustedImm32(currentInstruction[2].u.operand));
14957cd0 1087 stubCall.call();
4e4e5a6f
A
1088}
1089
1090void JIT::emit_op_debug(Instruction* currentInstruction)
1091{
1092#if ENABLE(DEBUG_WITH_BREAKPOINT)
1093 UNUSED_PARAM(currentInstruction);
1094 breakpoint();
1095#else
1096 JITStubCall stubCall(this, cti_op_debug);
1097 stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
1098 stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
1099 stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
93a37866 1100 stubCall.addArgument(Imm32(currentInstruction[4].u.operand));
4e4e5a6f
A
1101 stubCall.call();
1102#endif
1103}
1104
1105
1106void JIT::emit_op_enter(Instruction*)
1107{
93a37866
A
1108 emitEnterOptimizationCheck();
1109
4e4e5a6f
A
1110 // Even though JIT code doesn't use them, we initialize our constant
1111 // registers to zap stale pointers, to avoid unnecessarily prolonging
1112 // object lifetime and increasing GC pressure.
1113 for (int i = 0; i < m_codeBlock->m_numVars; ++i)
1114 emitStore(i, jsUndefined());
1115}
1116
14957cd0 1117void JIT::emit_op_create_activation(Instruction* currentInstruction)
4e4e5a6f 1118{
14957cd0
A
1119 unsigned activation = currentInstruction[1].u.operand;
1120
1121 Jump activationCreated = branch32(NotEqual, tagFor(activation), TrustedImm32(JSValue::EmptyValueTag));
1122 JITStubCall(this, cti_op_push_activation).call(activation);
1123 activationCreated.link(this);
4e4e5a6f
A
1124}
1125
14957cd0 1126void JIT::emit_op_create_arguments(Instruction* currentInstruction)
4e4e5a6f 1127{
14957cd0
A
1128 unsigned dst = currentInstruction[1].u.operand;
1129
1130 Jump argsCreated = branch32(NotEqual, tagFor(dst), TrustedImm32(JSValue::EmptyValueTag));
4e4e5a6f 1131
6fe7ccc8 1132 JITStubCall(this, cti_op_create_arguments).call();
14957cd0
A
1133 emitStore(dst, regT1, regT0);
1134 emitStore(unmodifiedArgumentsRegister(dst), regT1, regT0);
1135
4e4e5a6f
A
1136 argsCreated.link(this);
1137}
1138
14957cd0 1139void JIT::emit_op_init_lazy_reg(Instruction* currentInstruction)
4e4e5a6f 1140{
14957cd0
A
1141 unsigned dst = currentInstruction[1].u.operand;
1142
1143 emitStore(dst, JSValue());
1144}
1145
1146void JIT::emit_op_get_callee(Instruction* currentInstruction)
1147{
1148 int dst = currentInstruction[1].u.operand;
93a37866
A
1149 emitGetFromCallFrameHeaderPtr(JSStack::Callee, regT0);
1150 move(TrustedImm32(JSValue::CellTag), regT1);
1151 emitValueProfilingSite();
1152 emitStore(dst, regT1, regT0);
14957cd0
A
1153}
1154
1155void JIT::emit_op_create_this(Instruction* currentInstruction)
1156{
93a37866
A
1157 int callee = currentInstruction[2].u.operand;
1158 RegisterID calleeReg = regT0;
1159 RegisterID resultReg = regT0;
1160 RegisterID allocatorReg = regT1;
1161 RegisterID structureReg = regT2;
1162 RegisterID scratchReg = regT3;
6fe7ccc8 1163
93a37866
A
1164 emitLoadPayload(callee, calleeReg);
1165 loadPtr(Address(calleeReg, JSFunction::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfAllocator()), allocatorReg);
1166 loadPtr(Address(calleeReg, JSFunction::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfStructure()), structureReg);
1167 addSlowCase(branchTestPtr(Zero, allocatorReg));
1168
1169 emitAllocateJSObject(allocatorReg, structureReg, resultReg, scratchReg);
1170 emitStoreCell(currentInstruction[1].u.operand, resultReg);
6fe7ccc8
A
1171}
1172
1173void JIT::emitSlow_op_create_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1174{
93a37866 1175 linkSlowCase(iter); // doesn't have an allocation profile
6fe7ccc8 1176 linkSlowCase(iter); // allocation failed
93a37866 1177
14957cd0 1178 JITStubCall stubCall(this, cti_op_create_this);
93a37866 1179 stubCall.addArgument(TrustedImm32(currentInstruction[3].u.operand));
14957cd0 1180 stubCall.call(currentInstruction[1].u.operand);
4e4e5a6f
A
1181}
1182
1183void JIT::emit_op_convert_this(Instruction* currentInstruction)
1184{
1185 unsigned thisRegister = currentInstruction[1].u.operand;
1186
93a37866 1187 emitLoad(thisRegister, regT3, regT2);
4e4e5a6f 1188
93a37866
A
1189 addSlowCase(branch32(NotEqual, regT3, TrustedImm32(JSValue::CellTag)));
1190 if (shouldEmitProfiling()) {
1191 loadPtr(Address(regT2, JSCell::structureOffset()), regT0);
1192 move(regT3, regT1);
1193 emitValueProfilingSite();
1194 }
1195 addSlowCase(branchPtr(Equal, Address(regT2, JSCell::structureOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
14957cd0 1196}
4e4e5a6f 1197
4e4e5a6f
A
1198void JIT::emitSlow_op_convert_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1199{
93a37866 1200 void* globalThis = m_codeBlock->globalObject()->globalThis();
4e4e5a6f
A
1201 unsigned thisRegister = currentInstruction[1].u.operand;
1202
1203 linkSlowCase(iter);
93a37866
A
1204 if (shouldEmitProfiling()) {
1205 move(TrustedImm32(JSValue::UndefinedTag), regT1);
1206 move(TrustedImm32(0), regT0);
1207 }
1208 Jump isNotUndefined = branch32(NotEqual, regT3, TrustedImm32(JSValue::UndefinedTag));
1209 emitValueProfilingSite();
6fe7ccc8
A
1210 move(TrustedImmPtr(globalThis), regT0);
1211 move(TrustedImm32(JSValue::CellTag), regT1);
1212 emitStore(thisRegister, regT1, regT0);
1213 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_convert_this));
4e4e5a6f 1214
14957cd0 1215 linkSlowCase(iter);
93a37866
A
1216 if (shouldEmitProfiling()) {
1217 move(TrustedImm32(JSValue::CellTag), regT1);
1218 move(TrustedImmPtr(m_vm->stringStructure.get()), regT0);
1219 }
1220 isNotUndefined.link(this);
1221 emitValueProfilingSite();
6fe7ccc8 1222 JITStubCall stubCall(this, cti_op_convert_this);
93a37866 1223 stubCall.addArgument(regT3, regT2);
14957cd0
A
1224 stubCall.call(thisRegister);
1225}
1226
4e4e5a6f
A
1227void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
1228{
4e4e5a6f
A
1229 JITStubCall stubCall(this, cti_op_profile_will_call);
1230 stubCall.addArgument(currentInstruction[1].u.operand);
1231 stubCall.call();
4e4e5a6f
A
1232}
1233
1234void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
1235{
4e4e5a6f
A
1236 JITStubCall stubCall(this, cti_op_profile_did_call);
1237 stubCall.addArgument(currentInstruction[1].u.operand);
1238 stubCall.call();
4e4e5a6f
A
1239}
1240
14957cd0
A
1241void JIT::emit_op_get_arguments_length(Instruction* currentInstruction)
1242{
1243 int dst = currentInstruction[1].u.operand;
1244 int argumentsRegister = currentInstruction[2].u.operand;
1245 addSlowCase(branch32(NotEqual, tagFor(argumentsRegister), TrustedImm32(JSValue::EmptyValueTag)));
93a37866 1246 load32(payloadFor(JSStack::ArgumentCount), regT0);
14957cd0
A
1247 sub32(TrustedImm32(1), regT0);
1248 emitStoreInt32(dst, regT0);
4e4e5a6f
A
1249}
1250
14957cd0
A
1251void JIT::emitSlow_op_get_arguments_length(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1252{
1253 linkSlowCase(iter);
1254 int dst = currentInstruction[1].u.operand;
1255 int base = currentInstruction[2].u.operand;
1256 int ident = currentInstruction[3].u.operand;
1257
1258 JITStubCall stubCall(this, cti_op_get_by_id_generic);
1259 stubCall.addArgument(base);
1260 stubCall.addArgument(TrustedImmPtr(&(m_codeBlock->identifier(ident))));
1261 stubCall.call(dst);
1262}
1263
1264void JIT::emit_op_get_argument_by_val(Instruction* currentInstruction)
1265{
1266 int dst = currentInstruction[1].u.operand;
1267 int argumentsRegister = currentInstruction[2].u.operand;
1268 int property = currentInstruction[3].u.operand;
1269 addSlowCase(branch32(NotEqual, tagFor(argumentsRegister), TrustedImm32(JSValue::EmptyValueTag)));
1270 emitLoad(property, regT1, regT2);
1271 addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::Int32Tag)));
1272 add32(TrustedImm32(1), regT2);
1273 // regT2 now contains the integer index of the argument we want, including this
93a37866 1274 load32(payloadFor(JSStack::ArgumentCount), regT3);
14957cd0
A
1275 addSlowCase(branch32(AboveOrEqual, regT2, regT3));
1276
6fe7ccc8
A
1277 neg32(regT2);
1278 loadPtr(BaseIndex(callFrameRegister, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload) + CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register))), regT0);
1279 loadPtr(BaseIndex(callFrameRegister, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag) + CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register))), regT1);
93a37866 1280 emitValueProfilingSite();
14957cd0
A
1281 emitStore(dst, regT1, regT0);
1282}
1283
1284void JIT::emitSlow_op_get_argument_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1285{
1286 unsigned dst = currentInstruction[1].u.operand;
1287 unsigned arguments = currentInstruction[2].u.operand;
1288 unsigned property = currentInstruction[3].u.operand;
1289
1290 linkSlowCase(iter);
1291 Jump skipArgumentsCreation = jump();
1292
1293 linkSlowCase(iter);
1294 linkSlowCase(iter);
6fe7ccc8 1295 JITStubCall(this, cti_op_create_arguments).call();
14957cd0
A
1296 emitStore(arguments, regT1, regT0);
1297 emitStore(unmodifiedArgumentsRegister(arguments), regT1, regT0);
1298
1299 skipArgumentsCreation.link(this);
93a37866 1300 JITStubCall stubCall(this, cti_op_get_by_val_generic);
14957cd0
A
1301 stubCall.addArgument(arguments);
1302 stubCall.addArgument(property);
93a37866
A
1303 stubCall.callWithValueProfiling(dst);
1304}
1305
1306void JIT::emit_op_put_to_base(Instruction* currentInstruction)
1307{
1308 int base = currentInstruction[1].u.operand;
1309 int id = currentInstruction[2].u.operand;
1310 int value = currentInstruction[3].u.operand;
1311
1312 PutToBaseOperation* operation = currentInstruction[4].u.putToBaseOperation;
1313
1314
1315 switch (operation->m_kind) {
1316 case PutToBaseOperation::GlobalVariablePutChecked:
1317 addSlowCase(branchTest8(NonZero, AbsoluteAddress(operation->m_predicatePointer)));
1318 case PutToBaseOperation::GlobalVariablePut: {
1319 JSGlobalObject* globalObject = m_codeBlock->globalObject();
1320 if (operation->m_isDynamic)
1321 addSlowCase(branchPtr(NotEqual, payloadFor(base), TrustedImmPtr(globalObject)));
1322
1323 emitLoad(value, regT1, regT0);
1324 storePtr(regT0, reinterpret_cast<char*>(operation->m_registerAddress) + OBJECT_OFFSETOF(JSValue, u.asBits.payload));
1325 storePtr(regT1, reinterpret_cast<char*>(operation->m_registerAddress) + OBJECT_OFFSETOF(JSValue, u.asBits.tag));
1326 if (Heap::isWriteBarrierEnabled())
1327 emitWriteBarrier(globalObject, regT0, regT2, ShouldFilterImmediates, WriteBarrierForVariableAccess);
1328 break;
1329 }
1330 case PutToBaseOperation::VariablePut: {
1331 loadPtr(payloadFor(base), regT3);
1332 emitLoad(value, regT1, regT0);
1333 loadPtr(Address(regT3, JSVariableObject::offsetOfRegisters()), regT2);
1334 store32(regT0, Address(regT2, operation->m_offset * sizeof(Register) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)));
1335 store32(regT1, Address(regT2, operation->m_offset * sizeof(Register) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)));
1336 if (Heap::isWriteBarrierEnabled())
1337 emitWriteBarrier(regT3, regT1, regT0, regT2, ShouldFilterImmediates, WriteBarrierForVariableAccess);
1338 break;
1339 }
1340
1341 case PutToBaseOperation::GlobalPropertyPut: {
1342 JSGlobalObject* globalObject = m_codeBlock->globalObject();
1343 loadPtr(payloadFor(base), regT3);
1344 emitLoad(value, regT1, regT0);
1345 loadPtr(&operation->m_structure, regT2);
1346 addSlowCase(branchPtr(NotEqual, Address(regT3, JSCell::structureOffset()), regT2));
1347 ASSERT(!operation->m_structure || !operation->m_structure->inlineCapacity());
1348 loadPtr(Address(regT3, JSObject::butterflyOffset()), regT2);
1349 load32(&operation->m_offsetInButterfly, regT3);
1350 storePtr(regT0, BaseIndex(regT2, regT3, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)));
1351 storePtr(regT1, BaseIndex(regT2, regT3, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)));
1352 if (Heap::isWriteBarrierEnabled())
1353 emitWriteBarrier(globalObject, regT1, regT2, ShouldFilterImmediates, WriteBarrierForVariableAccess);
1354 break;
1355 }
1356
1357 case PutToBaseOperation::Uninitialised:
1358 case PutToBaseOperation::Readonly:
1359 case PutToBaseOperation::Generic:
1360 JITStubCall stubCall(this, cti_op_put_to_base);
1361
1362 stubCall.addArgument(TrustedImm32(base));
1363 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(id)));
1364 stubCall.addArgument(TrustedImm32(value));
1365 stubCall.addArgument(TrustedImmPtr(operation));
1366 stubCall.call();
1367 break;
1368 }
14957cd0
A
1369}
1370
14957cd0
A
1371} // namespace JSC
1372
1373#endif // USE(JSVALUE32_64)
1374#endif // ENABLE(JIT)