]> git.saurik.com Git - apple/javascriptcore.git/blame - jit/JITOpcodes.cpp
JavaScriptCore-903.5.tar.gz
[apple/javascriptcore.git] / jit / JITOpcodes.cpp
CommitLineData
ba379fdc
A
1/*
2 * Copyright (C) 2009 Apple Inc. All rights reserved.
4e4e5a6f 3 * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
ba379fdc
A
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
13 *
14 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
18 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25 */
26
27#include "config.h"
ba379fdc 28#if ENABLE(JIT)
14957cd0 29#include "JIT.h"
ba379fdc 30
14957cd0 31#include "Arguments.h"
ba379fdc
A
32#include "JITInlineMethods.h"
33#include "JITStubCall.h"
34#include "JSArray.h"
35#include "JSCell.h"
36#include "JSFunction.h"
f9bf01c6 37#include "JSPropertyNameIterator.h"
ba379fdc
A
38#include "LinkBuffer.h"
39
40namespace JSC {
41
14957cd0 42#if USE(JSVALUE64)
ba379fdc 43
4e4e5a6f 44void JIT::privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool>* executablePool, JSGlobalData* globalData, TrampolineStructure *trampolines)
ba379fdc
A
45{
46#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
47 // (2) The second function provides fast property access for string length
48 Label stringLengthBegin = align();
49
50 // Check eax is a string
51 Jump string_failureCases1 = emitJumpIfNotJSCell(regT0);
14957cd0 52 Jump string_failureCases2 = branchPtr(NotEqual, Address(regT0), TrustedImmPtr(m_globalData->jsStringVPtr));
ba379fdc
A
53
54 // Checks out okay! - get the length from the Ustring.
4e4e5a6f 55 load32(Address(regT0, OBJECT_OFFSETOF(JSString, m_length)), regT0);
ba379fdc 56
14957cd0 57 Jump string_failureCases3 = branch32(LessThan, regT0, TrustedImm32(0));
ba379fdc
A
58
59 // regT0 contains a 64 bit value (is positive, is zero extended) so we don't need sign extend here.
60 emitFastArithIntToImmNoCheck(regT0, regT0);
61
62 ret();
63#endif
64
65 // (3) Trampolines for the slow cases of op_call / op_call_eval / op_construct.
66 COMPILE_ASSERT(sizeof(CodeType) == 4, CodeTypeEnumMustBe32Bit);
67
f9bf01c6
A
68 // VirtualCallLink Trampoline
69 // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
14957cd0 70 JumpList callLinkFailures;
ba379fdc 71 Label virtualCallLinkBegin = align();
14957cd0 72 compileOpCallInitializeCallFrame();
ba379fdc 73 preserveReturnAddressAfterCall(regT3);
14957cd0 74 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
ba379fdc 75 restoreArgumentReference();
14957cd0
A
76 Call callLazyLinkCall = call();
77 callLinkFailures.append(branchTestPtr(Zero, regT0));
ba379fdc 78 restoreReturnAddressBeforeReturn(regT3);
14957cd0
A
79 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT1);
80 jump(regT0);
ba379fdc 81
14957cd0
A
82 // VirtualConstructLink Trampoline
83 // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
84 Label virtualConstructLinkBegin = align();
ba379fdc 85 compileOpCallInitializeCallFrame();
ba379fdc 86 preserveReturnAddressAfterCall(regT3);
14957cd0 87 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
ba379fdc 88 restoreArgumentReference();
14957cd0
A
89 Call callLazyLinkConstruct = call();
90 callLinkFailures.append(branchTestPtr(Zero, regT0));
ba379fdc 91 restoreReturnAddressBeforeReturn(regT3);
14957cd0 92 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT1);
ba379fdc
A
93 jump(regT0);
94
f9bf01c6
A
95 // VirtualCall Trampoline
96 // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
ba379fdc 97 Label virtualCallBegin = align();
14957cd0 98 compileOpCallInitializeCallFrame();
ba379fdc 99
14957cd0 100 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
f9bf01c6 101
14957cd0 102 Jump hasCodeBlock3 = branch32(GreaterThanOrEqual, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParametersForCall)), TrustedImm32(0));
ba379fdc
A
103 preserveReturnAddressAfterCall(regT3);
104 restoreArgumentReference();
14957cd0
A
105 Call callCompileCall = call();
106 callLinkFailures.append(branchTestPtr(Zero, regT0));
107 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT1);
ba379fdc 108 restoreReturnAddressBeforeReturn(regT3);
f9bf01c6 109 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
ba379fdc 110 hasCodeBlock3.link(this);
ba379fdc 111
14957cd0 112 loadPtr(Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_jitCodeForCallWithArityCheck)), regT0);
ba379fdc
A
113 jump(regT0);
114
14957cd0
A
115 // VirtualConstruct Trampoline
116 // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
117 Label virtualConstructBegin = align();
118 compileOpCallInitializeCallFrame();
ba379fdc 119
14957cd0 120 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
ba379fdc 121
14957cd0
A
122 Jump hasCodeBlock4 = branch32(GreaterThanOrEqual, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParametersForConstruct)), TrustedImm32(0));
123 preserveReturnAddressAfterCall(regT3);
124 restoreArgumentReference();
125 Call callCompileConstruct = call();
126 callLinkFailures.append(branchTestPtr(Zero, regT0));
127 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT1);
128 restoreReturnAddressBeforeReturn(regT3);
129 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
130 hasCodeBlock4.link(this);
ba379fdc 131
14957cd0
A
132 loadPtr(Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_jitCodeForConstructWithArityCheck)), regT0);
133 jump(regT0);
ba379fdc 134
14957cd0
A
135 // If the parser fails we want to be able to be able to keep going,
136 // So we handle this as a parse failure.
137 callLinkFailures.link(this);
138 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
139 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
140 restoreReturnAddressBeforeReturn(regT1);
141 move(TrustedImmPtr(&globalData->exceptionLocation), regT2);
142 storePtr(regT1, regT2);
143 poke(callFrameRegister, 1 + OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
144 poke(TrustedImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()));
145 ret();
ba379fdc 146
14957cd0
A
147 // NativeCall Trampoline
148 Label nativeCallThunk = privateCompileCTINativeCall(globalData);
149 Label nativeConstructThunk = privateCompileCTINativeCall(globalData, true);
ba379fdc 150
14957cd0
A
151#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
152 Call string_failureCases1Call = makeTailRecursiveCall(string_failureCases1);
153 Call string_failureCases2Call = makeTailRecursiveCall(string_failureCases2);
154 Call string_failureCases3Call = makeTailRecursiveCall(string_failureCases3);
155#endif
ba379fdc 156
14957cd0
A
157 // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
158 LinkBuffer patchBuffer(*m_globalData, this, m_globalData->executableAllocator);
ba379fdc 159
14957cd0
A
160#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
161 patchBuffer.link(string_failureCases1Call, FunctionPtr(cti_op_get_by_id_string_fail));
162 patchBuffer.link(string_failureCases2Call, FunctionPtr(cti_op_get_by_id_string_fail));
163 patchBuffer.link(string_failureCases3Call, FunctionPtr(cti_op_get_by_id_string_fail));
164#endif
165#if ENABLE(JIT_OPTIMIZE_CALL)
166 patchBuffer.link(callLazyLinkCall, FunctionPtr(cti_vm_lazyLinkCall));
167 patchBuffer.link(callLazyLinkConstruct, FunctionPtr(cti_vm_lazyLinkConstruct));
168#endif
169 patchBuffer.link(callCompileCall, FunctionPtr(cti_op_call_jitCompile));
170 patchBuffer.link(callCompileConstruct, FunctionPtr(cti_op_construct_jitCompile));
ba379fdc 171
14957cd0
A
172 CodeRef finalCode = patchBuffer.finalizeCode();
173 *executablePool = finalCode.m_executablePool;
ba379fdc 174
14957cd0
A
175 trampolines->ctiVirtualCallLink = patchBuffer.trampolineAt(virtualCallLinkBegin);
176 trampolines->ctiVirtualConstructLink = patchBuffer.trampolineAt(virtualConstructLinkBegin);
177 trampolines->ctiVirtualCall = patchBuffer.trampolineAt(virtualCallBegin);
178 trampolines->ctiVirtualConstruct = patchBuffer.trampolineAt(virtualConstructBegin);
179 trampolines->ctiNativeCall = patchBuffer.trampolineAt(nativeCallThunk);
180 trampolines->ctiNativeConstruct = patchBuffer.trampolineAt(nativeConstructThunk);
181#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
182 trampolines->ctiStringLengthTrampoline = patchBuffer.trampolineAt(stringLengthBegin);
ba379fdc 183#endif
14957cd0 184}
ba379fdc 185
14957cd0
A
186JIT::Label JIT::privateCompileCTINativeCall(JSGlobalData* globalData, bool isConstruct)
187{
188 int executableOffsetToFunction = isConstruct ? OBJECT_OFFSETOF(NativeExecutable, m_constructor) : OBJECT_OFFSETOF(NativeExecutable, m_function);
ba379fdc 189
14957cd0 190 Label nativeCallThunk = align();
ba379fdc 191
14957cd0 192 emitPutImmediateToCallFrameHeader(0, RegisterFile::CodeBlock);
ba379fdc 193
14957cd0
A
194#if CPU(X86_64)
195 // Load caller frame's scope chain into this callframe so that whatever we call can
196 // get to its global data.
197 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
198 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT0);
199 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
ba379fdc 200
14957cd0
A
201 peek(regT1);
202 emitPutToCallFrameHeader(regT1, RegisterFile::ReturnPC);
ba379fdc 203
14957cd0
A
204 // Calling convention: f(edi, esi, edx, ecx, ...);
205 // Host function signature: f(ExecState*);
206 move(callFrameRegister, X86Registers::edi);
ba379fdc 207
14957cd0 208 subPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister); // Align stack after call.
ba379fdc 209
14957cd0
A
210 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, X86Registers::esi);
211 loadPtr(Address(X86Registers::esi, OBJECT_OFFSETOF(JSFunction, m_executable)), X86Registers::r9);
212 move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
213 call(Address(X86Registers::r9, executableOffsetToFunction));
ba379fdc 214
14957cd0 215 addPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister);
ba379fdc 216
f9bf01c6 217#elif CPU(ARM)
14957cd0
A
218 // Load caller frame's scope chain into this callframe so that whatever we call can
219 // get to its global data.
220 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
221 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT2);
222 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
f9bf01c6 223
14957cd0
A
224 preserveReturnAddressAfterCall(regT3); // Callee preserved
225 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
f9bf01c6 226
14957cd0
A
227 // Calling convention: f(r0 == regT0, r1 == regT1, ...);
228 // Host function signature: f(ExecState*);
229 move(callFrameRegister, ARMRegisters::r0);
f9bf01c6 230
14957cd0
A
231 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, ARMRegisters::r1);
232 move(regT2, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
233 loadPtr(Address(ARMRegisters::r1, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
234 call(Address(regT2, executableOffsetToFunction));
f9bf01c6 235
14957cd0 236 restoreReturnAddressBeforeReturn(regT3);
4e4e5a6f
A
237
238#elif CPU(MIPS)
14957cd0
A
239 // Load caller frame's scope chain into this callframe so that whatever we call can
240 // get to its global data.
241 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
242 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT0);
243 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
4e4e5a6f 244
14957cd0
A
245 preserveReturnAddressAfterCall(regT3); // Callee preserved
246 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
4e4e5a6f 247
14957cd0
A
248 // Calling convention: f(a0, a1, a2, a3);
249 // Host function signature: f(ExecState*);
4e4e5a6f 250
14957cd0
A
251 // Allocate stack space for 16 bytes (8-byte aligned)
252 // 16 bytes (unused) for 4 arguments
253 subPtr(TrustedImm32(16), stackPointerRegister);
4e4e5a6f 254
14957cd0
A
255 // Setup arg0
256 move(callFrameRegister, MIPSRegisters::a0);
4e4e5a6f
A
257
258 // Call
14957cd0
A
259 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, MIPSRegisters::a2);
260 loadPtr(Address(MIPSRegisters::a2, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
261 move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
262 call(Address(regT2, executableOffsetToFunction));
4e4e5a6f
A
263
264 // Restore stack space
14957cd0
A
265 addPtr(TrustedImm32(16), stackPointerRegister);
266
267 restoreReturnAddressBeforeReturn(regT3);
f9bf01c6 268
ba379fdc
A
269#elif ENABLE(JIT_OPTIMIZE_NATIVE_CALL)
270#error "JIT_OPTIMIZE_NATIVE_CALL not yet supported on this platform."
271#else
14957cd0 272 UNUSED_PARAM(executableOffsetToFunction);
ba379fdc
A
273 breakpoint();
274#endif
275
276 // Check for an exception
277 loadPtr(&(globalData->exception), regT2);
278 Jump exceptionHandler = branchTestPtr(NonZero, regT2);
279
ba379fdc 280 // Return.
ba379fdc
A
281 ret();
282
283 // Handle an exception
284 exceptionHandler.link(this);
14957cd0 285
ba379fdc 286 // Grab the return address.
14957cd0 287 preserveReturnAddressAfterCall(regT1);
ba379fdc 288
14957cd0
A
289 move(TrustedImmPtr(&globalData->exceptionLocation), regT2);
290 storePtr(regT1, regT2);
291 poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
ba379fdc 292
14957cd0
A
293 // Set the return address.
294 move(TrustedImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()), regT1);
295 restoreReturnAddressBeforeReturn(regT1);
ba379fdc 296
14957cd0 297 ret();
ba379fdc 298
14957cd0
A
299 return nativeCallThunk;
300}
ba379fdc 301
14957cd0
A
302JIT::CodePtr JIT::privateCompileCTINativeCall(PassRefPtr<ExecutablePool>, JSGlobalData* globalData, NativeFunction)
303{
304 return globalData->jitStubs->ctiNativeCall();
ba379fdc
A
305}
306
307void JIT::emit_op_mov(Instruction* currentInstruction)
308{
309 int dst = currentInstruction[1].u.operand;
310 int src = currentInstruction[2].u.operand;
311
312 if (m_codeBlock->isConstantRegisterIndex(src)) {
313 storePtr(ImmPtr(JSValue::encode(getConstantOperand(src))), Address(callFrameRegister, dst * sizeof(Register)));
314 if (dst == m_lastResultBytecodeRegister)
315 killLastResultRegister();
316 } else if ((src == m_lastResultBytecodeRegister) || (dst == m_lastResultBytecodeRegister)) {
317 // If either the src or dst is the cached register go though
318 // get/put registers to make sure we track this correctly.
319 emitGetVirtualRegister(src, regT0);
320 emitPutVirtualRegister(dst);
321 } else {
322 // Perform the copy via regT1; do not disturb any mapping in regT0.
323 loadPtr(Address(callFrameRegister, src * sizeof(Register)), regT1);
324 storePtr(regT1, Address(callFrameRegister, dst * sizeof(Register)));
325 }
326}
327
328void JIT::emit_op_end(Instruction* currentInstruction)
329{
ba379fdc
A
330 ASSERT(returnValueRegister != callFrameRegister);
331 emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
332 restoreReturnAddressBeforeReturn(Address(callFrameRegister, RegisterFile::ReturnPC * static_cast<int>(sizeof(Register))));
333 ret();
334}
335
336void JIT::emit_op_jmp(Instruction* currentInstruction)
337{
338 unsigned target = currentInstruction[1].u.operand;
f9bf01c6 339 addJump(jump(), target);
ba379fdc
A
340}
341
342void JIT::emit_op_loop_if_lesseq(Instruction* currentInstruction)
343{
344 emitTimeoutCheck();
345
346 unsigned op1 = currentInstruction[1].u.operand;
347 unsigned op2 = currentInstruction[2].u.operand;
348 unsigned target = currentInstruction[3].u.operand;
349 if (isOperandConstantImmediateInt(op2)) {
350 emitGetVirtualRegister(op1, regT0);
351 emitJumpSlowCaseIfNotImmediateInteger(regT0);
ba379fdc 352 int32_t op2imm = getConstantOperandImmediateInt(op2);
f9bf01c6 353 addJump(branch32(LessThanOrEqual, regT0, Imm32(op2imm)), target);
ba379fdc
A
354 } else {
355 emitGetVirtualRegisters(op1, regT0, op2, regT1);
356 emitJumpSlowCaseIfNotImmediateInteger(regT0);
357 emitJumpSlowCaseIfNotImmediateInteger(regT1);
f9bf01c6 358 addJump(branch32(LessThanOrEqual, regT0, regT1), target);
ba379fdc
A
359 }
360}
361
362void JIT::emit_op_new_object(Instruction* currentInstruction)
363{
364 JITStubCall(this, cti_op_new_object).call(currentInstruction[1].u.operand);
365}
366
14957cd0
A
367void JIT::emit_op_check_has_instance(Instruction* currentInstruction)
368{
369 unsigned baseVal = currentInstruction[1].u.operand;
370
371 emitGetVirtualRegister(baseVal, regT0);
372
373 // Check that baseVal is a cell.
374 emitJumpSlowCaseIfNotJSCell(regT0, baseVal);
375
376 // Check that baseVal 'ImplementsHasInstance'.
377 loadPtr(Address(regT0, JSCell::structureOffset()), regT0);
378 addSlowCase(branchTest8(Zero, Address(regT0, Structure::typeInfoFlagsOffset()), TrustedImm32(ImplementsHasInstance)));
379}
380
ba379fdc
A
381void JIT::emit_op_instanceof(Instruction* currentInstruction)
382{
f9bf01c6
A
383 unsigned dst = currentInstruction[1].u.operand;
384 unsigned value = currentInstruction[2].u.operand;
385 unsigned baseVal = currentInstruction[3].u.operand;
386 unsigned proto = currentInstruction[4].u.operand;
387
ba379fdc
A
388 // Load the operands (baseVal, proto, and value respectively) into registers.
389 // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
f9bf01c6
A
390 emitGetVirtualRegister(value, regT2);
391 emitGetVirtualRegister(baseVal, regT0);
392 emitGetVirtualRegister(proto, regT1);
ba379fdc 393
14957cd0 394 // Check that proto are cells. baseVal must be a cell - this is checked by op_check_has_instance.
f9bf01c6 395 emitJumpSlowCaseIfNotJSCell(regT2, value);
f9bf01c6 396 emitJumpSlowCaseIfNotJSCell(regT1, proto);
ba379fdc 397
14957cd0
A
398 // Check that prototype is an object
399 loadPtr(Address(regT1, JSCell::structureOffset()), regT3);
400 addSlowCase(branch8(NotEqual, Address(regT3, Structure::typeInfoTypeOffset()), TrustedImm32(ObjectType)));
401
402 // Fixme: this check is only needed because the JSC API allows HasInstance to be overridden; we should deprecate this.
f9bf01c6 403 // Check that baseVal 'ImplementsDefaultHasInstance'.
14957cd0
A
404 loadPtr(Address(regT0, JSCell::structureOffset()), regT0);
405 addSlowCase(branchTest8(Zero, Address(regT0, Structure::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance)));
ba379fdc 406
ba379fdc
A
407 // Optimistically load the result true, and start looping.
408 // Initially, regT1 still contains proto and regT2 still contains value.
409 // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
14957cd0 410 move(TrustedImmPtr(JSValue::encode(jsBoolean(true))), regT0);
ba379fdc
A
411 Label loop(this);
412
413 // Load the prototype of the object in regT2. If this is equal to regT1 - WIN!
414 // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
14957cd0
A
415 loadPtr(Address(regT2, JSCell::structureOffset()), regT2);
416 loadPtr(Address(regT2, Structure::prototypeOffset()), regT2);
ba379fdc 417 Jump isInstance = branchPtr(Equal, regT2, regT1);
f9bf01c6 418 emitJumpIfJSCell(regT2).linkTo(loop, this);
ba379fdc
A
419
420 // We get here either by dropping out of the loop, or if value was not an Object. Result is false.
14957cd0 421 move(TrustedImmPtr(JSValue::encode(jsBoolean(false))), regT0);
ba379fdc
A
422
423 // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
424 isInstance.link(this);
f9bf01c6 425 emitPutVirtualRegister(dst);
ba379fdc
A
426}
427
ba379fdc
A
428void JIT::emit_op_call(Instruction* currentInstruction)
429{
430 compileOpCall(op_call, currentInstruction, m_callLinkInfoIndex++);
431}
432
433void JIT::emit_op_call_eval(Instruction* currentInstruction)
434{
435 compileOpCall(op_call_eval, currentInstruction, m_callLinkInfoIndex++);
436}
437
ba379fdc
A
438void JIT::emit_op_call_varargs(Instruction* currentInstruction)
439{
440 compileOpCallVarargs(currentInstruction);
441}
442
443void JIT::emit_op_construct(Instruction* currentInstruction)
444{
445 compileOpCall(op_construct, currentInstruction, m_callLinkInfoIndex++);
446}
447
448void JIT::emit_op_get_global_var(Instruction* currentInstruction)
449{
14957cd0
A
450 JSVariableObject* globalObject = m_codeBlock->globalObject();
451 loadPtr(&globalObject->m_registers, regT0);
452 loadPtr(Address(regT0, currentInstruction[2].u.operand * sizeof(Register)), regT0);
ba379fdc
A
453 emitPutVirtualRegister(currentInstruction[1].u.operand);
454}
455
456void JIT::emit_op_put_global_var(Instruction* currentInstruction)
457{
14957cd0
A
458 emitGetVirtualRegister(currentInstruction[2].u.operand, regT1);
459 JSVariableObject* globalObject = m_codeBlock->globalObject();
460 loadPtr(&globalObject->m_registers, regT0);
461 storePtr(regT1, Address(regT0, currentInstruction[1].u.operand * sizeof(Register)));
ba379fdc
A
462}
463
464void JIT::emit_op_get_scoped_var(Instruction* currentInstruction)
465{
14957cd0 466 int skip = currentInstruction[3].u.operand;
ba379fdc
A
467
468 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT0);
14957cd0
A
469 bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain();
470 ASSERT(skip || !checkTopLevel);
471 if (checkTopLevel && skip--) {
472 Jump activationNotCreated;
473 if (checkTopLevel)
474 activationNotCreated = branchTestPtr(Zero, addressFor(m_codeBlock->activationRegister()));
475 loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, next)), regT0);
476 activationNotCreated.link(this);
477 }
ba379fdc
A
478 while (skip--)
479 loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, next)), regT0);
480
481 loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, object)), regT0);
14957cd0
A
482 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSVariableObject, m_registers)), regT0);
483 loadPtr(Address(regT0, currentInstruction[2].u.operand * sizeof(Register)), regT0);
ba379fdc
A
484 emitPutVirtualRegister(currentInstruction[1].u.operand);
485}
486
487void JIT::emit_op_put_scoped_var(Instruction* currentInstruction)
488{
14957cd0 489 int skip = currentInstruction[2].u.operand;
ba379fdc
A
490
491 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1);
492 emitGetVirtualRegister(currentInstruction[3].u.operand, regT0);
14957cd0
A
493 bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain();
494 ASSERT(skip || !checkTopLevel);
495 if (checkTopLevel && skip--) {
496 Jump activationNotCreated;
497 if (checkTopLevel)
498 activationNotCreated = branchTestPtr(Zero, addressFor(m_codeBlock->activationRegister()));
499 loadPtr(Address(regT1, OBJECT_OFFSETOF(ScopeChainNode, next)), regT1);
500 activationNotCreated.link(this);
501 }
ba379fdc
A
502 while (skip--)
503 loadPtr(Address(regT1, OBJECT_OFFSETOF(ScopeChainNode, next)), regT1);
504
505 loadPtr(Address(regT1, OBJECT_OFFSETOF(ScopeChainNode, object)), regT1);
14957cd0
A
506 loadPtr(Address(regT1, OBJECT_OFFSETOF(JSVariableObject, m_registers)), regT1);
507 storePtr(regT0, Address(regT1, currentInstruction[1].u.operand * sizeof(Register)));
ba379fdc
A
508}
509
510void JIT::emit_op_tear_off_activation(Instruction* currentInstruction)
511{
14957cd0
A
512 unsigned activation = currentInstruction[1].u.operand;
513 unsigned arguments = currentInstruction[2].u.operand;
514 Jump activationCreated = branchTestPtr(NonZero, addressFor(activation));
515 Jump argumentsNotCreated = branchTestPtr(Zero, addressFor(arguments));
516 activationCreated.link(this);
ba379fdc 517 JITStubCall stubCall(this, cti_op_tear_off_activation);
14957cd0
A
518 stubCall.addArgument(activation, regT2);
519 stubCall.addArgument(unmodifiedArgumentsRegister(arguments), regT2);
ba379fdc 520 stubCall.call();
14957cd0 521 argumentsNotCreated.link(this);
ba379fdc
A
522}
523
14957cd0 524void JIT::emit_op_tear_off_arguments(Instruction* currentInstruction)
ba379fdc 525{
14957cd0
A
526 unsigned dst = currentInstruction[1].u.operand;
527
528 Jump argsNotCreated = branchTestPtr(Zero, Address(callFrameRegister, sizeof(Register) * (unmodifiedArgumentsRegister(dst))));
529 JITStubCall stubCall(this, cti_op_tear_off_arguments);
530 stubCall.addArgument(unmodifiedArgumentsRegister(dst), regT2);
531 stubCall.call();
532 argsNotCreated.link(this);
ba379fdc
A
533}
534
535void JIT::emit_op_ret(Instruction* currentInstruction)
536{
ba379fdc
A
537 ASSERT(callFrameRegister != regT1);
538 ASSERT(regT1 != returnValueRegister);
539 ASSERT(returnValueRegister != callFrameRegister);
540
541 // Return the result in %eax.
542 emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
543
544 // Grab the return address.
545 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
546
547 // Restore our caller's "r".
548 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
549
550 // Return.
551 restoreReturnAddressBeforeReturn(regT1);
552 ret();
553}
554
14957cd0 555void JIT::emit_op_ret_object_or_this(Instruction* currentInstruction)
ba379fdc 556{
14957cd0
A
557 ASSERT(callFrameRegister != regT1);
558 ASSERT(regT1 != returnValueRegister);
559 ASSERT(returnValueRegister != callFrameRegister);
560
561 // Return the result in %eax.
562 emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
563 Jump notJSCell = emitJumpIfNotJSCell(returnValueRegister);
564 loadPtr(Address(returnValueRegister, JSCell::structureOffset()), regT2);
565 Jump notObject = branch8(NotEqual, Address(regT2, Structure::typeInfoTypeOffset()), TrustedImm32(ObjectType));
566
567 // Grab the return address.
568 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
569
570 // Restore our caller's "r".
571 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
572
573 // Return.
574 restoreReturnAddressBeforeReturn(regT1);
575 ret();
576
577 // Return 'this' in %eax.
578 notJSCell.link(this);
579 notObject.link(this);
580 emitGetVirtualRegister(currentInstruction[2].u.operand, returnValueRegister);
581
582 // Grab the return address.
583 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
584
585 // Restore our caller's "r".
586 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
587
588 // Return.
589 restoreReturnAddressBeforeReturn(regT1);
590 ret();
ba379fdc
A
591}
592
593void JIT::emit_op_resolve(Instruction* currentInstruction)
594{
595 JITStubCall stubCall(this, cti_op_resolve);
14957cd0 596 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
ba379fdc
A
597 stubCall.call(currentInstruction[1].u.operand);
598}
599
ba379fdc
A
600void JIT::emit_op_to_primitive(Instruction* currentInstruction)
601{
602 int dst = currentInstruction[1].u.operand;
603 int src = currentInstruction[2].u.operand;
604
605 emitGetVirtualRegister(src, regT0);
606
607 Jump isImm = emitJumpIfNotJSCell(regT0);
14957cd0 608 addSlowCase(branchPtr(NotEqual, Address(regT0), TrustedImmPtr(m_globalData->jsStringVPtr)));
ba379fdc
A
609 isImm.link(this);
610
611 if (dst != src)
612 emitPutVirtualRegister(dst);
613
614}
615
616void JIT::emit_op_strcat(Instruction* currentInstruction)
617{
618 JITStubCall stubCall(this, cti_op_strcat);
619 stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
620 stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
621 stubCall.call(currentInstruction[1].u.operand);
622}
623
ba379fdc
A
624void JIT::emit_op_resolve_base(Instruction* currentInstruction)
625{
14957cd0
A
626 JITStubCall stubCall(this, currentInstruction[3].u.operand ? cti_op_resolve_base_strict_put : cti_op_resolve_base);
627 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
628 stubCall.call(currentInstruction[1].u.operand);
629}
630
631void JIT::emit_op_ensure_property_exists(Instruction* currentInstruction)
632{
633 JITStubCall stubCall(this, cti_op_ensure_property_exists);
634 stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
635 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
ba379fdc
A
636 stubCall.call(currentInstruction[1].u.operand);
637}
638
639void JIT::emit_op_resolve_skip(Instruction* currentInstruction)
640{
641 JITStubCall stubCall(this, cti_op_resolve_skip);
14957cd0
A
642 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
643 stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
ba379fdc
A
644 stubCall.call(currentInstruction[1].u.operand);
645}
646
4e4e5a6f 647void JIT::emit_op_resolve_global(Instruction* currentInstruction, bool)
ba379fdc
A
648{
649 // Fast case
14957cd0 650 void* globalObject = m_codeBlock->globalObject();
ba379fdc 651 unsigned currentIndex = m_globalResolveInfoIndex++;
14957cd0 652 GlobalResolveInfo* resolveInfoAddress = &(m_codeBlock->globalResolveInfo(currentIndex));
ba379fdc
A
653
654 // Check Structure of global object
14957cd0
A
655 move(TrustedImmPtr(globalObject), regT0);
656 move(TrustedImmPtr(resolveInfoAddress), regT2);
657 loadPtr(Address(regT2, OBJECT_OFFSETOF(GlobalResolveInfo, structure)), regT1);
658 addSlowCase(branchPtr(NotEqual, regT1, Address(regT0, JSCell::structureOffset()))); // Structures don't match
ba379fdc
A
659
660 // Load cached property
661 // Assume that the global object always uses external storage.
14957cd0
A
662 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSGlobalObject, m_propertyStorage)), regT0);
663 load32(Address(regT2, OBJECT_OFFSETOF(GlobalResolveInfo, offset)), regT1);
ba379fdc
A
664 loadPtr(BaseIndex(regT0, regT1, ScalePtr), regT0);
665 emitPutVirtualRegister(currentInstruction[1].u.operand);
4e4e5a6f 666}
ba379fdc 667
4e4e5a6f
A
668void JIT::emitSlow_op_resolve_global(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
669{
670 unsigned dst = currentInstruction[1].u.operand;
14957cd0 671 Identifier* ident = &m_codeBlock->identifier(currentInstruction[2].u.operand);
4e4e5a6f
A
672
673 unsigned currentIndex = m_globalResolveInfoIndex++;
674
675 linkSlowCase(iter);
ba379fdc 676 JITStubCall stubCall(this, cti_op_resolve_global);
14957cd0 677 stubCall.addArgument(TrustedImmPtr(ident));
ba379fdc 678 stubCall.addArgument(Imm32(currentIndex));
14957cd0 679 stubCall.addArgument(regT0);
4e4e5a6f 680 stubCall.call(dst);
ba379fdc
A
681}
682
683void JIT::emit_op_not(Instruction* currentInstruction)
684{
685 emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
14957cd0
A
686
687 // Invert against JSValue(false); if the value was tagged as a boolean, then all bits will be
688 // clear other than the low bit (which will be 0 or 1 for false or true inputs respectively).
689 // Then invert against JSValue(true), which will add the tag back in, and flip the low bit.
690 xorPtr(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
691 addSlowCase(branchTestPtr(NonZero, regT0, TrustedImm32(static_cast<int32_t>(~1))));
692 xorPtr(TrustedImm32(static_cast<int32_t>(ValueTrue)), regT0);
693
ba379fdc
A
694 emitPutVirtualRegister(currentInstruction[1].u.operand);
695}
696
697void JIT::emit_op_jfalse(Instruction* currentInstruction)
698{
699 unsigned target = currentInstruction[2].u.operand;
700 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
701
14957cd0 702 addJump(branchPtr(Equal, regT0, TrustedImmPtr(JSValue::encode(jsNumber(0)))), target);
ba379fdc
A
703 Jump isNonZero = emitJumpIfImmediateInteger(regT0);
704
14957cd0
A
705 addJump(branchPtr(Equal, regT0, TrustedImmPtr(JSValue::encode(jsBoolean(false)))), target);
706 addSlowCase(branchPtr(NotEqual, regT0, TrustedImmPtr(JSValue::encode(jsBoolean(true)))));
ba379fdc
A
707
708 isNonZero.link(this);
4e4e5a6f
A
709}
710
ba379fdc
A
711void JIT::emit_op_jeq_null(Instruction* currentInstruction)
712{
713 unsigned src = currentInstruction[1].u.operand;
714 unsigned target = currentInstruction[2].u.operand;
715
716 emitGetVirtualRegister(src, regT0);
717 Jump isImmediate = emitJumpIfNotJSCell(regT0);
718
719 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
14957cd0
A
720 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
721 addJump(branchTest8(NonZero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
ba379fdc
A
722 Jump wasNotImmediate = jump();
723
724 // Now handle the immediate cases - undefined & null
725 isImmediate.link(this);
14957cd0
A
726 andPtr(TrustedImm32(~TagBitUndefined), regT0);
727 addJump(branchPtr(Equal, regT0, TrustedImmPtr(JSValue::encode(jsNull()))), target);
ba379fdc
A
728
729 wasNotImmediate.link(this);
ba379fdc
A
730};
731void JIT::emit_op_jneq_null(Instruction* currentInstruction)
732{
733 unsigned src = currentInstruction[1].u.operand;
734 unsigned target = currentInstruction[2].u.operand;
735
736 emitGetVirtualRegister(src, regT0);
737 Jump isImmediate = emitJumpIfNotJSCell(regT0);
738
739 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
14957cd0
A
740 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
741 addJump(branchTest8(Zero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
ba379fdc
A
742 Jump wasNotImmediate = jump();
743
744 // Now handle the immediate cases - undefined & null
745 isImmediate.link(this);
14957cd0
A
746 andPtr(TrustedImm32(~TagBitUndefined), regT0);
747 addJump(branchPtr(NotEqual, regT0, TrustedImmPtr(JSValue::encode(jsNull()))), target);
ba379fdc
A
748
749 wasNotImmediate.link(this);
ba379fdc
A
750}
751
752void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
753{
754 unsigned src = currentInstruction[1].u.operand;
14957cd0 755 JSCell* ptr = currentInstruction[2].u.jsCell.get();
ba379fdc
A
756 unsigned target = currentInstruction[3].u.operand;
757
758 emitGetVirtualRegister(src, regT0);
14957cd0 759 addJump(branchPtr(NotEqual, regT0, TrustedImmPtr(JSValue::encode(JSValue(ptr)))), target);
ba379fdc
A
760}
761
762void JIT::emit_op_jsr(Instruction* currentInstruction)
763{
764 int retAddrDst = currentInstruction[1].u.operand;
765 int target = currentInstruction[2].u.operand;
14957cd0 766 DataLabelPtr storeLocation = storePtrWithPatch(TrustedImmPtr(0), Address(callFrameRegister, sizeof(Register) * retAddrDst));
f9bf01c6 767 addJump(jump(), target);
ba379fdc
A
768 m_jsrSites.append(JSRInfo(storeLocation, label()));
769 killLastResultRegister();
ba379fdc
A
770}
771
772void JIT::emit_op_sret(Instruction* currentInstruction)
773{
774 jump(Address(callFrameRegister, sizeof(Register) * currentInstruction[1].u.operand));
775 killLastResultRegister();
776}
777
778void JIT::emit_op_eq(Instruction* currentInstruction)
779{
780 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
781 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
14957cd0 782 compare32(Equal, regT1, regT0, regT0);
ba379fdc
A
783 emitTagAsBoolImmediate(regT0);
784 emitPutVirtualRegister(currentInstruction[1].u.operand);
785}
786
787void JIT::emit_op_bitnot(Instruction* currentInstruction)
788{
789 emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
790 emitJumpSlowCaseIfNotImmediateInteger(regT0);
ba379fdc
A
791 not32(regT0);
792 emitFastArithIntToImmNoCheck(regT0, regT0);
ba379fdc
A
793 emitPutVirtualRegister(currentInstruction[1].u.operand);
794}
795
796void JIT::emit_op_resolve_with_base(Instruction* currentInstruction)
797{
798 JITStubCall stubCall(this, cti_op_resolve_with_base);
14957cd0 799 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[3].u.operand)));
ba379fdc
A
800 stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
801 stubCall.call(currentInstruction[2].u.operand);
802}
803
804void JIT::emit_op_new_func_exp(Instruction* currentInstruction)
805{
806 JITStubCall stubCall(this, cti_op_new_func_exp);
14957cd0 807 stubCall.addArgument(TrustedImmPtr(m_codeBlock->functionExpr(currentInstruction[2].u.operand)));
ba379fdc
A
808 stubCall.call(currentInstruction[1].u.operand);
809}
810
811void JIT::emit_op_jtrue(Instruction* currentInstruction)
812{
813 unsigned target = currentInstruction[2].u.operand;
814 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
815
14957cd0 816 Jump isZero = branchPtr(Equal, regT0, TrustedImmPtr(JSValue::encode(jsNumber(0))));
f9bf01c6 817 addJump(emitJumpIfImmediateInteger(regT0), target);
ba379fdc 818
14957cd0
A
819 addJump(branchPtr(Equal, regT0, TrustedImmPtr(JSValue::encode(jsBoolean(true)))), target);
820 addSlowCase(branchPtr(NotEqual, regT0, TrustedImmPtr(JSValue::encode(jsBoolean(false)))));
ba379fdc
A
821
822 isZero.link(this);
ba379fdc
A
823}
824
825void JIT::emit_op_neq(Instruction* currentInstruction)
826{
827 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
828 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
14957cd0 829 compare32(NotEqual, regT1, regT0, regT0);
ba379fdc
A
830 emitTagAsBoolImmediate(regT0);
831
832 emitPutVirtualRegister(currentInstruction[1].u.operand);
833
834}
835
836void JIT::emit_op_bitxor(Instruction* currentInstruction)
837{
838 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
839 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
840 xorPtr(regT1, regT0);
841 emitFastArithReTagImmediate(regT0, regT0);
842 emitPutVirtualRegister(currentInstruction[1].u.operand);
843}
844
ba379fdc
A
845void JIT::emit_op_bitor(Instruction* currentInstruction)
846{
847 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
848 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
849 orPtr(regT1, regT0);
850 emitPutVirtualRegister(currentInstruction[1].u.operand);
851}
852
853void JIT::emit_op_throw(Instruction* currentInstruction)
854{
855 JITStubCall stubCall(this, cti_op_throw);
856 stubCall.addArgument(currentInstruction[1].u.operand, regT2);
857 stubCall.call();
858 ASSERT(regT0 == returnValueRegister);
859#ifndef NDEBUG
860 // cti_op_throw always changes it's return address,
861 // this point in the code should never be reached.
862 breakpoint();
863#endif
864}
865
f9bf01c6
A
866void JIT::emit_op_get_pnames(Instruction* currentInstruction)
867{
868 int dst = currentInstruction[1].u.operand;
869 int base = currentInstruction[2].u.operand;
870 int i = currentInstruction[3].u.operand;
871 int size = currentInstruction[4].u.operand;
872 int breakTarget = currentInstruction[5].u.operand;
873
874 JumpList isNotObject;
875
876 emitGetVirtualRegister(base, regT0);
877 if (!m_codeBlock->isKnownNotImmediate(base))
878 isNotObject.append(emitJumpIfNotJSCell(regT0));
14957cd0
A
879 if (base != m_codeBlock->thisRegister() || m_codeBlock->isStrictMode()) {
880 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
881 isNotObject.append(branch8(NotEqual, Address(regT2, Structure::typeInfoTypeOffset()), TrustedImm32(ObjectType)));
f9bf01c6
A
882 }
883
884 // We could inline the case where you have a valid cache, but
885 // this call doesn't seem to be hot.
886 Label isObject(this);
887 JITStubCall getPnamesStubCall(this, cti_op_get_pnames);
888 getPnamesStubCall.addArgument(regT0);
889 getPnamesStubCall.call(dst);
890 load32(Address(regT0, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStringsSize)), regT3);
14957cd0
A
891 storePtr(tagTypeNumberRegister, payloadFor(i));
892 store32(TrustedImm32(Int32Tag), intTagFor(size));
893 store32(regT3, intPayloadFor(size));
f9bf01c6
A
894 Jump end = jump();
895
896 isNotObject.link(this);
897 move(regT0, regT1);
14957cd0
A
898 and32(TrustedImm32(~TagBitUndefined), regT1);
899 addJump(branch32(Equal, regT1, TrustedImm32(ValueNull)), breakTarget);
f9bf01c6
A
900
901 JITStubCall toObjectStubCall(this, cti_to_object);
902 toObjectStubCall.addArgument(regT0);
903 toObjectStubCall.call(base);
904 jump().linkTo(isObject, this);
905
906 end.link(this);
907}
908
ba379fdc
A
909void JIT::emit_op_next_pname(Instruction* currentInstruction)
910{
f9bf01c6
A
911 int dst = currentInstruction[1].u.operand;
912 int base = currentInstruction[2].u.operand;
913 int i = currentInstruction[3].u.operand;
914 int size = currentInstruction[4].u.operand;
915 int it = currentInstruction[5].u.operand;
916 int target = currentInstruction[6].u.operand;
917
918 JumpList callHasProperty;
919
920 Label begin(this);
14957cd0
A
921 load32(intPayloadFor(i), regT0);
922 Jump end = branch32(Equal, regT0, intPayloadFor(size));
f9bf01c6
A
923
924 // Grab key @ i
925 loadPtr(addressFor(it), regT1);
926 loadPtr(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStrings)), regT2);
927
f9bf01c6 928 loadPtr(BaseIndex(regT2, regT0, TimesEight), regT2);
f9bf01c6
A
929
930 emitPutVirtualRegister(dst, regT2);
931
932 // Increment i
14957cd0
A
933 add32(TrustedImm32(1), regT0);
934 store32(regT0, intPayloadFor(i));
f9bf01c6
A
935
936 // Verify that i is valid:
937 emitGetVirtualRegister(base, regT0);
938
939 // Test base's structure
14957cd0 940 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
f9bf01c6
A
941 callHasProperty.append(branchPtr(NotEqual, regT2, Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure)))));
942
943 // Test base's prototype chain
944 loadPtr(Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedPrototypeChain))), regT3);
945 loadPtr(Address(regT3, OBJECT_OFFSETOF(StructureChain, m_vector)), regT3);
946 addJump(branchTestPtr(Zero, Address(regT3)), target);
947
948 Label checkPrototype(this);
14957cd0 949 loadPtr(Address(regT2, Structure::prototypeOffset()), regT2);
f9bf01c6 950 callHasProperty.append(emitJumpIfNotJSCell(regT2));
14957cd0 951 loadPtr(Address(regT2, JSCell::structureOffset()), regT2);
f9bf01c6 952 callHasProperty.append(branchPtr(NotEqual, regT2, Address(regT3)));
14957cd0 953 addPtr(TrustedImm32(sizeof(Structure*)), regT3);
f9bf01c6
A
954 branchTestPtr(NonZero, Address(regT3)).linkTo(checkPrototype, this);
955
956 // Continue loop.
957 addJump(jump(), target);
958
959 // Slow case: Ask the object if i is valid.
960 callHasProperty.link(this);
961 emitGetVirtualRegister(dst, regT1);
962 JITStubCall stubCall(this, cti_has_property);
963 stubCall.addArgument(regT0);
964 stubCall.addArgument(regT1);
ba379fdc 965 stubCall.call();
f9bf01c6
A
966
967 // Test for valid key.
968 addJump(branchTest32(NonZero, regT0), target);
969 jump().linkTo(begin, this);
970
971 // End of loop.
972 end.link(this);
ba379fdc
A
973}
974
975void JIT::emit_op_push_scope(Instruction* currentInstruction)
976{
977 JITStubCall stubCall(this, cti_op_push_scope);
978 stubCall.addArgument(currentInstruction[1].u.operand, regT2);
979 stubCall.call(currentInstruction[1].u.operand);
980}
981
982void JIT::emit_op_pop_scope(Instruction*)
983{
984 JITStubCall(this, cti_op_pop_scope).call();
985}
986
987void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
988{
989 unsigned dst = currentInstruction[1].u.operand;
990 unsigned src1 = currentInstruction[2].u.operand;
991 unsigned src2 = currentInstruction[3].u.operand;
992
993 emitGetVirtualRegisters(src1, regT0, src2, regT1);
994
995 // Jump to a slow case if either operand is a number, or if both are JSCell*s.
996 move(regT0, regT2);
997 orPtr(regT1, regT2);
998 addSlowCase(emitJumpIfJSCell(regT2));
999 addSlowCase(emitJumpIfImmediateNumber(regT2));
1000
1001 if (type == OpStrictEq)
14957cd0 1002 compare32(Equal, regT1, regT0, regT0);
ba379fdc 1003 else
14957cd0 1004 compare32(NotEqual, regT1, regT0, regT0);
ba379fdc
A
1005 emitTagAsBoolImmediate(regT0);
1006
1007 emitPutVirtualRegister(dst);
1008}
1009
1010void JIT::emit_op_stricteq(Instruction* currentInstruction)
1011{
1012 compileOpStrictEq(currentInstruction, OpStrictEq);
1013}
1014
1015void JIT::emit_op_nstricteq(Instruction* currentInstruction)
1016{
1017 compileOpStrictEq(currentInstruction, OpNStrictEq);
1018}
1019
1020void JIT::emit_op_to_jsnumber(Instruction* currentInstruction)
1021{
1022 int srcVReg = currentInstruction[2].u.operand;
1023 emitGetVirtualRegister(srcVReg, regT0);
1024
1025 Jump wasImmediate = emitJumpIfImmediateInteger(regT0);
1026
1027 emitJumpSlowCaseIfNotJSCell(regT0, srcVReg);
14957cd0
A
1028 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
1029 addSlowCase(branch8(NotEqual, Address(regT2, Structure::typeInfoTypeOffset()), TrustedImm32(NumberType)));
ba379fdc
A
1030
1031 wasImmediate.link(this);
1032
1033 emitPutVirtualRegister(currentInstruction[1].u.operand);
1034}
1035
1036void JIT::emit_op_push_new_scope(Instruction* currentInstruction)
1037{
1038 JITStubCall stubCall(this, cti_op_push_new_scope);
14957cd0 1039 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
ba379fdc
A
1040 stubCall.addArgument(currentInstruction[3].u.operand, regT2);
1041 stubCall.call(currentInstruction[1].u.operand);
1042}
1043
1044void JIT::emit_op_catch(Instruction* currentInstruction)
1045{
1046 killLastResultRegister(); // FIXME: Implicitly treat op_catch as a labeled statement, and remove this line of code.
14957cd0
A
1047 move(regT0, callFrameRegister);
1048 peek(regT3, OBJECT_OFFSETOF(struct JITStackFrame, globalData) / sizeof(void*));
1049 loadPtr(Address(regT3, OBJECT_OFFSETOF(JSGlobalData, exception)), regT0);
1050 storePtr(TrustedImmPtr(JSValue::encode(JSValue())), Address(regT3, OBJECT_OFFSETOF(JSGlobalData, exception)));
ba379fdc
A
1051 emitPutVirtualRegister(currentInstruction[1].u.operand);
1052}
1053
1054void JIT::emit_op_jmp_scopes(Instruction* currentInstruction)
1055{
1056 JITStubCall stubCall(this, cti_op_jmp_scopes);
1057 stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
1058 stubCall.call();
f9bf01c6 1059 addJump(jump(), currentInstruction[2].u.operand);
ba379fdc
A
1060}
1061
1062void JIT::emit_op_switch_imm(Instruction* currentInstruction)
1063{
1064 unsigned tableIndex = currentInstruction[1].u.operand;
1065 unsigned defaultOffset = currentInstruction[2].u.operand;
1066 unsigned scrutinee = currentInstruction[3].u.operand;
1067
1068 // create jump table for switch destinations, track this switch statement.
1069 SimpleJumpTable* jumpTable = &m_codeBlock->immediateSwitchJumpTable(tableIndex);
14957cd0 1070 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Immediate));
ba379fdc
A
1071 jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1072
1073 JITStubCall stubCall(this, cti_op_switch_imm);
1074 stubCall.addArgument(scrutinee, regT2);
1075 stubCall.addArgument(Imm32(tableIndex));
1076 stubCall.call();
1077 jump(regT0);
1078}
1079
1080void JIT::emit_op_switch_char(Instruction* currentInstruction)
1081{
1082 unsigned tableIndex = currentInstruction[1].u.operand;
1083 unsigned defaultOffset = currentInstruction[2].u.operand;
1084 unsigned scrutinee = currentInstruction[3].u.operand;
1085
1086 // create jump table for switch destinations, track this switch statement.
1087 SimpleJumpTable* jumpTable = &m_codeBlock->characterSwitchJumpTable(tableIndex);
14957cd0 1088 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Character));
ba379fdc
A
1089 jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1090
1091 JITStubCall stubCall(this, cti_op_switch_char);
1092 stubCall.addArgument(scrutinee, regT2);
1093 stubCall.addArgument(Imm32(tableIndex));
1094 stubCall.call();
1095 jump(regT0);
1096}
1097
1098void JIT::emit_op_switch_string(Instruction* currentInstruction)
1099{
1100 unsigned tableIndex = currentInstruction[1].u.operand;
1101 unsigned defaultOffset = currentInstruction[2].u.operand;
1102 unsigned scrutinee = currentInstruction[3].u.operand;
1103
1104 // create jump table for switch destinations, track this switch statement.
1105 StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
14957cd0 1106 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset));
ba379fdc
A
1107
1108 JITStubCall stubCall(this, cti_op_switch_string);
1109 stubCall.addArgument(scrutinee, regT2);
1110 stubCall.addArgument(Imm32(tableIndex));
1111 stubCall.call();
1112 jump(regT0);
1113}
1114
14957cd0 1115void JIT::emit_op_throw_reference_error(Instruction* currentInstruction)
ba379fdc 1116{
14957cd0
A
1117 JITStubCall stubCall(this, cti_op_throw_reference_error);
1118 stubCall.addArgument(ImmPtr(JSValue::encode(m_codeBlock->getConstant(currentInstruction[1].u.operand))));
1119 stubCall.call();
ba379fdc
A
1120}
1121
1122void JIT::emit_op_debug(Instruction* currentInstruction)
1123{
4e4e5a6f
A
1124#if ENABLE(DEBUG_WITH_BREAKPOINT)
1125 UNUSED_PARAM(currentInstruction);
1126 breakpoint();
1127#else
ba379fdc
A
1128 JITStubCall stubCall(this, cti_op_debug);
1129 stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
1130 stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
1131 stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
1132 stubCall.call();
4e4e5a6f 1133#endif
ba379fdc
A
1134}
1135
1136void JIT::emit_op_eq_null(Instruction* currentInstruction)
1137{
1138 unsigned dst = currentInstruction[1].u.operand;
1139 unsigned src1 = currentInstruction[2].u.operand;
1140
1141 emitGetVirtualRegister(src1, regT0);
1142 Jump isImmediate = emitJumpIfNotJSCell(regT0);
1143
14957cd0
A
1144 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
1145 test8(NonZero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined), regT0);
ba379fdc
A
1146
1147 Jump wasNotImmediate = jump();
1148
1149 isImmediate.link(this);
1150
14957cd0
A
1151 andPtr(TrustedImm32(~TagBitUndefined), regT0);
1152 comparePtr(Equal, regT0, TrustedImm32(ValueNull), regT0);
ba379fdc
A
1153
1154 wasNotImmediate.link(this);
1155
1156 emitTagAsBoolImmediate(regT0);
1157 emitPutVirtualRegister(dst);
1158
1159}
1160
1161void JIT::emit_op_neq_null(Instruction* currentInstruction)
1162{
1163 unsigned dst = currentInstruction[1].u.operand;
1164 unsigned src1 = currentInstruction[2].u.operand;
1165
1166 emitGetVirtualRegister(src1, regT0);
1167 Jump isImmediate = emitJumpIfNotJSCell(regT0);
1168
14957cd0
A
1169 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
1170 test8(Zero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined), regT0);
ba379fdc
A
1171
1172 Jump wasNotImmediate = jump();
1173
1174 isImmediate.link(this);
1175
14957cd0
A
1176 andPtr(TrustedImm32(~TagBitUndefined), regT0);
1177 comparePtr(NotEqual, regT0, TrustedImm32(ValueNull), regT0);
ba379fdc
A
1178
1179 wasNotImmediate.link(this);
1180
1181 emitTagAsBoolImmediate(regT0);
1182 emitPutVirtualRegister(dst);
ba379fdc
A
1183}
1184
1185void JIT::emit_op_enter(Instruction*)
1186{
1187 // Even though CTI doesn't use them, we initialize our constant
1188 // registers to zap stale pointers, to avoid unnecessarily prolonging
1189 // object lifetime and increasing GC pressure.
1190 size_t count = m_codeBlock->m_numVars;
1191 for (size_t j = 0; j < count; ++j)
1192 emitInitRegister(j);
1193
1194}
1195
14957cd0 1196void JIT::emit_op_create_activation(Instruction* currentInstruction)
ba379fdc 1197{
14957cd0
A
1198 unsigned dst = currentInstruction[1].u.operand;
1199
1200 Jump activationCreated = branchTestPtr(NonZero, Address(callFrameRegister, sizeof(Register) * dst));
ba379fdc 1201 JITStubCall(this, cti_op_push_activation).call(currentInstruction[1].u.operand);
14957cd0
A
1202 emitPutVirtualRegister(dst);
1203 activationCreated.link(this);
ba379fdc
A
1204}
1205
14957cd0 1206void JIT::emit_op_create_arguments(Instruction* currentInstruction)
ba379fdc 1207{
14957cd0
A
1208 unsigned dst = currentInstruction[1].u.operand;
1209
1210 Jump argsCreated = branchTestPtr(NonZero, Address(callFrameRegister, sizeof(Register) * dst));
ba379fdc
A
1211 if (m_codeBlock->m_numParameters == 1)
1212 JITStubCall(this, cti_op_create_arguments_no_params).call();
1213 else
1214 JITStubCall(this, cti_op_create_arguments).call();
14957cd0
A
1215 emitPutVirtualRegister(dst);
1216 emitPutVirtualRegister(unmodifiedArgumentsRegister(dst));
ba379fdc
A
1217 argsCreated.link(this);
1218}
14957cd0
A
1219
1220void JIT::emit_op_init_lazy_reg(Instruction* currentInstruction)
ba379fdc 1221{
14957cd0
A
1222 unsigned dst = currentInstruction[1].u.operand;
1223
1224 storePtr(TrustedImmPtr(0), Address(callFrameRegister, sizeof(Register) * dst));
ba379fdc
A
1225}
1226
1227void JIT::emit_op_convert_this(Instruction* currentInstruction)
1228{
1229 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
1230
1231 emitJumpSlowCaseIfNotJSCell(regT0);
14957cd0
A
1232 loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
1233 addSlowCase(branchTest8(NonZero, Address(regT1, Structure::typeInfoFlagsOffset()), TrustedImm32(NeedsThisConversion)));
1234}
ba379fdc 1235
14957cd0
A
1236void JIT::emit_op_convert_this_strict(Instruction* currentInstruction)
1237{
1238 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
1239 Jump notNull = branchTestPtr(NonZero, regT0);
1240 move(TrustedImmPtr(JSValue::encode(jsNull())), regT0);
1241 emitPutVirtualRegister(currentInstruction[1].u.operand, regT0);
1242 Jump setThis = jump();
1243 notNull.link(this);
1244 Jump isImmediate = emitJumpIfNotJSCell(regT0);
1245 loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
1246 Jump notAnObject = branch8(NotEqual, Address(regT1, Structure::typeInfoTypeOffset()), TrustedImm32(ObjectType));
1247 addSlowCase(branchTest8(NonZero, Address(regT1, Structure::typeInfoFlagsOffset()), TrustedImm32(NeedsThisConversion)));
1248 isImmediate.link(this);
1249 notAnObject.link(this);
1250 setThis.link(this);
1251}
1252
1253void JIT::emit_op_get_callee(Instruction* currentInstruction)
1254{
1255 unsigned result = currentInstruction[1].u.operand;
1256 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT0);
1257 emitPutVirtualRegister(result);
1258}
1259
1260void JIT::emit_op_create_this(Instruction* currentInstruction)
1261{
1262 JITStubCall stubCall(this, cti_op_create_this);
1263 stubCall.addArgument(currentInstruction[2].u.operand, regT1);
1264 stubCall.call(currentInstruction[1].u.operand);
ba379fdc
A
1265}
1266
1267void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
1268{
14957cd0 1269 peek(regT1, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof(void*));
ba379fdc
A
1270 Jump noProfiler = branchTestPtr(Zero, Address(regT1));
1271
1272 JITStubCall stubCall(this, cti_op_profile_will_call);
1273 stubCall.addArgument(currentInstruction[1].u.operand, regT1);
1274 stubCall.call();
1275 noProfiler.link(this);
1276
1277}
1278
1279void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
1280{
14957cd0 1281 peek(regT1, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof(void*));
ba379fdc
A
1282 Jump noProfiler = branchTestPtr(Zero, Address(regT1));
1283
1284 JITStubCall stubCall(this, cti_op_profile_did_call);
1285 stubCall.addArgument(currentInstruction[1].u.operand, regT1);
1286 stubCall.call();
1287 noProfiler.link(this);
1288}
1289
1290
1291// Slow cases
1292
1293void JIT::emitSlow_op_convert_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1294{
1295 linkSlowCase(iter);
1296 linkSlowCase(iter);
1297 JITStubCall stubCall(this, cti_op_convert_this);
1298 stubCall.addArgument(regT0);
1299 stubCall.call(currentInstruction[1].u.operand);
1300}
1301
14957cd0 1302void JIT::emitSlow_op_convert_this_strict(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
ba379fdc
A
1303{
1304 linkSlowCase(iter);
14957cd0
A
1305 JITStubCall stubCall(this, cti_op_convert_this_strict);
1306 stubCall.addArgument(regT0);
1307 stubCall.call(currentInstruction[1].u.operand);
ba379fdc
A
1308}
1309
1310void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1311{
1312 linkSlowCase(iter);
1313
1314 JITStubCall stubCall(this, cti_op_to_primitive);
1315 stubCall.addArgument(regT0);
1316 stubCall.call(currentInstruction[1].u.operand);
1317}
1318
ba379fdc
A
1319void JIT::emitSlow_op_loop_if_lesseq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1320{
1321 unsigned op2 = currentInstruction[2].u.operand;
1322 unsigned target = currentInstruction[3].u.operand;
1323 if (isOperandConstantImmediateInt(op2)) {
1324 linkSlowCase(iter);
1325 JITStubCall stubCall(this, cti_op_loop_if_lesseq);
1326 stubCall.addArgument(regT0);
1327 stubCall.addArgument(currentInstruction[2].u.operand, regT2);
1328 stubCall.call();
f9bf01c6 1329 emitJumpSlowToHot(branchTest32(NonZero, regT0), target);
ba379fdc
A
1330 } else {
1331 linkSlowCase(iter);
1332 linkSlowCase(iter);
1333 JITStubCall stubCall(this, cti_op_loop_if_lesseq);
1334 stubCall.addArgument(regT0);
1335 stubCall.addArgument(regT1);
1336 stubCall.call();
f9bf01c6 1337 emitJumpSlowToHot(branchTest32(NonZero, regT0), target);
ba379fdc
A
1338 }
1339}
1340
1341void JIT::emitSlow_op_put_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1342{
f9bf01c6
A
1343 unsigned base = currentInstruction[1].u.operand;
1344 unsigned property = currentInstruction[2].u.operand;
1345 unsigned value = currentInstruction[3].u.operand;
ba379fdc 1346
f9bf01c6
A
1347 linkSlowCase(iter); // property int32 check
1348 linkSlowCaseIfNotJSCell(iter, base); // base cell check
1349 linkSlowCase(iter); // base not array check
1350 linkSlowCase(iter); // in vector check
ba379fdc 1351
f9bf01c6
A
1352 JITStubCall stubPutByValCall(this, cti_op_put_by_val);
1353 stubPutByValCall.addArgument(regT0);
1354 stubPutByValCall.addArgument(property, regT2);
1355 stubPutByValCall.addArgument(value, regT2);
1356 stubPutByValCall.call();
ba379fdc
A
1357}
1358
1359void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1360{
1361 linkSlowCase(iter);
14957cd0 1362 xorPtr(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
ba379fdc
A
1363 JITStubCall stubCall(this, cti_op_not);
1364 stubCall.addArgument(regT0);
1365 stubCall.call(currentInstruction[1].u.operand);
1366}
1367
1368void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1369{
1370 linkSlowCase(iter);
1371 JITStubCall stubCall(this, cti_op_jtrue);
1372 stubCall.addArgument(regT0);
1373 stubCall.call();
f9bf01c6 1374 emitJumpSlowToHot(branchTest32(Zero, regT0), currentInstruction[2].u.operand); // inverted!
ba379fdc
A
1375}
1376
1377void JIT::emitSlow_op_bitnot(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1378{
1379 linkSlowCase(iter);
1380 JITStubCall stubCall(this, cti_op_bitnot);
1381 stubCall.addArgument(regT0);
1382 stubCall.call(currentInstruction[1].u.operand);
1383}
1384
1385void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1386{
1387 linkSlowCase(iter);
1388 JITStubCall stubCall(this, cti_op_jtrue);
1389 stubCall.addArgument(regT0);
1390 stubCall.call();
f9bf01c6 1391 emitJumpSlowToHot(branchTest32(NonZero, regT0), currentInstruction[2].u.operand);
ba379fdc
A
1392}
1393
1394void JIT::emitSlow_op_bitxor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1395{
1396 linkSlowCase(iter);
1397 JITStubCall stubCall(this, cti_op_bitxor);
1398 stubCall.addArgument(regT0);
1399 stubCall.addArgument(regT1);
1400 stubCall.call(currentInstruction[1].u.operand);
1401}
1402
1403void JIT::emitSlow_op_bitor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1404{
1405 linkSlowCase(iter);
1406 JITStubCall stubCall(this, cti_op_bitor);
1407 stubCall.addArgument(regT0);
1408 stubCall.addArgument(regT1);
1409 stubCall.call(currentInstruction[1].u.operand);
1410}
1411
1412void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1413{
1414 linkSlowCase(iter);
1415 JITStubCall stubCall(this, cti_op_eq);
1416 stubCall.addArgument(regT0);
1417 stubCall.addArgument(regT1);
1418 stubCall.call();
1419 emitTagAsBoolImmediate(regT0);
1420 emitPutVirtualRegister(currentInstruction[1].u.operand);
1421}
1422
1423void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1424{
1425 linkSlowCase(iter);
1426 JITStubCall stubCall(this, cti_op_eq);
1427 stubCall.addArgument(regT0);
1428 stubCall.addArgument(regT1);
1429 stubCall.call();
14957cd0 1430 xor32(TrustedImm32(0x1), regT0);
ba379fdc
A
1431 emitTagAsBoolImmediate(regT0);
1432 emitPutVirtualRegister(currentInstruction[1].u.operand);
1433}
1434
1435void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1436{
1437 linkSlowCase(iter);
1438 linkSlowCase(iter);
1439 JITStubCall stubCall(this, cti_op_stricteq);
1440 stubCall.addArgument(regT0);
1441 stubCall.addArgument(regT1);
1442 stubCall.call(currentInstruction[1].u.operand);
1443}
1444
1445void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1446{
1447 linkSlowCase(iter);
1448 linkSlowCase(iter);
1449 JITStubCall stubCall(this, cti_op_nstricteq);
1450 stubCall.addArgument(regT0);
1451 stubCall.addArgument(regT1);
1452 stubCall.call(currentInstruction[1].u.operand);
1453}
1454
14957cd0
A
1455void JIT::emitSlow_op_check_has_instance(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1456{
1457 unsigned baseVal = currentInstruction[1].u.operand;
1458
1459 linkSlowCaseIfNotJSCell(iter, baseVal);
1460 linkSlowCase(iter);
1461 JITStubCall stubCall(this, cti_op_check_has_instance);
1462 stubCall.addArgument(baseVal, regT2);
1463 stubCall.call();
1464}
1465
ba379fdc
A
1466void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1467{
f9bf01c6
A
1468 unsigned dst = currentInstruction[1].u.operand;
1469 unsigned value = currentInstruction[2].u.operand;
1470 unsigned baseVal = currentInstruction[3].u.operand;
1471 unsigned proto = currentInstruction[4].u.operand;
1472
1473 linkSlowCaseIfNotJSCell(iter, value);
f9bf01c6 1474 linkSlowCaseIfNotJSCell(iter, proto);
ba379fdc 1475 linkSlowCase(iter);
14957cd0 1476 linkSlowCase(iter);
ba379fdc 1477 JITStubCall stubCall(this, cti_op_instanceof);
f9bf01c6
A
1478 stubCall.addArgument(value, regT2);
1479 stubCall.addArgument(baseVal, regT2);
1480 stubCall.addArgument(proto, regT2);
1481 stubCall.call(dst);
ba379fdc
A
1482}
1483
1484void JIT::emitSlow_op_call(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1485{
1486 compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_call);
1487}
1488
1489void JIT::emitSlow_op_call_eval(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1490{
1491 compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_call_eval);
1492}
1493
1494void JIT::emitSlow_op_call_varargs(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1495{
1496 compileOpCallVarargsSlowCase(currentInstruction, iter);
1497}
1498
1499void JIT::emitSlow_op_construct(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1500{
1501 compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_construct);
1502}
1503
1504void JIT::emitSlow_op_to_jsnumber(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1505{
1506 linkSlowCaseIfNotJSCell(iter, currentInstruction[2].u.operand);
1507 linkSlowCase(iter);
1508
1509 JITStubCall stubCall(this, cti_op_to_jsnumber);
1510 stubCall.addArgument(regT0);
1511 stubCall.call(currentInstruction[1].u.operand);
1512}
1513
14957cd0
A
1514void JIT::emit_op_get_arguments_length(Instruction* currentInstruction)
1515{
1516 int dst = currentInstruction[1].u.operand;
1517 int argumentsRegister = currentInstruction[2].u.operand;
1518 addSlowCase(branchTestPtr(NonZero, addressFor(argumentsRegister)));
1519 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT0);
1520 sub32(TrustedImm32(1), regT0);
1521 emitFastArithReTagImmediate(regT0, regT0);
1522 emitPutVirtualRegister(dst, regT0);
1523}
1524
1525void JIT::emitSlow_op_get_arguments_length(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1526{
1527 linkSlowCase(iter);
1528 unsigned dst = currentInstruction[1].u.operand;
1529 unsigned base = currentInstruction[2].u.operand;
1530 Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
1531
1532 emitGetVirtualRegister(base, regT0);
1533 JITStubCall stubCall(this, cti_op_get_by_id_generic);
1534 stubCall.addArgument(regT0);
1535 stubCall.addArgument(TrustedImmPtr(ident));
1536 stubCall.call(dst);
1537}
1538
1539void JIT::emit_op_get_argument_by_val(Instruction* currentInstruction)
1540{
1541 int dst = currentInstruction[1].u.operand;
1542 int argumentsRegister = currentInstruction[2].u.operand;
1543 int property = currentInstruction[3].u.operand;
1544 addSlowCase(branchTestPtr(NonZero, addressFor(argumentsRegister)));
1545 emitGetVirtualRegister(property, regT1);
1546 addSlowCase(emitJumpIfNotImmediateInteger(regT1));
1547 add32(TrustedImm32(1), regT1);
1548 // regT1 now contains the integer index of the argument we want, including this
1549 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT2);
1550 addSlowCase(branch32(AboveOrEqual, regT1, regT2));
1551
1552 Jump skipOutofLineParams;
1553 int numArgs = m_codeBlock->m_numParameters;
1554 if (numArgs) {
1555 Jump notInInPlaceArgs = branch32(AboveOrEqual, regT1, Imm32(numArgs));
1556 addPtr(Imm32(static_cast<unsigned>(-(RegisterFile::CallFrameHeaderSize + numArgs) * sizeof(Register))), callFrameRegister, regT0);
1557 loadPtr(BaseIndex(regT0, regT1, TimesEight, 0), regT0);
1558 skipOutofLineParams = jump();
1559 notInInPlaceArgs.link(this);
1560 }
1561
1562 addPtr(Imm32(static_cast<unsigned>(-(RegisterFile::CallFrameHeaderSize + numArgs) * sizeof(Register))), callFrameRegister, regT0);
1563 mul32(TrustedImm32(sizeof(Register)), regT2, regT2);
1564 subPtr(regT2, regT0);
1565 loadPtr(BaseIndex(regT0, regT1, TimesEight, 0), regT0);
1566 if (numArgs)
1567 skipOutofLineParams.link(this);
1568 emitPutVirtualRegister(dst, regT0);
1569}
1570
1571void JIT::emitSlow_op_get_argument_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1572{
1573 unsigned dst = currentInstruction[1].u.operand;
1574 unsigned arguments = currentInstruction[2].u.operand;
1575 unsigned property = currentInstruction[3].u.operand;
1576
1577 linkSlowCase(iter);
1578 Jump skipArgumentsCreation = jump();
1579
1580 linkSlowCase(iter);
1581 linkSlowCase(iter);
1582 if (m_codeBlock->m_numParameters == 1)
1583 JITStubCall(this, cti_op_create_arguments_no_params).call();
1584 else
1585 JITStubCall(this, cti_op_create_arguments).call();
1586 emitPutVirtualRegister(arguments);
1587 emitPutVirtualRegister(unmodifiedArgumentsRegister(arguments));
1588
1589 skipArgumentsCreation.link(this);
1590 JITStubCall stubCall(this, cti_op_get_by_val);
1591 stubCall.addArgument(arguments, regT2);
1592 stubCall.addArgument(property, regT2);
1593 stubCall.call(dst);
1594}
1595
1596#endif // USE(JSVALUE64)
4e4e5a6f
A
1597
1598void JIT::emit_op_resolve_global_dynamic(Instruction* currentInstruction)
1599{
14957cd0 1600 int skip = currentInstruction[5].u.operand;
4e4e5a6f
A
1601
1602 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT0);
14957cd0
A
1603
1604 bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain();
1605 ASSERT(skip || !checkTopLevel);
1606 if (checkTopLevel && skip--) {
1607 Jump activationNotCreated;
1608 if (checkTopLevel)
1609 activationNotCreated = branchTestPtr(Zero, addressFor(m_codeBlock->activationRegister()));
1610 loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, object)), regT1);
1611 addSlowCase(checkStructure(regT1, m_globalData->activationStructure.get()));
1612 loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, next)), regT0);
1613 activationNotCreated.link(this);
1614 }
4e4e5a6f
A
1615 while (skip--) {
1616 loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, object)), regT1);
1617 addSlowCase(checkStructure(regT1, m_globalData->activationStructure.get()));
1618 loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, next)), regT0);
1619 }
1620 emit_op_resolve_global(currentInstruction, true);
1621}
1622
1623void JIT::emitSlow_op_resolve_global_dynamic(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1624{
1625 unsigned dst = currentInstruction[1].u.operand;
14957cd0
A
1626 Identifier* ident = &m_codeBlock->identifier(currentInstruction[2].u.operand);
1627 int skip = currentInstruction[5].u.operand;
4e4e5a6f
A
1628 while (skip--)
1629 linkSlowCase(iter);
1630 JITStubCall resolveStubCall(this, cti_op_resolve);
14957cd0 1631 resolveStubCall.addArgument(TrustedImmPtr(ident));
4e4e5a6f
A
1632 resolveStubCall.call(dst);
1633 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_resolve_global_dynamic));
1634
1635 unsigned currentIndex = m_globalResolveInfoIndex++;
1636
1637 linkSlowCase(iter); // We managed to skip all the nodes in the scope chain, but the cache missed.
1638 JITStubCall stubCall(this, cti_op_resolve_global);
14957cd0 1639 stubCall.addArgument(TrustedImmPtr(ident));
4e4e5a6f 1640 stubCall.addArgument(Imm32(currentIndex));
14957cd0 1641 stubCall.addArgument(regT0);
4e4e5a6f
A
1642 stubCall.call(dst);
1643}
1644
1645void JIT::emit_op_new_regexp(Instruction* currentInstruction)
1646{
1647 JITStubCall stubCall(this, cti_op_new_regexp);
14957cd0 1648 stubCall.addArgument(TrustedImmPtr(m_codeBlock->regexp(currentInstruction[2].u.operand)));
4e4e5a6f
A
1649 stubCall.call(currentInstruction[1].u.operand);
1650}
1651
14957cd0 1652void JIT::emit_op_load_varargs(Instruction* currentInstruction)
4e4e5a6f 1653{
14957cd0
A
1654 int argCountDst = currentInstruction[1].u.operand;
1655 int argsOffset = currentInstruction[2].u.operand;
1656 int registerOffset = currentInstruction[3].u.operand;
1657 ASSERT(argsOffset <= registerOffset);
1658
1659 int expectedParams = m_codeBlock->m_numParameters - 1;
1660 // Don't do inline copying if we aren't guaranteed to have a single stream
1661 // of arguments
1662 if (expectedParams) {
1663 JITStubCall stubCall(this, cti_op_load_varargs);
1664 stubCall.addArgument(Imm32(argsOffset));
1665 stubCall.call();
1666 // Stores a naked int32 in the register file.
1667 store32(returnValueRegister, Address(callFrameRegister, argCountDst * sizeof(Register)));
1668 return;
1669 }
1670
4e4e5a6f 1671#if USE(JSVALUE32_64)
14957cd0 1672 addSlowCase(branch32(NotEqual, tagFor(argsOffset), TrustedImm32(JSValue::EmptyValueTag)));
4e4e5a6f 1673#else
14957cd0 1674 addSlowCase(branchTestPtr(NonZero, addressFor(argsOffset)));
4e4e5a6f 1675#endif
14957cd0
A
1676 // Load arg count into regT0
1677 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT0);
1678 store32(TrustedImm32(Int32Tag), intTagFor(argCountDst));
1679 store32(regT0, intPayloadFor(argCountDst));
1680 Jump endBranch = branch32(Equal, regT0, TrustedImm32(1));
1681
1682 mul32(TrustedImm32(sizeof(Register)), regT0, regT3);
1683 addPtr(TrustedImm32(static_cast<unsigned>(sizeof(Register) - RegisterFile::CallFrameHeaderSize * sizeof(Register))), callFrameRegister, regT1);
1684 subPtr(regT3, regT1); // regT1 is now the start of the out of line arguments
1685 addPtr(Imm32(argsOffset * sizeof(Register)), callFrameRegister, regT2); // regT2 is the target buffer
4e4e5a6f 1686
14957cd0
A
1687 // Bounds check the registerfile
1688 addPtr(regT2, regT3);
1689 addPtr(Imm32((registerOffset - argsOffset) * sizeof(Register)), regT3);
1690 addSlowCase(branchPtr(Below, AbsoluteAddress(m_globalData->interpreter->registerFile().addressOfEnd()), regT3));
1691
1692 sub32(TrustedImm32(1), regT0);
1693 Label loopStart = label();
1694 loadPtr(BaseIndex(regT1, regT0, TimesEight, static_cast<unsigned>(0 - 2 * sizeof(Register))), regT3);
1695 storePtr(regT3, BaseIndex(regT2, regT0, TimesEight, static_cast<unsigned>(0 - sizeof(Register))));
1696#if USE(JSVALUE32_64)
1697 loadPtr(BaseIndex(regT1, regT0, TimesEight, static_cast<unsigned>(sizeof(void*) - 2 * sizeof(Register))), regT3);
1698 storePtr(regT3, BaseIndex(regT2, regT0, TimesEight, static_cast<unsigned>(sizeof(void*) - sizeof(Register))));
1699#endif
1700 branchSubPtr(NonZero, TrustedImm32(1), regT0).linkTo(loopStart, this);
1701 endBranch.link(this);
1702}
4e4e5a6f 1703
14957cd0
A
1704void JIT::emitSlow_op_load_varargs(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1705{
1706 int argCountDst = currentInstruction[1].u.operand;
1707 int argsOffset = currentInstruction[2].u.operand;
1708 int expectedParams = m_codeBlock->m_numParameters - 1;
1709 if (expectedParams)
1710 return;
4e4e5a6f 1711
14957cd0
A
1712 linkSlowCase(iter);
1713 linkSlowCase(iter);
1714 JITStubCall stubCall(this, cti_op_load_varargs);
1715 stubCall.addArgument(Imm32(argsOffset));
1716 stubCall.call();
4e4e5a6f 1717
14957cd0
A
1718 store32(TrustedImm32(Int32Tag), intTagFor(argCountDst));
1719 store32(returnValueRegister, intPayloadFor(argCountDst));
1720}
ba379fdc 1721
14957cd0
A
1722void JIT::emit_op_new_func(Instruction* currentInstruction)
1723{
1724 Jump lazyJump;
1725 int dst = currentInstruction[1].u.operand;
1726 if (currentInstruction[3].u.operand) {
4e4e5a6f 1727#if USE(JSVALUE32_64)
14957cd0 1728 lazyJump = branch32(NotEqual, tagFor(dst), TrustedImm32(JSValue::EmptyValueTag));
4e4e5a6f 1729#else
14957cd0 1730 lazyJump = branchTestPtr(NonZero, addressFor(dst));
4e4e5a6f 1731#endif
14957cd0
A
1732 }
1733 JITStubCall stubCall(this, cti_op_new_func);
1734 stubCall.addArgument(TrustedImmPtr(m_codeBlock->functionDecl(currentInstruction[2].u.operand)));
1735 stubCall.call(currentInstruction[1].u.operand);
1736 if (currentInstruction[3].u.operand)
1737 lazyJump.link(this);
4e4e5a6f 1738}
14957cd0
A
1739
1740void JIT::emit_op_new_array(Instruction* currentInstruction)
1741{
1742 JITStubCall stubCall(this, cti_op_new_array);
1743 stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
1744 stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
1745 stubCall.call(currentInstruction[1].u.operand);
1746}
1747
1748void JIT::emit_op_new_array_buffer(Instruction* currentInstruction)
1749{
1750 JITStubCall stubCall(this, cti_op_new_array_buffer);
1751 stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
1752 stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
1753 stubCall.call(currentInstruction[1].u.operand);
1754}
1755
ba379fdc
A
1756} // namespace JSC
1757
1758#endif // ENABLE(JIT)