]> git.saurik.com Git - apple/javascriptcore.git/blame - jit/JITOpcodes32_64.cpp
JavaScriptCore-903.tar.gz
[apple/javascriptcore.git] / jit / JITOpcodes32_64.cpp
CommitLineData
4e4e5a6f
A
1/*
2 * Copyright (C) 2009 Apple Inc. All rights reserved.
3 * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
13 *
14 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
18 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25 */
26
27#include "config.h"
4e4e5a6f 28
14957cd0
A
29#if ENABLE(JIT)
30#if USE(JSVALUE32_64)
31#include "JIT.h"
4e4e5a6f
A
32
33#include "JITInlineMethods.h"
34#include "JITStubCall.h"
35#include "JSArray.h"
36#include "JSCell.h"
37#include "JSFunction.h"
38#include "JSPropertyNameIterator.h"
39#include "LinkBuffer.h"
40
41namespace JSC {
42
43void JIT::privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool>* executablePool, JSGlobalData* globalData, TrampolineStructure *trampolines)
44{
14957cd0 45#if ENABLE(JIT_USE_SOFT_MODULO)
4e4e5a6f
A
46 Label softModBegin = align();
47 softModulo();
48#endif
49#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
50 // (1) This function provides fast property access for string length
51 Label stringLengthBegin = align();
52
53 // regT0 holds payload, regT1 holds tag
54
14957cd0
A
55 Jump string_failureCases1 = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
56 Jump string_failureCases2 = branchPtr(NotEqual, Address(regT0), TrustedImmPtr(m_globalData->jsStringVPtr));
4e4e5a6f
A
57
58 // Checks out okay! - get the length from the Ustring.
59 load32(Address(regT0, OBJECT_OFFSETOF(JSString, m_length)), regT2);
60
14957cd0 61 Jump string_failureCases3 = branch32(Above, regT2, TrustedImm32(INT_MAX));
4e4e5a6f 62 move(regT2, regT0);
14957cd0 63 move(TrustedImm32(JSValue::Int32Tag), regT1);
4e4e5a6f
A
64
65 ret();
66#endif
14957cd0
A
67
68 JumpList callLinkFailures;
4e4e5a6f 69 // (2) Trampolines for the slow cases of op_call / op_call_eval / op_construct.
4e4e5a6f
A
70#if ENABLE(JIT_OPTIMIZE_CALL)
71 // VirtualCallLink Trampoline
72 // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
73 Label virtualCallLinkBegin = align();
14957cd0 74 compileOpCallInitializeCallFrame();
4e4e5a6f 75 preserveReturnAddressAfterCall(regT3);
14957cd0 76 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
4e4e5a6f 77 restoreArgumentReference();
14957cd0
A
78 Call callLazyLinkCall = call();
79 callLinkFailures.append(branchTestPtr(Zero, regT0));
4e4e5a6f 80 restoreReturnAddressBeforeReturn(regT3);
14957cd0
A
81 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT1);
82 jump(regT0);
4e4e5a6f 83
14957cd0
A
84 // VirtualConstructLink Trampoline
85 // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
86 Label virtualConstructLinkBegin = align();
4e4e5a6f 87 compileOpCallInitializeCallFrame();
4e4e5a6f 88 preserveReturnAddressAfterCall(regT3);
14957cd0 89 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
4e4e5a6f 90 restoreArgumentReference();
14957cd0 91 Call callLazyLinkConstruct = call();
4e4e5a6f 92 restoreReturnAddressBeforeReturn(regT3);
14957cd0
A
93 callLinkFailures.append(branchTestPtr(Zero, regT0));
94 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT1);
4e4e5a6f 95 jump(regT0);
14957cd0 96
4e4e5a6f
A
97#endif // ENABLE(JIT_OPTIMIZE_CALL)
98
99 // VirtualCall Trampoline
100 // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
101 Label virtualCallBegin = align();
14957cd0 102 compileOpCallInitializeCallFrame();
4e4e5a6f 103
14957cd0 104 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
4e4e5a6f 105
14957cd0 106 Jump hasCodeBlock3 = branch32(GreaterThanOrEqual, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParametersForCall)), TrustedImm32(0));
4e4e5a6f
A
107 preserveReturnAddressAfterCall(regT3);
108 restoreArgumentReference();
14957cd0
A
109 Call callCompileCall = call();
110 callLinkFailures.append(branchTestPtr(Zero, regT0));
111 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT1);
4e4e5a6f
A
112 restoreReturnAddressBeforeReturn(regT3);
113 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
114 hasCodeBlock3.link(this);
115
14957cd0
A
116 loadPtr(Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_jitCodeForCallWithArityCheck)), regT0);
117 jump(regT0);
118
119 // VirtualConstruct Trampoline
120 // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
121 Label virtualConstructBegin = align();
122 compileOpCallInitializeCallFrame();
123
124 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
125
126 Jump hasCodeBlock4 = branch32(GreaterThanOrEqual, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParametersForConstruct)), TrustedImm32(0));
4e4e5a6f 127 preserveReturnAddressAfterCall(regT3);
4e4e5a6f 128 restoreArgumentReference();
14957cd0
A
129 Call callCompileCconstruct = call();
130 callLinkFailures.append(branchTestPtr(Zero, regT0));
131 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT1);
4e4e5a6f
A
132 restoreReturnAddressBeforeReturn(regT3);
133 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
14957cd0 134 hasCodeBlock4.link(this);
4e4e5a6f 135
14957cd0 136 loadPtr(Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_jitCodeForConstructWithArityCheck)), regT0);
4e4e5a6f 137 jump(regT0);
14957cd0
A
138
139 // If the parser fails we want to be able to be able to keep going,
140 // So we handle this as a parse failure.
141 callLinkFailures.link(this);
142 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
143 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
144 restoreReturnAddressBeforeReturn(regT1);
145 move(TrustedImmPtr(&globalData->exceptionLocation), regT2);
146 storePtr(regT1, regT2);
147 poke(callFrameRegister, 1 + OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
148 poke(TrustedImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()));
149 ret();
150
151 // NativeCall Trampoline
152 Label nativeCallThunk = privateCompileCTINativeCall(globalData);
153 Label nativeConstructThunk = privateCompileCTINativeCall(globalData, true);
154
155#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
156 Call string_failureCases1Call = makeTailRecursiveCall(string_failureCases1);
157 Call string_failureCases2Call = makeTailRecursiveCall(string_failureCases2);
158 Call string_failureCases3Call = makeTailRecursiveCall(string_failureCases3);
159#endif
160
161 // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
162 LinkBuffer patchBuffer(*m_globalData, this, m_globalData->executableAllocator);
163
164#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
165 patchBuffer.link(string_failureCases1Call, FunctionPtr(cti_op_get_by_id_string_fail));
166 patchBuffer.link(string_failureCases2Call, FunctionPtr(cti_op_get_by_id_string_fail));
167 patchBuffer.link(string_failureCases3Call, FunctionPtr(cti_op_get_by_id_string_fail));
168#endif
169#if ENABLE(JIT_OPTIMIZE_CALL)
170 patchBuffer.link(callLazyLinkCall, FunctionPtr(cti_vm_lazyLinkCall));
171 patchBuffer.link(callLazyLinkConstruct, FunctionPtr(cti_vm_lazyLinkConstruct));
172#endif
173 patchBuffer.link(callCompileCall, FunctionPtr(cti_op_call_jitCompile));
174 patchBuffer.link(callCompileCconstruct, FunctionPtr(cti_op_construct_jitCompile));
175
176 CodeRef finalCode = patchBuffer.finalizeCode();
177 *executablePool = finalCode.m_executablePool;
178
179 trampolines->ctiVirtualCall = patchBuffer.trampolineAt(virtualCallBegin);
180 trampolines->ctiVirtualConstruct = patchBuffer.trampolineAt(virtualConstructBegin);
181#if ENABLE(JIT_OPTIMIZE_NATIVE_CALL)
182 trampolines->ctiNativeCall = patchBuffer.trampolineAt(nativeCallThunk);
183 trampolines->ctiNativeConstruct = patchBuffer.trampolineAt(nativeConstructThunk);
184#endif
185#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
186 trampolines->ctiStringLengthTrampoline = patchBuffer.trampolineAt(stringLengthBegin);
187#endif
188#if ENABLE(JIT_OPTIMIZE_CALL)
189 trampolines->ctiVirtualCallLink = patchBuffer.trampolineAt(virtualCallLinkBegin);
190 trampolines->ctiVirtualConstructLink = patchBuffer.trampolineAt(virtualConstructLinkBegin);
191#endif
192#if ENABLE(JIT_USE_SOFT_MODULO)
193 trampolines->ctiSoftModulo = patchBuffer.trampolineAt(softModBegin);
194#endif
195}
196
197JIT::Label JIT::privateCompileCTINativeCall(JSGlobalData* globalData, bool isConstruct)
198{
199 int executableOffsetToFunction = isConstruct ? OBJECT_OFFSETOF(NativeExecutable, m_constructor) : OBJECT_OFFSETOF(NativeExecutable, m_function);
4e4e5a6f 200
4e4e5a6f 201 Label nativeCallThunk = align();
4e4e5a6f 202
14957cd0
A
203 emitPutImmediateToCallFrameHeader(0, RegisterFile::CodeBlock);
204
205#if CPU(X86)
4e4e5a6f
A
206 // Load caller frame's scope chain into this callframe so that whatever we call can
207 // get to its global data.
14957cd0
A
208 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
209 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT0);
210 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
4e4e5a6f 211
14957cd0
A
212 peek(regT1);
213 emitPutToCallFrameHeader(regT1, RegisterFile::ReturnPC);
4e4e5a6f 214
14957cd0
A
215 // Calling convention: f(ecx, edx, ...);
216 // Host function signature: f(ExecState*);
217 move(callFrameRegister, X86Registers::ecx);
4e4e5a6f 218
14957cd0 219 subPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister); // Align stack after call.
4e4e5a6f 220
14957cd0
A
221 // call the function
222 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT1);
223 loadPtr(Address(regT1, OBJECT_OFFSETOF(JSFunction, m_executable)), regT1);
224 move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
225 call(Address(regT1, executableOffsetToFunction));
4e4e5a6f 226
14957cd0 227 addPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister);
4e4e5a6f 228
14957cd0
A
229#elif CPU(ARM)
230 // Load caller frame's scope chain into this callframe so that whatever we call can
231 // get to its global data.
232 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
233 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT2);
234 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
4e4e5a6f 235
14957cd0
A
236 preserveReturnAddressAfterCall(regT3); // Callee preserved
237 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
4e4e5a6f 238
14957cd0
A
239 // Calling convention: f(r0 == regT0, r1 == regT1, ...);
240 // Host function signature: f(ExecState*);
241 move(callFrameRegister, ARMRegisters::r0);
4e4e5a6f 242
14957cd0
A
243 // call the function
244 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, ARMRegisters::r1);
245 move(regT2, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
246 loadPtr(Address(ARMRegisters::r1, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
247 call(Address(regT2, executableOffsetToFunction));
4e4e5a6f 248
14957cd0
A
249 restoreReturnAddressBeforeReturn(regT3);
250#elif CPU(SH4)
251 // Load caller frame's scope chain into this callframe so that whatever we call can
252 // get to its global data.
253 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
254 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT2);
255 emitPutToCallFrameHeader(regT1, RegisterFile::ScopeChain);
256
257 preserveReturnAddressAfterCall(regT3); // Callee preserved
258 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
259
260 // Calling convention: f(r0 == regT4, r1 == regT5, ...);
261 // Host function signature: f(ExecState*);
262 move(callFrameRegister, regT4);
263
264 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT5);
265 move(regT2, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
266 loadPtr(Address(regT5, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
267
268 call(Address(regT2, executableOffsetToFunction), regT0);
269 restoreReturnAddressBeforeReturn(regT3);
270#elif CPU(MIPS)
271 // Load caller frame's scope chain into this callframe so that whatever we call can
272 // get to its global data.
273 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
274 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT0);
275 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
4e4e5a6f 276
14957cd0
A
277 preserveReturnAddressAfterCall(regT3); // Callee preserved
278 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
4e4e5a6f 279
14957cd0
A
280 // Calling convention: f(a0, a1, a2, a3);
281 // Host function signature: f(ExecState*);
4e4e5a6f 282
14957cd0
A
283 // Allocate stack space for 16 bytes (8-byte aligned)
284 // 16 bytes (unused) for 4 arguments
285 subPtr(TrustedImm32(16), stackPointerRegister);
4e4e5a6f 286
14957cd0
A
287 // Setup arg0
288 move(callFrameRegister, MIPSRegisters::a0);
289
290 // Call
291 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, MIPSRegisters::a2);
292 loadPtr(Address(MIPSRegisters::a2, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
293 move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
294 call(Address(regT2, executableOffsetToFunction));
295
296 // Restore stack space
297 addPtr(TrustedImm32(16), stackPointerRegister);
298
299 restoreReturnAddressBeforeReturn(regT3);
300
301#elif ENABLE(JIT_OPTIMIZE_NATIVE_CALL)
302#error "JIT_OPTIMIZE_NATIVE_CALL not yet supported on this platform."
4e4e5a6f 303#else
14957cd0
A
304 UNUSED_PARAM(executableOffsetToFunction);
305 breakpoint();
306#endif // CPU(X86)
4e4e5a6f 307
14957cd0
A
308 // Check for an exception
309 Jump sawException = branch32(NotEqual, AbsoluteAddress(reinterpret_cast<char*>(&globalData->exception) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), TrustedImm32(JSValue::EmptyValueTag));
4e4e5a6f 310
14957cd0
A
311 // Return.
312 ret();
313
314 // Handle an exception
315 sawException.link(this);
316
317 // Grab the return address.
318 preserveReturnAddressAfterCall(regT1);
319
320 move(TrustedImmPtr(&globalData->exceptionLocation), regT2);
321 storePtr(regT1, regT2);
322 poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
323
324 // Set the return address.
325 move(TrustedImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()), regT1);
326 restoreReturnAddressBeforeReturn(regT1);
327
328 ret();
4e4e5a6f 329
14957cd0
A
330 return nativeCallThunk;
331}
4e4e5a6f 332
14957cd0
A
333JIT::CodePtr JIT::privateCompileCTINativeCall(PassRefPtr<ExecutablePool> executablePool, JSGlobalData* globalData, NativeFunction func)
334{
335 Call nativeCall;
336 Label nativeCallThunk = align();
4e4e5a6f 337
14957cd0 338 emitPutImmediateToCallFrameHeader(0, RegisterFile::CodeBlock);
4e4e5a6f 339
14957cd0
A
340#if CPU(X86)
341 // Load caller frame's scope chain into this callframe so that whatever we call can
342 // get to its global data.
343 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
344 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT0);
345 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
4e4e5a6f 346
14957cd0
A
347 peek(regT1);
348 emitPutToCallFrameHeader(regT1, RegisterFile::ReturnPC);
4e4e5a6f 349
14957cd0
A
350 // Calling convention: f(ecx, edx, ...);
351 // Host function signature: f(ExecState*);
352 move(callFrameRegister, X86Registers::ecx);
4e4e5a6f 353
14957cd0 354 subPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister); // Align stack after call.
4e4e5a6f 355
14957cd0 356 move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
4e4e5a6f 357
14957cd0
A
358 // call the function
359 nativeCall = call();
4e4e5a6f 360
14957cd0 361 addPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister);
4e4e5a6f 362
14957cd0
A
363#elif CPU(ARM)
364 // Load caller frame's scope chain into this callframe so that whatever we call can
365 // get to its global data.
366 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
367 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT2);
368 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
4e4e5a6f 369
14957cd0
A
370 preserveReturnAddressAfterCall(regT3); // Callee preserved
371 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
4e4e5a6f 372
14957cd0
A
373 // Calling convention: f(r0 == regT0, r1 == regT1, ...);
374 // Host function signature: f(ExecState*);
375 move(callFrameRegister, ARMRegisters::r0);
4e4e5a6f 376
14957cd0
A
377 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, ARMRegisters::r1);
378 move(regT2, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
379 loadPtr(Address(ARMRegisters::r1, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
4e4e5a6f 380
14957cd0
A
381 // call the function
382 nativeCall = call();
4e4e5a6f 383
14957cd0 384 restoreReturnAddressBeforeReturn(regT3);
4e4e5a6f 385
14957cd0
A
386#elif CPU(MIPS)
387 // Load caller frame's scope chain into this callframe so that whatever we call can
388 // get to its global data.
389 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
390 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT0);
391 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
4e4e5a6f 392
14957cd0
A
393 preserveReturnAddressAfterCall(regT3); // Callee preserved
394 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
4e4e5a6f 395
14957cd0
A
396 // Calling convention: f(a0, a1, a2, a3);
397 // Host function signature: f(ExecState*);
4e4e5a6f 398
14957cd0
A
399 // Allocate stack space for 16 bytes (8-byte aligned)
400 // 16 bytes (unused) for 4 arguments
401 subPtr(TrustedImm32(16), stackPointerRegister);
4e4e5a6f 402
14957cd0
A
403 // Setup arg0
404 move(callFrameRegister, MIPSRegisters::a0);
4e4e5a6f 405
14957cd0
A
406 // Call
407 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, MIPSRegisters::a2);
408 loadPtr(Address(MIPSRegisters::a2, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
409 move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
410
411 // call the function
412 nativeCall = call();
4e4e5a6f 413
14957cd0
A
414 // Restore stack space
415 addPtr(TrustedImm32(16), stackPointerRegister);
4e4e5a6f 416
14957cd0
A
417 restoreReturnAddressBeforeReturn(regT3);
418#elif CPU(SH4)
419 // Load caller frame's scope chain into this callframe so that whatever we call can
420 // get to its global data.
421 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
422 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT2);
423 emitPutToCallFrameHeader(regT1, RegisterFile::ScopeChain);
4e4e5a6f 424
14957cd0
A
425 preserveReturnAddressAfterCall(regT3); // Callee preserved
426 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
4e4e5a6f 427
14957cd0
A
428 // Calling convention: f(r0 == regT4, r1 == regT5, ...);
429 // Host function signature: f(ExecState*);
430 move(callFrameRegister, regT4);
431
432 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT5);
433 move(regT2, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
434 loadPtr(Address(regT5, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
435
436 // call the function
437 nativeCall = call();
4e4e5a6f 438
4e4e5a6f 439 restoreReturnAddressBeforeReturn(regT3);
14957cd0
A
440#elif ENABLE(JIT_OPTIMIZE_NATIVE_CALL)
441#error "JIT_OPTIMIZE_NATIVE_CALL not yet supported on this platform."
442#else
443 breakpoint();
444#endif // CPU(X86)
445
446 // Check for an exception
447 Jump sawException = branch32(NotEqual, AbsoluteAddress(reinterpret_cast<char*>(&globalData->exception) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), TrustedImm32(JSValue::EmptyValueTag));
448
449 // Return.
4e4e5a6f
A
450 ret();
451
452 // Handle an exception
453 sawException.link(this);
14957cd0 454
4e4e5a6f 455 // Grab the return address.
14957cd0
A
456 preserveReturnAddressAfterCall(regT1);
457
458 move(TrustedImmPtr(&globalData->exceptionLocation), regT2);
4e4e5a6f 459 storePtr(regT1, regT2);
4e4e5a6f 460 poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
4e4e5a6f 461
14957cd0
A
462 // Set the return address.
463 move(TrustedImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()), regT1);
464 restoreReturnAddressBeforeReturn(regT1);
4e4e5a6f 465
14957cd0 466 ret();
4e4e5a6f
A
467
468 // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
14957cd0 469 LinkBuffer patchBuffer(*m_globalData, this, executablePool);
4e4e5a6f 470
14957cd0
A
471 patchBuffer.link(nativeCall, FunctionPtr(func));
472 patchBuffer.finalizeCode();
4e4e5a6f 473
14957cd0 474 return patchBuffer.trampolineAt(nativeCallThunk);
4e4e5a6f
A
475}
476
477void JIT::emit_op_mov(Instruction* currentInstruction)
478{
479 unsigned dst = currentInstruction[1].u.operand;
480 unsigned src = currentInstruction[2].u.operand;
481
482 if (m_codeBlock->isConstantRegisterIndex(src))
483 emitStore(dst, getConstantOperand(src));
484 else {
485 emitLoad(src, regT1, regT0);
486 emitStore(dst, regT1, regT0);
14957cd0 487 map(m_bytecodeOffset + OPCODE_LENGTH(op_mov), dst, regT1, regT0);
4e4e5a6f
A
488 }
489}
490
491void JIT::emit_op_end(Instruction* currentInstruction)
492{
4e4e5a6f
A
493 ASSERT(returnValueRegister != callFrameRegister);
494 emitLoad(currentInstruction[1].u.operand, regT1, regT0);
495 restoreReturnAddressBeforeReturn(Address(callFrameRegister, RegisterFile::ReturnPC * static_cast<int>(sizeof(Register))));
496 ret();
497}
498
499void JIT::emit_op_jmp(Instruction* currentInstruction)
500{
501 unsigned target = currentInstruction[1].u.operand;
502 addJump(jump(), target);
503}
504
505void JIT::emit_op_loop_if_lesseq(Instruction* currentInstruction)
506{
507 unsigned op1 = currentInstruction[1].u.operand;
508 unsigned op2 = currentInstruction[2].u.operand;
509 unsigned target = currentInstruction[3].u.operand;
510
511 emitTimeoutCheck();
512
513 if (isOperandConstantImmediateInt(op1)) {
514 emitLoad(op2, regT1, regT0);
14957cd0 515 addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::Int32Tag)));
4e4e5a6f
A
516 addJump(branch32(GreaterThanOrEqual, regT0, Imm32(getConstantOperand(op1).asInt32())), target);
517 return;
518 }
519
520 if (isOperandConstantImmediateInt(op2)) {
521 emitLoad(op1, regT1, regT0);
14957cd0 522 addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::Int32Tag)));
4e4e5a6f
A
523 addJump(branch32(LessThanOrEqual, regT0, Imm32(getConstantOperand(op2).asInt32())), target);
524 return;
525 }
526
527 emitLoad2(op1, regT1, regT0, op2, regT3, regT2);
14957cd0
A
528 addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::Int32Tag)));
529 addSlowCase(branch32(NotEqual, regT3, TrustedImm32(JSValue::Int32Tag)));
4e4e5a6f
A
530 addJump(branch32(LessThanOrEqual, regT0, regT2), target);
531}
532
533void JIT::emitSlow_op_loop_if_lesseq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
534{
535 unsigned op1 = currentInstruction[1].u.operand;
536 unsigned op2 = currentInstruction[2].u.operand;
537 unsigned target = currentInstruction[3].u.operand;
538
539 if (!isOperandConstantImmediateInt(op1) && !isOperandConstantImmediateInt(op2))
540 linkSlowCase(iter); // int32 check
541 linkSlowCase(iter); // int32 check
542
543 JITStubCall stubCall(this, cti_op_loop_if_lesseq);
544 stubCall.addArgument(op1);
545 stubCall.addArgument(op2);
546 stubCall.call();
547 emitJumpSlowToHot(branchTest32(NonZero, regT0), target);
548}
549
550void JIT::emit_op_new_object(Instruction* currentInstruction)
551{
552 JITStubCall(this, cti_op_new_object).call(currentInstruction[1].u.operand);
553}
554
14957cd0
A
555void JIT::emit_op_check_has_instance(Instruction* currentInstruction)
556{
557 unsigned baseVal = currentInstruction[1].u.operand;
558
559 emitLoadPayload(baseVal, regT0);
560
561 // Check that baseVal is a cell.
562 emitJumpSlowCaseIfNotJSCell(baseVal);
563
564 // Check that baseVal 'ImplementsHasInstance'.
565 loadPtr(Address(regT0, JSCell::structureOffset()), regT0);
566 addSlowCase(branchTest8(Zero, Address(regT0, Structure::typeInfoFlagsOffset()), TrustedImm32(ImplementsHasInstance)));
567}
568
4e4e5a6f
A
569void JIT::emit_op_instanceof(Instruction* currentInstruction)
570{
571 unsigned dst = currentInstruction[1].u.operand;
572 unsigned value = currentInstruction[2].u.operand;
573 unsigned baseVal = currentInstruction[3].u.operand;
574 unsigned proto = currentInstruction[4].u.operand;
575
576 // Load the operands into registers.
577 // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
578 emitLoadPayload(value, regT2);
579 emitLoadPayload(baseVal, regT0);
580 emitLoadPayload(proto, regT1);
581
14957cd0 582 // Check that proto are cells. baseVal must be a cell - this is checked by op_check_has_instance.
4e4e5a6f 583 emitJumpSlowCaseIfNotJSCell(value);
4e4e5a6f 584 emitJumpSlowCaseIfNotJSCell(proto);
14957cd0
A
585
586 // Check that prototype is an object
587 loadPtr(Address(regT1, JSCell::structureOffset()), regT3);
588 addSlowCase(branch8(NotEqual, Address(regT3, Structure::typeInfoTypeOffset()), TrustedImm32(ObjectType)));
4e4e5a6f 589
14957cd0 590 // Fixme: this check is only needed because the JSC API allows HasInstance to be overridden; we should deprecate this.
4e4e5a6f 591 // Check that baseVal 'ImplementsDefaultHasInstance'.
14957cd0
A
592 loadPtr(Address(regT0, JSCell::structureOffset()), regT0);
593 addSlowCase(branchTest8(Zero, Address(regT0, Structure::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance)));
4e4e5a6f
A
594
595 // Optimistically load the result true, and start looping.
596 // Initially, regT1 still contains proto and regT2 still contains value.
597 // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
14957cd0 598 move(TrustedImm32(1), regT0);
4e4e5a6f
A
599 Label loop(this);
600
601 // Load the prototype of the cell in regT2. If this is equal to regT1 - WIN!
602 // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
14957cd0
A
603 loadPtr(Address(regT2, JSCell::structureOffset()), regT2);
604 load32(Address(regT2, Structure::prototypeOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT2);
4e4e5a6f
A
605 Jump isInstance = branchPtr(Equal, regT2, regT1);
606 branchTest32(NonZero, regT2).linkTo(loop, this);
607
608 // We get here either by dropping out of the loop, or if value was not an Object. Result is false.
14957cd0 609 move(TrustedImm32(0), regT0);
4e4e5a6f
A
610
611 // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
612 isInstance.link(this);
613 emitStoreBool(dst, regT0);
614}
615
14957cd0
A
616void JIT::emitSlow_op_check_has_instance(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
617{
618 unsigned baseVal = currentInstruction[1].u.operand;
619
620 linkSlowCaseIfNotJSCell(iter, baseVal);
621 linkSlowCase(iter);
622
623 JITStubCall stubCall(this, cti_op_check_has_instance);
624 stubCall.addArgument(baseVal);
625 stubCall.call();
626}
627
4e4e5a6f
A
628void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
629{
630 unsigned dst = currentInstruction[1].u.operand;
631 unsigned value = currentInstruction[2].u.operand;
632 unsigned baseVal = currentInstruction[3].u.operand;
633 unsigned proto = currentInstruction[4].u.operand;
634
635 linkSlowCaseIfNotJSCell(iter, value);
4e4e5a6f
A
636 linkSlowCaseIfNotJSCell(iter, proto);
637 linkSlowCase(iter);
14957cd0 638 linkSlowCase(iter);
4e4e5a6f
A
639
640 JITStubCall stubCall(this, cti_op_instanceof);
641 stubCall.addArgument(value);
642 stubCall.addArgument(baseVal);
643 stubCall.addArgument(proto);
644 stubCall.call(dst);
645}
646
4e4e5a6f
A
647void JIT::emit_op_get_global_var(Instruction* currentInstruction)
648{
649 int dst = currentInstruction[1].u.operand;
14957cd0 650 JSGlobalObject* globalObject = m_codeBlock->globalObject();
4e4e5a6f 651 ASSERT(globalObject->isGlobalObject());
14957cd0 652 int index = currentInstruction[2].u.operand;
4e4e5a6f 653
14957cd0 654 loadPtr(&globalObject->m_registers, regT2);
4e4e5a6f
A
655
656 emitLoad(index, regT1, regT0, regT2);
657 emitStore(dst, regT1, regT0);
14957cd0 658 map(m_bytecodeOffset + OPCODE_LENGTH(op_get_global_var), dst, regT1, regT0);
4e4e5a6f
A
659}
660
661void JIT::emit_op_put_global_var(Instruction* currentInstruction)
662{
14957cd0 663 JSGlobalObject* globalObject = m_codeBlock->globalObject();
4e4e5a6f 664 ASSERT(globalObject->isGlobalObject());
14957cd0
A
665 int index = currentInstruction[1].u.operand;
666 int value = currentInstruction[2].u.operand;
4e4e5a6f
A
667
668 emitLoad(value, regT1, regT0);
669
14957cd0 670 loadPtr(&globalObject->m_registers, regT2);
4e4e5a6f 671 emitStore(index, regT1, regT0, regT2);
14957cd0 672 map(m_bytecodeOffset + OPCODE_LENGTH(op_put_global_var), value, regT1, regT0);
4e4e5a6f
A
673}
674
675void JIT::emit_op_get_scoped_var(Instruction* currentInstruction)
676{
677 int dst = currentInstruction[1].u.operand;
678 int index = currentInstruction[2].u.operand;
14957cd0 679 int skip = currentInstruction[3].u.operand;
4e4e5a6f
A
680
681 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2);
14957cd0
A
682 bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain();
683 ASSERT(skip || !checkTopLevel);
684 if (checkTopLevel && skip--) {
685 Jump activationNotCreated;
686 if (checkTopLevel)
687 activationNotCreated = branch32(Equal, tagFor(m_codeBlock->activationRegister()), TrustedImm32(JSValue::EmptyValueTag));
688 loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
689 activationNotCreated.link(this);
690 }
4e4e5a6f
A
691 while (skip--)
692 loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
693
694 loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, object)), regT2);
14957cd0 695 loadPtr(Address(regT2, OBJECT_OFFSETOF(JSVariableObject, m_registers)), regT2);
4e4e5a6f
A
696
697 emitLoad(index, regT1, regT0, regT2);
698 emitStore(dst, regT1, regT0);
14957cd0 699 map(m_bytecodeOffset + OPCODE_LENGTH(op_get_scoped_var), dst, regT1, regT0);
4e4e5a6f
A
700}
701
702void JIT::emit_op_put_scoped_var(Instruction* currentInstruction)
703{
704 int index = currentInstruction[1].u.operand;
14957cd0 705 int skip = currentInstruction[2].u.operand;
4e4e5a6f
A
706 int value = currentInstruction[3].u.operand;
707
708 emitLoad(value, regT1, regT0);
709
710 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2);
14957cd0
A
711 bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain();
712 ASSERT(skip || !checkTopLevel);
713 if (checkTopLevel && skip--) {
714 Jump activationNotCreated;
715 if (checkTopLevel)
716 activationNotCreated = branch32(Equal, tagFor(m_codeBlock->activationRegister()), TrustedImm32(JSValue::EmptyValueTag));
717 loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
718 activationNotCreated.link(this);
719 }
4e4e5a6f
A
720 while (skip--)
721 loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
722
723 loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, object)), regT2);
14957cd0 724 loadPtr(Address(regT2, OBJECT_OFFSETOF(JSVariableObject, m_registers)), regT2);
4e4e5a6f
A
725
726 emitStore(index, regT1, regT0, regT2);
14957cd0 727 map(m_bytecodeOffset + OPCODE_LENGTH(op_put_scoped_var), value, regT1, regT0);
4e4e5a6f
A
728}
729
730void JIT::emit_op_tear_off_activation(Instruction* currentInstruction)
731{
14957cd0
A
732 unsigned activation = currentInstruction[1].u.operand;
733 unsigned arguments = currentInstruction[2].u.operand;
734 Jump activationCreated = branch32(NotEqual, tagFor(activation), TrustedImm32(JSValue::EmptyValueTag));
735 Jump argumentsNotCreated = branch32(Equal, tagFor(arguments), TrustedImm32(JSValue::EmptyValueTag));
736 activationCreated.link(this);
4e4e5a6f
A
737 JITStubCall stubCall(this, cti_op_tear_off_activation);
738 stubCall.addArgument(currentInstruction[1].u.operand);
14957cd0 739 stubCall.addArgument(unmodifiedArgumentsRegister(currentInstruction[2].u.operand));
4e4e5a6f 740 stubCall.call();
14957cd0 741 argumentsNotCreated.link(this);
4e4e5a6f
A
742}
743
14957cd0 744void JIT::emit_op_tear_off_arguments(Instruction* currentInstruction)
4e4e5a6f 745{
14957cd0 746 int dst = currentInstruction[1].u.operand;
4e4e5a6f 747
14957cd0
A
748 Jump argsNotCreated = branch32(Equal, tagFor(unmodifiedArgumentsRegister(dst)), TrustedImm32(JSValue::EmptyValueTag));
749 JITStubCall stubCall(this, cti_op_tear_off_arguments);
750 stubCall.addArgument(unmodifiedArgumentsRegister(dst));
751 stubCall.call();
752 argsNotCreated.link(this);
4e4e5a6f
A
753}
754
755void JIT::emit_op_resolve(Instruction* currentInstruction)
756{
757 JITStubCall stubCall(this, cti_op_resolve);
14957cd0 758 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
4e4e5a6f
A
759 stubCall.call(currentInstruction[1].u.operand);
760}
761
762void JIT::emit_op_to_primitive(Instruction* currentInstruction)
763{
764 int dst = currentInstruction[1].u.operand;
765 int src = currentInstruction[2].u.operand;
766
767 emitLoad(src, regT1, regT0);
768
14957cd0
A
769 Jump isImm = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
770 addSlowCase(branchPtr(NotEqual, Address(regT0), TrustedImmPtr(m_globalData->jsStringVPtr)));
4e4e5a6f
A
771 isImm.link(this);
772
773 if (dst != src)
774 emitStore(dst, regT1, regT0);
14957cd0 775 map(m_bytecodeOffset + OPCODE_LENGTH(op_to_primitive), dst, regT1, regT0);
4e4e5a6f
A
776}
777
778void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
779{
780 int dst = currentInstruction[1].u.operand;
781
782 linkSlowCase(iter);
783
784 JITStubCall stubCall(this, cti_op_to_primitive);
785 stubCall.addArgument(regT1, regT0);
786 stubCall.call(dst);
787}
788
789void JIT::emit_op_strcat(Instruction* currentInstruction)
790{
791 JITStubCall stubCall(this, cti_op_strcat);
792 stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
793 stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
794 stubCall.call(currentInstruction[1].u.operand);
795}
796
797void JIT::emit_op_resolve_base(Instruction* currentInstruction)
798{
14957cd0
A
799 JITStubCall stubCall(this, currentInstruction[3].u.operand ? cti_op_resolve_base_strict_put : cti_op_resolve_base);
800 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
801 stubCall.call(currentInstruction[1].u.operand);
802}
803
804void JIT::emit_op_ensure_property_exists(Instruction* currentInstruction)
805{
806 JITStubCall stubCall(this, cti_op_ensure_property_exists);
807 stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
808 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
4e4e5a6f
A
809 stubCall.call(currentInstruction[1].u.operand);
810}
811
812void JIT::emit_op_resolve_skip(Instruction* currentInstruction)
813{
814 JITStubCall stubCall(this, cti_op_resolve_skip);
14957cd0
A
815 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
816 stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
4e4e5a6f
A
817 stubCall.call(currentInstruction[1].u.operand);
818}
819
820void JIT::emit_op_resolve_global(Instruction* currentInstruction, bool dynamic)
821{
822 // FIXME: Optimize to use patching instead of so many memory accesses.
823
824 unsigned dst = currentInstruction[1].u.operand;
14957cd0 825 void* globalObject = m_codeBlock->globalObject();
4e4e5a6f
A
826
827 unsigned currentIndex = m_globalResolveInfoIndex++;
14957cd0
A
828 GlobalResolveInfo* resolveInfoAddress = &m_codeBlock->globalResolveInfo(currentIndex);
829
4e4e5a6f
A
830
831 // Verify structure.
14957cd0
A
832 move(TrustedImmPtr(globalObject), regT0);
833 move(TrustedImmPtr(resolveInfoAddress), regT3);
834 loadPtr(Address(regT3, OBJECT_OFFSETOF(GlobalResolveInfo, structure)), regT1);
835 addSlowCase(branchPtr(NotEqual, regT1, Address(regT0, JSCell::structureOffset())));
4e4e5a6f
A
836
837 // Load property.
14957cd0
A
838 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSGlobalObject, m_propertyStorage)), regT2);
839 load32(Address(regT3, OBJECT_OFFSETOF(GlobalResolveInfo, offset)), regT3);
840 load32(BaseIndex(regT2, regT3, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0); // payload
841 load32(BaseIndex(regT2, regT3, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1); // tag
4e4e5a6f 842 emitStore(dst, regT1, regT0);
14957cd0 843 map(m_bytecodeOffset + (dynamic ? OPCODE_LENGTH(op_resolve_global_dynamic) : OPCODE_LENGTH(op_resolve_global)), dst, regT1, regT0);
4e4e5a6f
A
844}
845
846void JIT::emitSlow_op_resolve_global(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
847{
848 unsigned dst = currentInstruction[1].u.operand;
14957cd0 849 Identifier* ident = &m_codeBlock->identifier(currentInstruction[2].u.operand);
4e4e5a6f
A
850
851 unsigned currentIndex = m_globalResolveInfoIndex++;
852
853 linkSlowCase(iter);
854 JITStubCall stubCall(this, cti_op_resolve_global);
14957cd0 855 stubCall.addArgument(TrustedImmPtr(ident));
4e4e5a6f
A
856 stubCall.addArgument(Imm32(currentIndex));
857 stubCall.call(dst);
858}
859
860void JIT::emit_op_not(Instruction* currentInstruction)
861{
862 unsigned dst = currentInstruction[1].u.operand;
863 unsigned src = currentInstruction[2].u.operand;
864
865 emitLoadTag(src, regT0);
866
14957cd0
A
867 emitLoad(src, regT1, regT0);
868 addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::BooleanTag)));
869 xor32(TrustedImm32(1), regT0);
4e4e5a6f
A
870
871 emitStoreBool(dst, regT0, (dst == src));
872}
873
874void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
875{
876 unsigned dst = currentInstruction[1].u.operand;
877 unsigned src = currentInstruction[2].u.operand;
878
879 linkSlowCase(iter);
880
881 JITStubCall stubCall(this, cti_op_not);
882 stubCall.addArgument(src);
883 stubCall.call(dst);
884}
885
886void JIT::emit_op_jfalse(Instruction* currentInstruction)
887{
888 unsigned cond = currentInstruction[1].u.operand;
889 unsigned target = currentInstruction[2].u.operand;
890
891 emitLoad(cond, regT1, regT0);
892
14957cd0
A
893 ASSERT((JSValue::BooleanTag + 1 == JSValue::Int32Tag) && !(JSValue::Int32Tag + 1));
894 addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::BooleanTag)));
895 addJump(branchTest32(Zero, regT0), target);
4e4e5a6f
A
896}
897
898void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
899{
900 unsigned cond = currentInstruction[1].u.operand;
901 unsigned target = currentInstruction[2].u.operand;
902
903 linkSlowCase(iter);
14957cd0
A
904
905 if (supportsFloatingPoint()) {
906 // regT1 contains the tag from the hot path.
907 Jump notNumber = branch32(Above, regT1, Imm32(JSValue::LowestTag));
908
909 emitLoadDouble(cond, fpRegT0);
910 emitJumpSlowToHot(branchDoubleZeroOrNaN(fpRegT0, fpRegT1), target);
911 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_jfalse));
912
913 notNumber.link(this);
914 }
915
4e4e5a6f
A
916 JITStubCall stubCall(this, cti_op_jtrue);
917 stubCall.addArgument(cond);
918 stubCall.call();
919 emitJumpSlowToHot(branchTest32(Zero, regT0), target); // Inverted.
920}
921
922void JIT::emit_op_jtrue(Instruction* currentInstruction)
923{
924 unsigned cond = currentInstruction[1].u.operand;
925 unsigned target = currentInstruction[2].u.operand;
926
927 emitLoad(cond, regT1, regT0);
928
14957cd0
A
929 ASSERT((JSValue::BooleanTag + 1 == JSValue::Int32Tag) && !(JSValue::Int32Tag + 1));
930 addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::BooleanTag)));
931 addJump(branchTest32(NonZero, regT0), target);
4e4e5a6f
A
932}
933
934void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
935{
936 unsigned cond = currentInstruction[1].u.operand;
937 unsigned target = currentInstruction[2].u.operand;
938
939 linkSlowCase(iter);
14957cd0
A
940
941 if (supportsFloatingPoint()) {
942 // regT1 contains the tag from the hot path.
943 Jump notNumber = branch32(Above, regT1, Imm32(JSValue::LowestTag));
944
945 emitLoadDouble(cond, fpRegT0);
946 emitJumpSlowToHot(branchDoubleNonZero(fpRegT0, fpRegT1), target);
947 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_jtrue));
948
949 notNumber.link(this);
950 }
951
4e4e5a6f
A
952 JITStubCall stubCall(this, cti_op_jtrue);
953 stubCall.addArgument(cond);
954 stubCall.call();
955 emitJumpSlowToHot(branchTest32(NonZero, regT0), target);
956}
957
958void JIT::emit_op_jeq_null(Instruction* currentInstruction)
959{
960 unsigned src = currentInstruction[1].u.operand;
961 unsigned target = currentInstruction[2].u.operand;
962
963 emitLoad(src, regT1, regT0);
964
14957cd0 965 Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
4e4e5a6f
A
966
967 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
14957cd0
A
968 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
969 addJump(branchTest8(NonZero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
4e4e5a6f
A
970
971 Jump wasNotImmediate = jump();
972
973 // Now handle the immediate cases - undefined & null
974 isImmediate.link(this);
975
14957cd0
A
976 ASSERT((JSValue::UndefinedTag + 1 == JSValue::NullTag) && (JSValue::NullTag & 0x1));
977 or32(TrustedImm32(1), regT1);
978 addJump(branch32(Equal, regT1, TrustedImm32(JSValue::NullTag)), target);
4e4e5a6f
A
979
980 wasNotImmediate.link(this);
981}
982
983void JIT::emit_op_jneq_null(Instruction* currentInstruction)
984{
985 unsigned src = currentInstruction[1].u.operand;
986 unsigned target = currentInstruction[2].u.operand;
987
988 emitLoad(src, regT1, regT0);
989
14957cd0 990 Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
4e4e5a6f
A
991
992 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
14957cd0
A
993 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
994 addJump(branchTest8(Zero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
4e4e5a6f
A
995
996 Jump wasNotImmediate = jump();
997
998 // Now handle the immediate cases - undefined & null
999 isImmediate.link(this);
1000
14957cd0
A
1001 ASSERT((JSValue::UndefinedTag + 1 == JSValue::NullTag) && (JSValue::NullTag & 0x1));
1002 or32(TrustedImm32(1), regT1);
1003 addJump(branch32(NotEqual, regT1, TrustedImm32(JSValue::NullTag)), target);
4e4e5a6f
A
1004
1005 wasNotImmediate.link(this);
1006}
1007
1008void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
1009{
1010 unsigned src = currentInstruction[1].u.operand;
14957cd0 1011 JSCell* ptr = currentInstruction[2].u.jsCell.get();
4e4e5a6f
A
1012 unsigned target = currentInstruction[3].u.operand;
1013
1014 emitLoad(src, regT1, regT0);
14957cd0
A
1015 addJump(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)), target);
1016 addJump(branchPtr(NotEqual, regT0, TrustedImmPtr(ptr)), target);
4e4e5a6f
A
1017}
1018
1019void JIT::emit_op_jsr(Instruction* currentInstruction)
1020{
1021 int retAddrDst = currentInstruction[1].u.operand;
1022 int target = currentInstruction[2].u.operand;
14957cd0 1023 DataLabelPtr storeLocation = storePtrWithPatch(TrustedImmPtr(0), Address(callFrameRegister, sizeof(Register) * retAddrDst));
4e4e5a6f
A
1024 addJump(jump(), target);
1025 m_jsrSites.append(JSRInfo(storeLocation, label()));
1026}
1027
1028void JIT::emit_op_sret(Instruction* currentInstruction)
1029{
1030 jump(Address(callFrameRegister, sizeof(Register) * currentInstruction[1].u.operand));
1031}
1032
1033void JIT::emit_op_eq(Instruction* currentInstruction)
1034{
1035 unsigned dst = currentInstruction[1].u.operand;
1036 unsigned src1 = currentInstruction[2].u.operand;
1037 unsigned src2 = currentInstruction[3].u.operand;
1038
1039 emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
1040 addSlowCase(branch32(NotEqual, regT1, regT3));
14957cd0
A
1041 addSlowCase(branch32(Equal, regT1, TrustedImm32(JSValue::CellTag)));
1042 addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::LowestTag)));
4e4e5a6f 1043
14957cd0 1044 compare32(Equal, regT0, regT2, regT0);
4e4e5a6f
A
1045
1046 emitStoreBool(dst, regT0);
1047}
1048
1049void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1050{
1051 unsigned dst = currentInstruction[1].u.operand;
1052 unsigned op1 = currentInstruction[2].u.operand;
1053 unsigned op2 = currentInstruction[3].u.operand;
1054
1055 JumpList storeResult;
1056 JumpList genericCase;
1057
1058 genericCase.append(getSlowCase(iter)); // tags not equal
1059
1060 linkSlowCase(iter); // tags equal and JSCell
14957cd0
A
1061 genericCase.append(branchPtr(NotEqual, Address(regT0), TrustedImmPtr(m_globalData->jsStringVPtr)));
1062 genericCase.append(branchPtr(NotEqual, Address(regT2), TrustedImmPtr(m_globalData->jsStringVPtr)));
4e4e5a6f
A
1063
1064 // String case.
1065 JITStubCall stubCallEqStrings(this, cti_op_eq_strings);
1066 stubCallEqStrings.addArgument(regT0);
1067 stubCallEqStrings.addArgument(regT2);
1068 stubCallEqStrings.call();
1069 storeResult.append(jump());
1070
1071 // Generic case.
1072 genericCase.append(getSlowCase(iter)); // doubles
1073 genericCase.link(this);
1074 JITStubCall stubCallEq(this, cti_op_eq);
1075 stubCallEq.addArgument(op1);
1076 stubCallEq.addArgument(op2);
1077 stubCallEq.call(regT0);
1078
1079 storeResult.link(this);
4e4e5a6f
A
1080 emitStoreBool(dst, regT0);
1081}
1082
1083void JIT::emit_op_neq(Instruction* currentInstruction)
1084{
1085 unsigned dst = currentInstruction[1].u.operand;
1086 unsigned src1 = currentInstruction[2].u.operand;
1087 unsigned src2 = currentInstruction[3].u.operand;
1088
1089 emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
1090 addSlowCase(branch32(NotEqual, regT1, regT3));
14957cd0
A
1091 addSlowCase(branch32(Equal, regT1, TrustedImm32(JSValue::CellTag)));
1092 addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::LowestTag)));
4e4e5a6f 1093
14957cd0 1094 compare32(NotEqual, regT0, regT2, regT0);
4e4e5a6f
A
1095
1096 emitStoreBool(dst, regT0);
1097}
1098
1099void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1100{
1101 unsigned dst = currentInstruction[1].u.operand;
1102
1103 JumpList storeResult;
1104 JumpList genericCase;
1105
1106 genericCase.append(getSlowCase(iter)); // tags not equal
1107
1108 linkSlowCase(iter); // tags equal and JSCell
14957cd0
A
1109 genericCase.append(branchPtr(NotEqual, Address(regT0), TrustedImmPtr(m_globalData->jsStringVPtr)));
1110 genericCase.append(branchPtr(NotEqual, Address(regT2), TrustedImmPtr(m_globalData->jsStringVPtr)));
4e4e5a6f
A
1111
1112 // String case.
1113 JITStubCall stubCallEqStrings(this, cti_op_eq_strings);
1114 stubCallEqStrings.addArgument(regT0);
1115 stubCallEqStrings.addArgument(regT2);
1116 stubCallEqStrings.call(regT0);
1117 storeResult.append(jump());
1118
1119 // Generic case.
1120 genericCase.append(getSlowCase(iter)); // doubles
1121 genericCase.link(this);
1122 JITStubCall stubCallEq(this, cti_op_eq);
1123 stubCallEq.addArgument(regT1, regT0);
1124 stubCallEq.addArgument(regT3, regT2);
1125 stubCallEq.call(regT0);
1126
1127 storeResult.link(this);
14957cd0 1128 xor32(TrustedImm32(0x1), regT0);
4e4e5a6f
A
1129 emitStoreBool(dst, regT0);
1130}
1131
1132void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
1133{
1134 unsigned dst = currentInstruction[1].u.operand;
1135 unsigned src1 = currentInstruction[2].u.operand;
1136 unsigned src2 = currentInstruction[3].u.operand;
1137
1138 emitLoadTag(src1, regT0);
1139 emitLoadTag(src2, regT1);
1140
1141 // Jump to a slow case if either operand is double, or if both operands are
1142 // cells and/or Int32s.
1143 move(regT0, regT2);
1144 and32(regT1, regT2);
14957cd0
A
1145 addSlowCase(branch32(Below, regT2, TrustedImm32(JSValue::LowestTag)));
1146 addSlowCase(branch32(AboveOrEqual, regT2, TrustedImm32(JSValue::CellTag)));
4e4e5a6f
A
1147
1148 if (type == OpStrictEq)
14957cd0 1149 compare32(Equal, regT0, regT1, regT0);
4e4e5a6f 1150 else
14957cd0 1151 compare32(NotEqual, regT0, regT1, regT0);
4e4e5a6f
A
1152
1153 emitStoreBool(dst, regT0);
1154}
1155
1156void JIT::emit_op_stricteq(Instruction* currentInstruction)
1157{
1158 compileOpStrictEq(currentInstruction, OpStrictEq);
1159}
1160
1161void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1162{
1163 unsigned dst = currentInstruction[1].u.operand;
1164 unsigned src1 = currentInstruction[2].u.operand;
1165 unsigned src2 = currentInstruction[3].u.operand;
1166
1167 linkSlowCase(iter);
1168 linkSlowCase(iter);
1169
1170 JITStubCall stubCall(this, cti_op_stricteq);
1171 stubCall.addArgument(src1);
1172 stubCall.addArgument(src2);
1173 stubCall.call(dst);
1174}
1175
1176void JIT::emit_op_nstricteq(Instruction* currentInstruction)
1177{
1178 compileOpStrictEq(currentInstruction, OpNStrictEq);
1179}
1180
1181void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1182{
1183 unsigned dst = currentInstruction[1].u.operand;
1184 unsigned src1 = currentInstruction[2].u.operand;
1185 unsigned src2 = currentInstruction[3].u.operand;
1186
1187 linkSlowCase(iter);
1188 linkSlowCase(iter);
1189
1190 JITStubCall stubCall(this, cti_op_nstricteq);
1191 stubCall.addArgument(src1);
1192 stubCall.addArgument(src2);
1193 stubCall.call(dst);
1194}
1195
1196void JIT::emit_op_eq_null(Instruction* currentInstruction)
1197{
1198 unsigned dst = currentInstruction[1].u.operand;
1199 unsigned src = currentInstruction[2].u.operand;
1200
1201 emitLoad(src, regT1, regT0);
14957cd0 1202 Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
4e4e5a6f 1203
14957cd0
A
1204 loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
1205 test8(NonZero, Address(regT1, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined), regT1);
4e4e5a6f
A
1206
1207 Jump wasNotImmediate = jump();
1208
1209 isImmediate.link(this);
1210
14957cd0
A
1211 compare32(Equal, regT1, TrustedImm32(JSValue::NullTag), regT2);
1212 compare32(Equal, regT1, TrustedImm32(JSValue::UndefinedTag), regT1);
4e4e5a6f
A
1213 or32(regT2, regT1);
1214
1215 wasNotImmediate.link(this);
1216
4e4e5a6f
A
1217 emitStoreBool(dst, regT1);
1218}
1219
1220void JIT::emit_op_neq_null(Instruction* currentInstruction)
1221{
1222 unsigned dst = currentInstruction[1].u.operand;
1223 unsigned src = currentInstruction[2].u.operand;
1224
1225 emitLoad(src, regT1, regT0);
14957cd0 1226 Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
4e4e5a6f 1227
14957cd0
A
1228 loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
1229 test8(Zero, Address(regT1, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined), regT1);
4e4e5a6f
A
1230
1231 Jump wasNotImmediate = jump();
1232
1233 isImmediate.link(this);
1234
14957cd0
A
1235 compare32(NotEqual, regT1, TrustedImm32(JSValue::NullTag), regT2);
1236 compare32(NotEqual, regT1, TrustedImm32(JSValue::UndefinedTag), regT1);
4e4e5a6f
A
1237 and32(regT2, regT1);
1238
1239 wasNotImmediate.link(this);
1240
4e4e5a6f
A
1241 emitStoreBool(dst, regT1);
1242}
1243
1244void JIT::emit_op_resolve_with_base(Instruction* currentInstruction)
1245{
1246 JITStubCall stubCall(this, cti_op_resolve_with_base);
14957cd0 1247 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[3].u.operand)));
4e4e5a6f
A
1248 stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
1249 stubCall.call(currentInstruction[2].u.operand);
1250}
1251
1252void JIT::emit_op_new_func_exp(Instruction* currentInstruction)
1253{
1254 JITStubCall stubCall(this, cti_op_new_func_exp);
14957cd0 1255 stubCall.addArgument(TrustedImmPtr(m_codeBlock->functionExpr(currentInstruction[2].u.operand)));
4e4e5a6f
A
1256 stubCall.call(currentInstruction[1].u.operand);
1257}
1258
1259void JIT::emit_op_throw(Instruction* currentInstruction)
1260{
1261 unsigned exception = currentInstruction[1].u.operand;
1262 JITStubCall stubCall(this, cti_op_throw);
1263 stubCall.addArgument(exception);
1264 stubCall.call();
1265
1266#ifndef NDEBUG
1267 // cti_op_throw always changes it's return address,
1268 // this point in the code should never be reached.
1269 breakpoint();
1270#endif
1271}
1272
1273void JIT::emit_op_get_pnames(Instruction* currentInstruction)
1274{
1275 int dst = currentInstruction[1].u.operand;
1276 int base = currentInstruction[2].u.operand;
1277 int i = currentInstruction[3].u.operand;
1278 int size = currentInstruction[4].u.operand;
1279 int breakTarget = currentInstruction[5].u.operand;
1280
1281 JumpList isNotObject;
1282
1283 emitLoad(base, regT1, regT0);
1284 if (!m_codeBlock->isKnownNotImmediate(base))
14957cd0
A
1285 isNotObject.append(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)));
1286 if (base != m_codeBlock->thisRegister() || m_codeBlock->isStrictMode()) {
1287 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
1288 isNotObject.append(branch8(NotEqual, Address(regT2, Structure::typeInfoTypeOffset()), TrustedImm32(ObjectType)));
4e4e5a6f
A
1289 }
1290
1291 // We could inline the case where you have a valid cache, but
1292 // this call doesn't seem to be hot.
1293 Label isObject(this);
1294 JITStubCall getPnamesStubCall(this, cti_op_get_pnames);
1295 getPnamesStubCall.addArgument(regT0);
1296 getPnamesStubCall.call(dst);
1297 load32(Address(regT0, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStringsSize)), regT3);
14957cd0
A
1298 store32(TrustedImm32(Int32Tag), intTagFor(i));
1299 store32(TrustedImm32(0), intPayloadFor(i));
1300 store32(TrustedImm32(Int32Tag), intTagFor(size));
1301 store32(regT3, payloadFor(size));
4e4e5a6f
A
1302 Jump end = jump();
1303
1304 isNotObject.link(this);
14957cd0
A
1305 addJump(branch32(Equal, regT1, TrustedImm32(JSValue::NullTag)), breakTarget);
1306 addJump(branch32(Equal, regT1, TrustedImm32(JSValue::UndefinedTag)), breakTarget);
4e4e5a6f
A
1307 JITStubCall toObjectStubCall(this, cti_to_object);
1308 toObjectStubCall.addArgument(regT1, regT0);
1309 toObjectStubCall.call(base);
1310 jump().linkTo(isObject, this);
1311
1312 end.link(this);
1313}
1314
1315void JIT::emit_op_next_pname(Instruction* currentInstruction)
1316{
1317 int dst = currentInstruction[1].u.operand;
1318 int base = currentInstruction[2].u.operand;
1319 int i = currentInstruction[3].u.operand;
1320 int size = currentInstruction[4].u.operand;
1321 int it = currentInstruction[5].u.operand;
1322 int target = currentInstruction[6].u.operand;
1323
1324 JumpList callHasProperty;
1325
1326 Label begin(this);
14957cd0
A
1327 load32(intPayloadFor(i), regT0);
1328 Jump end = branch32(Equal, regT0, intPayloadFor(size));
4e4e5a6f
A
1329
1330 // Grab key @ i
14957cd0 1331 loadPtr(payloadFor(it), regT1);
4e4e5a6f
A
1332 loadPtr(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStrings)), regT2);
1333 load32(BaseIndex(regT2, regT0, TimesEight), regT2);
14957cd0 1334 store32(TrustedImm32(JSValue::CellTag), tagFor(dst));
4e4e5a6f
A
1335 store32(regT2, payloadFor(dst));
1336
1337 // Increment i
14957cd0
A
1338 add32(TrustedImm32(1), regT0);
1339 store32(regT0, intPayloadFor(i));
4e4e5a6f
A
1340
1341 // Verify that i is valid:
14957cd0 1342 loadPtr(payloadFor(base), regT0);
4e4e5a6f
A
1343
1344 // Test base's structure
14957cd0 1345 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
4e4e5a6f
A
1346 callHasProperty.append(branchPtr(NotEqual, regT2, Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure)))));
1347
1348 // Test base's prototype chain
1349 loadPtr(Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedPrototypeChain))), regT3);
1350 loadPtr(Address(regT3, OBJECT_OFFSETOF(StructureChain, m_vector)), regT3);
1351 addJump(branchTestPtr(Zero, Address(regT3)), target);
1352
1353 Label checkPrototype(this);
14957cd0
A
1354 callHasProperty.append(branch32(Equal, Address(regT2, Structure::prototypeOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), TrustedImm32(JSValue::NullTag)));
1355 loadPtr(Address(regT2, Structure::prototypeOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT2);
1356 loadPtr(Address(regT2, JSCell::structureOffset()), regT2);
4e4e5a6f 1357 callHasProperty.append(branchPtr(NotEqual, regT2, Address(regT3)));
14957cd0 1358 addPtr(TrustedImm32(sizeof(Structure*)), regT3);
4e4e5a6f
A
1359 branchTestPtr(NonZero, Address(regT3)).linkTo(checkPrototype, this);
1360
1361 // Continue loop.
1362 addJump(jump(), target);
1363
1364 // Slow case: Ask the object if i is valid.
1365 callHasProperty.link(this);
1366 loadPtr(addressFor(dst), regT1);
1367 JITStubCall stubCall(this, cti_has_property);
1368 stubCall.addArgument(regT0);
1369 stubCall.addArgument(regT1);
1370 stubCall.call();
1371
1372 // Test for valid key.
1373 addJump(branchTest32(NonZero, regT0), target);
1374 jump().linkTo(begin, this);
1375
1376 // End of loop.
1377 end.link(this);
1378}
1379
1380void JIT::emit_op_push_scope(Instruction* currentInstruction)
1381{
1382 JITStubCall stubCall(this, cti_op_push_scope);
1383 stubCall.addArgument(currentInstruction[1].u.operand);
1384 stubCall.call(currentInstruction[1].u.operand);
1385}
1386
1387void JIT::emit_op_pop_scope(Instruction*)
1388{
1389 JITStubCall(this, cti_op_pop_scope).call();
1390}
1391
1392void JIT::emit_op_to_jsnumber(Instruction* currentInstruction)
1393{
1394 int dst = currentInstruction[1].u.operand;
1395 int src = currentInstruction[2].u.operand;
1396
1397 emitLoad(src, regT1, regT0);
1398
14957cd0
A
1399 Jump isInt32 = branch32(Equal, regT1, TrustedImm32(JSValue::Int32Tag));
1400 addSlowCase(branch32(AboveOrEqual, regT1, TrustedImm32(JSValue::EmptyValueTag)));
4e4e5a6f
A
1401 isInt32.link(this);
1402
1403 if (src != dst)
1404 emitStore(dst, regT1, regT0);
14957cd0 1405 map(m_bytecodeOffset + OPCODE_LENGTH(op_to_jsnumber), dst, regT1, regT0);
4e4e5a6f
A
1406}
1407
1408void JIT::emitSlow_op_to_jsnumber(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1409{
1410 int dst = currentInstruction[1].u.operand;
1411
1412 linkSlowCase(iter);
1413
1414 JITStubCall stubCall(this, cti_op_to_jsnumber);
1415 stubCall.addArgument(regT1, regT0);
1416 stubCall.call(dst);
1417}
1418
1419void JIT::emit_op_push_new_scope(Instruction* currentInstruction)
1420{
1421 JITStubCall stubCall(this, cti_op_push_new_scope);
14957cd0 1422 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
4e4e5a6f
A
1423 stubCall.addArgument(currentInstruction[3].u.operand);
1424 stubCall.call(currentInstruction[1].u.operand);
1425}
1426
1427void JIT::emit_op_catch(Instruction* currentInstruction)
1428{
14957cd0
A
1429 // cti_op_throw returns the callFrame for the handler.
1430 move(regT0, callFrameRegister);
4e4e5a6f
A
1431
1432 // Now store the exception returned by cti_op_throw.
14957cd0
A
1433 loadPtr(Address(stackPointerRegister, OBJECT_OFFSETOF(struct JITStackFrame, globalData)), regT3);
1434 load32(Address(regT3, OBJECT_OFFSETOF(JSGlobalData, exception) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
1435 load32(Address(regT3, OBJECT_OFFSETOF(JSGlobalData, exception) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
1436 store32(TrustedImm32(JSValue().payload()), Address(regT3, OBJECT_OFFSETOF(JSGlobalData, exception) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)));
1437 store32(TrustedImm32(JSValue().tag()), Address(regT3, OBJECT_OFFSETOF(JSGlobalData, exception) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)));
1438
1439 unsigned exception = currentInstruction[1].u.operand;
4e4e5a6f 1440 emitStore(exception, regT1, regT0);
14957cd0 1441 map(m_bytecodeOffset + OPCODE_LENGTH(op_catch), exception, regT1, regT0);
4e4e5a6f
A
1442}
1443
1444void JIT::emit_op_jmp_scopes(Instruction* currentInstruction)
1445{
1446 JITStubCall stubCall(this, cti_op_jmp_scopes);
1447 stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
1448 stubCall.call();
1449 addJump(jump(), currentInstruction[2].u.operand);
1450}
1451
1452void JIT::emit_op_switch_imm(Instruction* currentInstruction)
1453{
1454 unsigned tableIndex = currentInstruction[1].u.operand;
1455 unsigned defaultOffset = currentInstruction[2].u.operand;
1456 unsigned scrutinee = currentInstruction[3].u.operand;
1457
1458 // create jump table for switch destinations, track this switch statement.
1459 SimpleJumpTable* jumpTable = &m_codeBlock->immediateSwitchJumpTable(tableIndex);
14957cd0 1460 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Immediate));
4e4e5a6f
A
1461 jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1462
1463 JITStubCall stubCall(this, cti_op_switch_imm);
1464 stubCall.addArgument(scrutinee);
1465 stubCall.addArgument(Imm32(tableIndex));
1466 stubCall.call();
1467 jump(regT0);
1468}
1469
1470void JIT::emit_op_switch_char(Instruction* currentInstruction)
1471{
1472 unsigned tableIndex = currentInstruction[1].u.operand;
1473 unsigned defaultOffset = currentInstruction[2].u.operand;
1474 unsigned scrutinee = currentInstruction[3].u.operand;
1475
1476 // create jump table for switch destinations, track this switch statement.
1477 SimpleJumpTable* jumpTable = &m_codeBlock->characterSwitchJumpTable(tableIndex);
14957cd0 1478 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Character));
4e4e5a6f
A
1479 jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1480
1481 JITStubCall stubCall(this, cti_op_switch_char);
1482 stubCall.addArgument(scrutinee);
1483 stubCall.addArgument(Imm32(tableIndex));
1484 stubCall.call();
1485 jump(regT0);
1486}
1487
1488void JIT::emit_op_switch_string(Instruction* currentInstruction)
1489{
1490 unsigned tableIndex = currentInstruction[1].u.operand;
1491 unsigned defaultOffset = currentInstruction[2].u.operand;
1492 unsigned scrutinee = currentInstruction[3].u.operand;
1493
1494 // create jump table for switch destinations, track this switch statement.
1495 StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
14957cd0 1496 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset));
4e4e5a6f
A
1497
1498 JITStubCall stubCall(this, cti_op_switch_string);
1499 stubCall.addArgument(scrutinee);
1500 stubCall.addArgument(Imm32(tableIndex));
1501 stubCall.call();
1502 jump(regT0);
1503}
1504
14957cd0 1505void JIT::emit_op_throw_reference_error(Instruction* currentInstruction)
4e4e5a6f 1506{
14957cd0 1507 unsigned message = currentInstruction[1].u.operand;
4e4e5a6f 1508
14957cd0 1509 JITStubCall stubCall(this, cti_op_throw_reference_error);
4e4e5a6f 1510 stubCall.addArgument(m_codeBlock->getConstant(message));
14957cd0 1511 stubCall.call();
4e4e5a6f
A
1512}
1513
1514void JIT::emit_op_debug(Instruction* currentInstruction)
1515{
1516#if ENABLE(DEBUG_WITH_BREAKPOINT)
1517 UNUSED_PARAM(currentInstruction);
1518 breakpoint();
1519#else
1520 JITStubCall stubCall(this, cti_op_debug);
1521 stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
1522 stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
1523 stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
1524 stubCall.call();
1525#endif
1526}
1527
1528
1529void JIT::emit_op_enter(Instruction*)
1530{
1531 // Even though JIT code doesn't use them, we initialize our constant
1532 // registers to zap stale pointers, to avoid unnecessarily prolonging
1533 // object lifetime and increasing GC pressure.
1534 for (int i = 0; i < m_codeBlock->m_numVars; ++i)
1535 emitStore(i, jsUndefined());
1536}
1537
14957cd0 1538void JIT::emit_op_create_activation(Instruction* currentInstruction)
4e4e5a6f 1539{
14957cd0
A
1540 unsigned activation = currentInstruction[1].u.operand;
1541
1542 Jump activationCreated = branch32(NotEqual, tagFor(activation), TrustedImm32(JSValue::EmptyValueTag));
1543 JITStubCall(this, cti_op_push_activation).call(activation);
1544 activationCreated.link(this);
4e4e5a6f
A
1545}
1546
14957cd0 1547void JIT::emit_op_create_arguments(Instruction* currentInstruction)
4e4e5a6f 1548{
14957cd0
A
1549 unsigned dst = currentInstruction[1].u.operand;
1550
1551 Jump argsCreated = branch32(NotEqual, tagFor(dst), TrustedImm32(JSValue::EmptyValueTag));
4e4e5a6f 1552
4e4e5a6f
A
1553 if (m_codeBlock->m_numParameters == 1)
1554 JITStubCall(this, cti_op_create_arguments_no_params).call();
1555 else
1556 JITStubCall(this, cti_op_create_arguments).call();
1557
14957cd0
A
1558 emitStore(dst, regT1, regT0);
1559 emitStore(unmodifiedArgumentsRegister(dst), regT1, regT0);
1560
4e4e5a6f
A
1561 argsCreated.link(this);
1562}
1563
14957cd0 1564void JIT::emit_op_init_lazy_reg(Instruction* currentInstruction)
4e4e5a6f 1565{
14957cd0
A
1566 unsigned dst = currentInstruction[1].u.operand;
1567
1568 emitStore(dst, JSValue());
1569}
1570
1571void JIT::emit_op_get_callee(Instruction* currentInstruction)
1572{
1573 int dst = currentInstruction[1].u.operand;
1574 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT0);
1575 emitStoreCell(dst, regT0);
1576}
1577
1578void JIT::emit_op_create_this(Instruction* currentInstruction)
1579{
1580 unsigned protoRegister = currentInstruction[2].u.operand;
1581 emitLoad(protoRegister, regT1, regT0);
1582 JITStubCall stubCall(this, cti_op_create_this);
1583 stubCall.addArgument(regT1, regT0);
1584 stubCall.call(currentInstruction[1].u.operand);
4e4e5a6f
A
1585}
1586
1587void JIT::emit_op_convert_this(Instruction* currentInstruction)
1588{
1589 unsigned thisRegister = currentInstruction[1].u.operand;
1590
1591 emitLoad(thisRegister, regT1, regT0);
1592
14957cd0
A
1593 addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)));
1594
1595 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
1596 addSlowCase(branchTest8(NonZero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(NeedsThisConversion)));
4e4e5a6f 1597
14957cd0
A
1598 map(m_bytecodeOffset + OPCODE_LENGTH(op_convert_this), thisRegister, regT1, regT0);
1599}
4e4e5a6f 1600
14957cd0
A
1601void JIT::emit_op_convert_this_strict(Instruction* currentInstruction)
1602{
1603 unsigned thisRegister = currentInstruction[1].u.operand;
1604
1605 emitLoad(thisRegister, regT1, regT0);
1606
1607 Jump notNull = branch32(NotEqual, regT1, TrustedImm32(JSValue::EmptyValueTag));
1608 emitStore(thisRegister, jsNull());
1609 Jump setThis = jump();
1610 notNull.link(this);
1611 Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
1612 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
1613 Jump notAnObject = branch8(NotEqual, Address(regT2, Structure::typeInfoTypeOffset()), TrustedImm32(ObjectType));
1614 addSlowCase(branchTest8(NonZero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(NeedsThisConversion)));
1615 isImmediate.link(this);
1616 notAnObject.link(this);
1617 setThis.link(this);
1618 map(m_bytecodeOffset + OPCODE_LENGTH(op_convert_this_strict), thisRegister, regT1, regT0);
4e4e5a6f
A
1619}
1620
1621void JIT::emitSlow_op_convert_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1622{
1623 unsigned thisRegister = currentInstruction[1].u.operand;
1624
1625 linkSlowCase(iter);
1626 linkSlowCase(iter);
1627
1628 JITStubCall stubCall(this, cti_op_convert_this);
1629 stubCall.addArgument(regT1, regT0);
1630 stubCall.call(thisRegister);
1631}
1632
14957cd0
A
1633void JIT::emitSlow_op_convert_this_strict(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1634{
1635 unsigned thisRegister = currentInstruction[1].u.operand;
1636
1637 linkSlowCase(iter);
1638
1639 JITStubCall stubCall(this, cti_op_convert_this_strict);
1640 stubCall.addArgument(regT1, regT0);
1641 stubCall.call(thisRegister);
1642}
1643
4e4e5a6f
A
1644void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
1645{
1646 peek(regT2, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof(void*));
1647 Jump noProfiler = branchTestPtr(Zero, Address(regT2));
1648
1649 JITStubCall stubCall(this, cti_op_profile_will_call);
1650 stubCall.addArgument(currentInstruction[1].u.operand);
1651 stubCall.call();
1652 noProfiler.link(this);
1653}
1654
1655void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
1656{
1657 peek(regT2, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof(void*));
1658 Jump noProfiler = branchTestPtr(Zero, Address(regT2));
1659
1660 JITStubCall stubCall(this, cti_op_profile_did_call);
1661 stubCall.addArgument(currentInstruction[1].u.operand);
1662 stubCall.call();
1663 noProfiler.link(this);
1664}
1665
14957cd0
A
1666void JIT::emit_op_get_arguments_length(Instruction* currentInstruction)
1667{
1668 int dst = currentInstruction[1].u.operand;
1669 int argumentsRegister = currentInstruction[2].u.operand;
1670 addSlowCase(branch32(NotEqual, tagFor(argumentsRegister), TrustedImm32(JSValue::EmptyValueTag)));
1671 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT0);
1672 sub32(TrustedImm32(1), regT0);
1673 emitStoreInt32(dst, regT0);
4e4e5a6f
A
1674}
1675
14957cd0
A
1676void JIT::emitSlow_op_get_arguments_length(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1677{
1678 linkSlowCase(iter);
1679 int dst = currentInstruction[1].u.operand;
1680 int base = currentInstruction[2].u.operand;
1681 int ident = currentInstruction[3].u.operand;
1682
1683 JITStubCall stubCall(this, cti_op_get_by_id_generic);
1684 stubCall.addArgument(base);
1685 stubCall.addArgument(TrustedImmPtr(&(m_codeBlock->identifier(ident))));
1686 stubCall.call(dst);
1687}
1688
1689void JIT::emit_op_get_argument_by_val(Instruction* currentInstruction)
1690{
1691 int dst = currentInstruction[1].u.operand;
1692 int argumentsRegister = currentInstruction[2].u.operand;
1693 int property = currentInstruction[3].u.operand;
1694 addSlowCase(branch32(NotEqual, tagFor(argumentsRegister), TrustedImm32(JSValue::EmptyValueTag)));
1695 emitLoad(property, regT1, regT2);
1696 addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::Int32Tag)));
1697 add32(TrustedImm32(1), regT2);
1698 // regT2 now contains the integer index of the argument we want, including this
1699 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT3);
1700 addSlowCase(branch32(AboveOrEqual, regT2, regT3));
1701
1702 Jump skipOutofLineParams;
1703 int numArgs = m_codeBlock->m_numParameters;
1704 if (numArgs) {
1705 Jump notInInPlaceArgs = branch32(AboveOrEqual, regT2, Imm32(numArgs));
1706 addPtr(Imm32(static_cast<unsigned>(-(RegisterFile::CallFrameHeaderSize + numArgs) * sizeof(Register))), callFrameRegister, regT1);
1707 loadPtr(BaseIndex(regT1, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
1708 loadPtr(BaseIndex(regT1, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
1709 skipOutofLineParams = jump();
1710 notInInPlaceArgs.link(this);
1711 }
1712
1713 addPtr(Imm32(static_cast<unsigned>(-(RegisterFile::CallFrameHeaderSize + numArgs) * sizeof(Register))), callFrameRegister, regT1);
1714 mul32(TrustedImm32(sizeof(Register)), regT3, regT3);
1715 subPtr(regT3, regT1);
1716 loadPtr(BaseIndex(regT1, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
1717 loadPtr(BaseIndex(regT1, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
1718 if (numArgs)
1719 skipOutofLineParams.link(this);
1720 emitStore(dst, regT1, regT0);
1721}
1722
1723void JIT::emitSlow_op_get_argument_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1724{
1725 unsigned dst = currentInstruction[1].u.operand;
1726 unsigned arguments = currentInstruction[2].u.operand;
1727 unsigned property = currentInstruction[3].u.operand;
1728
1729 linkSlowCase(iter);
1730 Jump skipArgumentsCreation = jump();
1731
1732 linkSlowCase(iter);
1733 linkSlowCase(iter);
1734 if (m_codeBlock->m_numParameters == 1)
1735 JITStubCall(this, cti_op_create_arguments_no_params).call();
1736 else
1737 JITStubCall(this, cti_op_create_arguments).call();
1738
1739 emitStore(arguments, regT1, regT0);
1740 emitStore(unmodifiedArgumentsRegister(arguments), regT1, regT0);
1741
1742 skipArgumentsCreation.link(this);
1743 JITStubCall stubCall(this, cti_op_get_by_val);
1744 stubCall.addArgument(arguments);
1745 stubCall.addArgument(property);
1746 stubCall.call(dst);
1747}
1748
1749#if ENABLE(JIT_USE_SOFT_MODULO)
1750void JIT::softModulo()
1751{
1752 push(regT1);
1753 push(regT3);
1754 move(regT2, regT3);
1755 move(regT0, regT2);
1756 move(TrustedImm32(0), regT1);
1757
1758 // Check for negative result reminder
1759 Jump positiveRegT3 = branch32(GreaterThanOrEqual, regT3, TrustedImm32(0));
1760 neg32(regT3);
1761 xor32(TrustedImm32(1), regT1);
1762 positiveRegT3.link(this);
1763
1764 Jump positiveRegT2 = branch32(GreaterThanOrEqual, regT2, TrustedImm32(0));
1765 neg32(regT2);
1766 xor32(TrustedImm32(2), regT1);
1767 positiveRegT2.link(this);
1768
1769 // Save the condition for negative reminder
1770 push(regT1);
1771
1772 Jump exitBranch = branch32(LessThan, regT2, regT3);
1773
1774 // Power of two fast case
1775 move(regT3, regT0);
1776 sub32(TrustedImm32(1), regT0);
1777 Jump powerOfTwo = branchTest32(NonZero, regT0, regT3);
1778 and32(regT0, regT2);
1779 powerOfTwo.link(this);
1780
1781 and32(regT3, regT0);
1782
1783 Jump exitBranch2 = branchTest32(Zero, regT0);
1784
1785 countLeadingZeros32(regT2, regT0);
1786 countLeadingZeros32(regT3, regT1);
1787 sub32(regT0, regT1);
1788
1789 Jump useFullTable = branch32(Equal, regT1, TrustedImm32(31));
1790
1791 neg32(regT1);
1792 add32(TrustedImm32(31), regT1);
1793
1794 int elementSizeByShift = -1;
1795#if CPU(ARM)
1796 elementSizeByShift = 3;
1797#else
1798#error "JIT_OPTIMIZE_MOD not yet supported on this platform."
1799#endif
1800 relativeTableJump(regT1, elementSizeByShift);
1801
1802 useFullTable.link(this);
1803 // Modulo table
1804 for (int i = 31; i > 0; --i) {
1805#if CPU(ARM_TRADITIONAL)
1806 m_assembler.cmp_r(regT2, m_assembler.lsl(regT3, i));
1807 m_assembler.sub_r(regT2, regT2, m_assembler.lsl(regT3, i), ARMAssembler::CS);
1808#elif CPU(ARM_THUMB2)
1809 ShiftTypeAndAmount shift(SRType_LSL, i);
1810 m_assembler.sub_S(regT1, regT2, regT3, shift);
1811 m_assembler.it(ARMv7Assembler::ConditionCS);
1812 m_assembler.mov(regT2, regT1);
1813#else
1814#error "JIT_OPTIMIZE_MOD not yet supported on this platform."
1815#endif
1816 }
1817
1818 Jump lower = branch32(Below, regT2, regT3);
1819 sub32(regT3, regT2);
1820 lower.link(this);
1821
1822 exitBranch.link(this);
1823 exitBranch2.link(this);
1824
1825 // Check for negative reminder
1826 pop(regT1);
1827 Jump positiveResult = branch32(Equal, regT1, TrustedImm32(0));
1828 neg32(regT2);
1829 positiveResult.link(this);
1830
1831 move(regT2, regT0);
1832
1833 pop(regT3);
1834 pop(regT1);
1835 ret();
1836}
1837#endif // ENABLE(JIT_USE_SOFT_MODULO)
1838
1839} // namespace JSC
1840
1841#endif // USE(JSVALUE32_64)
1842#endif // ENABLE(JIT)