]> git.saurik.com Git - apple/javascriptcore.git/blame - jit/JITOpcodes32_64.cpp
JavaScriptCore-1097.3.tar.gz
[apple/javascriptcore.git] / jit / JITOpcodes32_64.cpp
CommitLineData
4e4e5a6f
A
1/*
2 * Copyright (C) 2009 Apple Inc. All rights reserved.
3 * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
13 *
14 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
18 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25 */
26
27#include "config.h"
4e4e5a6f 28
14957cd0
A
29#if ENABLE(JIT)
30#if USE(JSVALUE32_64)
31#include "JIT.h"
4e4e5a6f
A
32
33#include "JITInlineMethods.h"
34#include "JITStubCall.h"
35#include "JSArray.h"
36#include "JSCell.h"
37#include "JSFunction.h"
38#include "JSPropertyNameIterator.h"
39#include "LinkBuffer.h"
40
41namespace JSC {
42
6fe7ccc8 43PassRefPtr<ExecutableMemoryHandle> JIT::privateCompileCTIMachineTrampolines(JSGlobalData* globalData, TrampolineStructure *trampolines)
4e4e5a6f 44{
4e4e5a6f
A
45 // (1) This function provides fast property access for string length
46 Label stringLengthBegin = align();
47
48 // regT0 holds payload, regT1 holds tag
49
14957cd0 50 Jump string_failureCases1 = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
6fe7ccc8 51 Jump string_failureCases2 = branchPtr(NotEqual, Address(regT0, JSCell::classInfoOffset()), TrustedImmPtr(&JSString::s_info));
4e4e5a6f
A
52
53 // Checks out okay! - get the length from the Ustring.
54 load32(Address(regT0, OBJECT_OFFSETOF(JSString, m_length)), regT2);
55
14957cd0 56 Jump string_failureCases3 = branch32(Above, regT2, TrustedImm32(INT_MAX));
4e4e5a6f 57 move(regT2, regT0);
14957cd0 58 move(TrustedImm32(JSValue::Int32Tag), regT1);
4e4e5a6f
A
59
60 ret();
14957cd0 61
6fe7ccc8
A
62 JumpList callSlowCase;
63 JumpList constructSlowCase;
64
4e4e5a6f 65 // VirtualCallLink Trampoline
6fe7ccc8 66 // regT1, regT0 holds callee; callFrame is moved and partially initialized.
4e4e5a6f 67 Label virtualCallLinkBegin = align();
6fe7ccc8
A
68 callSlowCase.append(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)));
69 callSlowCase.append(emitJumpIfNotType(regT0, regT1, JSFunctionType));
70
71 // Finish canonical initialization before JS function call.
72 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_scopeChain)), regT1);
73 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
74
75 // Also initialize ReturnPC for use by lazy linking and exceptions.
4e4e5a6f 76 preserveReturnAddressAfterCall(regT3);
14957cd0 77 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
6fe7ccc8
A
78
79 storePtr(callFrameRegister, &m_globalData->topCallFrame);
4e4e5a6f 80 restoreArgumentReference();
14957cd0 81 Call callLazyLinkCall = call();
4e4e5a6f 82 restoreReturnAddressBeforeReturn(regT3);
14957cd0 83 jump(regT0);
4e4e5a6f 84
14957cd0 85 // VirtualConstructLink Trampoline
6fe7ccc8 86 // regT1, regT0 holds callee; callFrame is moved and partially initialized.
14957cd0 87 Label virtualConstructLinkBegin = align();
6fe7ccc8
A
88 constructSlowCase.append(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)));
89 constructSlowCase.append(emitJumpIfNotType(regT0, regT1, JSFunctionType));
90
91 // Finish canonical initialization before JS function call.
92 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_scopeChain)), regT1);
93 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
94
95 // Also initialize ReturnPC for use by lazy linking and exeptions.
4e4e5a6f 96 preserveReturnAddressAfterCall(regT3);
14957cd0 97 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
6fe7ccc8
A
98
99 storePtr(callFrameRegister, &m_globalData->topCallFrame);
4e4e5a6f 100 restoreArgumentReference();
14957cd0 101 Call callLazyLinkConstruct = call();
4e4e5a6f
A
102 restoreReturnAddressBeforeReturn(regT3);
103 jump(regT0);
14957cd0 104
4e4e5a6f 105 // VirtualCall Trampoline
6fe7ccc8 106 // regT1, regT0 holds callee; regT2 will hold the FunctionExecutable.
4e4e5a6f 107 Label virtualCallBegin = align();
6fe7ccc8
A
108 callSlowCase.append(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)));
109 callSlowCase.append(emitJumpIfNotType(regT0, regT1, JSFunctionType));
4e4e5a6f 110
6fe7ccc8
A
111 // Finish canonical initialization before JS function call.
112 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_scopeChain)), regT1);
113 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
4e4e5a6f 114
6fe7ccc8
A
115 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
116 Jump hasCodeBlock1 = branch32(GreaterThanOrEqual, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParametersForCall)), TrustedImm32(0));
4e4e5a6f 117 preserveReturnAddressAfterCall(regT3);
6fe7ccc8
A
118
119 storePtr(callFrameRegister, &m_globalData->topCallFrame);
4e4e5a6f 120 restoreArgumentReference();
14957cd0 121 Call callCompileCall = call();
4e4e5a6f
A
122 restoreReturnAddressBeforeReturn(regT3);
123 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
4e4e5a6f 124
6fe7ccc8 125 hasCodeBlock1.link(this);
14957cd0
A
126 loadPtr(Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_jitCodeForCallWithArityCheck)), regT0);
127 jump(regT0);
128
129 // VirtualConstruct Trampoline
6fe7ccc8 130 // regT1, regT0 holds callee; regT2 will hold the FunctionExecutable.
14957cd0 131 Label virtualConstructBegin = align();
6fe7ccc8
A
132 constructSlowCase.append(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)));
133 constructSlowCase.append(emitJumpIfNotType(regT0, regT1, JSFunctionType));
14957cd0 134
6fe7ccc8
A
135 // Finish canonical initialization before JS function call.
136 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_scopeChain)), regT1);
137 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
14957cd0 138
6fe7ccc8
A
139 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
140 Jump hasCodeBlock2 = branch32(GreaterThanOrEqual, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParametersForConstruct)), TrustedImm32(0));
4e4e5a6f 141 preserveReturnAddressAfterCall(regT3);
6fe7ccc8
A
142
143 storePtr(callFrameRegister, &m_globalData->topCallFrame);
4e4e5a6f 144 restoreArgumentReference();
6fe7ccc8 145 Call callCompileConstruct = call();
4e4e5a6f
A
146 restoreReturnAddressBeforeReturn(regT3);
147 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
4e4e5a6f 148
6fe7ccc8 149 hasCodeBlock2.link(this);
14957cd0 150 loadPtr(Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_jitCodeForConstructWithArityCheck)), regT0);
4e4e5a6f 151 jump(regT0);
6fe7ccc8
A
152
153 callSlowCase.link(this);
154 // Finish canonical initialization before JS function call.
155 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
156 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2, regT2);
157 emitPutCellToCallFrameHeader(regT2, RegisterFile::ScopeChain);
158
159 // Also initialize ReturnPC and CodeBlock, like a JS function would.
160 preserveReturnAddressAfterCall(regT3);
161 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
162 emitPutImmediateToCallFrameHeader(0, RegisterFile::CodeBlock);
163
164 storePtr(callFrameRegister, &m_globalData->topCallFrame);
165 restoreArgumentReference();
166 Call callCallNotJSFunction = call();
14957cd0 167 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
6fe7ccc8
A
168 restoreReturnAddressBeforeReturn(regT3);
169 ret();
170
171 constructSlowCase.link(this);
172 // Finish canonical initialization before JS function call.
173 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
174 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2, regT2);
175 emitPutCellToCallFrameHeader(regT2, RegisterFile::ScopeChain);
176
177 // Also initialize ReturnPC and CodeBlock, like a JS function would.
178 preserveReturnAddressAfterCall(regT3);
179 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
180 emitPutImmediateToCallFrameHeader(0, RegisterFile::CodeBlock);
181
182 storePtr(callFrameRegister, &m_globalData->topCallFrame);
183 restoreArgumentReference();
184 Call callConstructNotJSFunction = call();
185 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
186 restoreReturnAddressBeforeReturn(regT3);
14957cd0
A
187 ret();
188
189 // NativeCall Trampoline
190 Label nativeCallThunk = privateCompileCTINativeCall(globalData);
191 Label nativeConstructThunk = privateCompileCTINativeCall(globalData, true);
192
14957cd0
A
193 Call string_failureCases1Call = makeTailRecursiveCall(string_failureCases1);
194 Call string_failureCases2Call = makeTailRecursiveCall(string_failureCases2);
195 Call string_failureCases3Call = makeTailRecursiveCall(string_failureCases3);
14957cd0
A
196
197 // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
6fe7ccc8 198 LinkBuffer patchBuffer(*m_globalData, this, GLOBAL_THUNK_ID);
14957cd0 199
14957cd0
A
200 patchBuffer.link(string_failureCases1Call, FunctionPtr(cti_op_get_by_id_string_fail));
201 patchBuffer.link(string_failureCases2Call, FunctionPtr(cti_op_get_by_id_string_fail));
202 patchBuffer.link(string_failureCases3Call, FunctionPtr(cti_op_get_by_id_string_fail));
14957cd0
A
203 patchBuffer.link(callLazyLinkCall, FunctionPtr(cti_vm_lazyLinkCall));
204 patchBuffer.link(callLazyLinkConstruct, FunctionPtr(cti_vm_lazyLinkConstruct));
14957cd0 205 patchBuffer.link(callCompileCall, FunctionPtr(cti_op_call_jitCompile));
6fe7ccc8
A
206 patchBuffer.link(callCompileConstruct, FunctionPtr(cti_op_construct_jitCompile));
207 patchBuffer.link(callCallNotJSFunction, FunctionPtr(cti_op_call_NotJSFunction));
208 patchBuffer.link(callConstructNotJSFunction, FunctionPtr(cti_op_construct_NotJSConstruct));
14957cd0
A
209
210 CodeRef finalCode = patchBuffer.finalizeCode();
6fe7ccc8 211 RefPtr<ExecutableMemoryHandle> executableMemory = finalCode.executableMemory();
14957cd0 212
6fe7ccc8
A
213 trampolines->ctiVirtualCallLink = patchBuffer.trampolineAt(virtualCallLinkBegin);
214 trampolines->ctiVirtualConstructLink = patchBuffer.trampolineAt(virtualConstructLinkBegin);
14957cd0
A
215 trampolines->ctiVirtualCall = patchBuffer.trampolineAt(virtualCallBegin);
216 trampolines->ctiVirtualConstruct = patchBuffer.trampolineAt(virtualConstructBegin);
14957cd0
A
217 trampolines->ctiNativeCall = patchBuffer.trampolineAt(nativeCallThunk);
218 trampolines->ctiNativeConstruct = patchBuffer.trampolineAt(nativeConstructThunk);
14957cd0 219 trampolines->ctiStringLengthTrampoline = patchBuffer.trampolineAt(stringLengthBegin);
6fe7ccc8
A
220
221 return executableMemory.release();
14957cd0
A
222}
223
224JIT::Label JIT::privateCompileCTINativeCall(JSGlobalData* globalData, bool isConstruct)
225{
226 int executableOffsetToFunction = isConstruct ? OBJECT_OFFSETOF(NativeExecutable, m_constructor) : OBJECT_OFFSETOF(NativeExecutable, m_function);
4e4e5a6f 227
4e4e5a6f 228 Label nativeCallThunk = align();
4e4e5a6f 229
14957cd0 230 emitPutImmediateToCallFrameHeader(0, RegisterFile::CodeBlock);
6fe7ccc8 231 storePtr(callFrameRegister, &m_globalData->topCallFrame);
14957cd0
A
232
233#if CPU(X86)
4e4e5a6f
A
234 // Load caller frame's scope chain into this callframe so that whatever we call can
235 // get to its global data.
14957cd0
A
236 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
237 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT0);
238 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
4e4e5a6f 239
14957cd0
A
240 peek(regT1);
241 emitPutToCallFrameHeader(regT1, RegisterFile::ReturnPC);
4e4e5a6f 242
14957cd0
A
243 // Calling convention: f(ecx, edx, ...);
244 // Host function signature: f(ExecState*);
245 move(callFrameRegister, X86Registers::ecx);
4e4e5a6f 246
14957cd0 247 subPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister); // Align stack after call.
4e4e5a6f 248
14957cd0
A
249 // call the function
250 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT1);
251 loadPtr(Address(regT1, OBJECT_OFFSETOF(JSFunction, m_executable)), regT1);
252 move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
253 call(Address(regT1, executableOffsetToFunction));
4e4e5a6f 254
14957cd0 255 addPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister);
4e4e5a6f 256
14957cd0
A
257#elif CPU(ARM)
258 // Load caller frame's scope chain into this callframe so that whatever we call can
259 // get to its global data.
260 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
261 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT2);
262 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
4e4e5a6f 263
14957cd0
A
264 preserveReturnAddressAfterCall(regT3); // Callee preserved
265 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
4e4e5a6f 266
14957cd0
A
267 // Calling convention: f(r0 == regT0, r1 == regT1, ...);
268 // Host function signature: f(ExecState*);
269 move(callFrameRegister, ARMRegisters::r0);
4e4e5a6f 270
14957cd0
A
271 // call the function
272 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, ARMRegisters::r1);
273 move(regT2, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
274 loadPtr(Address(ARMRegisters::r1, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
275 call(Address(regT2, executableOffsetToFunction));
4e4e5a6f 276
14957cd0
A
277 restoreReturnAddressBeforeReturn(regT3);
278#elif CPU(SH4)
279 // Load caller frame's scope chain into this callframe so that whatever we call can
280 // get to its global data.
281 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
282 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT2);
6fe7ccc8 283 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
14957cd0
A
284
285 preserveReturnAddressAfterCall(regT3); // Callee preserved
286 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
287
288 // Calling convention: f(r0 == regT4, r1 == regT5, ...);
289 // Host function signature: f(ExecState*);
290 move(callFrameRegister, regT4);
291
292 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT5);
293 move(regT2, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
294 loadPtr(Address(regT5, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
295
296 call(Address(regT2, executableOffsetToFunction), regT0);
297 restoreReturnAddressBeforeReturn(regT3);
298#elif CPU(MIPS)
299 // Load caller frame's scope chain into this callframe so that whatever we call can
300 // get to its global data.
301 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
302 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT0);
303 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
4e4e5a6f 304
14957cd0
A
305 preserveReturnAddressAfterCall(regT3); // Callee preserved
306 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
4e4e5a6f 307
14957cd0
A
308 // Calling convention: f(a0, a1, a2, a3);
309 // Host function signature: f(ExecState*);
4e4e5a6f 310
14957cd0
A
311 // Allocate stack space for 16 bytes (8-byte aligned)
312 // 16 bytes (unused) for 4 arguments
313 subPtr(TrustedImm32(16), stackPointerRegister);
4e4e5a6f 314
14957cd0
A
315 // Setup arg0
316 move(callFrameRegister, MIPSRegisters::a0);
317
318 // Call
319 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, MIPSRegisters::a2);
320 loadPtr(Address(MIPSRegisters::a2, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
321 move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
322 call(Address(regT2, executableOffsetToFunction));
323
324 // Restore stack space
325 addPtr(TrustedImm32(16), stackPointerRegister);
326
327 restoreReturnAddressBeforeReturn(regT3);
328
4e4e5a6f 329#else
6fe7ccc8 330#error "JIT not supported on this platform."
14957cd0
A
331 UNUSED_PARAM(executableOffsetToFunction);
332 breakpoint();
333#endif // CPU(X86)
4e4e5a6f 334
14957cd0
A
335 // Check for an exception
336 Jump sawException = branch32(NotEqual, AbsoluteAddress(reinterpret_cast<char*>(&globalData->exception) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), TrustedImm32(JSValue::EmptyValueTag));
4e4e5a6f 337
14957cd0
A
338 // Return.
339 ret();
340
341 // Handle an exception
342 sawException.link(this);
343
344 // Grab the return address.
345 preserveReturnAddressAfterCall(regT1);
346
347 move(TrustedImmPtr(&globalData->exceptionLocation), regT2);
348 storePtr(regT1, regT2);
349 poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
6fe7ccc8
A
350
351 storePtr(callFrameRegister, &m_globalData->topCallFrame);
14957cd0
A
352 // Set the return address.
353 move(TrustedImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()), regT1);
354 restoreReturnAddressBeforeReturn(regT1);
355
356 ret();
4e4e5a6f 357
14957cd0
A
358 return nativeCallThunk;
359}
4e4e5a6f 360
6fe7ccc8 361JIT::CodeRef JIT::privateCompileCTINativeCall(JSGlobalData* globalData, NativeFunction func)
14957cd0
A
362{
363 Call nativeCall;
4e4e5a6f 364
14957cd0 365 emitPutImmediateToCallFrameHeader(0, RegisterFile::CodeBlock);
6fe7ccc8 366 storePtr(callFrameRegister, &m_globalData->topCallFrame);
4e4e5a6f 367
14957cd0
A
368#if CPU(X86)
369 // Load caller frame's scope chain into this callframe so that whatever we call can
370 // get to its global data.
371 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
372 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT0);
373 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
4e4e5a6f 374
14957cd0
A
375 peek(regT1);
376 emitPutToCallFrameHeader(regT1, RegisterFile::ReturnPC);
4e4e5a6f 377
14957cd0
A
378 // Calling convention: f(ecx, edx, ...);
379 // Host function signature: f(ExecState*);
380 move(callFrameRegister, X86Registers::ecx);
4e4e5a6f 381
14957cd0 382 subPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister); // Align stack after call.
4e4e5a6f 383
14957cd0 384 move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
4e4e5a6f 385
14957cd0
A
386 // call the function
387 nativeCall = call();
4e4e5a6f 388
14957cd0 389 addPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister);
4e4e5a6f 390
14957cd0
A
391#elif CPU(ARM)
392 // Load caller frame's scope chain into this callframe so that whatever we call can
393 // get to its global data.
394 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
395 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT2);
396 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
4e4e5a6f 397
14957cd0
A
398 preserveReturnAddressAfterCall(regT3); // Callee preserved
399 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
4e4e5a6f 400
14957cd0
A
401 // Calling convention: f(r0 == regT0, r1 == regT1, ...);
402 // Host function signature: f(ExecState*);
403 move(callFrameRegister, ARMRegisters::r0);
4e4e5a6f 404
14957cd0
A
405 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, ARMRegisters::r1);
406 move(regT2, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
407 loadPtr(Address(ARMRegisters::r1, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
4e4e5a6f 408
14957cd0
A
409 // call the function
410 nativeCall = call();
4e4e5a6f 411
14957cd0 412 restoreReturnAddressBeforeReturn(regT3);
4e4e5a6f 413
14957cd0
A
414#elif CPU(MIPS)
415 // Load caller frame's scope chain into this callframe so that whatever we call can
416 // get to its global data.
417 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
418 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT0);
419 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
4e4e5a6f 420
14957cd0
A
421 preserveReturnAddressAfterCall(regT3); // Callee preserved
422 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
4e4e5a6f 423
14957cd0
A
424 // Calling convention: f(a0, a1, a2, a3);
425 // Host function signature: f(ExecState*);
4e4e5a6f 426
14957cd0
A
427 // Allocate stack space for 16 bytes (8-byte aligned)
428 // 16 bytes (unused) for 4 arguments
429 subPtr(TrustedImm32(16), stackPointerRegister);
4e4e5a6f 430
14957cd0
A
431 // Setup arg0
432 move(callFrameRegister, MIPSRegisters::a0);
4e4e5a6f 433
14957cd0
A
434 // Call
435 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, MIPSRegisters::a2);
436 loadPtr(Address(MIPSRegisters::a2, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
437 move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
438
439 // call the function
440 nativeCall = call();
4e4e5a6f 441
14957cd0
A
442 // Restore stack space
443 addPtr(TrustedImm32(16), stackPointerRegister);
4e4e5a6f 444
14957cd0
A
445 restoreReturnAddressBeforeReturn(regT3);
446#elif CPU(SH4)
447 // Load caller frame's scope chain into this callframe so that whatever we call can
448 // get to its global data.
449 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
450 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT2);
6fe7ccc8 451 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
4e4e5a6f 452
14957cd0
A
453 preserveReturnAddressAfterCall(regT3); // Callee preserved
454 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
4e4e5a6f 455
14957cd0
A
456 // Calling convention: f(r0 == regT4, r1 == regT5, ...);
457 // Host function signature: f(ExecState*);
458 move(callFrameRegister, regT4);
459
460 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT5);
461 move(regT2, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
462 loadPtr(Address(regT5, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
463
464 // call the function
465 nativeCall = call();
4e4e5a6f 466
4e4e5a6f 467 restoreReturnAddressBeforeReturn(regT3);
14957cd0 468#else
6fe7ccc8 469#error "JIT not supported on this platform."
14957cd0
A
470 breakpoint();
471#endif // CPU(X86)
472
473 // Check for an exception
474 Jump sawException = branch32(NotEqual, AbsoluteAddress(reinterpret_cast<char*>(&globalData->exception) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), TrustedImm32(JSValue::EmptyValueTag));
475
476 // Return.
4e4e5a6f
A
477 ret();
478
479 // Handle an exception
480 sawException.link(this);
14957cd0 481
4e4e5a6f 482 // Grab the return address.
14957cd0
A
483 preserveReturnAddressAfterCall(regT1);
484
485 move(TrustedImmPtr(&globalData->exceptionLocation), regT2);
4e4e5a6f 486 storePtr(regT1, regT2);
4e4e5a6f 487 poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
6fe7ccc8
A
488
489 storePtr(callFrameRegister, &m_globalData->topCallFrame);
14957cd0
A
490 // Set the return address.
491 move(TrustedImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()), regT1);
492 restoreReturnAddressBeforeReturn(regT1);
4e4e5a6f 493
14957cd0 494 ret();
4e4e5a6f
A
495
496 // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
6fe7ccc8 497 LinkBuffer patchBuffer(*m_globalData, this, GLOBAL_THUNK_ID);
4e4e5a6f 498
14957cd0 499 patchBuffer.link(nativeCall, FunctionPtr(func));
6fe7ccc8 500 return patchBuffer.finalizeCode();
4e4e5a6f
A
501}
502
503void JIT::emit_op_mov(Instruction* currentInstruction)
504{
505 unsigned dst = currentInstruction[1].u.operand;
506 unsigned src = currentInstruction[2].u.operand;
507
508 if (m_codeBlock->isConstantRegisterIndex(src))
509 emitStore(dst, getConstantOperand(src));
510 else {
511 emitLoad(src, regT1, regT0);
512 emitStore(dst, regT1, regT0);
14957cd0 513 map(m_bytecodeOffset + OPCODE_LENGTH(op_mov), dst, regT1, regT0);
4e4e5a6f
A
514 }
515}
516
517void JIT::emit_op_end(Instruction* currentInstruction)
518{
4e4e5a6f
A
519 ASSERT(returnValueRegister != callFrameRegister);
520 emitLoad(currentInstruction[1].u.operand, regT1, regT0);
521 restoreReturnAddressBeforeReturn(Address(callFrameRegister, RegisterFile::ReturnPC * static_cast<int>(sizeof(Register))));
522 ret();
523}
524
525void JIT::emit_op_jmp(Instruction* currentInstruction)
526{
527 unsigned target = currentInstruction[1].u.operand;
528 addJump(jump(), target);
529}
530
6fe7ccc8 531void JIT::emit_op_new_object(Instruction* currentInstruction)
4e4e5a6f 532{
6fe7ccc8
A
533 emitAllocateJSFinalObject(TrustedImmPtr(m_codeBlock->globalObject()->emptyObjectStructure()), regT0, regT1);
534
535 emitStoreCell(currentInstruction[1].u.operand, regT0);
4e4e5a6f
A
536}
537
6fe7ccc8 538void JIT::emitSlow_op_new_object(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
4e4e5a6f 539{
6fe7ccc8 540 linkSlowCase(iter);
4e4e5a6f
A
541 JITStubCall(this, cti_op_new_object).call(currentInstruction[1].u.operand);
542}
543
14957cd0
A
544void JIT::emit_op_check_has_instance(Instruction* currentInstruction)
545{
546 unsigned baseVal = currentInstruction[1].u.operand;
547
548 emitLoadPayload(baseVal, regT0);
549
550 // Check that baseVal is a cell.
551 emitJumpSlowCaseIfNotJSCell(baseVal);
552
553 // Check that baseVal 'ImplementsHasInstance'.
554 loadPtr(Address(regT0, JSCell::structureOffset()), regT0);
555 addSlowCase(branchTest8(Zero, Address(regT0, Structure::typeInfoFlagsOffset()), TrustedImm32(ImplementsHasInstance)));
556}
557
4e4e5a6f
A
558void JIT::emit_op_instanceof(Instruction* currentInstruction)
559{
560 unsigned dst = currentInstruction[1].u.operand;
561 unsigned value = currentInstruction[2].u.operand;
562 unsigned baseVal = currentInstruction[3].u.operand;
563 unsigned proto = currentInstruction[4].u.operand;
564
565 // Load the operands into registers.
566 // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
567 emitLoadPayload(value, regT2);
568 emitLoadPayload(baseVal, regT0);
569 emitLoadPayload(proto, regT1);
570
14957cd0 571 // Check that proto are cells. baseVal must be a cell - this is checked by op_check_has_instance.
4e4e5a6f 572 emitJumpSlowCaseIfNotJSCell(value);
4e4e5a6f 573 emitJumpSlowCaseIfNotJSCell(proto);
14957cd0
A
574
575 // Check that prototype is an object
576 loadPtr(Address(regT1, JSCell::structureOffset()), regT3);
6fe7ccc8 577 addSlowCase(emitJumpIfNotObject(regT3));
4e4e5a6f 578
14957cd0 579 // Fixme: this check is only needed because the JSC API allows HasInstance to be overridden; we should deprecate this.
4e4e5a6f 580 // Check that baseVal 'ImplementsDefaultHasInstance'.
14957cd0
A
581 loadPtr(Address(regT0, JSCell::structureOffset()), regT0);
582 addSlowCase(branchTest8(Zero, Address(regT0, Structure::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance)));
4e4e5a6f
A
583
584 // Optimistically load the result true, and start looping.
585 // Initially, regT1 still contains proto and regT2 still contains value.
586 // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
14957cd0 587 move(TrustedImm32(1), regT0);
4e4e5a6f
A
588 Label loop(this);
589
590 // Load the prototype of the cell in regT2. If this is equal to regT1 - WIN!
591 // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
14957cd0
A
592 loadPtr(Address(regT2, JSCell::structureOffset()), regT2);
593 load32(Address(regT2, Structure::prototypeOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT2);
4e4e5a6f
A
594 Jump isInstance = branchPtr(Equal, regT2, regT1);
595 branchTest32(NonZero, regT2).linkTo(loop, this);
596
597 // We get here either by dropping out of the loop, or if value was not an Object. Result is false.
14957cd0 598 move(TrustedImm32(0), regT0);
4e4e5a6f
A
599
600 // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
601 isInstance.link(this);
602 emitStoreBool(dst, regT0);
603}
604
14957cd0
A
605void JIT::emitSlow_op_check_has_instance(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
606{
607 unsigned baseVal = currentInstruction[1].u.operand;
608
609 linkSlowCaseIfNotJSCell(iter, baseVal);
610 linkSlowCase(iter);
611
612 JITStubCall stubCall(this, cti_op_check_has_instance);
613 stubCall.addArgument(baseVal);
614 stubCall.call();
615}
616
4e4e5a6f
A
617void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
618{
619 unsigned dst = currentInstruction[1].u.operand;
620 unsigned value = currentInstruction[2].u.operand;
621 unsigned baseVal = currentInstruction[3].u.operand;
622 unsigned proto = currentInstruction[4].u.operand;
623
624 linkSlowCaseIfNotJSCell(iter, value);
4e4e5a6f
A
625 linkSlowCaseIfNotJSCell(iter, proto);
626 linkSlowCase(iter);
14957cd0 627 linkSlowCase(iter);
4e4e5a6f
A
628
629 JITStubCall stubCall(this, cti_op_instanceof);
630 stubCall.addArgument(value);
631 stubCall.addArgument(baseVal);
632 stubCall.addArgument(proto);
633 stubCall.call(dst);
634}
635
6fe7ccc8 636void JIT::emit_op_is_undefined(Instruction* currentInstruction)
4e4e5a6f 637{
6fe7ccc8
A
638 unsigned dst = currentInstruction[1].u.operand;
639 unsigned value = currentInstruction[2].u.operand;
640
641 emitLoad(value, regT1, regT0);
642 Jump isCell = branch32(Equal, regT1, TrustedImm32(JSValue::CellTag));
4e4e5a6f 643
6fe7ccc8
A
644 compare32(Equal, regT1, TrustedImm32(JSValue::UndefinedTag), regT0);
645 Jump done = jump();
646
647 isCell.link(this);
648 loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
649 test8(NonZero, Address(regT1, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined), regT0);
650
651 done.link(this);
652 emitStoreBool(dst, regT0);
4e4e5a6f
A
653}
654
6fe7ccc8 655void JIT::emit_op_is_boolean(Instruction* currentInstruction)
4e4e5a6f 656{
6fe7ccc8
A
657 unsigned dst = currentInstruction[1].u.operand;
658 unsigned value = currentInstruction[2].u.operand;
659
660 emitLoadTag(value, regT0);
661 compare32(Equal, regT0, TrustedImm32(JSValue::BooleanTag), regT0);
662 emitStoreBool(dst, regT0);
4e4e5a6f
A
663}
664
6fe7ccc8 665void JIT::emit_op_is_number(Instruction* currentInstruction)
4e4e5a6f 666{
6fe7ccc8
A
667 unsigned dst = currentInstruction[1].u.operand;
668 unsigned value = currentInstruction[2].u.operand;
669
670 emitLoadTag(value, regT0);
671 add32(TrustedImm32(1), regT0);
672 compare32(Below, regT0, TrustedImm32(JSValue::LowestTag + 1), regT0);
673 emitStoreBool(dst, regT0);
4e4e5a6f
A
674}
675
6fe7ccc8 676void JIT::emit_op_is_string(Instruction* currentInstruction)
4e4e5a6f 677{
6fe7ccc8
A
678 unsigned dst = currentInstruction[1].u.operand;
679 unsigned value = currentInstruction[2].u.operand;
680
4e4e5a6f 681 emitLoad(value, regT1, regT0);
6fe7ccc8
A
682 Jump isNotCell = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
683
684 loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
685 compare8(Equal, Address(regT1, Structure::typeInfoTypeOffset()), TrustedImm32(StringType), regT0);
686 Jump done = jump();
687
688 isNotCell.link(this);
689 move(TrustedImm32(0), regT0);
690
691 done.link(this);
692 emitStoreBool(dst, regT0);
4e4e5a6f
A
693}
694
695void JIT::emit_op_tear_off_activation(Instruction* currentInstruction)
696{
14957cd0
A
697 unsigned activation = currentInstruction[1].u.operand;
698 unsigned arguments = currentInstruction[2].u.operand;
699 Jump activationCreated = branch32(NotEqual, tagFor(activation), TrustedImm32(JSValue::EmptyValueTag));
700 Jump argumentsNotCreated = branch32(Equal, tagFor(arguments), TrustedImm32(JSValue::EmptyValueTag));
701 activationCreated.link(this);
4e4e5a6f
A
702 JITStubCall stubCall(this, cti_op_tear_off_activation);
703 stubCall.addArgument(currentInstruction[1].u.operand);
14957cd0 704 stubCall.addArgument(unmodifiedArgumentsRegister(currentInstruction[2].u.operand));
4e4e5a6f 705 stubCall.call();
14957cd0 706 argumentsNotCreated.link(this);
4e4e5a6f
A
707}
708
14957cd0 709void JIT::emit_op_tear_off_arguments(Instruction* currentInstruction)
4e4e5a6f 710{
14957cd0 711 int dst = currentInstruction[1].u.operand;
4e4e5a6f 712
14957cd0
A
713 Jump argsNotCreated = branch32(Equal, tagFor(unmodifiedArgumentsRegister(dst)), TrustedImm32(JSValue::EmptyValueTag));
714 JITStubCall stubCall(this, cti_op_tear_off_arguments);
715 stubCall.addArgument(unmodifiedArgumentsRegister(dst));
716 stubCall.call();
717 argsNotCreated.link(this);
4e4e5a6f
A
718}
719
720void JIT::emit_op_resolve(Instruction* currentInstruction)
721{
722 JITStubCall stubCall(this, cti_op_resolve);
14957cd0 723 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
6fe7ccc8 724 stubCall.callWithValueProfiling(currentInstruction[1].u.operand);
4e4e5a6f
A
725}
726
727void JIT::emit_op_to_primitive(Instruction* currentInstruction)
728{
729 int dst = currentInstruction[1].u.operand;
730 int src = currentInstruction[2].u.operand;
731
732 emitLoad(src, regT1, regT0);
733
14957cd0 734 Jump isImm = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
6fe7ccc8 735 addSlowCase(branchPtr(NotEqual, Address(regT0, JSCell::classInfoOffset()), TrustedImmPtr(&JSString::s_info)));
4e4e5a6f
A
736 isImm.link(this);
737
738 if (dst != src)
739 emitStore(dst, regT1, regT0);
14957cd0 740 map(m_bytecodeOffset + OPCODE_LENGTH(op_to_primitive), dst, regT1, regT0);
4e4e5a6f
A
741}
742
743void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
744{
745 int dst = currentInstruction[1].u.operand;
746
747 linkSlowCase(iter);
748
749 JITStubCall stubCall(this, cti_op_to_primitive);
750 stubCall.addArgument(regT1, regT0);
751 stubCall.call(dst);
752}
753
754void JIT::emit_op_strcat(Instruction* currentInstruction)
755{
756 JITStubCall stubCall(this, cti_op_strcat);
6fe7ccc8
A
757 stubCall.addArgument(TrustedImm32(currentInstruction[2].u.operand));
758 stubCall.addArgument(TrustedImm32(currentInstruction[3].u.operand));
4e4e5a6f
A
759 stubCall.call(currentInstruction[1].u.operand);
760}
761
762void JIT::emit_op_resolve_base(Instruction* currentInstruction)
763{
14957cd0
A
764 JITStubCall stubCall(this, currentInstruction[3].u.operand ? cti_op_resolve_base_strict_put : cti_op_resolve_base);
765 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
6fe7ccc8 766 stubCall.callWithValueProfiling(currentInstruction[1].u.operand);
14957cd0
A
767}
768
769void JIT::emit_op_ensure_property_exists(Instruction* currentInstruction)
770{
771 JITStubCall stubCall(this, cti_op_ensure_property_exists);
6fe7ccc8 772 stubCall.addArgument(TrustedImm32(currentInstruction[1].u.operand));
14957cd0 773 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
4e4e5a6f
A
774 stubCall.call(currentInstruction[1].u.operand);
775}
776
777void JIT::emit_op_resolve_skip(Instruction* currentInstruction)
778{
779 JITStubCall stubCall(this, cti_op_resolve_skip);
14957cd0 780 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
6fe7ccc8
A
781 stubCall.addArgument(TrustedImm32(currentInstruction[3].u.operand));
782 stubCall.callWithValueProfiling(currentInstruction[1].u.operand);
4e4e5a6f
A
783}
784
785void JIT::emit_op_resolve_global(Instruction* currentInstruction, bool dynamic)
786{
787 // FIXME: Optimize to use patching instead of so many memory accesses.
788
789 unsigned dst = currentInstruction[1].u.operand;
14957cd0 790 void* globalObject = m_codeBlock->globalObject();
4e4e5a6f
A
791
792 unsigned currentIndex = m_globalResolveInfoIndex++;
14957cd0
A
793 GlobalResolveInfo* resolveInfoAddress = &m_codeBlock->globalResolveInfo(currentIndex);
794
4e4e5a6f
A
795
796 // Verify structure.
14957cd0
A
797 move(TrustedImmPtr(globalObject), regT0);
798 move(TrustedImmPtr(resolveInfoAddress), regT3);
799 loadPtr(Address(regT3, OBJECT_OFFSETOF(GlobalResolveInfo, structure)), regT1);
800 addSlowCase(branchPtr(NotEqual, regT1, Address(regT0, JSCell::structureOffset())));
4e4e5a6f
A
801
802 // Load property.
14957cd0
A
803 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSGlobalObject, m_propertyStorage)), regT2);
804 load32(Address(regT3, OBJECT_OFFSETOF(GlobalResolveInfo, offset)), regT3);
805 load32(BaseIndex(regT2, regT3, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0); // payload
806 load32(BaseIndex(regT2, regT3, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1); // tag
6fe7ccc8 807 emitValueProfilingSite();
4e4e5a6f 808 emitStore(dst, regT1, regT0);
14957cd0 809 map(m_bytecodeOffset + (dynamic ? OPCODE_LENGTH(op_resolve_global_dynamic) : OPCODE_LENGTH(op_resolve_global)), dst, regT1, regT0);
4e4e5a6f
A
810}
811
812void JIT::emitSlow_op_resolve_global(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
813{
814 unsigned dst = currentInstruction[1].u.operand;
14957cd0 815 Identifier* ident = &m_codeBlock->identifier(currentInstruction[2].u.operand);
4e4e5a6f
A
816
817 unsigned currentIndex = m_globalResolveInfoIndex++;
818
819 linkSlowCase(iter);
820 JITStubCall stubCall(this, cti_op_resolve_global);
14957cd0 821 stubCall.addArgument(TrustedImmPtr(ident));
6fe7ccc8
A
822 stubCall.addArgument(TrustedImm32(currentIndex));
823 stubCall.callWithValueProfiling(dst);
4e4e5a6f
A
824}
825
826void JIT::emit_op_not(Instruction* currentInstruction)
827{
828 unsigned dst = currentInstruction[1].u.operand;
829 unsigned src = currentInstruction[2].u.operand;
830
831 emitLoadTag(src, regT0);
832
14957cd0
A
833 emitLoad(src, regT1, regT0);
834 addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::BooleanTag)));
835 xor32(TrustedImm32(1), regT0);
4e4e5a6f
A
836
837 emitStoreBool(dst, regT0, (dst == src));
838}
839
840void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
841{
842 unsigned dst = currentInstruction[1].u.operand;
843 unsigned src = currentInstruction[2].u.operand;
844
845 linkSlowCase(iter);
846
847 JITStubCall stubCall(this, cti_op_not);
848 stubCall.addArgument(src);
849 stubCall.call(dst);
850}
851
852void JIT::emit_op_jfalse(Instruction* currentInstruction)
853{
854 unsigned cond = currentInstruction[1].u.operand;
855 unsigned target = currentInstruction[2].u.operand;
856
857 emitLoad(cond, regT1, regT0);
858
14957cd0
A
859 ASSERT((JSValue::BooleanTag + 1 == JSValue::Int32Tag) && !(JSValue::Int32Tag + 1));
860 addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::BooleanTag)));
861 addJump(branchTest32(Zero, regT0), target);
4e4e5a6f
A
862}
863
864void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
865{
866 unsigned cond = currentInstruction[1].u.operand;
867 unsigned target = currentInstruction[2].u.operand;
868
869 linkSlowCase(iter);
14957cd0
A
870
871 if (supportsFloatingPoint()) {
872 // regT1 contains the tag from the hot path.
6fe7ccc8 873 Jump notNumber = branch32(Above, regT1, TrustedImm32(JSValue::LowestTag));
14957cd0
A
874
875 emitLoadDouble(cond, fpRegT0);
876 emitJumpSlowToHot(branchDoubleZeroOrNaN(fpRegT0, fpRegT1), target);
877 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_jfalse));
878
879 notNumber.link(this);
880 }
881
4e4e5a6f
A
882 JITStubCall stubCall(this, cti_op_jtrue);
883 stubCall.addArgument(cond);
884 stubCall.call();
885 emitJumpSlowToHot(branchTest32(Zero, regT0), target); // Inverted.
886}
887
888void JIT::emit_op_jtrue(Instruction* currentInstruction)
889{
890 unsigned cond = currentInstruction[1].u.operand;
891 unsigned target = currentInstruction[2].u.operand;
892
893 emitLoad(cond, regT1, regT0);
894
14957cd0
A
895 ASSERT((JSValue::BooleanTag + 1 == JSValue::Int32Tag) && !(JSValue::Int32Tag + 1));
896 addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::BooleanTag)));
897 addJump(branchTest32(NonZero, regT0), target);
4e4e5a6f
A
898}
899
900void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
901{
902 unsigned cond = currentInstruction[1].u.operand;
903 unsigned target = currentInstruction[2].u.operand;
904
905 linkSlowCase(iter);
14957cd0
A
906
907 if (supportsFloatingPoint()) {
908 // regT1 contains the tag from the hot path.
6fe7ccc8 909 Jump notNumber = branch32(Above, regT1, TrustedImm32(JSValue::LowestTag));
14957cd0
A
910
911 emitLoadDouble(cond, fpRegT0);
912 emitJumpSlowToHot(branchDoubleNonZero(fpRegT0, fpRegT1), target);
913 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_jtrue));
914
915 notNumber.link(this);
916 }
917
4e4e5a6f
A
918 JITStubCall stubCall(this, cti_op_jtrue);
919 stubCall.addArgument(cond);
920 stubCall.call();
921 emitJumpSlowToHot(branchTest32(NonZero, regT0), target);
922}
923
924void JIT::emit_op_jeq_null(Instruction* currentInstruction)
925{
926 unsigned src = currentInstruction[1].u.operand;
927 unsigned target = currentInstruction[2].u.operand;
928
929 emitLoad(src, regT1, regT0);
930
14957cd0 931 Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
4e4e5a6f
A
932
933 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
14957cd0
A
934 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
935 addJump(branchTest8(NonZero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
4e4e5a6f
A
936
937 Jump wasNotImmediate = jump();
938
939 // Now handle the immediate cases - undefined & null
940 isImmediate.link(this);
941
14957cd0
A
942 ASSERT((JSValue::UndefinedTag + 1 == JSValue::NullTag) && (JSValue::NullTag & 0x1));
943 or32(TrustedImm32(1), regT1);
944 addJump(branch32(Equal, regT1, TrustedImm32(JSValue::NullTag)), target);
4e4e5a6f
A
945
946 wasNotImmediate.link(this);
947}
948
949void JIT::emit_op_jneq_null(Instruction* currentInstruction)
950{
951 unsigned src = currentInstruction[1].u.operand;
952 unsigned target = currentInstruction[2].u.operand;
953
954 emitLoad(src, regT1, regT0);
955
14957cd0 956 Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
4e4e5a6f
A
957
958 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
14957cd0
A
959 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
960 addJump(branchTest8(Zero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
4e4e5a6f
A
961
962 Jump wasNotImmediate = jump();
963
964 // Now handle the immediate cases - undefined & null
965 isImmediate.link(this);
966
14957cd0
A
967 ASSERT((JSValue::UndefinedTag + 1 == JSValue::NullTag) && (JSValue::NullTag & 0x1));
968 or32(TrustedImm32(1), regT1);
969 addJump(branch32(NotEqual, regT1, TrustedImm32(JSValue::NullTag)), target);
4e4e5a6f
A
970
971 wasNotImmediate.link(this);
972}
973
974void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
975{
976 unsigned src = currentInstruction[1].u.operand;
14957cd0 977 JSCell* ptr = currentInstruction[2].u.jsCell.get();
4e4e5a6f
A
978 unsigned target = currentInstruction[3].u.operand;
979
980 emitLoad(src, regT1, regT0);
14957cd0
A
981 addJump(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)), target);
982 addJump(branchPtr(NotEqual, regT0, TrustedImmPtr(ptr)), target);
4e4e5a6f
A
983}
984
4e4e5a6f
A
985void JIT::emit_op_eq(Instruction* currentInstruction)
986{
987 unsigned dst = currentInstruction[1].u.operand;
988 unsigned src1 = currentInstruction[2].u.operand;
989 unsigned src2 = currentInstruction[3].u.operand;
990
991 emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
992 addSlowCase(branch32(NotEqual, regT1, regT3));
14957cd0
A
993 addSlowCase(branch32(Equal, regT1, TrustedImm32(JSValue::CellTag)));
994 addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::LowestTag)));
4e4e5a6f 995
14957cd0 996 compare32(Equal, regT0, regT2, regT0);
4e4e5a6f
A
997
998 emitStoreBool(dst, regT0);
999}
1000
1001void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1002{
1003 unsigned dst = currentInstruction[1].u.operand;
1004 unsigned op1 = currentInstruction[2].u.operand;
1005 unsigned op2 = currentInstruction[3].u.operand;
1006
1007 JumpList storeResult;
1008 JumpList genericCase;
1009
1010 genericCase.append(getSlowCase(iter)); // tags not equal
1011
1012 linkSlowCase(iter); // tags equal and JSCell
6fe7ccc8
A
1013 genericCase.append(branchPtr(NotEqual, Address(regT0, JSCell::classInfoOffset()), TrustedImmPtr(&JSString::s_info)));
1014 genericCase.append(branchPtr(NotEqual, Address(regT2, JSCell::classInfoOffset()), TrustedImmPtr(&JSString::s_info)));
4e4e5a6f
A
1015
1016 // String case.
1017 JITStubCall stubCallEqStrings(this, cti_op_eq_strings);
1018 stubCallEqStrings.addArgument(regT0);
1019 stubCallEqStrings.addArgument(regT2);
1020 stubCallEqStrings.call();
1021 storeResult.append(jump());
1022
1023 // Generic case.
1024 genericCase.append(getSlowCase(iter)); // doubles
1025 genericCase.link(this);
1026 JITStubCall stubCallEq(this, cti_op_eq);
1027 stubCallEq.addArgument(op1);
1028 stubCallEq.addArgument(op2);
1029 stubCallEq.call(regT0);
1030
1031 storeResult.link(this);
4e4e5a6f
A
1032 emitStoreBool(dst, regT0);
1033}
1034
1035void JIT::emit_op_neq(Instruction* currentInstruction)
1036{
1037 unsigned dst = currentInstruction[1].u.operand;
1038 unsigned src1 = currentInstruction[2].u.operand;
1039 unsigned src2 = currentInstruction[3].u.operand;
1040
1041 emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
1042 addSlowCase(branch32(NotEqual, regT1, regT3));
14957cd0
A
1043 addSlowCase(branch32(Equal, regT1, TrustedImm32(JSValue::CellTag)));
1044 addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::LowestTag)));
4e4e5a6f 1045
14957cd0 1046 compare32(NotEqual, regT0, regT2, regT0);
4e4e5a6f
A
1047
1048 emitStoreBool(dst, regT0);
1049}
1050
1051void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1052{
1053 unsigned dst = currentInstruction[1].u.operand;
1054
1055 JumpList storeResult;
1056 JumpList genericCase;
1057
1058 genericCase.append(getSlowCase(iter)); // tags not equal
1059
1060 linkSlowCase(iter); // tags equal and JSCell
6fe7ccc8
A
1061 genericCase.append(branchPtr(NotEqual, Address(regT0, JSCell::classInfoOffset()), TrustedImmPtr(&JSString::s_info)));
1062 genericCase.append(branchPtr(NotEqual, Address(regT2, JSCell::classInfoOffset()), TrustedImmPtr(&JSString::s_info)));
4e4e5a6f
A
1063
1064 // String case.
1065 JITStubCall stubCallEqStrings(this, cti_op_eq_strings);
1066 stubCallEqStrings.addArgument(regT0);
1067 stubCallEqStrings.addArgument(regT2);
1068 stubCallEqStrings.call(regT0);
1069 storeResult.append(jump());
1070
1071 // Generic case.
1072 genericCase.append(getSlowCase(iter)); // doubles
1073 genericCase.link(this);
1074 JITStubCall stubCallEq(this, cti_op_eq);
1075 stubCallEq.addArgument(regT1, regT0);
1076 stubCallEq.addArgument(regT3, regT2);
1077 stubCallEq.call(regT0);
1078
1079 storeResult.link(this);
14957cd0 1080 xor32(TrustedImm32(0x1), regT0);
4e4e5a6f
A
1081 emitStoreBool(dst, regT0);
1082}
1083
1084void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
1085{
1086 unsigned dst = currentInstruction[1].u.operand;
1087 unsigned src1 = currentInstruction[2].u.operand;
1088 unsigned src2 = currentInstruction[3].u.operand;
1089
6fe7ccc8
A
1090 emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
1091
1092 // Bail if the tags differ, or are double.
1093 addSlowCase(branch32(NotEqual, regT1, regT3));
1094 addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::LowestTag)));
4e4e5a6f 1095
6fe7ccc8
A
1096 // Jump to a slow case if both are strings.
1097 Jump notCell = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
1098 Jump firstNotString = branchPtr(NotEqual, Address(regT0, JSCell::classInfoOffset()), TrustedImmPtr(&JSString::s_info));
1099 addSlowCase(branchPtr(Equal, Address(regT2, JSCell::classInfoOffset()), TrustedImmPtr(&JSString::s_info)));
1100 notCell.link(this);
1101 firstNotString.link(this);
4e4e5a6f 1102
6fe7ccc8 1103 // Simply compare the payloads.
4e4e5a6f 1104 if (type == OpStrictEq)
6fe7ccc8 1105 compare32(Equal, regT0, regT2, regT0);
4e4e5a6f 1106 else
6fe7ccc8 1107 compare32(NotEqual, regT0, regT2, regT0);
4e4e5a6f
A
1108
1109 emitStoreBool(dst, regT0);
1110}
1111
1112void JIT::emit_op_stricteq(Instruction* currentInstruction)
1113{
1114 compileOpStrictEq(currentInstruction, OpStrictEq);
1115}
1116
1117void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1118{
1119 unsigned dst = currentInstruction[1].u.operand;
1120 unsigned src1 = currentInstruction[2].u.operand;
1121 unsigned src2 = currentInstruction[3].u.operand;
1122
6fe7ccc8 1123 linkSlowCase(iter);
4e4e5a6f
A
1124 linkSlowCase(iter);
1125 linkSlowCase(iter);
1126
1127 JITStubCall stubCall(this, cti_op_stricteq);
1128 stubCall.addArgument(src1);
1129 stubCall.addArgument(src2);
1130 stubCall.call(dst);
1131}
1132
1133void JIT::emit_op_nstricteq(Instruction* currentInstruction)
1134{
1135 compileOpStrictEq(currentInstruction, OpNStrictEq);
1136}
1137
1138void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1139{
1140 unsigned dst = currentInstruction[1].u.operand;
1141 unsigned src1 = currentInstruction[2].u.operand;
1142 unsigned src2 = currentInstruction[3].u.operand;
1143
6fe7ccc8 1144 linkSlowCase(iter);
4e4e5a6f
A
1145 linkSlowCase(iter);
1146 linkSlowCase(iter);
1147
1148 JITStubCall stubCall(this, cti_op_nstricteq);
1149 stubCall.addArgument(src1);
1150 stubCall.addArgument(src2);
1151 stubCall.call(dst);
1152}
1153
1154void JIT::emit_op_eq_null(Instruction* currentInstruction)
1155{
1156 unsigned dst = currentInstruction[1].u.operand;
1157 unsigned src = currentInstruction[2].u.operand;
1158
1159 emitLoad(src, regT1, regT0);
14957cd0 1160 Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
4e4e5a6f 1161
14957cd0
A
1162 loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
1163 test8(NonZero, Address(regT1, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined), regT1);
4e4e5a6f
A
1164
1165 Jump wasNotImmediate = jump();
1166
1167 isImmediate.link(this);
1168
14957cd0
A
1169 compare32(Equal, regT1, TrustedImm32(JSValue::NullTag), regT2);
1170 compare32(Equal, regT1, TrustedImm32(JSValue::UndefinedTag), regT1);
4e4e5a6f
A
1171 or32(regT2, regT1);
1172
1173 wasNotImmediate.link(this);
1174
4e4e5a6f
A
1175 emitStoreBool(dst, regT1);
1176}
1177
1178void JIT::emit_op_neq_null(Instruction* currentInstruction)
1179{
1180 unsigned dst = currentInstruction[1].u.operand;
1181 unsigned src = currentInstruction[2].u.operand;
1182
1183 emitLoad(src, regT1, regT0);
14957cd0 1184 Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
4e4e5a6f 1185
14957cd0
A
1186 loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
1187 test8(Zero, Address(regT1, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined), regT1);
4e4e5a6f
A
1188
1189 Jump wasNotImmediate = jump();
1190
1191 isImmediate.link(this);
1192
14957cd0
A
1193 compare32(NotEqual, regT1, TrustedImm32(JSValue::NullTag), regT2);
1194 compare32(NotEqual, regT1, TrustedImm32(JSValue::UndefinedTag), regT1);
4e4e5a6f
A
1195 and32(regT2, regT1);
1196
1197 wasNotImmediate.link(this);
1198
4e4e5a6f
A
1199 emitStoreBool(dst, regT1);
1200}
1201
1202void JIT::emit_op_resolve_with_base(Instruction* currentInstruction)
1203{
1204 JITStubCall stubCall(this, cti_op_resolve_with_base);
14957cd0 1205 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[3].u.operand)));
6fe7ccc8
A
1206 stubCall.addArgument(TrustedImm32(currentInstruction[1].u.operand));
1207 stubCall.callWithValueProfiling(currentInstruction[2].u.operand);
4e4e5a6f
A
1208}
1209
6fe7ccc8 1210void JIT::emit_op_resolve_with_this(Instruction* currentInstruction)
4e4e5a6f 1211{
6fe7ccc8
A
1212 JITStubCall stubCall(this, cti_op_resolve_with_this);
1213 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[3].u.operand)));
1214 stubCall.addArgument(TrustedImm32(currentInstruction[1].u.operand));
1215 stubCall.callWithValueProfiling(currentInstruction[2].u.operand);
4e4e5a6f
A
1216}
1217
1218void JIT::emit_op_throw(Instruction* currentInstruction)
1219{
1220 unsigned exception = currentInstruction[1].u.operand;
1221 JITStubCall stubCall(this, cti_op_throw);
1222 stubCall.addArgument(exception);
1223 stubCall.call();
1224
1225#ifndef NDEBUG
1226 // cti_op_throw always changes it's return address,
1227 // this point in the code should never be reached.
1228 breakpoint();
1229#endif
1230}
1231
1232void JIT::emit_op_get_pnames(Instruction* currentInstruction)
1233{
1234 int dst = currentInstruction[1].u.operand;
1235 int base = currentInstruction[2].u.operand;
1236 int i = currentInstruction[3].u.operand;
1237 int size = currentInstruction[4].u.operand;
1238 int breakTarget = currentInstruction[5].u.operand;
1239
1240 JumpList isNotObject;
1241
1242 emitLoad(base, regT1, regT0);
1243 if (!m_codeBlock->isKnownNotImmediate(base))
14957cd0
A
1244 isNotObject.append(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)));
1245 if (base != m_codeBlock->thisRegister() || m_codeBlock->isStrictMode()) {
1246 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
6fe7ccc8 1247 isNotObject.append(emitJumpIfNotObject(regT2));
4e4e5a6f
A
1248 }
1249
1250 // We could inline the case where you have a valid cache, but
1251 // this call doesn't seem to be hot.
1252 Label isObject(this);
1253 JITStubCall getPnamesStubCall(this, cti_op_get_pnames);
1254 getPnamesStubCall.addArgument(regT0);
1255 getPnamesStubCall.call(dst);
1256 load32(Address(regT0, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStringsSize)), regT3);
14957cd0
A
1257 store32(TrustedImm32(Int32Tag), intTagFor(i));
1258 store32(TrustedImm32(0), intPayloadFor(i));
1259 store32(TrustedImm32(Int32Tag), intTagFor(size));
1260 store32(regT3, payloadFor(size));
4e4e5a6f
A
1261 Jump end = jump();
1262
1263 isNotObject.link(this);
14957cd0
A
1264 addJump(branch32(Equal, regT1, TrustedImm32(JSValue::NullTag)), breakTarget);
1265 addJump(branch32(Equal, regT1, TrustedImm32(JSValue::UndefinedTag)), breakTarget);
4e4e5a6f
A
1266 JITStubCall toObjectStubCall(this, cti_to_object);
1267 toObjectStubCall.addArgument(regT1, regT0);
1268 toObjectStubCall.call(base);
1269 jump().linkTo(isObject, this);
1270
1271 end.link(this);
1272}
1273
1274void JIT::emit_op_next_pname(Instruction* currentInstruction)
1275{
1276 int dst = currentInstruction[1].u.operand;
1277 int base = currentInstruction[2].u.operand;
1278 int i = currentInstruction[3].u.operand;
1279 int size = currentInstruction[4].u.operand;
1280 int it = currentInstruction[5].u.operand;
1281 int target = currentInstruction[6].u.operand;
1282
1283 JumpList callHasProperty;
1284
1285 Label begin(this);
14957cd0
A
1286 load32(intPayloadFor(i), regT0);
1287 Jump end = branch32(Equal, regT0, intPayloadFor(size));
4e4e5a6f
A
1288
1289 // Grab key @ i
14957cd0 1290 loadPtr(payloadFor(it), regT1);
4e4e5a6f
A
1291 loadPtr(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStrings)), regT2);
1292 load32(BaseIndex(regT2, regT0, TimesEight), regT2);
14957cd0 1293 store32(TrustedImm32(JSValue::CellTag), tagFor(dst));
4e4e5a6f
A
1294 store32(regT2, payloadFor(dst));
1295
1296 // Increment i
14957cd0
A
1297 add32(TrustedImm32(1), regT0);
1298 store32(regT0, intPayloadFor(i));
4e4e5a6f
A
1299
1300 // Verify that i is valid:
14957cd0 1301 loadPtr(payloadFor(base), regT0);
4e4e5a6f
A
1302
1303 // Test base's structure
14957cd0 1304 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
4e4e5a6f
A
1305 callHasProperty.append(branchPtr(NotEqual, regT2, Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure)))));
1306
1307 // Test base's prototype chain
1308 loadPtr(Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedPrototypeChain))), regT3);
1309 loadPtr(Address(regT3, OBJECT_OFFSETOF(StructureChain, m_vector)), regT3);
1310 addJump(branchTestPtr(Zero, Address(regT3)), target);
1311
1312 Label checkPrototype(this);
14957cd0
A
1313 callHasProperty.append(branch32(Equal, Address(regT2, Structure::prototypeOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), TrustedImm32(JSValue::NullTag)));
1314 loadPtr(Address(regT2, Structure::prototypeOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT2);
1315 loadPtr(Address(regT2, JSCell::structureOffset()), regT2);
4e4e5a6f 1316 callHasProperty.append(branchPtr(NotEqual, regT2, Address(regT3)));
14957cd0 1317 addPtr(TrustedImm32(sizeof(Structure*)), regT3);
4e4e5a6f
A
1318 branchTestPtr(NonZero, Address(regT3)).linkTo(checkPrototype, this);
1319
1320 // Continue loop.
1321 addJump(jump(), target);
1322
1323 // Slow case: Ask the object if i is valid.
1324 callHasProperty.link(this);
1325 loadPtr(addressFor(dst), regT1);
1326 JITStubCall stubCall(this, cti_has_property);
1327 stubCall.addArgument(regT0);
1328 stubCall.addArgument(regT1);
1329 stubCall.call();
1330
1331 // Test for valid key.
1332 addJump(branchTest32(NonZero, regT0), target);
1333 jump().linkTo(begin, this);
1334
1335 // End of loop.
1336 end.link(this);
1337}
1338
1339void JIT::emit_op_push_scope(Instruction* currentInstruction)
1340{
1341 JITStubCall stubCall(this, cti_op_push_scope);
1342 stubCall.addArgument(currentInstruction[1].u.operand);
1343 stubCall.call(currentInstruction[1].u.operand);
1344}
1345
1346void JIT::emit_op_pop_scope(Instruction*)
1347{
1348 JITStubCall(this, cti_op_pop_scope).call();
1349}
1350
1351void JIT::emit_op_to_jsnumber(Instruction* currentInstruction)
1352{
1353 int dst = currentInstruction[1].u.operand;
1354 int src = currentInstruction[2].u.operand;
1355
1356 emitLoad(src, regT1, regT0);
1357
14957cd0
A
1358 Jump isInt32 = branch32(Equal, regT1, TrustedImm32(JSValue::Int32Tag));
1359 addSlowCase(branch32(AboveOrEqual, regT1, TrustedImm32(JSValue::EmptyValueTag)));
4e4e5a6f
A
1360 isInt32.link(this);
1361
1362 if (src != dst)
1363 emitStore(dst, regT1, regT0);
14957cd0 1364 map(m_bytecodeOffset + OPCODE_LENGTH(op_to_jsnumber), dst, regT1, regT0);
4e4e5a6f
A
1365}
1366
1367void JIT::emitSlow_op_to_jsnumber(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1368{
1369 int dst = currentInstruction[1].u.operand;
1370
1371 linkSlowCase(iter);
1372
1373 JITStubCall stubCall(this, cti_op_to_jsnumber);
1374 stubCall.addArgument(regT1, regT0);
1375 stubCall.call(dst);
1376}
1377
1378void JIT::emit_op_push_new_scope(Instruction* currentInstruction)
1379{
1380 JITStubCall stubCall(this, cti_op_push_new_scope);
14957cd0 1381 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
4e4e5a6f
A
1382 stubCall.addArgument(currentInstruction[3].u.operand);
1383 stubCall.call(currentInstruction[1].u.operand);
1384}
1385
1386void JIT::emit_op_catch(Instruction* currentInstruction)
1387{
14957cd0
A
1388 // cti_op_throw returns the callFrame for the handler.
1389 move(regT0, callFrameRegister);
4e4e5a6f
A
1390
1391 // Now store the exception returned by cti_op_throw.
14957cd0
A
1392 loadPtr(Address(stackPointerRegister, OBJECT_OFFSETOF(struct JITStackFrame, globalData)), regT3);
1393 load32(Address(regT3, OBJECT_OFFSETOF(JSGlobalData, exception) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
1394 load32(Address(regT3, OBJECT_OFFSETOF(JSGlobalData, exception) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
1395 store32(TrustedImm32(JSValue().payload()), Address(regT3, OBJECT_OFFSETOF(JSGlobalData, exception) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)));
1396 store32(TrustedImm32(JSValue().tag()), Address(regT3, OBJECT_OFFSETOF(JSGlobalData, exception) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)));
1397
1398 unsigned exception = currentInstruction[1].u.operand;
4e4e5a6f 1399 emitStore(exception, regT1, regT0);
14957cd0 1400 map(m_bytecodeOffset + OPCODE_LENGTH(op_catch), exception, regT1, regT0);
4e4e5a6f
A
1401}
1402
1403void JIT::emit_op_jmp_scopes(Instruction* currentInstruction)
1404{
1405 JITStubCall stubCall(this, cti_op_jmp_scopes);
6fe7ccc8 1406 stubCall.addArgument(TrustedImm32(currentInstruction[1].u.operand));
4e4e5a6f
A
1407 stubCall.call();
1408 addJump(jump(), currentInstruction[2].u.operand);
1409}
1410
1411void JIT::emit_op_switch_imm(Instruction* currentInstruction)
1412{
1413 unsigned tableIndex = currentInstruction[1].u.operand;
1414 unsigned defaultOffset = currentInstruction[2].u.operand;
1415 unsigned scrutinee = currentInstruction[3].u.operand;
1416
1417 // create jump table for switch destinations, track this switch statement.
1418 SimpleJumpTable* jumpTable = &m_codeBlock->immediateSwitchJumpTable(tableIndex);
14957cd0 1419 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Immediate));
4e4e5a6f
A
1420 jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1421
1422 JITStubCall stubCall(this, cti_op_switch_imm);
1423 stubCall.addArgument(scrutinee);
6fe7ccc8 1424 stubCall.addArgument(TrustedImm32(tableIndex));
4e4e5a6f
A
1425 stubCall.call();
1426 jump(regT0);
1427}
1428
1429void JIT::emit_op_switch_char(Instruction* currentInstruction)
1430{
1431 unsigned tableIndex = currentInstruction[1].u.operand;
1432 unsigned defaultOffset = currentInstruction[2].u.operand;
1433 unsigned scrutinee = currentInstruction[3].u.operand;
1434
1435 // create jump table for switch destinations, track this switch statement.
1436 SimpleJumpTable* jumpTable = &m_codeBlock->characterSwitchJumpTable(tableIndex);
14957cd0 1437 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Character));
4e4e5a6f
A
1438 jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1439
1440 JITStubCall stubCall(this, cti_op_switch_char);
1441 stubCall.addArgument(scrutinee);
6fe7ccc8 1442 stubCall.addArgument(TrustedImm32(tableIndex));
4e4e5a6f
A
1443 stubCall.call();
1444 jump(regT0);
1445}
1446
1447void JIT::emit_op_switch_string(Instruction* currentInstruction)
1448{
1449 unsigned tableIndex = currentInstruction[1].u.operand;
1450 unsigned defaultOffset = currentInstruction[2].u.operand;
1451 unsigned scrutinee = currentInstruction[3].u.operand;
1452
1453 // create jump table for switch destinations, track this switch statement.
1454 StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
14957cd0 1455 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset));
4e4e5a6f
A
1456
1457 JITStubCall stubCall(this, cti_op_switch_string);
1458 stubCall.addArgument(scrutinee);
6fe7ccc8 1459 stubCall.addArgument(TrustedImm32(tableIndex));
4e4e5a6f
A
1460 stubCall.call();
1461 jump(regT0);
1462}
1463
14957cd0 1464void JIT::emit_op_throw_reference_error(Instruction* currentInstruction)
4e4e5a6f 1465{
14957cd0 1466 unsigned message = currentInstruction[1].u.operand;
4e4e5a6f 1467
14957cd0 1468 JITStubCall stubCall(this, cti_op_throw_reference_error);
4e4e5a6f 1469 stubCall.addArgument(m_codeBlock->getConstant(message));
14957cd0 1470 stubCall.call();
4e4e5a6f
A
1471}
1472
1473void JIT::emit_op_debug(Instruction* currentInstruction)
1474{
1475#if ENABLE(DEBUG_WITH_BREAKPOINT)
1476 UNUSED_PARAM(currentInstruction);
1477 breakpoint();
1478#else
1479 JITStubCall stubCall(this, cti_op_debug);
1480 stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
1481 stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
1482 stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
1483 stubCall.call();
1484#endif
1485}
1486
1487
1488void JIT::emit_op_enter(Instruction*)
1489{
1490 // Even though JIT code doesn't use them, we initialize our constant
1491 // registers to zap stale pointers, to avoid unnecessarily prolonging
1492 // object lifetime and increasing GC pressure.
1493 for (int i = 0; i < m_codeBlock->m_numVars; ++i)
1494 emitStore(i, jsUndefined());
1495}
1496
14957cd0 1497void JIT::emit_op_create_activation(Instruction* currentInstruction)
4e4e5a6f 1498{
14957cd0
A
1499 unsigned activation = currentInstruction[1].u.operand;
1500
1501 Jump activationCreated = branch32(NotEqual, tagFor(activation), TrustedImm32(JSValue::EmptyValueTag));
1502 JITStubCall(this, cti_op_push_activation).call(activation);
1503 activationCreated.link(this);
4e4e5a6f
A
1504}
1505
14957cd0 1506void JIT::emit_op_create_arguments(Instruction* currentInstruction)
4e4e5a6f 1507{
14957cd0
A
1508 unsigned dst = currentInstruction[1].u.operand;
1509
1510 Jump argsCreated = branch32(NotEqual, tagFor(dst), TrustedImm32(JSValue::EmptyValueTag));
4e4e5a6f 1511
6fe7ccc8 1512 JITStubCall(this, cti_op_create_arguments).call();
14957cd0
A
1513 emitStore(dst, regT1, regT0);
1514 emitStore(unmodifiedArgumentsRegister(dst), regT1, regT0);
1515
4e4e5a6f
A
1516 argsCreated.link(this);
1517}
1518
14957cd0 1519void JIT::emit_op_init_lazy_reg(Instruction* currentInstruction)
4e4e5a6f 1520{
14957cd0
A
1521 unsigned dst = currentInstruction[1].u.operand;
1522
1523 emitStore(dst, JSValue());
1524}
1525
1526void JIT::emit_op_get_callee(Instruction* currentInstruction)
1527{
1528 int dst = currentInstruction[1].u.operand;
1529 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT0);
1530 emitStoreCell(dst, regT0);
1531}
1532
1533void JIT::emit_op_create_this(Instruction* currentInstruction)
1534{
6fe7ccc8
A
1535 emitLoad(currentInstruction[2].u.operand, regT1, regT0);
1536 emitJumpSlowCaseIfNotJSCell(currentInstruction[2].u.operand, regT1);
1537 loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
1538 addSlowCase(emitJumpIfNotObject(regT1));
1539
1540 // now we know that the prototype is an object, but we don't know if it's got an
1541 // inheritor ID
1542
1543 loadPtr(Address(regT0, JSObject::offsetOfInheritorID()), regT2);
1544 addSlowCase(branchTestPtr(Zero, regT2));
1545
1546 // now regT2 contains the inheritorID, which is the structure that the newly
1547 // allocated object will have.
1548
1549 emitAllocateJSFinalObject(regT2, regT0, regT1);
1550
1551 emitStoreCell(currentInstruction[1].u.operand, regT0);
1552}
1553
1554void JIT::emitSlow_op_create_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1555{
1556 linkSlowCaseIfNotJSCell(iter, currentInstruction[2].u.operand); // not a cell
1557 linkSlowCase(iter); // not an object
1558 linkSlowCase(iter); // doesn't have an inheritor ID
1559 linkSlowCase(iter); // allocation failed
14957cd0
A
1560 unsigned protoRegister = currentInstruction[2].u.operand;
1561 emitLoad(protoRegister, regT1, regT0);
1562 JITStubCall stubCall(this, cti_op_create_this);
1563 stubCall.addArgument(regT1, regT0);
1564 stubCall.call(currentInstruction[1].u.operand);
4e4e5a6f
A
1565}
1566
1567void JIT::emit_op_convert_this(Instruction* currentInstruction)
1568{
1569 unsigned thisRegister = currentInstruction[1].u.operand;
1570
1571 emitLoad(thisRegister, regT1, regT0);
1572
14957cd0 1573 addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)));
6fe7ccc8 1574 addSlowCase(branchPtr(Equal, Address(regT0, JSCell::classInfoOffset()), TrustedImmPtr(&JSString::s_info)));
4e4e5a6f 1575
14957cd0
A
1576 map(m_bytecodeOffset + OPCODE_LENGTH(op_convert_this), thisRegister, regT1, regT0);
1577}
4e4e5a6f 1578
4e4e5a6f
A
1579void JIT::emitSlow_op_convert_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1580{
6fe7ccc8 1581 void* globalThis = m_codeBlock->globalObject()->globalScopeChain()->globalThis.get();
4e4e5a6f
A
1582 unsigned thisRegister = currentInstruction[1].u.operand;
1583
1584 linkSlowCase(iter);
6fe7ccc8
A
1585 Jump isNotUndefined = branch32(NotEqual, regT1, TrustedImm32(JSValue::UndefinedTag));
1586 move(TrustedImmPtr(globalThis), regT0);
1587 move(TrustedImm32(JSValue::CellTag), regT1);
1588 emitStore(thisRegister, regT1, regT0);
1589 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_convert_this));
4e4e5a6f 1590
6fe7ccc8 1591 isNotUndefined.link(this);
14957cd0 1592 linkSlowCase(iter);
6fe7ccc8 1593 JITStubCall stubCall(this, cti_op_convert_this);
14957cd0
A
1594 stubCall.addArgument(regT1, regT0);
1595 stubCall.call(thisRegister);
1596}
1597
4e4e5a6f
A
1598void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
1599{
1600 peek(regT2, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof(void*));
1601 Jump noProfiler = branchTestPtr(Zero, Address(regT2));
1602
1603 JITStubCall stubCall(this, cti_op_profile_will_call);
1604 stubCall.addArgument(currentInstruction[1].u.operand);
1605 stubCall.call();
1606 noProfiler.link(this);
1607}
1608
1609void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
1610{
1611 peek(regT2, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof(void*));
1612 Jump noProfiler = branchTestPtr(Zero, Address(regT2));
1613
1614 JITStubCall stubCall(this, cti_op_profile_did_call);
1615 stubCall.addArgument(currentInstruction[1].u.operand);
1616 stubCall.call();
1617 noProfiler.link(this);
1618}
1619
14957cd0
A
1620void JIT::emit_op_get_arguments_length(Instruction* currentInstruction)
1621{
1622 int dst = currentInstruction[1].u.operand;
1623 int argumentsRegister = currentInstruction[2].u.operand;
1624 addSlowCase(branch32(NotEqual, tagFor(argumentsRegister), TrustedImm32(JSValue::EmptyValueTag)));
6fe7ccc8 1625 load32(payloadFor(RegisterFile::ArgumentCount), regT0);
14957cd0
A
1626 sub32(TrustedImm32(1), regT0);
1627 emitStoreInt32(dst, regT0);
4e4e5a6f
A
1628}
1629
14957cd0
A
1630void JIT::emitSlow_op_get_arguments_length(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1631{
1632 linkSlowCase(iter);
1633 int dst = currentInstruction[1].u.operand;
1634 int base = currentInstruction[2].u.operand;
1635 int ident = currentInstruction[3].u.operand;
1636
1637 JITStubCall stubCall(this, cti_op_get_by_id_generic);
1638 stubCall.addArgument(base);
1639 stubCall.addArgument(TrustedImmPtr(&(m_codeBlock->identifier(ident))));
1640 stubCall.call(dst);
1641}
1642
1643void JIT::emit_op_get_argument_by_val(Instruction* currentInstruction)
1644{
1645 int dst = currentInstruction[1].u.operand;
1646 int argumentsRegister = currentInstruction[2].u.operand;
1647 int property = currentInstruction[3].u.operand;
1648 addSlowCase(branch32(NotEqual, tagFor(argumentsRegister), TrustedImm32(JSValue::EmptyValueTag)));
1649 emitLoad(property, regT1, regT2);
1650 addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::Int32Tag)));
1651 add32(TrustedImm32(1), regT2);
1652 // regT2 now contains the integer index of the argument we want, including this
6fe7ccc8 1653 load32(payloadFor(RegisterFile::ArgumentCount), regT3);
14957cd0
A
1654 addSlowCase(branch32(AboveOrEqual, regT2, regT3));
1655
6fe7ccc8
A
1656 neg32(regT2);
1657 loadPtr(BaseIndex(callFrameRegister, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload) + CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register))), regT0);
1658 loadPtr(BaseIndex(callFrameRegister, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag) + CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register))), regT1);
14957cd0
A
1659 emitStore(dst, regT1, regT0);
1660}
1661
1662void JIT::emitSlow_op_get_argument_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1663{
1664 unsigned dst = currentInstruction[1].u.operand;
1665 unsigned arguments = currentInstruction[2].u.operand;
1666 unsigned property = currentInstruction[3].u.operand;
1667
1668 linkSlowCase(iter);
1669 Jump skipArgumentsCreation = jump();
1670
1671 linkSlowCase(iter);
1672 linkSlowCase(iter);
6fe7ccc8 1673 JITStubCall(this, cti_op_create_arguments).call();
14957cd0
A
1674 emitStore(arguments, regT1, regT0);
1675 emitStore(unmodifiedArgumentsRegister(arguments), regT1, regT0);
1676
1677 skipArgumentsCreation.link(this);
1678 JITStubCall stubCall(this, cti_op_get_by_val);
1679 stubCall.addArgument(arguments);
1680 stubCall.addArgument(property);
1681 stubCall.call(dst);
1682}
1683
14957cd0
A
1684} // namespace JSC
1685
1686#endif // USE(JSVALUE32_64)
1687#endif // ENABLE(JIT)