]> git.saurik.com Git - apple/javascriptcore.git/blob - jit/JITOpcodes32_64.cpp
JavaScriptCore-1097.13.tar.gz
[apple/javascriptcore.git] / jit / JITOpcodes32_64.cpp
1 /*
2 * Copyright (C) 2009 Apple Inc. All rights reserved.
3 * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
13 *
14 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
18 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25 */
26
27 #include "config.h"
28
29 #if ENABLE(JIT)
30 #if USE(JSVALUE32_64)
31 #include "JIT.h"
32
33 #include "JITInlineMethods.h"
34 #include "JITStubCall.h"
35 #include "JSArray.h"
36 #include "JSCell.h"
37 #include "JSFunction.h"
38 #include "JSPropertyNameIterator.h"
39 #include "LinkBuffer.h"
40
41 namespace JSC {
42
43 PassRefPtr<ExecutableMemoryHandle> JIT::privateCompileCTIMachineTrampolines(JSGlobalData* globalData, TrampolineStructure *trampolines)
44 {
45 // (1) This function provides fast property access for string length
46 Label stringLengthBegin = align();
47
48 // regT0 holds payload, regT1 holds tag
49
50 Jump string_failureCases1 = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
51 Jump string_failureCases2 = branchPtr(NotEqual, Address(regT0, JSCell::classInfoOffset()), TrustedImmPtr(&JSString::s_info));
52
53 // Checks out okay! - get the length from the Ustring.
54 load32(Address(regT0, OBJECT_OFFSETOF(JSString, m_length)), regT2);
55
56 Jump string_failureCases3 = branch32(Above, regT2, TrustedImm32(INT_MAX));
57 move(regT2, regT0);
58 move(TrustedImm32(JSValue::Int32Tag), regT1);
59
60 ret();
61
62 JumpList callSlowCase;
63 JumpList constructSlowCase;
64
65 // VirtualCallLink Trampoline
66 // regT1, regT0 holds callee; callFrame is moved and partially initialized.
67 Label virtualCallLinkBegin = align();
68 callSlowCase.append(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)));
69 callSlowCase.append(emitJumpIfNotType(regT0, regT1, JSFunctionType));
70
71 // Finish canonical initialization before JS function call.
72 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_scopeChain)), regT1);
73 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
74
75 // Also initialize ReturnPC for use by lazy linking and exceptions.
76 preserveReturnAddressAfterCall(regT3);
77 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
78
79 storePtr(callFrameRegister, &m_globalData->topCallFrame);
80 restoreArgumentReference();
81 Call callLazyLinkCall = call();
82 restoreReturnAddressBeforeReturn(regT3);
83 jump(regT0);
84
85 // VirtualConstructLink Trampoline
86 // regT1, regT0 holds callee; callFrame is moved and partially initialized.
87 Label virtualConstructLinkBegin = align();
88 constructSlowCase.append(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)));
89 constructSlowCase.append(emitJumpIfNotType(regT0, regT1, JSFunctionType));
90
91 // Finish canonical initialization before JS function call.
92 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_scopeChain)), regT1);
93 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
94
95 // Also initialize ReturnPC for use by lazy linking and exeptions.
96 preserveReturnAddressAfterCall(regT3);
97 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
98
99 storePtr(callFrameRegister, &m_globalData->topCallFrame);
100 restoreArgumentReference();
101 Call callLazyLinkConstruct = call();
102 restoreReturnAddressBeforeReturn(regT3);
103 jump(regT0);
104
105 // VirtualCall Trampoline
106 // regT1, regT0 holds callee; regT2 will hold the FunctionExecutable.
107 Label virtualCallBegin = align();
108 callSlowCase.append(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)));
109 callSlowCase.append(emitJumpIfNotType(regT0, regT1, JSFunctionType));
110
111 // Finish canonical initialization before JS function call.
112 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_scopeChain)), regT1);
113 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
114
115 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
116 Jump hasCodeBlock1 = branch32(GreaterThanOrEqual, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParametersForCall)), TrustedImm32(0));
117 preserveReturnAddressAfterCall(regT3);
118
119 storePtr(callFrameRegister, &m_globalData->topCallFrame);
120 restoreArgumentReference();
121 Call callCompileCall = call();
122 restoreReturnAddressBeforeReturn(regT3);
123 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
124
125 hasCodeBlock1.link(this);
126 loadPtr(Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_jitCodeForCallWithArityCheck)), regT0);
127 jump(regT0);
128
129 // VirtualConstruct Trampoline
130 // regT1, regT0 holds callee; regT2 will hold the FunctionExecutable.
131 Label virtualConstructBegin = align();
132 constructSlowCase.append(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)));
133 constructSlowCase.append(emitJumpIfNotType(regT0, regT1, JSFunctionType));
134
135 // Finish canonical initialization before JS function call.
136 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_scopeChain)), regT1);
137 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
138
139 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
140 Jump hasCodeBlock2 = branch32(GreaterThanOrEqual, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParametersForConstruct)), TrustedImm32(0));
141 preserveReturnAddressAfterCall(regT3);
142
143 storePtr(callFrameRegister, &m_globalData->topCallFrame);
144 restoreArgumentReference();
145 Call callCompileConstruct = call();
146 restoreReturnAddressBeforeReturn(regT3);
147 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
148
149 hasCodeBlock2.link(this);
150 loadPtr(Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_jitCodeForConstructWithArityCheck)), regT0);
151 jump(regT0);
152
153 callSlowCase.link(this);
154 // Finish canonical initialization before JS function call.
155 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
156 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2, regT2);
157 emitPutCellToCallFrameHeader(regT2, RegisterFile::ScopeChain);
158
159 // Also initialize ReturnPC and CodeBlock, like a JS function would.
160 preserveReturnAddressAfterCall(regT3);
161 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
162 emitPutImmediateToCallFrameHeader(0, RegisterFile::CodeBlock);
163
164 storePtr(callFrameRegister, &m_globalData->topCallFrame);
165 restoreArgumentReference();
166 Call callCallNotJSFunction = call();
167 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
168 restoreReturnAddressBeforeReturn(regT3);
169 ret();
170
171 constructSlowCase.link(this);
172 // Finish canonical initialization before JS function call.
173 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
174 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2, regT2);
175 emitPutCellToCallFrameHeader(regT2, RegisterFile::ScopeChain);
176
177 // Also initialize ReturnPC and CodeBlock, like a JS function would.
178 preserveReturnAddressAfterCall(regT3);
179 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
180 emitPutImmediateToCallFrameHeader(0, RegisterFile::CodeBlock);
181
182 storePtr(callFrameRegister, &m_globalData->topCallFrame);
183 restoreArgumentReference();
184 Call callConstructNotJSFunction = call();
185 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
186 restoreReturnAddressBeforeReturn(regT3);
187 ret();
188
189 // NativeCall Trampoline
190 Label nativeCallThunk = privateCompileCTINativeCall(globalData);
191 Label nativeConstructThunk = privateCompileCTINativeCall(globalData, true);
192
193 Call string_failureCases1Call = makeTailRecursiveCall(string_failureCases1);
194 Call string_failureCases2Call = makeTailRecursiveCall(string_failureCases2);
195 Call string_failureCases3Call = makeTailRecursiveCall(string_failureCases3);
196
197 // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
198 LinkBuffer patchBuffer(*m_globalData, this, GLOBAL_THUNK_ID);
199
200 patchBuffer.link(string_failureCases1Call, FunctionPtr(cti_op_get_by_id_string_fail));
201 patchBuffer.link(string_failureCases2Call, FunctionPtr(cti_op_get_by_id_string_fail));
202 patchBuffer.link(string_failureCases3Call, FunctionPtr(cti_op_get_by_id_string_fail));
203 patchBuffer.link(callLazyLinkCall, FunctionPtr(cti_vm_lazyLinkCall));
204 patchBuffer.link(callLazyLinkConstruct, FunctionPtr(cti_vm_lazyLinkConstruct));
205 patchBuffer.link(callCompileCall, FunctionPtr(cti_op_call_jitCompile));
206 patchBuffer.link(callCompileConstruct, FunctionPtr(cti_op_construct_jitCompile));
207 patchBuffer.link(callCallNotJSFunction, FunctionPtr(cti_op_call_NotJSFunction));
208 patchBuffer.link(callConstructNotJSFunction, FunctionPtr(cti_op_construct_NotJSConstruct));
209
210 CodeRef finalCode = patchBuffer.finalizeCode();
211 RefPtr<ExecutableMemoryHandle> executableMemory = finalCode.executableMemory();
212
213 trampolines->ctiVirtualCallLink = patchBuffer.trampolineAt(virtualCallLinkBegin);
214 trampolines->ctiVirtualConstructLink = patchBuffer.trampolineAt(virtualConstructLinkBegin);
215 trampolines->ctiVirtualCall = patchBuffer.trampolineAt(virtualCallBegin);
216 trampolines->ctiVirtualConstruct = patchBuffer.trampolineAt(virtualConstructBegin);
217 trampolines->ctiNativeCall = patchBuffer.trampolineAt(nativeCallThunk);
218 trampolines->ctiNativeConstruct = patchBuffer.trampolineAt(nativeConstructThunk);
219 trampolines->ctiStringLengthTrampoline = patchBuffer.trampolineAt(stringLengthBegin);
220
221 return executableMemory.release();
222 }
223
224 JIT::Label JIT::privateCompileCTINativeCall(JSGlobalData* globalData, bool isConstruct)
225 {
226 int executableOffsetToFunction = isConstruct ? OBJECT_OFFSETOF(NativeExecutable, m_constructor) : OBJECT_OFFSETOF(NativeExecutable, m_function);
227
228 Label nativeCallThunk = align();
229
230 emitPutImmediateToCallFrameHeader(0, RegisterFile::CodeBlock);
231 storePtr(callFrameRegister, &m_globalData->topCallFrame);
232
233 #if CPU(X86)
234 // Load caller frame's scope chain into this callframe so that whatever we call can
235 // get to its global data.
236 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
237 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT0);
238 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
239
240 peek(regT1);
241 emitPutToCallFrameHeader(regT1, RegisterFile::ReturnPC);
242
243 // Calling convention: f(ecx, edx, ...);
244 // Host function signature: f(ExecState*);
245 move(callFrameRegister, X86Registers::ecx);
246
247 subPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister); // Align stack after call.
248
249 // call the function
250 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT1);
251 loadPtr(Address(regT1, OBJECT_OFFSETOF(JSFunction, m_executable)), regT1);
252 move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
253 call(Address(regT1, executableOffsetToFunction));
254
255 addPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister);
256
257 #elif CPU(ARM)
258 // Load caller frame's scope chain into this callframe so that whatever we call can
259 // get to its global data.
260 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
261 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT2);
262 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
263
264 preserveReturnAddressAfterCall(regT3); // Callee preserved
265 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
266
267 // Calling convention: f(r0 == regT0, r1 == regT1, ...);
268 // Host function signature: f(ExecState*);
269 move(callFrameRegister, ARMRegisters::r0);
270
271 // call the function
272 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, ARMRegisters::r1);
273 move(regT2, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
274 loadPtr(Address(ARMRegisters::r1, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
275 call(Address(regT2, executableOffsetToFunction));
276
277 restoreReturnAddressBeforeReturn(regT3);
278 #elif CPU(SH4)
279 // Load caller frame's scope chain into this callframe so that whatever we call can
280 // get to its global data.
281 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
282 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT2);
283 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
284
285 preserveReturnAddressAfterCall(regT3); // Callee preserved
286 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
287
288 // Calling convention: f(r0 == regT4, r1 == regT5, ...);
289 // Host function signature: f(ExecState*);
290 move(callFrameRegister, regT4);
291
292 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT5);
293 move(regT2, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
294 loadPtr(Address(regT5, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
295
296 call(Address(regT2, executableOffsetToFunction), regT0);
297 restoreReturnAddressBeforeReturn(regT3);
298 #elif CPU(MIPS)
299 // Load caller frame's scope chain into this callframe so that whatever we call can
300 // get to its global data.
301 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
302 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT0);
303 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
304
305 preserveReturnAddressAfterCall(regT3); // Callee preserved
306 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
307
308 // Calling convention: f(a0, a1, a2, a3);
309 // Host function signature: f(ExecState*);
310
311 // Allocate stack space for 16 bytes (8-byte aligned)
312 // 16 bytes (unused) for 4 arguments
313 subPtr(TrustedImm32(16), stackPointerRegister);
314
315 // Setup arg0
316 move(callFrameRegister, MIPSRegisters::a0);
317
318 // Call
319 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, MIPSRegisters::a2);
320 loadPtr(Address(MIPSRegisters::a2, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
321 move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
322 call(Address(regT2, executableOffsetToFunction));
323
324 // Restore stack space
325 addPtr(TrustedImm32(16), stackPointerRegister);
326
327 restoreReturnAddressBeforeReturn(regT3);
328
329 #else
330 #error "JIT not supported on this platform."
331 UNUSED_PARAM(executableOffsetToFunction);
332 breakpoint();
333 #endif // CPU(X86)
334
335 // Check for an exception
336 Jump sawException = branch32(NotEqual, AbsoluteAddress(reinterpret_cast<char*>(&globalData->exception) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), TrustedImm32(JSValue::EmptyValueTag));
337
338 // Return.
339 ret();
340
341 // Handle an exception
342 sawException.link(this);
343
344 // Grab the return address.
345 preserveReturnAddressAfterCall(regT1);
346
347 move(TrustedImmPtr(&globalData->exceptionLocation), regT2);
348 storePtr(regT1, regT2);
349 poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
350
351 storePtr(callFrameRegister, &m_globalData->topCallFrame);
352 // Set the return address.
353 move(TrustedImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()), regT1);
354 restoreReturnAddressBeforeReturn(regT1);
355
356 ret();
357
358 return nativeCallThunk;
359 }
360
361 JIT::CodeRef JIT::privateCompileCTINativeCall(JSGlobalData* globalData, NativeFunction func)
362 {
363 Call nativeCall;
364
365 emitPutImmediateToCallFrameHeader(0, RegisterFile::CodeBlock);
366 storePtr(callFrameRegister, &m_globalData->topCallFrame);
367
368 #if CPU(X86)
369 // Load caller frame's scope chain into this callframe so that whatever we call can
370 // get to its global data.
371 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
372 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT0);
373 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
374
375 peek(regT1);
376 emitPutToCallFrameHeader(regT1, RegisterFile::ReturnPC);
377
378 // Calling convention: f(ecx, edx, ...);
379 // Host function signature: f(ExecState*);
380 move(callFrameRegister, X86Registers::ecx);
381
382 subPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister); // Align stack after call.
383
384 move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
385
386 // call the function
387 nativeCall = call();
388
389 addPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister);
390
391 #elif CPU(ARM)
392 // Load caller frame's scope chain into this callframe so that whatever we call can
393 // get to its global data.
394 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
395 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT2);
396 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
397
398 preserveReturnAddressAfterCall(regT3); // Callee preserved
399 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
400
401 // Calling convention: f(r0 == regT0, r1 == regT1, ...);
402 // Host function signature: f(ExecState*);
403 move(callFrameRegister, ARMRegisters::r0);
404
405 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, ARMRegisters::r1);
406 move(regT2, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
407 loadPtr(Address(ARMRegisters::r1, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
408
409 // call the function
410 nativeCall = call();
411
412 restoreReturnAddressBeforeReturn(regT3);
413
414 #elif CPU(MIPS)
415 // Load caller frame's scope chain into this callframe so that whatever we call can
416 // get to its global data.
417 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
418 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT0);
419 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
420
421 preserveReturnAddressAfterCall(regT3); // Callee preserved
422 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
423
424 // Calling convention: f(a0, a1, a2, a3);
425 // Host function signature: f(ExecState*);
426
427 // Allocate stack space for 16 bytes (8-byte aligned)
428 // 16 bytes (unused) for 4 arguments
429 subPtr(TrustedImm32(16), stackPointerRegister);
430
431 // Setup arg0
432 move(callFrameRegister, MIPSRegisters::a0);
433
434 // Call
435 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, MIPSRegisters::a2);
436 loadPtr(Address(MIPSRegisters::a2, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
437 move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
438
439 // call the function
440 nativeCall = call();
441
442 // Restore stack space
443 addPtr(TrustedImm32(16), stackPointerRegister);
444
445 restoreReturnAddressBeforeReturn(regT3);
446 #elif CPU(SH4)
447 // Load caller frame's scope chain into this callframe so that whatever we call can
448 // get to its global data.
449 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
450 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT2);
451 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
452
453 preserveReturnAddressAfterCall(regT3); // Callee preserved
454 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
455
456 // Calling convention: f(r0 == regT4, r1 == regT5, ...);
457 // Host function signature: f(ExecState*);
458 move(callFrameRegister, regT4);
459
460 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT5);
461 move(regT2, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
462 loadPtr(Address(regT5, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
463
464 // call the function
465 nativeCall = call();
466
467 restoreReturnAddressBeforeReturn(regT3);
468 #else
469 #error "JIT not supported on this platform."
470 breakpoint();
471 #endif // CPU(X86)
472
473 // Check for an exception
474 Jump sawException = branch32(NotEqual, AbsoluteAddress(reinterpret_cast<char*>(&globalData->exception) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), TrustedImm32(JSValue::EmptyValueTag));
475
476 // Return.
477 ret();
478
479 // Handle an exception
480 sawException.link(this);
481
482 // Grab the return address.
483 preserveReturnAddressAfterCall(regT1);
484
485 move(TrustedImmPtr(&globalData->exceptionLocation), regT2);
486 storePtr(regT1, regT2);
487 poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
488
489 storePtr(callFrameRegister, &m_globalData->topCallFrame);
490 // Set the return address.
491 move(TrustedImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()), regT1);
492 restoreReturnAddressBeforeReturn(regT1);
493
494 ret();
495
496 // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
497 LinkBuffer patchBuffer(*m_globalData, this, GLOBAL_THUNK_ID);
498
499 patchBuffer.link(nativeCall, FunctionPtr(func));
500 return patchBuffer.finalizeCode();
501 }
502
503 void JIT::emit_op_mov(Instruction* currentInstruction)
504 {
505 unsigned dst = currentInstruction[1].u.operand;
506 unsigned src = currentInstruction[2].u.operand;
507
508 if (m_codeBlock->isConstantRegisterIndex(src))
509 emitStore(dst, getConstantOperand(src));
510 else {
511 emitLoad(src, regT1, regT0);
512 emitStore(dst, regT1, regT0);
513 map(m_bytecodeOffset + OPCODE_LENGTH(op_mov), dst, regT1, regT0);
514 }
515 }
516
517 void JIT::emit_op_end(Instruction* currentInstruction)
518 {
519 ASSERT(returnValueRegister != callFrameRegister);
520 emitLoad(currentInstruction[1].u.operand, regT1, regT0);
521 restoreReturnAddressBeforeReturn(Address(callFrameRegister, RegisterFile::ReturnPC * static_cast<int>(sizeof(Register))));
522 ret();
523 }
524
525 void JIT::emit_op_jmp(Instruction* currentInstruction)
526 {
527 unsigned target = currentInstruction[1].u.operand;
528 addJump(jump(), target);
529 }
530
531 void JIT::emit_op_new_object(Instruction* currentInstruction)
532 {
533 emitAllocateJSFinalObject(TrustedImmPtr(m_codeBlock->globalObject()->emptyObjectStructure()), regT0, regT1);
534
535 emitStoreCell(currentInstruction[1].u.operand, regT0);
536 }
537
538 void JIT::emitSlow_op_new_object(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
539 {
540 linkSlowCase(iter);
541 JITStubCall(this, cti_op_new_object).call(currentInstruction[1].u.operand);
542 }
543
544 void JIT::emit_op_check_has_instance(Instruction* currentInstruction)
545 {
546 unsigned baseVal = currentInstruction[1].u.operand;
547
548 emitLoadPayload(baseVal, regT0);
549
550 // Check that baseVal is a cell.
551 emitJumpSlowCaseIfNotJSCell(baseVal);
552
553 // Check that baseVal 'ImplementsHasInstance'.
554 loadPtr(Address(regT0, JSCell::structureOffset()), regT0);
555 addSlowCase(branchTest8(Zero, Address(regT0, Structure::typeInfoFlagsOffset()), TrustedImm32(ImplementsHasInstance)));
556 }
557
558 void JIT::emit_op_instanceof(Instruction* currentInstruction)
559 {
560 unsigned dst = currentInstruction[1].u.operand;
561 unsigned value = currentInstruction[2].u.operand;
562 unsigned baseVal = currentInstruction[3].u.operand;
563 unsigned proto = currentInstruction[4].u.operand;
564
565 // Load the operands into registers.
566 // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
567 emitLoadPayload(value, regT2);
568 emitLoadPayload(baseVal, regT0);
569 emitLoadPayload(proto, regT1);
570
571 // Check that proto are cells. baseVal must be a cell - this is checked by op_check_has_instance.
572 emitJumpSlowCaseIfNotJSCell(value);
573 emitJumpSlowCaseIfNotJSCell(proto);
574
575 // Check that prototype is an object
576 loadPtr(Address(regT1, JSCell::structureOffset()), regT3);
577 addSlowCase(emitJumpIfNotObject(regT3));
578
579 // Fixme: this check is only needed because the JSC API allows HasInstance to be overridden; we should deprecate this.
580 // Check that baseVal 'ImplementsDefaultHasInstance'.
581 loadPtr(Address(regT0, JSCell::structureOffset()), regT0);
582 addSlowCase(branchTest8(Zero, Address(regT0, Structure::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance)));
583
584 // Optimistically load the result true, and start looping.
585 // Initially, regT1 still contains proto and regT2 still contains value.
586 // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
587 move(TrustedImm32(1), regT0);
588 Label loop(this);
589
590 // Load the prototype of the cell in regT2. If this is equal to regT1 - WIN!
591 // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
592 loadPtr(Address(regT2, JSCell::structureOffset()), regT2);
593 load32(Address(regT2, Structure::prototypeOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT2);
594 Jump isInstance = branchPtr(Equal, regT2, regT1);
595 branchTest32(NonZero, regT2).linkTo(loop, this);
596
597 // We get here either by dropping out of the loop, or if value was not an Object. Result is false.
598 move(TrustedImm32(0), regT0);
599
600 // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
601 isInstance.link(this);
602 emitStoreBool(dst, regT0);
603 }
604
605 void JIT::emitSlow_op_check_has_instance(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
606 {
607 unsigned baseVal = currentInstruction[1].u.operand;
608
609 linkSlowCaseIfNotJSCell(iter, baseVal);
610 linkSlowCase(iter);
611
612 JITStubCall stubCall(this, cti_op_check_has_instance);
613 stubCall.addArgument(baseVal);
614 stubCall.call();
615 }
616
617 void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
618 {
619 unsigned dst = currentInstruction[1].u.operand;
620 unsigned value = currentInstruction[2].u.operand;
621 unsigned baseVal = currentInstruction[3].u.operand;
622 unsigned proto = currentInstruction[4].u.operand;
623
624 linkSlowCaseIfNotJSCell(iter, value);
625 linkSlowCaseIfNotJSCell(iter, proto);
626 linkSlowCase(iter);
627 linkSlowCase(iter);
628
629 JITStubCall stubCall(this, cti_op_instanceof);
630 stubCall.addArgument(value);
631 stubCall.addArgument(baseVal);
632 stubCall.addArgument(proto);
633 stubCall.call(dst);
634 }
635
636 void JIT::emit_op_is_undefined(Instruction* currentInstruction)
637 {
638 unsigned dst = currentInstruction[1].u.operand;
639 unsigned value = currentInstruction[2].u.operand;
640
641 emitLoad(value, regT1, regT0);
642 Jump isCell = branch32(Equal, regT1, TrustedImm32(JSValue::CellTag));
643
644 compare32(Equal, regT1, TrustedImm32(JSValue::UndefinedTag), regT0);
645 Jump done = jump();
646
647 isCell.link(this);
648 loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
649 test8(NonZero, Address(regT1, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined), regT0);
650
651 done.link(this);
652 emitStoreBool(dst, regT0);
653 }
654
655 void JIT::emit_op_is_boolean(Instruction* currentInstruction)
656 {
657 unsigned dst = currentInstruction[1].u.operand;
658 unsigned value = currentInstruction[2].u.operand;
659
660 emitLoadTag(value, regT0);
661 compare32(Equal, regT0, TrustedImm32(JSValue::BooleanTag), regT0);
662 emitStoreBool(dst, regT0);
663 }
664
665 void JIT::emit_op_is_number(Instruction* currentInstruction)
666 {
667 unsigned dst = currentInstruction[1].u.operand;
668 unsigned value = currentInstruction[2].u.operand;
669
670 emitLoadTag(value, regT0);
671 add32(TrustedImm32(1), regT0);
672 compare32(Below, regT0, TrustedImm32(JSValue::LowestTag + 1), regT0);
673 emitStoreBool(dst, regT0);
674 }
675
676 void JIT::emit_op_is_string(Instruction* currentInstruction)
677 {
678 unsigned dst = currentInstruction[1].u.operand;
679 unsigned value = currentInstruction[2].u.operand;
680
681 emitLoad(value, regT1, regT0);
682 Jump isNotCell = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
683
684 loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
685 compare8(Equal, Address(regT1, Structure::typeInfoTypeOffset()), TrustedImm32(StringType), regT0);
686 Jump done = jump();
687
688 isNotCell.link(this);
689 move(TrustedImm32(0), regT0);
690
691 done.link(this);
692 emitStoreBool(dst, regT0);
693 }
694
695 void JIT::emit_op_tear_off_activation(Instruction* currentInstruction)
696 {
697 unsigned activation = currentInstruction[1].u.operand;
698 unsigned arguments = currentInstruction[2].u.operand;
699 Jump activationCreated = branch32(NotEqual, tagFor(activation), TrustedImm32(JSValue::EmptyValueTag));
700 Jump argumentsNotCreated = branch32(Equal, tagFor(arguments), TrustedImm32(JSValue::EmptyValueTag));
701 activationCreated.link(this);
702 JITStubCall stubCall(this, cti_op_tear_off_activation);
703 stubCall.addArgument(currentInstruction[1].u.operand);
704 stubCall.addArgument(unmodifiedArgumentsRegister(currentInstruction[2].u.operand));
705 stubCall.call();
706 argumentsNotCreated.link(this);
707 }
708
709 void JIT::emit_op_tear_off_arguments(Instruction* currentInstruction)
710 {
711 int dst = currentInstruction[1].u.operand;
712
713 Jump argsNotCreated = branch32(Equal, tagFor(unmodifiedArgumentsRegister(dst)), TrustedImm32(JSValue::EmptyValueTag));
714 JITStubCall stubCall(this, cti_op_tear_off_arguments);
715 stubCall.addArgument(unmodifiedArgumentsRegister(dst));
716 stubCall.call();
717 argsNotCreated.link(this);
718 }
719
720 void JIT::emit_op_resolve(Instruction* currentInstruction)
721 {
722 JITStubCall stubCall(this, cti_op_resolve);
723 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
724 stubCall.callWithValueProfiling(currentInstruction[1].u.operand);
725 }
726
727 void JIT::emit_op_to_primitive(Instruction* currentInstruction)
728 {
729 int dst = currentInstruction[1].u.operand;
730 int src = currentInstruction[2].u.operand;
731
732 emitLoad(src, regT1, regT0);
733
734 Jump isImm = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
735 addSlowCase(branchPtr(NotEqual, Address(regT0, JSCell::classInfoOffset()), TrustedImmPtr(&JSString::s_info)));
736 isImm.link(this);
737
738 if (dst != src)
739 emitStore(dst, regT1, regT0);
740 map(m_bytecodeOffset + OPCODE_LENGTH(op_to_primitive), dst, regT1, regT0);
741 }
742
743 void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
744 {
745 int dst = currentInstruction[1].u.operand;
746
747 linkSlowCase(iter);
748
749 JITStubCall stubCall(this, cti_op_to_primitive);
750 stubCall.addArgument(regT1, regT0);
751 stubCall.call(dst);
752 }
753
754 void JIT::emit_op_strcat(Instruction* currentInstruction)
755 {
756 JITStubCall stubCall(this, cti_op_strcat);
757 stubCall.addArgument(TrustedImm32(currentInstruction[2].u.operand));
758 stubCall.addArgument(TrustedImm32(currentInstruction[3].u.operand));
759 stubCall.call(currentInstruction[1].u.operand);
760 }
761
762 void JIT::emit_op_resolve_base(Instruction* currentInstruction)
763 {
764 JITStubCall stubCall(this, currentInstruction[3].u.operand ? cti_op_resolve_base_strict_put : cti_op_resolve_base);
765 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
766 stubCall.callWithValueProfiling(currentInstruction[1].u.operand);
767 }
768
769 void JIT::emit_op_ensure_property_exists(Instruction* currentInstruction)
770 {
771 JITStubCall stubCall(this, cti_op_ensure_property_exists);
772 stubCall.addArgument(TrustedImm32(currentInstruction[1].u.operand));
773 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
774 stubCall.call(currentInstruction[1].u.operand);
775 }
776
777 void JIT::emit_op_resolve_skip(Instruction* currentInstruction)
778 {
779 JITStubCall stubCall(this, cti_op_resolve_skip);
780 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
781 stubCall.addArgument(TrustedImm32(currentInstruction[3].u.operand));
782 stubCall.callWithValueProfiling(currentInstruction[1].u.operand);
783 }
784
785 void JIT::emit_op_resolve_global(Instruction* currentInstruction, bool dynamic)
786 {
787 // FIXME: Optimize to use patching instead of so many memory accesses.
788
789 unsigned dst = currentInstruction[1].u.operand;
790 void* globalObject = m_codeBlock->globalObject();
791
792 unsigned currentIndex = m_globalResolveInfoIndex++;
793 GlobalResolveInfo* resolveInfoAddress = &m_codeBlock->globalResolveInfo(currentIndex);
794
795
796 // Verify structure.
797 move(TrustedImmPtr(globalObject), regT0);
798 move(TrustedImmPtr(resolveInfoAddress), regT3);
799 loadPtr(Address(regT3, OBJECT_OFFSETOF(GlobalResolveInfo, structure)), regT1);
800 addSlowCase(branchPtr(NotEqual, regT1, Address(regT0, JSCell::structureOffset())));
801
802 // Load property.
803 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSGlobalObject, m_propertyStorage)), regT2);
804 load32(Address(regT3, OBJECT_OFFSETOF(GlobalResolveInfo, offset)), regT3);
805 load32(BaseIndex(regT2, regT3, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0); // payload
806 load32(BaseIndex(regT2, regT3, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1); // tag
807 emitValueProfilingSite();
808 emitStore(dst, regT1, regT0);
809 map(m_bytecodeOffset + (dynamic ? OPCODE_LENGTH(op_resolve_global_dynamic) : OPCODE_LENGTH(op_resolve_global)), dst, regT1, regT0);
810 }
811
812 void JIT::emitSlow_op_resolve_global(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
813 {
814 unsigned dst = currentInstruction[1].u.operand;
815 Identifier* ident = &m_codeBlock->identifier(currentInstruction[2].u.operand);
816
817 unsigned currentIndex = m_globalResolveInfoIndex++;
818
819 linkSlowCase(iter);
820 JITStubCall stubCall(this, cti_op_resolve_global);
821 stubCall.addArgument(TrustedImmPtr(ident));
822 stubCall.addArgument(TrustedImm32(currentIndex));
823 stubCall.callWithValueProfiling(dst);
824 }
825
826 void JIT::emit_op_not(Instruction* currentInstruction)
827 {
828 unsigned dst = currentInstruction[1].u.operand;
829 unsigned src = currentInstruction[2].u.operand;
830
831 emitLoadTag(src, regT0);
832
833 emitLoad(src, regT1, regT0);
834 addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::BooleanTag)));
835 xor32(TrustedImm32(1), regT0);
836
837 emitStoreBool(dst, regT0, (dst == src));
838 }
839
840 void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
841 {
842 unsigned dst = currentInstruction[1].u.operand;
843 unsigned src = currentInstruction[2].u.operand;
844
845 linkSlowCase(iter);
846
847 JITStubCall stubCall(this, cti_op_not);
848 stubCall.addArgument(src);
849 stubCall.call(dst);
850 }
851
852 void JIT::emit_op_jfalse(Instruction* currentInstruction)
853 {
854 unsigned cond = currentInstruction[1].u.operand;
855 unsigned target = currentInstruction[2].u.operand;
856
857 emitLoad(cond, regT1, regT0);
858
859 ASSERT((JSValue::BooleanTag + 1 == JSValue::Int32Tag) && !(JSValue::Int32Tag + 1));
860 addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::BooleanTag)));
861 addJump(branchTest32(Zero, regT0), target);
862 }
863
864 void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
865 {
866 unsigned cond = currentInstruction[1].u.operand;
867 unsigned target = currentInstruction[2].u.operand;
868
869 linkSlowCase(iter);
870
871 if (supportsFloatingPoint()) {
872 // regT1 contains the tag from the hot path.
873 Jump notNumber = branch32(Above, regT1, TrustedImm32(JSValue::LowestTag));
874
875 emitLoadDouble(cond, fpRegT0);
876 emitJumpSlowToHot(branchDoubleZeroOrNaN(fpRegT0, fpRegT1), target);
877 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_jfalse));
878
879 notNumber.link(this);
880 }
881
882 JITStubCall stubCall(this, cti_op_jtrue);
883 stubCall.addArgument(cond);
884 stubCall.call();
885 emitJumpSlowToHot(branchTest32(Zero, regT0), target); // Inverted.
886 }
887
888 void JIT::emit_op_jtrue(Instruction* currentInstruction)
889 {
890 unsigned cond = currentInstruction[1].u.operand;
891 unsigned target = currentInstruction[2].u.operand;
892
893 emitLoad(cond, regT1, regT0);
894
895 ASSERT((JSValue::BooleanTag + 1 == JSValue::Int32Tag) && !(JSValue::Int32Tag + 1));
896 addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::BooleanTag)));
897 addJump(branchTest32(NonZero, regT0), target);
898 }
899
900 void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
901 {
902 unsigned cond = currentInstruction[1].u.operand;
903 unsigned target = currentInstruction[2].u.operand;
904
905 linkSlowCase(iter);
906
907 if (supportsFloatingPoint()) {
908 // regT1 contains the tag from the hot path.
909 Jump notNumber = branch32(Above, regT1, TrustedImm32(JSValue::LowestTag));
910
911 emitLoadDouble(cond, fpRegT0);
912 emitJumpSlowToHot(branchDoubleNonZero(fpRegT0, fpRegT1), target);
913 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_jtrue));
914
915 notNumber.link(this);
916 }
917
918 JITStubCall stubCall(this, cti_op_jtrue);
919 stubCall.addArgument(cond);
920 stubCall.call();
921 emitJumpSlowToHot(branchTest32(NonZero, regT0), target);
922 }
923
924 void JIT::emit_op_jeq_null(Instruction* currentInstruction)
925 {
926 unsigned src = currentInstruction[1].u.operand;
927 unsigned target = currentInstruction[2].u.operand;
928
929 emitLoad(src, regT1, regT0);
930
931 Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
932
933 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
934 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
935 addJump(branchTest8(NonZero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
936
937 Jump wasNotImmediate = jump();
938
939 // Now handle the immediate cases - undefined & null
940 isImmediate.link(this);
941
942 ASSERT((JSValue::UndefinedTag + 1 == JSValue::NullTag) && (JSValue::NullTag & 0x1));
943 or32(TrustedImm32(1), regT1);
944 addJump(branch32(Equal, regT1, TrustedImm32(JSValue::NullTag)), target);
945
946 wasNotImmediate.link(this);
947 }
948
949 void JIT::emit_op_jneq_null(Instruction* currentInstruction)
950 {
951 unsigned src = currentInstruction[1].u.operand;
952 unsigned target = currentInstruction[2].u.operand;
953
954 emitLoad(src, regT1, regT0);
955
956 Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
957
958 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
959 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
960 addJump(branchTest8(Zero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
961
962 Jump wasNotImmediate = jump();
963
964 // Now handle the immediate cases - undefined & null
965 isImmediate.link(this);
966
967 ASSERT((JSValue::UndefinedTag + 1 == JSValue::NullTag) && (JSValue::NullTag & 0x1));
968 or32(TrustedImm32(1), regT1);
969 addJump(branch32(NotEqual, regT1, TrustedImm32(JSValue::NullTag)), target);
970
971 wasNotImmediate.link(this);
972 }
973
974 void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
975 {
976 unsigned src = currentInstruction[1].u.operand;
977 JSCell* ptr = currentInstruction[2].u.jsCell.get();
978 unsigned target = currentInstruction[3].u.operand;
979
980 emitLoad(src, regT1, regT0);
981 addJump(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)), target);
982 addJump(branchPtr(NotEqual, regT0, TrustedImmPtr(ptr)), target);
983 }
984
985 void JIT::emit_op_eq(Instruction* currentInstruction)
986 {
987 unsigned dst = currentInstruction[1].u.operand;
988 unsigned src1 = currentInstruction[2].u.operand;
989 unsigned src2 = currentInstruction[3].u.operand;
990
991 emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
992 addSlowCase(branch32(NotEqual, regT1, regT3));
993 addSlowCase(branch32(Equal, regT1, TrustedImm32(JSValue::CellTag)));
994 addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::LowestTag)));
995
996 compare32(Equal, regT0, regT2, regT0);
997
998 emitStoreBool(dst, regT0);
999 }
1000
1001 void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1002 {
1003 unsigned dst = currentInstruction[1].u.operand;
1004 unsigned op1 = currentInstruction[2].u.operand;
1005 unsigned op2 = currentInstruction[3].u.operand;
1006
1007 JumpList storeResult;
1008 JumpList genericCase;
1009
1010 genericCase.append(getSlowCase(iter)); // tags not equal
1011
1012 linkSlowCase(iter); // tags equal and JSCell
1013 genericCase.append(branchPtr(NotEqual, Address(regT0, JSCell::classInfoOffset()), TrustedImmPtr(&JSString::s_info)));
1014 genericCase.append(branchPtr(NotEqual, Address(regT2, JSCell::classInfoOffset()), TrustedImmPtr(&JSString::s_info)));
1015
1016 // String case.
1017 JITStubCall stubCallEqStrings(this, cti_op_eq_strings);
1018 stubCallEqStrings.addArgument(regT0);
1019 stubCallEqStrings.addArgument(regT2);
1020 stubCallEqStrings.call();
1021 storeResult.append(jump());
1022
1023 // Generic case.
1024 genericCase.append(getSlowCase(iter)); // doubles
1025 genericCase.link(this);
1026 JITStubCall stubCallEq(this, cti_op_eq);
1027 stubCallEq.addArgument(op1);
1028 stubCallEq.addArgument(op2);
1029 stubCallEq.call(regT0);
1030
1031 storeResult.link(this);
1032 emitStoreBool(dst, regT0);
1033 }
1034
1035 void JIT::emit_op_neq(Instruction* currentInstruction)
1036 {
1037 unsigned dst = currentInstruction[1].u.operand;
1038 unsigned src1 = currentInstruction[2].u.operand;
1039 unsigned src2 = currentInstruction[3].u.operand;
1040
1041 emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
1042 addSlowCase(branch32(NotEqual, regT1, regT3));
1043 addSlowCase(branch32(Equal, regT1, TrustedImm32(JSValue::CellTag)));
1044 addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::LowestTag)));
1045
1046 compare32(NotEqual, regT0, regT2, regT0);
1047
1048 emitStoreBool(dst, regT0);
1049 }
1050
1051 void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1052 {
1053 unsigned dst = currentInstruction[1].u.operand;
1054
1055 JumpList storeResult;
1056 JumpList genericCase;
1057
1058 genericCase.append(getSlowCase(iter)); // tags not equal
1059
1060 linkSlowCase(iter); // tags equal and JSCell
1061 genericCase.append(branchPtr(NotEqual, Address(regT0, JSCell::classInfoOffset()), TrustedImmPtr(&JSString::s_info)));
1062 genericCase.append(branchPtr(NotEqual, Address(regT2, JSCell::classInfoOffset()), TrustedImmPtr(&JSString::s_info)));
1063
1064 // String case.
1065 JITStubCall stubCallEqStrings(this, cti_op_eq_strings);
1066 stubCallEqStrings.addArgument(regT0);
1067 stubCallEqStrings.addArgument(regT2);
1068 stubCallEqStrings.call(regT0);
1069 storeResult.append(jump());
1070
1071 // Generic case.
1072 genericCase.append(getSlowCase(iter)); // doubles
1073 genericCase.link(this);
1074 JITStubCall stubCallEq(this, cti_op_eq);
1075 stubCallEq.addArgument(regT1, regT0);
1076 stubCallEq.addArgument(regT3, regT2);
1077 stubCallEq.call(regT0);
1078
1079 storeResult.link(this);
1080 xor32(TrustedImm32(0x1), regT0);
1081 emitStoreBool(dst, regT0);
1082 }
1083
1084 void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
1085 {
1086 unsigned dst = currentInstruction[1].u.operand;
1087 unsigned src1 = currentInstruction[2].u.operand;
1088 unsigned src2 = currentInstruction[3].u.operand;
1089
1090 emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
1091
1092 // Bail if the tags differ, or are double.
1093 addSlowCase(branch32(NotEqual, regT1, regT3));
1094 addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::LowestTag)));
1095
1096 // Jump to a slow case if both are strings.
1097 Jump notCell = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
1098 Jump firstNotString = branchPtr(NotEqual, Address(regT0, JSCell::classInfoOffset()), TrustedImmPtr(&JSString::s_info));
1099 addSlowCase(branchPtr(Equal, Address(regT2, JSCell::classInfoOffset()), TrustedImmPtr(&JSString::s_info)));
1100 notCell.link(this);
1101 firstNotString.link(this);
1102
1103 // Simply compare the payloads.
1104 if (type == OpStrictEq)
1105 compare32(Equal, regT0, regT2, regT0);
1106 else
1107 compare32(NotEqual, regT0, regT2, regT0);
1108
1109 emitStoreBool(dst, regT0);
1110 }
1111
1112 void JIT::emit_op_stricteq(Instruction* currentInstruction)
1113 {
1114 compileOpStrictEq(currentInstruction, OpStrictEq);
1115 }
1116
1117 void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1118 {
1119 unsigned dst = currentInstruction[1].u.operand;
1120 unsigned src1 = currentInstruction[2].u.operand;
1121 unsigned src2 = currentInstruction[3].u.operand;
1122
1123 linkSlowCase(iter);
1124 linkSlowCase(iter);
1125 linkSlowCase(iter);
1126
1127 JITStubCall stubCall(this, cti_op_stricteq);
1128 stubCall.addArgument(src1);
1129 stubCall.addArgument(src2);
1130 stubCall.call(dst);
1131 }
1132
1133 void JIT::emit_op_nstricteq(Instruction* currentInstruction)
1134 {
1135 compileOpStrictEq(currentInstruction, OpNStrictEq);
1136 }
1137
1138 void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1139 {
1140 unsigned dst = currentInstruction[1].u.operand;
1141 unsigned src1 = currentInstruction[2].u.operand;
1142 unsigned src2 = currentInstruction[3].u.operand;
1143
1144 linkSlowCase(iter);
1145 linkSlowCase(iter);
1146 linkSlowCase(iter);
1147
1148 JITStubCall stubCall(this, cti_op_nstricteq);
1149 stubCall.addArgument(src1);
1150 stubCall.addArgument(src2);
1151 stubCall.call(dst);
1152 }
1153
1154 void JIT::emit_op_eq_null(Instruction* currentInstruction)
1155 {
1156 unsigned dst = currentInstruction[1].u.operand;
1157 unsigned src = currentInstruction[2].u.operand;
1158
1159 emitLoad(src, regT1, regT0);
1160 Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
1161
1162 loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
1163 test8(NonZero, Address(regT1, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined), regT1);
1164
1165 Jump wasNotImmediate = jump();
1166
1167 isImmediate.link(this);
1168
1169 compare32(Equal, regT1, TrustedImm32(JSValue::NullTag), regT2);
1170 compare32(Equal, regT1, TrustedImm32(JSValue::UndefinedTag), regT1);
1171 or32(regT2, regT1);
1172
1173 wasNotImmediate.link(this);
1174
1175 emitStoreBool(dst, regT1);
1176 }
1177
1178 void JIT::emit_op_neq_null(Instruction* currentInstruction)
1179 {
1180 unsigned dst = currentInstruction[1].u.operand;
1181 unsigned src = currentInstruction[2].u.operand;
1182
1183 emitLoad(src, regT1, regT0);
1184 Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
1185
1186 loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
1187 test8(Zero, Address(regT1, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined), regT1);
1188
1189 Jump wasNotImmediate = jump();
1190
1191 isImmediate.link(this);
1192
1193 compare32(NotEqual, regT1, TrustedImm32(JSValue::NullTag), regT2);
1194 compare32(NotEqual, regT1, TrustedImm32(JSValue::UndefinedTag), regT1);
1195 and32(regT2, regT1);
1196
1197 wasNotImmediate.link(this);
1198
1199 emitStoreBool(dst, regT1);
1200 }
1201
1202 void JIT::emit_op_resolve_with_base(Instruction* currentInstruction)
1203 {
1204 JITStubCall stubCall(this, cti_op_resolve_with_base);
1205 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[3].u.operand)));
1206 stubCall.addArgument(TrustedImm32(currentInstruction[1].u.operand));
1207 stubCall.callWithValueProfiling(currentInstruction[2].u.operand);
1208 }
1209
1210 void JIT::emit_op_resolve_with_this(Instruction* currentInstruction)
1211 {
1212 JITStubCall stubCall(this, cti_op_resolve_with_this);
1213 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[3].u.operand)));
1214 stubCall.addArgument(TrustedImm32(currentInstruction[1].u.operand));
1215 stubCall.callWithValueProfiling(currentInstruction[2].u.operand);
1216 }
1217
1218 void JIT::emit_op_throw(Instruction* currentInstruction)
1219 {
1220 unsigned exception = currentInstruction[1].u.operand;
1221 JITStubCall stubCall(this, cti_op_throw);
1222 stubCall.addArgument(exception);
1223 stubCall.call();
1224
1225 #ifndef NDEBUG
1226 // cti_op_throw always changes it's return address,
1227 // this point in the code should never be reached.
1228 breakpoint();
1229 #endif
1230 }
1231
1232 void JIT::emit_op_get_pnames(Instruction* currentInstruction)
1233 {
1234 int dst = currentInstruction[1].u.operand;
1235 int base = currentInstruction[2].u.operand;
1236 int i = currentInstruction[3].u.operand;
1237 int size = currentInstruction[4].u.operand;
1238 int breakTarget = currentInstruction[5].u.operand;
1239
1240 JumpList isNotObject;
1241
1242 emitLoad(base, regT1, regT0);
1243 if (!m_codeBlock->isKnownNotImmediate(base))
1244 isNotObject.append(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)));
1245 if (base != m_codeBlock->thisRegister() || m_codeBlock->isStrictMode()) {
1246 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
1247 isNotObject.append(emitJumpIfNotObject(regT2));
1248 }
1249
1250 // We could inline the case where you have a valid cache, but
1251 // this call doesn't seem to be hot.
1252 Label isObject(this);
1253 JITStubCall getPnamesStubCall(this, cti_op_get_pnames);
1254 getPnamesStubCall.addArgument(regT0);
1255 getPnamesStubCall.call(dst);
1256 load32(Address(regT0, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStringsSize)), regT3);
1257 store32(TrustedImm32(Int32Tag), intTagFor(i));
1258 store32(TrustedImm32(0), intPayloadFor(i));
1259 store32(TrustedImm32(Int32Tag), intTagFor(size));
1260 store32(regT3, payloadFor(size));
1261 Jump end = jump();
1262
1263 isNotObject.link(this);
1264 addJump(branch32(Equal, regT1, TrustedImm32(JSValue::NullTag)), breakTarget);
1265 addJump(branch32(Equal, regT1, TrustedImm32(JSValue::UndefinedTag)), breakTarget);
1266 JITStubCall toObjectStubCall(this, cti_to_object);
1267 toObjectStubCall.addArgument(regT1, regT0);
1268 toObjectStubCall.call(base);
1269 jump().linkTo(isObject, this);
1270
1271 end.link(this);
1272 }
1273
1274 void JIT::emit_op_next_pname(Instruction* currentInstruction)
1275 {
1276 int dst = currentInstruction[1].u.operand;
1277 int base = currentInstruction[2].u.operand;
1278 int i = currentInstruction[3].u.operand;
1279 int size = currentInstruction[4].u.operand;
1280 int it = currentInstruction[5].u.operand;
1281 int target = currentInstruction[6].u.operand;
1282
1283 JumpList callHasProperty;
1284
1285 Label begin(this);
1286 load32(intPayloadFor(i), regT0);
1287 Jump end = branch32(Equal, regT0, intPayloadFor(size));
1288
1289 // Grab key @ i
1290 loadPtr(payloadFor(it), regT1);
1291 loadPtr(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStrings)), regT2);
1292 load32(BaseIndex(regT2, regT0, TimesEight), regT2);
1293 store32(TrustedImm32(JSValue::CellTag), tagFor(dst));
1294 store32(regT2, payloadFor(dst));
1295
1296 // Increment i
1297 add32(TrustedImm32(1), regT0);
1298 store32(regT0, intPayloadFor(i));
1299
1300 // Verify that i is valid:
1301 loadPtr(payloadFor(base), regT0);
1302
1303 // Test base's structure
1304 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
1305 callHasProperty.append(branchPtr(NotEqual, regT2, Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure)))));
1306
1307 // Test base's prototype chain
1308 loadPtr(Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedPrototypeChain))), regT3);
1309 loadPtr(Address(regT3, OBJECT_OFFSETOF(StructureChain, m_vector)), regT3);
1310 addJump(branchTestPtr(Zero, Address(regT3)), target);
1311
1312 Label checkPrototype(this);
1313 callHasProperty.append(branch32(Equal, Address(regT2, Structure::prototypeOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), TrustedImm32(JSValue::NullTag)));
1314 loadPtr(Address(regT2, Structure::prototypeOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT2);
1315 loadPtr(Address(regT2, JSCell::structureOffset()), regT2);
1316 callHasProperty.append(branchPtr(NotEqual, regT2, Address(regT3)));
1317 addPtr(TrustedImm32(sizeof(Structure*)), regT3);
1318 branchTestPtr(NonZero, Address(regT3)).linkTo(checkPrototype, this);
1319
1320 // Continue loop.
1321 addJump(jump(), target);
1322
1323 // Slow case: Ask the object if i is valid.
1324 callHasProperty.link(this);
1325 loadPtr(addressFor(dst), regT1);
1326 JITStubCall stubCall(this, cti_has_property);
1327 stubCall.addArgument(regT0);
1328 stubCall.addArgument(regT1);
1329 stubCall.call();
1330
1331 // Test for valid key.
1332 addJump(branchTest32(NonZero, regT0), target);
1333 jump().linkTo(begin, this);
1334
1335 // End of loop.
1336 end.link(this);
1337 }
1338
1339 void JIT::emit_op_push_scope(Instruction* currentInstruction)
1340 {
1341 JITStubCall stubCall(this, cti_op_push_scope);
1342 stubCall.addArgument(currentInstruction[1].u.operand);
1343 stubCall.call(currentInstruction[1].u.operand);
1344 }
1345
1346 void JIT::emit_op_pop_scope(Instruction*)
1347 {
1348 JITStubCall(this, cti_op_pop_scope).call();
1349 }
1350
1351 void JIT::emit_op_to_jsnumber(Instruction* currentInstruction)
1352 {
1353 int dst = currentInstruction[1].u.operand;
1354 int src = currentInstruction[2].u.operand;
1355
1356 emitLoad(src, regT1, regT0);
1357
1358 Jump isInt32 = branch32(Equal, regT1, TrustedImm32(JSValue::Int32Tag));
1359 addSlowCase(branch32(AboveOrEqual, regT1, TrustedImm32(JSValue::EmptyValueTag)));
1360 isInt32.link(this);
1361
1362 if (src != dst)
1363 emitStore(dst, regT1, regT0);
1364 map(m_bytecodeOffset + OPCODE_LENGTH(op_to_jsnumber), dst, regT1, regT0);
1365 }
1366
1367 void JIT::emitSlow_op_to_jsnumber(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1368 {
1369 int dst = currentInstruction[1].u.operand;
1370
1371 linkSlowCase(iter);
1372
1373 JITStubCall stubCall(this, cti_op_to_jsnumber);
1374 stubCall.addArgument(regT1, regT0);
1375 stubCall.call(dst);
1376 }
1377
1378 void JIT::emit_op_push_new_scope(Instruction* currentInstruction)
1379 {
1380 JITStubCall stubCall(this, cti_op_push_new_scope);
1381 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
1382 stubCall.addArgument(currentInstruction[3].u.operand);
1383 stubCall.call(currentInstruction[1].u.operand);
1384 }
1385
1386 void JIT::emit_op_catch(Instruction* currentInstruction)
1387 {
1388 // cti_op_throw returns the callFrame for the handler.
1389 move(regT0, callFrameRegister);
1390
1391 // Now store the exception returned by cti_op_throw.
1392 loadPtr(Address(stackPointerRegister, OBJECT_OFFSETOF(struct JITStackFrame, globalData)), regT3);
1393 load32(Address(regT3, OBJECT_OFFSETOF(JSGlobalData, exception) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
1394 load32(Address(regT3, OBJECT_OFFSETOF(JSGlobalData, exception) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
1395 store32(TrustedImm32(JSValue().payload()), Address(regT3, OBJECT_OFFSETOF(JSGlobalData, exception) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)));
1396 store32(TrustedImm32(JSValue().tag()), Address(regT3, OBJECT_OFFSETOF(JSGlobalData, exception) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)));
1397
1398 unsigned exception = currentInstruction[1].u.operand;
1399 emitStore(exception, regT1, regT0);
1400 map(m_bytecodeOffset + OPCODE_LENGTH(op_catch), exception, regT1, regT0);
1401 }
1402
1403 void JIT::emit_op_jmp_scopes(Instruction* currentInstruction)
1404 {
1405 JITStubCall stubCall(this, cti_op_jmp_scopes);
1406 stubCall.addArgument(TrustedImm32(currentInstruction[1].u.operand));
1407 stubCall.call();
1408 addJump(jump(), currentInstruction[2].u.operand);
1409 }
1410
1411 void JIT::emit_op_switch_imm(Instruction* currentInstruction)
1412 {
1413 unsigned tableIndex = currentInstruction[1].u.operand;
1414 unsigned defaultOffset = currentInstruction[2].u.operand;
1415 unsigned scrutinee = currentInstruction[3].u.operand;
1416
1417 // create jump table for switch destinations, track this switch statement.
1418 SimpleJumpTable* jumpTable = &m_codeBlock->immediateSwitchJumpTable(tableIndex);
1419 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Immediate));
1420 jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1421
1422 JITStubCall stubCall(this, cti_op_switch_imm);
1423 stubCall.addArgument(scrutinee);
1424 stubCall.addArgument(TrustedImm32(tableIndex));
1425 stubCall.call();
1426 jump(regT0);
1427 }
1428
1429 void JIT::emit_op_switch_char(Instruction* currentInstruction)
1430 {
1431 unsigned tableIndex = currentInstruction[1].u.operand;
1432 unsigned defaultOffset = currentInstruction[2].u.operand;
1433 unsigned scrutinee = currentInstruction[3].u.operand;
1434
1435 // create jump table for switch destinations, track this switch statement.
1436 SimpleJumpTable* jumpTable = &m_codeBlock->characterSwitchJumpTable(tableIndex);
1437 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Character));
1438 jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1439
1440 JITStubCall stubCall(this, cti_op_switch_char);
1441 stubCall.addArgument(scrutinee);
1442 stubCall.addArgument(TrustedImm32(tableIndex));
1443 stubCall.call();
1444 jump(regT0);
1445 }
1446
1447 void JIT::emit_op_switch_string(Instruction* currentInstruction)
1448 {
1449 unsigned tableIndex = currentInstruction[1].u.operand;
1450 unsigned defaultOffset = currentInstruction[2].u.operand;
1451 unsigned scrutinee = currentInstruction[3].u.operand;
1452
1453 // create jump table for switch destinations, track this switch statement.
1454 StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
1455 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset));
1456
1457 JITStubCall stubCall(this, cti_op_switch_string);
1458 stubCall.addArgument(scrutinee);
1459 stubCall.addArgument(TrustedImm32(tableIndex));
1460 stubCall.call();
1461 jump(regT0);
1462 }
1463
1464 void JIT::emit_op_throw_reference_error(Instruction* currentInstruction)
1465 {
1466 unsigned message = currentInstruction[1].u.operand;
1467
1468 JITStubCall stubCall(this, cti_op_throw_reference_error);
1469 stubCall.addArgument(m_codeBlock->getConstant(message));
1470 stubCall.call();
1471 }
1472
1473 void JIT::emit_op_debug(Instruction* currentInstruction)
1474 {
1475 #if ENABLE(DEBUG_WITH_BREAKPOINT)
1476 UNUSED_PARAM(currentInstruction);
1477 breakpoint();
1478 #else
1479 JITStubCall stubCall(this, cti_op_debug);
1480 stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
1481 stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
1482 stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
1483 stubCall.call();
1484 #endif
1485 }
1486
1487
1488 void JIT::emit_op_enter(Instruction*)
1489 {
1490 // Even though JIT code doesn't use them, we initialize our constant
1491 // registers to zap stale pointers, to avoid unnecessarily prolonging
1492 // object lifetime and increasing GC pressure.
1493 for (int i = 0; i < m_codeBlock->m_numVars; ++i)
1494 emitStore(i, jsUndefined());
1495 }
1496
1497 void JIT::emit_op_create_activation(Instruction* currentInstruction)
1498 {
1499 unsigned activation = currentInstruction[1].u.operand;
1500
1501 Jump activationCreated = branch32(NotEqual, tagFor(activation), TrustedImm32(JSValue::EmptyValueTag));
1502 JITStubCall(this, cti_op_push_activation).call(activation);
1503 activationCreated.link(this);
1504 }
1505
1506 void JIT::emit_op_create_arguments(Instruction* currentInstruction)
1507 {
1508 unsigned dst = currentInstruction[1].u.operand;
1509
1510 Jump argsCreated = branch32(NotEqual, tagFor(dst), TrustedImm32(JSValue::EmptyValueTag));
1511
1512 JITStubCall(this, cti_op_create_arguments).call();
1513 emitStore(dst, regT1, regT0);
1514 emitStore(unmodifiedArgumentsRegister(dst), regT1, regT0);
1515
1516 argsCreated.link(this);
1517 }
1518
1519 void JIT::emit_op_init_lazy_reg(Instruction* currentInstruction)
1520 {
1521 unsigned dst = currentInstruction[1].u.operand;
1522
1523 emitStore(dst, JSValue());
1524 }
1525
1526 void JIT::emit_op_get_callee(Instruction* currentInstruction)
1527 {
1528 int dst = currentInstruction[1].u.operand;
1529 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT0);
1530 emitStoreCell(dst, regT0);
1531 }
1532
1533 void JIT::emit_op_create_this(Instruction* currentInstruction)
1534 {
1535 emitLoad(currentInstruction[2].u.operand, regT1, regT0);
1536 emitJumpSlowCaseIfNotJSCell(currentInstruction[2].u.operand, regT1);
1537 loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
1538 addSlowCase(emitJumpIfNotObject(regT1));
1539
1540 // now we know that the prototype is an object, but we don't know if it's got an
1541 // inheritor ID
1542
1543 loadPtr(Address(regT0, JSObject::offsetOfInheritorID()), regT2);
1544 addSlowCase(branchTestPtr(Zero, regT2));
1545
1546 // now regT2 contains the inheritorID, which is the structure that the newly
1547 // allocated object will have.
1548
1549 emitAllocateJSFinalObject(regT2, regT0, regT1);
1550
1551 emitStoreCell(currentInstruction[1].u.operand, regT0);
1552 }
1553
1554 void JIT::emitSlow_op_create_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1555 {
1556 linkSlowCaseIfNotJSCell(iter, currentInstruction[2].u.operand); // not a cell
1557 linkSlowCase(iter); // not an object
1558 linkSlowCase(iter); // doesn't have an inheritor ID
1559 linkSlowCase(iter); // allocation failed
1560 unsigned protoRegister = currentInstruction[2].u.operand;
1561 emitLoad(protoRegister, regT1, regT0);
1562 JITStubCall stubCall(this, cti_op_create_this);
1563 stubCall.addArgument(regT1, regT0);
1564 stubCall.call(currentInstruction[1].u.operand);
1565 }
1566
1567 void JIT::emit_op_convert_this(Instruction* currentInstruction)
1568 {
1569 unsigned thisRegister = currentInstruction[1].u.operand;
1570
1571 emitLoad(thisRegister, regT1, regT0);
1572
1573 addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)));
1574 addSlowCase(branchPtr(Equal, Address(regT0, JSCell::classInfoOffset()), TrustedImmPtr(&JSString::s_info)));
1575
1576 map(m_bytecodeOffset + OPCODE_LENGTH(op_convert_this), thisRegister, regT1, regT0);
1577 }
1578
1579 void JIT::emitSlow_op_convert_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1580 {
1581 void* globalThis = m_codeBlock->globalObject()->globalScopeChain()->globalThis.get();
1582 unsigned thisRegister = currentInstruction[1].u.operand;
1583
1584 linkSlowCase(iter);
1585 Jump isNotUndefined = branch32(NotEqual, regT1, TrustedImm32(JSValue::UndefinedTag));
1586 move(TrustedImmPtr(globalThis), regT0);
1587 move(TrustedImm32(JSValue::CellTag), regT1);
1588 emitStore(thisRegister, regT1, regT0);
1589 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_convert_this));
1590
1591 isNotUndefined.link(this);
1592 linkSlowCase(iter);
1593 JITStubCall stubCall(this, cti_op_convert_this);
1594 stubCall.addArgument(regT1, regT0);
1595 stubCall.call(thisRegister);
1596 }
1597
1598 void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
1599 {
1600 peek(regT2, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof(void*));
1601 Jump noProfiler = branchTestPtr(Zero, Address(regT2));
1602
1603 JITStubCall stubCall(this, cti_op_profile_will_call);
1604 stubCall.addArgument(currentInstruction[1].u.operand);
1605 stubCall.call();
1606 noProfiler.link(this);
1607 }
1608
1609 void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
1610 {
1611 peek(regT2, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof(void*));
1612 Jump noProfiler = branchTestPtr(Zero, Address(regT2));
1613
1614 JITStubCall stubCall(this, cti_op_profile_did_call);
1615 stubCall.addArgument(currentInstruction[1].u.operand);
1616 stubCall.call();
1617 noProfiler.link(this);
1618 }
1619
1620 void JIT::emit_op_get_arguments_length(Instruction* currentInstruction)
1621 {
1622 int dst = currentInstruction[1].u.operand;
1623 int argumentsRegister = currentInstruction[2].u.operand;
1624 addSlowCase(branch32(NotEqual, tagFor(argumentsRegister), TrustedImm32(JSValue::EmptyValueTag)));
1625 load32(payloadFor(RegisterFile::ArgumentCount), regT0);
1626 sub32(TrustedImm32(1), regT0);
1627 emitStoreInt32(dst, regT0);
1628 }
1629
1630 void JIT::emitSlow_op_get_arguments_length(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1631 {
1632 linkSlowCase(iter);
1633 int dst = currentInstruction[1].u.operand;
1634 int base = currentInstruction[2].u.operand;
1635 int ident = currentInstruction[3].u.operand;
1636
1637 JITStubCall stubCall(this, cti_op_get_by_id_generic);
1638 stubCall.addArgument(base);
1639 stubCall.addArgument(TrustedImmPtr(&(m_codeBlock->identifier(ident))));
1640 stubCall.call(dst);
1641 }
1642
1643 void JIT::emit_op_get_argument_by_val(Instruction* currentInstruction)
1644 {
1645 int dst = currentInstruction[1].u.operand;
1646 int argumentsRegister = currentInstruction[2].u.operand;
1647 int property = currentInstruction[3].u.operand;
1648 addSlowCase(branch32(NotEqual, tagFor(argumentsRegister), TrustedImm32(JSValue::EmptyValueTag)));
1649 emitLoad(property, regT1, regT2);
1650 addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::Int32Tag)));
1651 add32(TrustedImm32(1), regT2);
1652 // regT2 now contains the integer index of the argument we want, including this
1653 load32(payloadFor(RegisterFile::ArgumentCount), regT3);
1654 addSlowCase(branch32(AboveOrEqual, regT2, regT3));
1655
1656 neg32(regT2);
1657 loadPtr(BaseIndex(callFrameRegister, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload) + CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register))), regT0);
1658 loadPtr(BaseIndex(callFrameRegister, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag) + CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register))), regT1);
1659 emitStore(dst, regT1, regT0);
1660 }
1661
1662 void JIT::emitSlow_op_get_argument_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1663 {
1664 unsigned dst = currentInstruction[1].u.operand;
1665 unsigned arguments = currentInstruction[2].u.operand;
1666 unsigned property = currentInstruction[3].u.operand;
1667
1668 linkSlowCase(iter);
1669 Jump skipArgumentsCreation = jump();
1670
1671 linkSlowCase(iter);
1672 linkSlowCase(iter);
1673 JITStubCall(this, cti_op_create_arguments).call();
1674 emitStore(arguments, regT1, regT0);
1675 emitStore(unmodifiedArgumentsRegister(arguments), regT1, regT0);
1676
1677 skipArgumentsCreation.link(this);
1678 JITStubCall stubCall(this, cti_op_get_by_val);
1679 stubCall.addArgument(arguments);
1680 stubCall.addArgument(property);
1681 stubCall.call(dst);
1682 }
1683
1684 } // namespace JSC
1685
1686 #endif // USE(JSVALUE32_64)
1687 #endif // ENABLE(JIT)