]> git.saurik.com Git - apple/javascriptcore.git/blob - jit/JITOpcodes32_64.cpp
4f4c5c0b186f7149e96a9368034a7fcab7f29803
[apple/javascriptcore.git] / jit / JITOpcodes32_64.cpp
1 /*
2 * Copyright (C) 2009 Apple Inc. All rights reserved.
3 * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
13 *
14 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
18 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25 */
26
27 #include "config.h"
28
29 #if ENABLE(JIT)
30 #if USE(JSVALUE32_64)
31 #include "JIT.h"
32
33 #include "JITInlineMethods.h"
34 #include "JITStubCall.h"
35 #include "JSArray.h"
36 #include "JSCell.h"
37 #include "JSFunction.h"
38 #include "JSPropertyNameIterator.h"
39 #include "LinkBuffer.h"
40
41 namespace JSC {
42
43 void JIT::privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool>* executablePool, JSGlobalData* globalData, TrampolineStructure *trampolines)
44 {
45 #if ENABLE(JIT_USE_SOFT_MODULO)
46 Label softModBegin = align();
47 softModulo();
48 #endif
49 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
50 // (1) This function provides fast property access for string length
51 Label stringLengthBegin = align();
52
53 // regT0 holds payload, regT1 holds tag
54
55 Jump string_failureCases1 = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
56 Jump string_failureCases2 = branchPtr(NotEqual, Address(regT0), TrustedImmPtr(m_globalData->jsStringVPtr));
57
58 // Checks out okay! - get the length from the Ustring.
59 load32(Address(regT0, OBJECT_OFFSETOF(JSString, m_length)), regT2);
60
61 Jump string_failureCases3 = branch32(Above, regT2, TrustedImm32(INT_MAX));
62 move(regT2, regT0);
63 move(TrustedImm32(JSValue::Int32Tag), regT1);
64
65 ret();
66 #endif
67
68 JumpList callLinkFailures;
69 // (2) Trampolines for the slow cases of op_call / op_call_eval / op_construct.
70 #if ENABLE(JIT_OPTIMIZE_CALL)
71 // VirtualCallLink Trampoline
72 // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
73 Label virtualCallLinkBegin = align();
74 compileOpCallInitializeCallFrame();
75 preserveReturnAddressAfterCall(regT3);
76 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
77 restoreArgumentReference();
78 Call callLazyLinkCall = call();
79 callLinkFailures.append(branchTestPtr(Zero, regT0));
80 restoreReturnAddressBeforeReturn(regT3);
81 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT1);
82 jump(regT0);
83
84 // VirtualConstructLink Trampoline
85 // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
86 Label virtualConstructLinkBegin = align();
87 compileOpCallInitializeCallFrame();
88 preserveReturnAddressAfterCall(regT3);
89 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
90 restoreArgumentReference();
91 Call callLazyLinkConstruct = call();
92 restoreReturnAddressBeforeReturn(regT3);
93 callLinkFailures.append(branchTestPtr(Zero, regT0));
94 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT1);
95 jump(regT0);
96
97 #endif // ENABLE(JIT_OPTIMIZE_CALL)
98
99 // VirtualCall Trampoline
100 // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
101 Label virtualCallBegin = align();
102 compileOpCallInitializeCallFrame();
103
104 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
105
106 Jump hasCodeBlock3 = branch32(GreaterThanOrEqual, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParametersForCall)), TrustedImm32(0));
107 preserveReturnAddressAfterCall(regT3);
108 restoreArgumentReference();
109 Call callCompileCall = call();
110 callLinkFailures.append(branchTestPtr(Zero, regT0));
111 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT1);
112 restoreReturnAddressBeforeReturn(regT3);
113 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
114 hasCodeBlock3.link(this);
115
116 loadPtr(Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_jitCodeForCallWithArityCheck)), regT0);
117 jump(regT0);
118
119 // VirtualConstruct Trampoline
120 // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
121 Label virtualConstructBegin = align();
122 compileOpCallInitializeCallFrame();
123
124 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
125
126 Jump hasCodeBlock4 = branch32(GreaterThanOrEqual, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParametersForConstruct)), TrustedImm32(0));
127 preserveReturnAddressAfterCall(regT3);
128 restoreArgumentReference();
129 Call callCompileCconstruct = call();
130 callLinkFailures.append(branchTestPtr(Zero, regT0));
131 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT1);
132 restoreReturnAddressBeforeReturn(regT3);
133 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
134 hasCodeBlock4.link(this);
135
136 loadPtr(Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_jitCodeForConstructWithArityCheck)), regT0);
137 jump(regT0);
138
139 // If the parser fails we want to be able to be able to keep going,
140 // So we handle this as a parse failure.
141 callLinkFailures.link(this);
142 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
143 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
144 restoreReturnAddressBeforeReturn(regT1);
145 move(TrustedImmPtr(&globalData->exceptionLocation), regT2);
146 storePtr(regT1, regT2);
147 poke(callFrameRegister, 1 + OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
148 poke(TrustedImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()));
149 ret();
150
151 // NativeCall Trampoline
152 Label nativeCallThunk = privateCompileCTINativeCall(globalData);
153 Label nativeConstructThunk = privateCompileCTINativeCall(globalData, true);
154
155 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
156 Call string_failureCases1Call = makeTailRecursiveCall(string_failureCases1);
157 Call string_failureCases2Call = makeTailRecursiveCall(string_failureCases2);
158 Call string_failureCases3Call = makeTailRecursiveCall(string_failureCases3);
159 #endif
160
161 // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
162 LinkBuffer patchBuffer(*m_globalData, this, m_globalData->executableAllocator);
163
164 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
165 patchBuffer.link(string_failureCases1Call, FunctionPtr(cti_op_get_by_id_string_fail));
166 patchBuffer.link(string_failureCases2Call, FunctionPtr(cti_op_get_by_id_string_fail));
167 patchBuffer.link(string_failureCases3Call, FunctionPtr(cti_op_get_by_id_string_fail));
168 #endif
169 #if ENABLE(JIT_OPTIMIZE_CALL)
170 patchBuffer.link(callLazyLinkCall, FunctionPtr(cti_vm_lazyLinkCall));
171 patchBuffer.link(callLazyLinkConstruct, FunctionPtr(cti_vm_lazyLinkConstruct));
172 #endif
173 patchBuffer.link(callCompileCall, FunctionPtr(cti_op_call_jitCompile));
174 patchBuffer.link(callCompileCconstruct, FunctionPtr(cti_op_construct_jitCompile));
175
176 CodeRef finalCode = patchBuffer.finalizeCode();
177 *executablePool = finalCode.m_executablePool;
178
179 trampolines->ctiVirtualCall = patchBuffer.trampolineAt(virtualCallBegin);
180 trampolines->ctiVirtualConstruct = patchBuffer.trampolineAt(virtualConstructBegin);
181 #if ENABLE(JIT_OPTIMIZE_NATIVE_CALL)
182 trampolines->ctiNativeCall = patchBuffer.trampolineAt(nativeCallThunk);
183 trampolines->ctiNativeConstruct = patchBuffer.trampolineAt(nativeConstructThunk);
184 #endif
185 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
186 trampolines->ctiStringLengthTrampoline = patchBuffer.trampolineAt(stringLengthBegin);
187 #endif
188 #if ENABLE(JIT_OPTIMIZE_CALL)
189 trampolines->ctiVirtualCallLink = patchBuffer.trampolineAt(virtualCallLinkBegin);
190 trampolines->ctiVirtualConstructLink = patchBuffer.trampolineAt(virtualConstructLinkBegin);
191 #endif
192 #if ENABLE(JIT_USE_SOFT_MODULO)
193 trampolines->ctiSoftModulo = patchBuffer.trampolineAt(softModBegin);
194 #endif
195 }
196
197 JIT::Label JIT::privateCompileCTINativeCall(JSGlobalData* globalData, bool isConstruct)
198 {
199 int executableOffsetToFunction = isConstruct ? OBJECT_OFFSETOF(NativeExecutable, m_constructor) : OBJECT_OFFSETOF(NativeExecutable, m_function);
200
201 Label nativeCallThunk = align();
202
203 emitPutImmediateToCallFrameHeader(0, RegisterFile::CodeBlock);
204
205 #if CPU(X86)
206 // Load caller frame's scope chain into this callframe so that whatever we call can
207 // get to its global data.
208 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
209 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT0);
210 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
211
212 peek(regT1);
213 emitPutToCallFrameHeader(regT1, RegisterFile::ReturnPC);
214
215 // Calling convention: f(ecx, edx, ...);
216 // Host function signature: f(ExecState*);
217 move(callFrameRegister, X86Registers::ecx);
218
219 subPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister); // Align stack after call.
220
221 // call the function
222 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT1);
223 loadPtr(Address(regT1, OBJECT_OFFSETOF(JSFunction, m_executable)), regT1);
224 move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
225 call(Address(regT1, executableOffsetToFunction));
226
227 addPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister);
228
229 #elif CPU(ARM)
230 // Load caller frame's scope chain into this callframe so that whatever we call can
231 // get to its global data.
232 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
233 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT2);
234 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
235
236 preserveReturnAddressAfterCall(regT3); // Callee preserved
237 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
238
239 // Calling convention: f(r0 == regT0, r1 == regT1, ...);
240 // Host function signature: f(ExecState*);
241 move(callFrameRegister, ARMRegisters::r0);
242
243 // call the function
244 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, ARMRegisters::r1);
245 move(regT2, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
246 loadPtr(Address(ARMRegisters::r1, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
247 call(Address(regT2, executableOffsetToFunction));
248
249 restoreReturnAddressBeforeReturn(regT3);
250 #elif CPU(SH4)
251 // Load caller frame's scope chain into this callframe so that whatever we call can
252 // get to its global data.
253 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
254 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT2);
255 emitPutToCallFrameHeader(regT1, RegisterFile::ScopeChain);
256
257 preserveReturnAddressAfterCall(regT3); // Callee preserved
258 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
259
260 // Calling convention: f(r0 == regT4, r1 == regT5, ...);
261 // Host function signature: f(ExecState*);
262 move(callFrameRegister, regT4);
263
264 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT5);
265 move(regT2, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
266 loadPtr(Address(regT5, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
267
268 call(Address(regT2, executableOffsetToFunction), regT0);
269 restoreReturnAddressBeforeReturn(regT3);
270 #elif CPU(MIPS)
271 // Load caller frame's scope chain into this callframe so that whatever we call can
272 // get to its global data.
273 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
274 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT0);
275 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
276
277 preserveReturnAddressAfterCall(regT3); // Callee preserved
278 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
279
280 // Calling convention: f(a0, a1, a2, a3);
281 // Host function signature: f(ExecState*);
282
283 // Allocate stack space for 16 bytes (8-byte aligned)
284 // 16 bytes (unused) for 4 arguments
285 subPtr(TrustedImm32(16), stackPointerRegister);
286
287 // Setup arg0
288 move(callFrameRegister, MIPSRegisters::a0);
289
290 // Call
291 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, MIPSRegisters::a2);
292 loadPtr(Address(MIPSRegisters::a2, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
293 move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
294 call(Address(regT2, executableOffsetToFunction));
295
296 // Restore stack space
297 addPtr(TrustedImm32(16), stackPointerRegister);
298
299 restoreReturnAddressBeforeReturn(regT3);
300
301 #elif ENABLE(JIT_OPTIMIZE_NATIVE_CALL)
302 #error "JIT_OPTIMIZE_NATIVE_CALL not yet supported on this platform."
303 #else
304 UNUSED_PARAM(executableOffsetToFunction);
305 breakpoint();
306 #endif // CPU(X86)
307
308 // Check for an exception
309 Jump sawException = branch32(NotEqual, AbsoluteAddress(reinterpret_cast<char*>(&globalData->exception) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), TrustedImm32(JSValue::EmptyValueTag));
310
311 // Return.
312 ret();
313
314 // Handle an exception
315 sawException.link(this);
316
317 // Grab the return address.
318 preserveReturnAddressAfterCall(regT1);
319
320 move(TrustedImmPtr(&globalData->exceptionLocation), regT2);
321 storePtr(regT1, regT2);
322 poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
323
324 // Set the return address.
325 move(TrustedImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()), regT1);
326 restoreReturnAddressBeforeReturn(regT1);
327
328 ret();
329
330 return nativeCallThunk;
331 }
332
333 JIT::CodePtr JIT::privateCompileCTINativeCall(PassRefPtr<ExecutablePool> executablePool, JSGlobalData* globalData, NativeFunction func)
334 {
335 Call nativeCall;
336 Label nativeCallThunk = align();
337
338 emitPutImmediateToCallFrameHeader(0, RegisterFile::CodeBlock);
339
340 #if CPU(X86)
341 // Load caller frame's scope chain into this callframe so that whatever we call can
342 // get to its global data.
343 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
344 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT0);
345 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
346
347 peek(regT1);
348 emitPutToCallFrameHeader(regT1, RegisterFile::ReturnPC);
349
350 // Calling convention: f(ecx, edx, ...);
351 // Host function signature: f(ExecState*);
352 move(callFrameRegister, X86Registers::ecx);
353
354 subPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister); // Align stack after call.
355
356 move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
357
358 // call the function
359 nativeCall = call();
360
361 addPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister);
362
363 #elif CPU(ARM)
364 // Load caller frame's scope chain into this callframe so that whatever we call can
365 // get to its global data.
366 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
367 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT2);
368 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
369
370 preserveReturnAddressAfterCall(regT3); // Callee preserved
371 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
372
373 // Calling convention: f(r0 == regT0, r1 == regT1, ...);
374 // Host function signature: f(ExecState*);
375 move(callFrameRegister, ARMRegisters::r0);
376
377 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, ARMRegisters::r1);
378 move(regT2, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
379 loadPtr(Address(ARMRegisters::r1, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
380
381 // call the function
382 nativeCall = call();
383
384 restoreReturnAddressBeforeReturn(regT3);
385
386 #elif CPU(MIPS)
387 // Load caller frame's scope chain into this callframe so that whatever we call can
388 // get to its global data.
389 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
390 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT0);
391 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
392
393 preserveReturnAddressAfterCall(regT3); // Callee preserved
394 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
395
396 // Calling convention: f(a0, a1, a2, a3);
397 // Host function signature: f(ExecState*);
398
399 // Allocate stack space for 16 bytes (8-byte aligned)
400 // 16 bytes (unused) for 4 arguments
401 subPtr(TrustedImm32(16), stackPointerRegister);
402
403 // Setup arg0
404 move(callFrameRegister, MIPSRegisters::a0);
405
406 // Call
407 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, MIPSRegisters::a2);
408 loadPtr(Address(MIPSRegisters::a2, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
409 move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
410
411 // call the function
412 nativeCall = call();
413
414 // Restore stack space
415 addPtr(TrustedImm32(16), stackPointerRegister);
416
417 restoreReturnAddressBeforeReturn(regT3);
418 #elif CPU(SH4)
419 // Load caller frame's scope chain into this callframe so that whatever we call can
420 // get to its global data.
421 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
422 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT2);
423 emitPutToCallFrameHeader(regT1, RegisterFile::ScopeChain);
424
425 preserveReturnAddressAfterCall(regT3); // Callee preserved
426 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
427
428 // Calling convention: f(r0 == regT4, r1 == regT5, ...);
429 // Host function signature: f(ExecState*);
430 move(callFrameRegister, regT4);
431
432 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT5);
433 move(regT2, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
434 loadPtr(Address(regT5, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
435
436 // call the function
437 nativeCall = call();
438
439 restoreReturnAddressBeforeReturn(regT3);
440 #elif ENABLE(JIT_OPTIMIZE_NATIVE_CALL)
441 #error "JIT_OPTIMIZE_NATIVE_CALL not yet supported on this platform."
442 #else
443 breakpoint();
444 #endif // CPU(X86)
445
446 // Check for an exception
447 Jump sawException = branch32(NotEqual, AbsoluteAddress(reinterpret_cast<char*>(&globalData->exception) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), TrustedImm32(JSValue::EmptyValueTag));
448
449 // Return.
450 ret();
451
452 // Handle an exception
453 sawException.link(this);
454
455 // Grab the return address.
456 preserveReturnAddressAfterCall(regT1);
457
458 move(TrustedImmPtr(&globalData->exceptionLocation), regT2);
459 storePtr(regT1, regT2);
460 poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
461
462 // Set the return address.
463 move(TrustedImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()), regT1);
464 restoreReturnAddressBeforeReturn(regT1);
465
466 ret();
467
468 // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
469 LinkBuffer patchBuffer(*m_globalData, this, executablePool);
470
471 patchBuffer.link(nativeCall, FunctionPtr(func));
472 patchBuffer.finalizeCode();
473
474 return patchBuffer.trampolineAt(nativeCallThunk);
475 }
476
477 void JIT::emit_op_mov(Instruction* currentInstruction)
478 {
479 unsigned dst = currentInstruction[1].u.operand;
480 unsigned src = currentInstruction[2].u.operand;
481
482 if (m_codeBlock->isConstantRegisterIndex(src))
483 emitStore(dst, getConstantOperand(src));
484 else {
485 emitLoad(src, regT1, regT0);
486 emitStore(dst, regT1, regT0);
487 map(m_bytecodeOffset + OPCODE_LENGTH(op_mov), dst, regT1, regT0);
488 }
489 }
490
491 void JIT::emit_op_end(Instruction* currentInstruction)
492 {
493 ASSERT(returnValueRegister != callFrameRegister);
494 emitLoad(currentInstruction[1].u.operand, regT1, regT0);
495 restoreReturnAddressBeforeReturn(Address(callFrameRegister, RegisterFile::ReturnPC * static_cast<int>(sizeof(Register))));
496 ret();
497 }
498
499 void JIT::emit_op_jmp(Instruction* currentInstruction)
500 {
501 unsigned target = currentInstruction[1].u.operand;
502 addJump(jump(), target);
503 }
504
505 void JIT::emit_op_loop_if_lesseq(Instruction* currentInstruction)
506 {
507 unsigned op1 = currentInstruction[1].u.operand;
508 unsigned op2 = currentInstruction[2].u.operand;
509 unsigned target = currentInstruction[3].u.operand;
510
511 emitTimeoutCheck();
512
513 if (isOperandConstantImmediateInt(op1)) {
514 emitLoad(op2, regT1, regT0);
515 addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::Int32Tag)));
516 addJump(branch32(GreaterThanOrEqual, regT0, Imm32(getConstantOperand(op1).asInt32())), target);
517 return;
518 }
519
520 if (isOperandConstantImmediateInt(op2)) {
521 emitLoad(op1, regT1, regT0);
522 addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::Int32Tag)));
523 addJump(branch32(LessThanOrEqual, regT0, Imm32(getConstantOperand(op2).asInt32())), target);
524 return;
525 }
526
527 emitLoad2(op1, regT1, regT0, op2, regT3, regT2);
528 addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::Int32Tag)));
529 addSlowCase(branch32(NotEqual, regT3, TrustedImm32(JSValue::Int32Tag)));
530 addJump(branch32(LessThanOrEqual, regT0, regT2), target);
531 }
532
533 void JIT::emitSlow_op_loop_if_lesseq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
534 {
535 unsigned op1 = currentInstruction[1].u.operand;
536 unsigned op2 = currentInstruction[2].u.operand;
537 unsigned target = currentInstruction[3].u.operand;
538
539 if (!isOperandConstantImmediateInt(op1) && !isOperandConstantImmediateInt(op2))
540 linkSlowCase(iter); // int32 check
541 linkSlowCase(iter); // int32 check
542
543 JITStubCall stubCall(this, cti_op_loop_if_lesseq);
544 stubCall.addArgument(op1);
545 stubCall.addArgument(op2);
546 stubCall.call();
547 emitJumpSlowToHot(branchTest32(NonZero, regT0), target);
548 }
549
550 void JIT::emit_op_new_object(Instruction* currentInstruction)
551 {
552 JITStubCall(this, cti_op_new_object).call(currentInstruction[1].u.operand);
553 }
554
555 void JIT::emit_op_check_has_instance(Instruction* currentInstruction)
556 {
557 unsigned baseVal = currentInstruction[1].u.operand;
558
559 emitLoadPayload(baseVal, regT0);
560
561 // Check that baseVal is a cell.
562 emitJumpSlowCaseIfNotJSCell(baseVal);
563
564 // Check that baseVal 'ImplementsHasInstance'.
565 loadPtr(Address(regT0, JSCell::structureOffset()), regT0);
566 addSlowCase(branchTest8(Zero, Address(regT0, Structure::typeInfoFlagsOffset()), TrustedImm32(ImplementsHasInstance)));
567 }
568
569 void JIT::emit_op_instanceof(Instruction* currentInstruction)
570 {
571 unsigned dst = currentInstruction[1].u.operand;
572 unsigned value = currentInstruction[2].u.operand;
573 unsigned baseVal = currentInstruction[3].u.operand;
574 unsigned proto = currentInstruction[4].u.operand;
575
576 // Load the operands into registers.
577 // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
578 emitLoadPayload(value, regT2);
579 emitLoadPayload(baseVal, regT0);
580 emitLoadPayload(proto, regT1);
581
582 // Check that proto are cells. baseVal must be a cell - this is checked by op_check_has_instance.
583 emitJumpSlowCaseIfNotJSCell(value);
584 emitJumpSlowCaseIfNotJSCell(proto);
585
586 // Check that prototype is an object
587 loadPtr(Address(regT1, JSCell::structureOffset()), regT3);
588 addSlowCase(branch8(NotEqual, Address(regT3, Structure::typeInfoTypeOffset()), TrustedImm32(ObjectType)));
589
590 // Fixme: this check is only needed because the JSC API allows HasInstance to be overridden; we should deprecate this.
591 // Check that baseVal 'ImplementsDefaultHasInstance'.
592 loadPtr(Address(regT0, JSCell::structureOffset()), regT0);
593 addSlowCase(branchTest8(Zero, Address(regT0, Structure::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance)));
594
595 // Optimistically load the result true, and start looping.
596 // Initially, regT1 still contains proto and regT2 still contains value.
597 // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
598 move(TrustedImm32(1), regT0);
599 Label loop(this);
600
601 // Load the prototype of the cell in regT2. If this is equal to regT1 - WIN!
602 // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
603 loadPtr(Address(regT2, JSCell::structureOffset()), regT2);
604 load32(Address(regT2, Structure::prototypeOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT2);
605 Jump isInstance = branchPtr(Equal, regT2, regT1);
606 branchTest32(NonZero, regT2).linkTo(loop, this);
607
608 // We get here either by dropping out of the loop, or if value was not an Object. Result is false.
609 move(TrustedImm32(0), regT0);
610
611 // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
612 isInstance.link(this);
613 emitStoreBool(dst, regT0);
614 }
615
616 void JIT::emitSlow_op_check_has_instance(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
617 {
618 unsigned baseVal = currentInstruction[1].u.operand;
619
620 linkSlowCaseIfNotJSCell(iter, baseVal);
621 linkSlowCase(iter);
622
623 JITStubCall stubCall(this, cti_op_check_has_instance);
624 stubCall.addArgument(baseVal);
625 stubCall.call();
626 }
627
628 void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
629 {
630 unsigned dst = currentInstruction[1].u.operand;
631 unsigned value = currentInstruction[2].u.operand;
632 unsigned baseVal = currentInstruction[3].u.operand;
633 unsigned proto = currentInstruction[4].u.operand;
634
635 linkSlowCaseIfNotJSCell(iter, value);
636 linkSlowCaseIfNotJSCell(iter, proto);
637 linkSlowCase(iter);
638 linkSlowCase(iter);
639
640 JITStubCall stubCall(this, cti_op_instanceof);
641 stubCall.addArgument(value);
642 stubCall.addArgument(baseVal);
643 stubCall.addArgument(proto);
644 stubCall.call(dst);
645 }
646
647 void JIT::emit_op_get_global_var(Instruction* currentInstruction)
648 {
649 int dst = currentInstruction[1].u.operand;
650 JSGlobalObject* globalObject = m_codeBlock->globalObject();
651 ASSERT(globalObject->isGlobalObject());
652 int index = currentInstruction[2].u.operand;
653
654 loadPtr(&globalObject->m_registers, regT2);
655
656 emitLoad(index, regT1, regT0, regT2);
657 emitStore(dst, regT1, regT0);
658 map(m_bytecodeOffset + OPCODE_LENGTH(op_get_global_var), dst, regT1, regT0);
659 }
660
661 void JIT::emit_op_put_global_var(Instruction* currentInstruction)
662 {
663 JSGlobalObject* globalObject = m_codeBlock->globalObject();
664 ASSERT(globalObject->isGlobalObject());
665 int index = currentInstruction[1].u.operand;
666 int value = currentInstruction[2].u.operand;
667
668 emitLoad(value, regT1, regT0);
669
670 loadPtr(&globalObject->m_registers, regT2);
671 emitStore(index, regT1, regT0, regT2);
672 map(m_bytecodeOffset + OPCODE_LENGTH(op_put_global_var), value, regT1, regT0);
673 }
674
675 void JIT::emit_op_get_scoped_var(Instruction* currentInstruction)
676 {
677 int dst = currentInstruction[1].u.operand;
678 int index = currentInstruction[2].u.operand;
679 int skip = currentInstruction[3].u.operand;
680
681 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2);
682 bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain();
683 ASSERT(skip || !checkTopLevel);
684 if (checkTopLevel && skip--) {
685 Jump activationNotCreated;
686 if (checkTopLevel)
687 activationNotCreated = branch32(Equal, tagFor(m_codeBlock->activationRegister()), TrustedImm32(JSValue::EmptyValueTag));
688 loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
689 activationNotCreated.link(this);
690 }
691 while (skip--)
692 loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
693
694 loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, object)), regT2);
695 loadPtr(Address(regT2, OBJECT_OFFSETOF(JSVariableObject, m_registers)), regT2);
696
697 emitLoad(index, regT1, regT0, regT2);
698 emitStore(dst, regT1, regT0);
699 map(m_bytecodeOffset + OPCODE_LENGTH(op_get_scoped_var), dst, regT1, regT0);
700 }
701
702 void JIT::emit_op_put_scoped_var(Instruction* currentInstruction)
703 {
704 int index = currentInstruction[1].u.operand;
705 int skip = currentInstruction[2].u.operand;
706 int value = currentInstruction[3].u.operand;
707
708 emitLoad(value, regT1, regT0);
709
710 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2);
711 bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain();
712 ASSERT(skip || !checkTopLevel);
713 if (checkTopLevel && skip--) {
714 Jump activationNotCreated;
715 if (checkTopLevel)
716 activationNotCreated = branch32(Equal, tagFor(m_codeBlock->activationRegister()), TrustedImm32(JSValue::EmptyValueTag));
717 loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
718 activationNotCreated.link(this);
719 }
720 while (skip--)
721 loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
722
723 loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, object)), regT2);
724 loadPtr(Address(regT2, OBJECT_OFFSETOF(JSVariableObject, m_registers)), regT2);
725
726 emitStore(index, regT1, regT0, regT2);
727 map(m_bytecodeOffset + OPCODE_LENGTH(op_put_scoped_var), value, regT1, regT0);
728 }
729
730 void JIT::emit_op_tear_off_activation(Instruction* currentInstruction)
731 {
732 unsigned activation = currentInstruction[1].u.operand;
733 unsigned arguments = currentInstruction[2].u.operand;
734 Jump activationCreated = branch32(NotEqual, tagFor(activation), TrustedImm32(JSValue::EmptyValueTag));
735 Jump argumentsNotCreated = branch32(Equal, tagFor(arguments), TrustedImm32(JSValue::EmptyValueTag));
736 activationCreated.link(this);
737 JITStubCall stubCall(this, cti_op_tear_off_activation);
738 stubCall.addArgument(currentInstruction[1].u.operand);
739 stubCall.addArgument(unmodifiedArgumentsRegister(currentInstruction[2].u.operand));
740 stubCall.call();
741 argumentsNotCreated.link(this);
742 }
743
744 void JIT::emit_op_tear_off_arguments(Instruction* currentInstruction)
745 {
746 int dst = currentInstruction[1].u.operand;
747
748 Jump argsNotCreated = branch32(Equal, tagFor(unmodifiedArgumentsRegister(dst)), TrustedImm32(JSValue::EmptyValueTag));
749 JITStubCall stubCall(this, cti_op_tear_off_arguments);
750 stubCall.addArgument(unmodifiedArgumentsRegister(dst));
751 stubCall.call();
752 argsNotCreated.link(this);
753 }
754
755 void JIT::emit_op_resolve(Instruction* currentInstruction)
756 {
757 JITStubCall stubCall(this, cti_op_resolve);
758 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
759 stubCall.call(currentInstruction[1].u.operand);
760 }
761
762 void JIT::emit_op_to_primitive(Instruction* currentInstruction)
763 {
764 int dst = currentInstruction[1].u.operand;
765 int src = currentInstruction[2].u.operand;
766
767 emitLoad(src, regT1, regT0);
768
769 Jump isImm = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
770 addSlowCase(branchPtr(NotEqual, Address(regT0), TrustedImmPtr(m_globalData->jsStringVPtr)));
771 isImm.link(this);
772
773 if (dst != src)
774 emitStore(dst, regT1, regT0);
775 map(m_bytecodeOffset + OPCODE_LENGTH(op_to_primitive), dst, regT1, regT0);
776 }
777
778 void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
779 {
780 int dst = currentInstruction[1].u.operand;
781
782 linkSlowCase(iter);
783
784 JITStubCall stubCall(this, cti_op_to_primitive);
785 stubCall.addArgument(regT1, regT0);
786 stubCall.call(dst);
787 }
788
789 void JIT::emit_op_strcat(Instruction* currentInstruction)
790 {
791 JITStubCall stubCall(this, cti_op_strcat);
792 stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
793 stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
794 stubCall.call(currentInstruction[1].u.operand);
795 }
796
797 void JIT::emit_op_resolve_base(Instruction* currentInstruction)
798 {
799 JITStubCall stubCall(this, currentInstruction[3].u.operand ? cti_op_resolve_base_strict_put : cti_op_resolve_base);
800 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
801 stubCall.call(currentInstruction[1].u.operand);
802 }
803
804 void JIT::emit_op_ensure_property_exists(Instruction* currentInstruction)
805 {
806 JITStubCall stubCall(this, cti_op_ensure_property_exists);
807 stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
808 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
809 stubCall.call(currentInstruction[1].u.operand);
810 }
811
812 void JIT::emit_op_resolve_skip(Instruction* currentInstruction)
813 {
814 JITStubCall stubCall(this, cti_op_resolve_skip);
815 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
816 stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
817 stubCall.call(currentInstruction[1].u.operand);
818 }
819
820 void JIT::emit_op_resolve_global(Instruction* currentInstruction, bool dynamic)
821 {
822 // FIXME: Optimize to use patching instead of so many memory accesses.
823
824 unsigned dst = currentInstruction[1].u.operand;
825 void* globalObject = m_codeBlock->globalObject();
826
827 unsigned currentIndex = m_globalResolveInfoIndex++;
828 GlobalResolveInfo* resolveInfoAddress = &m_codeBlock->globalResolveInfo(currentIndex);
829
830
831 // Verify structure.
832 move(TrustedImmPtr(globalObject), regT0);
833 move(TrustedImmPtr(resolveInfoAddress), regT3);
834 loadPtr(Address(regT3, OBJECT_OFFSETOF(GlobalResolveInfo, structure)), regT1);
835 addSlowCase(branchPtr(NotEqual, regT1, Address(regT0, JSCell::structureOffset())));
836
837 // Load property.
838 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSGlobalObject, m_propertyStorage)), regT2);
839 load32(Address(regT3, OBJECT_OFFSETOF(GlobalResolveInfo, offset)), regT3);
840 load32(BaseIndex(regT2, regT3, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0); // payload
841 load32(BaseIndex(regT2, regT3, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1); // tag
842 emitStore(dst, regT1, regT0);
843 map(m_bytecodeOffset + (dynamic ? OPCODE_LENGTH(op_resolve_global_dynamic) : OPCODE_LENGTH(op_resolve_global)), dst, regT1, regT0);
844 }
845
846 void JIT::emitSlow_op_resolve_global(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
847 {
848 unsigned dst = currentInstruction[1].u.operand;
849 Identifier* ident = &m_codeBlock->identifier(currentInstruction[2].u.operand);
850
851 unsigned currentIndex = m_globalResolveInfoIndex++;
852
853 linkSlowCase(iter);
854 JITStubCall stubCall(this, cti_op_resolve_global);
855 stubCall.addArgument(TrustedImmPtr(ident));
856 stubCall.addArgument(Imm32(currentIndex));
857 stubCall.call(dst);
858 }
859
860 void JIT::emit_op_not(Instruction* currentInstruction)
861 {
862 unsigned dst = currentInstruction[1].u.operand;
863 unsigned src = currentInstruction[2].u.operand;
864
865 emitLoadTag(src, regT0);
866
867 emitLoad(src, regT1, regT0);
868 addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::BooleanTag)));
869 xor32(TrustedImm32(1), regT0);
870
871 emitStoreBool(dst, regT0, (dst == src));
872 }
873
874 void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
875 {
876 unsigned dst = currentInstruction[1].u.operand;
877 unsigned src = currentInstruction[2].u.operand;
878
879 linkSlowCase(iter);
880
881 JITStubCall stubCall(this, cti_op_not);
882 stubCall.addArgument(src);
883 stubCall.call(dst);
884 }
885
886 void JIT::emit_op_jfalse(Instruction* currentInstruction)
887 {
888 unsigned cond = currentInstruction[1].u.operand;
889 unsigned target = currentInstruction[2].u.operand;
890
891 emitLoad(cond, regT1, regT0);
892
893 ASSERT((JSValue::BooleanTag + 1 == JSValue::Int32Tag) && !(JSValue::Int32Tag + 1));
894 addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::BooleanTag)));
895 addJump(branchTest32(Zero, regT0), target);
896 }
897
898 void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
899 {
900 unsigned cond = currentInstruction[1].u.operand;
901 unsigned target = currentInstruction[2].u.operand;
902
903 linkSlowCase(iter);
904
905 if (supportsFloatingPoint()) {
906 // regT1 contains the tag from the hot path.
907 Jump notNumber = branch32(Above, regT1, Imm32(JSValue::LowestTag));
908
909 emitLoadDouble(cond, fpRegT0);
910 emitJumpSlowToHot(branchDoubleZeroOrNaN(fpRegT0, fpRegT1), target);
911 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_jfalse));
912
913 notNumber.link(this);
914 }
915
916 JITStubCall stubCall(this, cti_op_jtrue);
917 stubCall.addArgument(cond);
918 stubCall.call();
919 emitJumpSlowToHot(branchTest32(Zero, regT0), target); // Inverted.
920 }
921
922 void JIT::emit_op_jtrue(Instruction* currentInstruction)
923 {
924 unsigned cond = currentInstruction[1].u.operand;
925 unsigned target = currentInstruction[2].u.operand;
926
927 emitLoad(cond, regT1, regT0);
928
929 ASSERT((JSValue::BooleanTag + 1 == JSValue::Int32Tag) && !(JSValue::Int32Tag + 1));
930 addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::BooleanTag)));
931 addJump(branchTest32(NonZero, regT0), target);
932 }
933
934 void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
935 {
936 unsigned cond = currentInstruction[1].u.operand;
937 unsigned target = currentInstruction[2].u.operand;
938
939 linkSlowCase(iter);
940
941 if (supportsFloatingPoint()) {
942 // regT1 contains the tag from the hot path.
943 Jump notNumber = branch32(Above, regT1, Imm32(JSValue::LowestTag));
944
945 emitLoadDouble(cond, fpRegT0);
946 emitJumpSlowToHot(branchDoubleNonZero(fpRegT0, fpRegT1), target);
947 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_jtrue));
948
949 notNumber.link(this);
950 }
951
952 JITStubCall stubCall(this, cti_op_jtrue);
953 stubCall.addArgument(cond);
954 stubCall.call();
955 emitJumpSlowToHot(branchTest32(NonZero, regT0), target);
956 }
957
958 void JIT::emit_op_jeq_null(Instruction* currentInstruction)
959 {
960 unsigned src = currentInstruction[1].u.operand;
961 unsigned target = currentInstruction[2].u.operand;
962
963 emitLoad(src, regT1, regT0);
964
965 Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
966
967 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
968 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
969 addJump(branchTest8(NonZero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
970
971 Jump wasNotImmediate = jump();
972
973 // Now handle the immediate cases - undefined & null
974 isImmediate.link(this);
975
976 ASSERT((JSValue::UndefinedTag + 1 == JSValue::NullTag) && (JSValue::NullTag & 0x1));
977 or32(TrustedImm32(1), regT1);
978 addJump(branch32(Equal, regT1, TrustedImm32(JSValue::NullTag)), target);
979
980 wasNotImmediate.link(this);
981 }
982
983 void JIT::emit_op_jneq_null(Instruction* currentInstruction)
984 {
985 unsigned src = currentInstruction[1].u.operand;
986 unsigned target = currentInstruction[2].u.operand;
987
988 emitLoad(src, regT1, regT0);
989
990 Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
991
992 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
993 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
994 addJump(branchTest8(Zero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
995
996 Jump wasNotImmediate = jump();
997
998 // Now handle the immediate cases - undefined & null
999 isImmediate.link(this);
1000
1001 ASSERT((JSValue::UndefinedTag + 1 == JSValue::NullTag) && (JSValue::NullTag & 0x1));
1002 or32(TrustedImm32(1), regT1);
1003 addJump(branch32(NotEqual, regT1, TrustedImm32(JSValue::NullTag)), target);
1004
1005 wasNotImmediate.link(this);
1006 }
1007
1008 void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
1009 {
1010 unsigned src = currentInstruction[1].u.operand;
1011 JSCell* ptr = currentInstruction[2].u.jsCell.get();
1012 unsigned target = currentInstruction[3].u.operand;
1013
1014 emitLoad(src, regT1, regT0);
1015 addJump(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)), target);
1016 addJump(branchPtr(NotEqual, regT0, TrustedImmPtr(ptr)), target);
1017 }
1018
1019 void JIT::emit_op_jsr(Instruction* currentInstruction)
1020 {
1021 int retAddrDst = currentInstruction[1].u.operand;
1022 int target = currentInstruction[2].u.operand;
1023 DataLabelPtr storeLocation = storePtrWithPatch(TrustedImmPtr(0), Address(callFrameRegister, sizeof(Register) * retAddrDst));
1024 addJump(jump(), target);
1025 m_jsrSites.append(JSRInfo(storeLocation, label()));
1026 }
1027
1028 void JIT::emit_op_sret(Instruction* currentInstruction)
1029 {
1030 jump(Address(callFrameRegister, sizeof(Register) * currentInstruction[1].u.operand));
1031 }
1032
1033 void JIT::emit_op_eq(Instruction* currentInstruction)
1034 {
1035 unsigned dst = currentInstruction[1].u.operand;
1036 unsigned src1 = currentInstruction[2].u.operand;
1037 unsigned src2 = currentInstruction[3].u.operand;
1038
1039 emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
1040 addSlowCase(branch32(NotEqual, regT1, regT3));
1041 addSlowCase(branch32(Equal, regT1, TrustedImm32(JSValue::CellTag)));
1042 addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::LowestTag)));
1043
1044 compare32(Equal, regT0, regT2, regT0);
1045
1046 emitStoreBool(dst, regT0);
1047 }
1048
1049 void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1050 {
1051 unsigned dst = currentInstruction[1].u.operand;
1052 unsigned op1 = currentInstruction[2].u.operand;
1053 unsigned op2 = currentInstruction[3].u.operand;
1054
1055 JumpList storeResult;
1056 JumpList genericCase;
1057
1058 genericCase.append(getSlowCase(iter)); // tags not equal
1059
1060 linkSlowCase(iter); // tags equal and JSCell
1061 genericCase.append(branchPtr(NotEqual, Address(regT0), TrustedImmPtr(m_globalData->jsStringVPtr)));
1062 genericCase.append(branchPtr(NotEqual, Address(regT2), TrustedImmPtr(m_globalData->jsStringVPtr)));
1063
1064 // String case.
1065 JITStubCall stubCallEqStrings(this, cti_op_eq_strings);
1066 stubCallEqStrings.addArgument(regT0);
1067 stubCallEqStrings.addArgument(regT2);
1068 stubCallEqStrings.call();
1069 storeResult.append(jump());
1070
1071 // Generic case.
1072 genericCase.append(getSlowCase(iter)); // doubles
1073 genericCase.link(this);
1074 JITStubCall stubCallEq(this, cti_op_eq);
1075 stubCallEq.addArgument(op1);
1076 stubCallEq.addArgument(op2);
1077 stubCallEq.call(regT0);
1078
1079 storeResult.link(this);
1080 emitStoreBool(dst, regT0);
1081 }
1082
1083 void JIT::emit_op_neq(Instruction* currentInstruction)
1084 {
1085 unsigned dst = currentInstruction[1].u.operand;
1086 unsigned src1 = currentInstruction[2].u.operand;
1087 unsigned src2 = currentInstruction[3].u.operand;
1088
1089 emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
1090 addSlowCase(branch32(NotEqual, regT1, regT3));
1091 addSlowCase(branch32(Equal, regT1, TrustedImm32(JSValue::CellTag)));
1092 addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::LowestTag)));
1093
1094 compare32(NotEqual, regT0, regT2, regT0);
1095
1096 emitStoreBool(dst, regT0);
1097 }
1098
1099 void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1100 {
1101 unsigned dst = currentInstruction[1].u.operand;
1102
1103 JumpList storeResult;
1104 JumpList genericCase;
1105
1106 genericCase.append(getSlowCase(iter)); // tags not equal
1107
1108 linkSlowCase(iter); // tags equal and JSCell
1109 genericCase.append(branchPtr(NotEqual, Address(regT0), TrustedImmPtr(m_globalData->jsStringVPtr)));
1110 genericCase.append(branchPtr(NotEqual, Address(regT2), TrustedImmPtr(m_globalData->jsStringVPtr)));
1111
1112 // String case.
1113 JITStubCall stubCallEqStrings(this, cti_op_eq_strings);
1114 stubCallEqStrings.addArgument(regT0);
1115 stubCallEqStrings.addArgument(regT2);
1116 stubCallEqStrings.call(regT0);
1117 storeResult.append(jump());
1118
1119 // Generic case.
1120 genericCase.append(getSlowCase(iter)); // doubles
1121 genericCase.link(this);
1122 JITStubCall stubCallEq(this, cti_op_eq);
1123 stubCallEq.addArgument(regT1, regT0);
1124 stubCallEq.addArgument(regT3, regT2);
1125 stubCallEq.call(regT0);
1126
1127 storeResult.link(this);
1128 xor32(TrustedImm32(0x1), regT0);
1129 emitStoreBool(dst, regT0);
1130 }
1131
1132 void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
1133 {
1134 unsigned dst = currentInstruction[1].u.operand;
1135 unsigned src1 = currentInstruction[2].u.operand;
1136 unsigned src2 = currentInstruction[3].u.operand;
1137
1138 emitLoadTag(src1, regT0);
1139 emitLoadTag(src2, regT1);
1140
1141 // Jump to a slow case if either operand is double, or if both operands are
1142 // cells and/or Int32s.
1143 move(regT0, regT2);
1144 and32(regT1, regT2);
1145 addSlowCase(branch32(Below, regT2, TrustedImm32(JSValue::LowestTag)));
1146 addSlowCase(branch32(AboveOrEqual, regT2, TrustedImm32(JSValue::CellTag)));
1147
1148 if (type == OpStrictEq)
1149 compare32(Equal, regT0, regT1, regT0);
1150 else
1151 compare32(NotEqual, regT0, regT1, regT0);
1152
1153 emitStoreBool(dst, regT0);
1154 }
1155
1156 void JIT::emit_op_stricteq(Instruction* currentInstruction)
1157 {
1158 compileOpStrictEq(currentInstruction, OpStrictEq);
1159 }
1160
1161 void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1162 {
1163 unsigned dst = currentInstruction[1].u.operand;
1164 unsigned src1 = currentInstruction[2].u.operand;
1165 unsigned src2 = currentInstruction[3].u.operand;
1166
1167 linkSlowCase(iter);
1168 linkSlowCase(iter);
1169
1170 JITStubCall stubCall(this, cti_op_stricteq);
1171 stubCall.addArgument(src1);
1172 stubCall.addArgument(src2);
1173 stubCall.call(dst);
1174 }
1175
1176 void JIT::emit_op_nstricteq(Instruction* currentInstruction)
1177 {
1178 compileOpStrictEq(currentInstruction, OpNStrictEq);
1179 }
1180
1181 void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1182 {
1183 unsigned dst = currentInstruction[1].u.operand;
1184 unsigned src1 = currentInstruction[2].u.operand;
1185 unsigned src2 = currentInstruction[3].u.operand;
1186
1187 linkSlowCase(iter);
1188 linkSlowCase(iter);
1189
1190 JITStubCall stubCall(this, cti_op_nstricteq);
1191 stubCall.addArgument(src1);
1192 stubCall.addArgument(src2);
1193 stubCall.call(dst);
1194 }
1195
1196 void JIT::emit_op_eq_null(Instruction* currentInstruction)
1197 {
1198 unsigned dst = currentInstruction[1].u.operand;
1199 unsigned src = currentInstruction[2].u.operand;
1200
1201 emitLoad(src, regT1, regT0);
1202 Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
1203
1204 loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
1205 test8(NonZero, Address(regT1, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined), regT1);
1206
1207 Jump wasNotImmediate = jump();
1208
1209 isImmediate.link(this);
1210
1211 compare32(Equal, regT1, TrustedImm32(JSValue::NullTag), regT2);
1212 compare32(Equal, regT1, TrustedImm32(JSValue::UndefinedTag), regT1);
1213 or32(regT2, regT1);
1214
1215 wasNotImmediate.link(this);
1216
1217 emitStoreBool(dst, regT1);
1218 }
1219
1220 void JIT::emit_op_neq_null(Instruction* currentInstruction)
1221 {
1222 unsigned dst = currentInstruction[1].u.operand;
1223 unsigned src = currentInstruction[2].u.operand;
1224
1225 emitLoad(src, regT1, regT0);
1226 Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
1227
1228 loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
1229 test8(Zero, Address(regT1, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined), regT1);
1230
1231 Jump wasNotImmediate = jump();
1232
1233 isImmediate.link(this);
1234
1235 compare32(NotEqual, regT1, TrustedImm32(JSValue::NullTag), regT2);
1236 compare32(NotEqual, regT1, TrustedImm32(JSValue::UndefinedTag), regT1);
1237 and32(regT2, regT1);
1238
1239 wasNotImmediate.link(this);
1240
1241 emitStoreBool(dst, regT1);
1242 }
1243
1244 void JIT::emit_op_resolve_with_base(Instruction* currentInstruction)
1245 {
1246 JITStubCall stubCall(this, cti_op_resolve_with_base);
1247 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[3].u.operand)));
1248 stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
1249 stubCall.call(currentInstruction[2].u.operand);
1250 }
1251
1252 void JIT::emit_op_new_func_exp(Instruction* currentInstruction)
1253 {
1254 JITStubCall stubCall(this, cti_op_new_func_exp);
1255 stubCall.addArgument(TrustedImmPtr(m_codeBlock->functionExpr(currentInstruction[2].u.operand)));
1256 stubCall.call(currentInstruction[1].u.operand);
1257 }
1258
1259 void JIT::emit_op_throw(Instruction* currentInstruction)
1260 {
1261 unsigned exception = currentInstruction[1].u.operand;
1262 JITStubCall stubCall(this, cti_op_throw);
1263 stubCall.addArgument(exception);
1264 stubCall.call();
1265
1266 #ifndef NDEBUG
1267 // cti_op_throw always changes it's return address,
1268 // this point in the code should never be reached.
1269 breakpoint();
1270 #endif
1271 }
1272
1273 void JIT::emit_op_get_pnames(Instruction* currentInstruction)
1274 {
1275 int dst = currentInstruction[1].u.operand;
1276 int base = currentInstruction[2].u.operand;
1277 int i = currentInstruction[3].u.operand;
1278 int size = currentInstruction[4].u.operand;
1279 int breakTarget = currentInstruction[5].u.operand;
1280
1281 JumpList isNotObject;
1282
1283 emitLoad(base, regT1, regT0);
1284 if (!m_codeBlock->isKnownNotImmediate(base))
1285 isNotObject.append(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)));
1286 if (base != m_codeBlock->thisRegister() || m_codeBlock->isStrictMode()) {
1287 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
1288 isNotObject.append(branch8(NotEqual, Address(regT2, Structure::typeInfoTypeOffset()), TrustedImm32(ObjectType)));
1289 }
1290
1291 // We could inline the case where you have a valid cache, but
1292 // this call doesn't seem to be hot.
1293 Label isObject(this);
1294 JITStubCall getPnamesStubCall(this, cti_op_get_pnames);
1295 getPnamesStubCall.addArgument(regT0);
1296 getPnamesStubCall.call(dst);
1297 load32(Address(regT0, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStringsSize)), regT3);
1298 store32(TrustedImm32(Int32Tag), intTagFor(i));
1299 store32(TrustedImm32(0), intPayloadFor(i));
1300 store32(TrustedImm32(Int32Tag), intTagFor(size));
1301 store32(regT3, payloadFor(size));
1302 Jump end = jump();
1303
1304 isNotObject.link(this);
1305 addJump(branch32(Equal, regT1, TrustedImm32(JSValue::NullTag)), breakTarget);
1306 addJump(branch32(Equal, regT1, TrustedImm32(JSValue::UndefinedTag)), breakTarget);
1307 JITStubCall toObjectStubCall(this, cti_to_object);
1308 toObjectStubCall.addArgument(regT1, regT0);
1309 toObjectStubCall.call(base);
1310 jump().linkTo(isObject, this);
1311
1312 end.link(this);
1313 }
1314
1315 void JIT::emit_op_next_pname(Instruction* currentInstruction)
1316 {
1317 int dst = currentInstruction[1].u.operand;
1318 int base = currentInstruction[2].u.operand;
1319 int i = currentInstruction[3].u.operand;
1320 int size = currentInstruction[4].u.operand;
1321 int it = currentInstruction[5].u.operand;
1322 int target = currentInstruction[6].u.operand;
1323
1324 JumpList callHasProperty;
1325
1326 Label begin(this);
1327 load32(intPayloadFor(i), regT0);
1328 Jump end = branch32(Equal, regT0, intPayloadFor(size));
1329
1330 // Grab key @ i
1331 loadPtr(payloadFor(it), regT1);
1332 loadPtr(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStrings)), regT2);
1333 load32(BaseIndex(regT2, regT0, TimesEight), regT2);
1334 store32(TrustedImm32(JSValue::CellTag), tagFor(dst));
1335 store32(regT2, payloadFor(dst));
1336
1337 // Increment i
1338 add32(TrustedImm32(1), regT0);
1339 store32(regT0, intPayloadFor(i));
1340
1341 // Verify that i is valid:
1342 loadPtr(payloadFor(base), regT0);
1343
1344 // Test base's structure
1345 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
1346 callHasProperty.append(branchPtr(NotEqual, regT2, Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure)))));
1347
1348 // Test base's prototype chain
1349 loadPtr(Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedPrototypeChain))), regT3);
1350 loadPtr(Address(regT3, OBJECT_OFFSETOF(StructureChain, m_vector)), regT3);
1351 addJump(branchTestPtr(Zero, Address(regT3)), target);
1352
1353 Label checkPrototype(this);
1354 callHasProperty.append(branch32(Equal, Address(regT2, Structure::prototypeOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), TrustedImm32(JSValue::NullTag)));
1355 loadPtr(Address(regT2, Structure::prototypeOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT2);
1356 loadPtr(Address(regT2, JSCell::structureOffset()), regT2);
1357 callHasProperty.append(branchPtr(NotEqual, regT2, Address(regT3)));
1358 addPtr(TrustedImm32(sizeof(Structure*)), regT3);
1359 branchTestPtr(NonZero, Address(regT3)).linkTo(checkPrototype, this);
1360
1361 // Continue loop.
1362 addJump(jump(), target);
1363
1364 // Slow case: Ask the object if i is valid.
1365 callHasProperty.link(this);
1366 loadPtr(addressFor(dst), regT1);
1367 JITStubCall stubCall(this, cti_has_property);
1368 stubCall.addArgument(regT0);
1369 stubCall.addArgument(regT1);
1370 stubCall.call();
1371
1372 // Test for valid key.
1373 addJump(branchTest32(NonZero, regT0), target);
1374 jump().linkTo(begin, this);
1375
1376 // End of loop.
1377 end.link(this);
1378 }
1379
1380 void JIT::emit_op_push_scope(Instruction* currentInstruction)
1381 {
1382 JITStubCall stubCall(this, cti_op_push_scope);
1383 stubCall.addArgument(currentInstruction[1].u.operand);
1384 stubCall.call(currentInstruction[1].u.operand);
1385 }
1386
1387 void JIT::emit_op_pop_scope(Instruction*)
1388 {
1389 JITStubCall(this, cti_op_pop_scope).call();
1390 }
1391
1392 void JIT::emit_op_to_jsnumber(Instruction* currentInstruction)
1393 {
1394 int dst = currentInstruction[1].u.operand;
1395 int src = currentInstruction[2].u.operand;
1396
1397 emitLoad(src, regT1, regT0);
1398
1399 Jump isInt32 = branch32(Equal, regT1, TrustedImm32(JSValue::Int32Tag));
1400 addSlowCase(branch32(AboveOrEqual, regT1, TrustedImm32(JSValue::EmptyValueTag)));
1401 isInt32.link(this);
1402
1403 if (src != dst)
1404 emitStore(dst, regT1, regT0);
1405 map(m_bytecodeOffset + OPCODE_LENGTH(op_to_jsnumber), dst, regT1, regT0);
1406 }
1407
1408 void JIT::emitSlow_op_to_jsnumber(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1409 {
1410 int dst = currentInstruction[1].u.operand;
1411
1412 linkSlowCase(iter);
1413
1414 JITStubCall stubCall(this, cti_op_to_jsnumber);
1415 stubCall.addArgument(regT1, regT0);
1416 stubCall.call(dst);
1417 }
1418
1419 void JIT::emit_op_push_new_scope(Instruction* currentInstruction)
1420 {
1421 JITStubCall stubCall(this, cti_op_push_new_scope);
1422 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
1423 stubCall.addArgument(currentInstruction[3].u.operand);
1424 stubCall.call(currentInstruction[1].u.operand);
1425 }
1426
1427 void JIT::emit_op_catch(Instruction* currentInstruction)
1428 {
1429 // cti_op_throw returns the callFrame for the handler.
1430 move(regT0, callFrameRegister);
1431
1432 // Now store the exception returned by cti_op_throw.
1433 loadPtr(Address(stackPointerRegister, OBJECT_OFFSETOF(struct JITStackFrame, globalData)), regT3);
1434 load32(Address(regT3, OBJECT_OFFSETOF(JSGlobalData, exception) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
1435 load32(Address(regT3, OBJECT_OFFSETOF(JSGlobalData, exception) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
1436 store32(TrustedImm32(JSValue().payload()), Address(regT3, OBJECT_OFFSETOF(JSGlobalData, exception) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)));
1437 store32(TrustedImm32(JSValue().tag()), Address(regT3, OBJECT_OFFSETOF(JSGlobalData, exception) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)));
1438
1439 unsigned exception = currentInstruction[1].u.operand;
1440 emitStore(exception, regT1, regT0);
1441 map(m_bytecodeOffset + OPCODE_LENGTH(op_catch), exception, regT1, regT0);
1442 }
1443
1444 void JIT::emit_op_jmp_scopes(Instruction* currentInstruction)
1445 {
1446 JITStubCall stubCall(this, cti_op_jmp_scopes);
1447 stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
1448 stubCall.call();
1449 addJump(jump(), currentInstruction[2].u.operand);
1450 }
1451
1452 void JIT::emit_op_switch_imm(Instruction* currentInstruction)
1453 {
1454 unsigned tableIndex = currentInstruction[1].u.operand;
1455 unsigned defaultOffset = currentInstruction[2].u.operand;
1456 unsigned scrutinee = currentInstruction[3].u.operand;
1457
1458 // create jump table for switch destinations, track this switch statement.
1459 SimpleJumpTable* jumpTable = &m_codeBlock->immediateSwitchJumpTable(tableIndex);
1460 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Immediate));
1461 jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1462
1463 JITStubCall stubCall(this, cti_op_switch_imm);
1464 stubCall.addArgument(scrutinee);
1465 stubCall.addArgument(Imm32(tableIndex));
1466 stubCall.call();
1467 jump(regT0);
1468 }
1469
1470 void JIT::emit_op_switch_char(Instruction* currentInstruction)
1471 {
1472 unsigned tableIndex = currentInstruction[1].u.operand;
1473 unsigned defaultOffset = currentInstruction[2].u.operand;
1474 unsigned scrutinee = currentInstruction[3].u.operand;
1475
1476 // create jump table for switch destinations, track this switch statement.
1477 SimpleJumpTable* jumpTable = &m_codeBlock->characterSwitchJumpTable(tableIndex);
1478 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Character));
1479 jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1480
1481 JITStubCall stubCall(this, cti_op_switch_char);
1482 stubCall.addArgument(scrutinee);
1483 stubCall.addArgument(Imm32(tableIndex));
1484 stubCall.call();
1485 jump(regT0);
1486 }
1487
1488 void JIT::emit_op_switch_string(Instruction* currentInstruction)
1489 {
1490 unsigned tableIndex = currentInstruction[1].u.operand;
1491 unsigned defaultOffset = currentInstruction[2].u.operand;
1492 unsigned scrutinee = currentInstruction[3].u.operand;
1493
1494 // create jump table for switch destinations, track this switch statement.
1495 StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
1496 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset));
1497
1498 JITStubCall stubCall(this, cti_op_switch_string);
1499 stubCall.addArgument(scrutinee);
1500 stubCall.addArgument(Imm32(tableIndex));
1501 stubCall.call();
1502 jump(regT0);
1503 }
1504
1505 void JIT::emit_op_throw_reference_error(Instruction* currentInstruction)
1506 {
1507 unsigned message = currentInstruction[1].u.operand;
1508
1509 JITStubCall stubCall(this, cti_op_throw_reference_error);
1510 stubCall.addArgument(m_codeBlock->getConstant(message));
1511 stubCall.call();
1512 }
1513
1514 void JIT::emit_op_debug(Instruction* currentInstruction)
1515 {
1516 #if ENABLE(DEBUG_WITH_BREAKPOINT)
1517 UNUSED_PARAM(currentInstruction);
1518 breakpoint();
1519 #else
1520 JITStubCall stubCall(this, cti_op_debug);
1521 stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
1522 stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
1523 stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
1524 stubCall.call();
1525 #endif
1526 }
1527
1528
1529 void JIT::emit_op_enter(Instruction*)
1530 {
1531 // Even though JIT code doesn't use them, we initialize our constant
1532 // registers to zap stale pointers, to avoid unnecessarily prolonging
1533 // object lifetime and increasing GC pressure.
1534 for (int i = 0; i < m_codeBlock->m_numVars; ++i)
1535 emitStore(i, jsUndefined());
1536 }
1537
1538 void JIT::emit_op_create_activation(Instruction* currentInstruction)
1539 {
1540 unsigned activation = currentInstruction[1].u.operand;
1541
1542 Jump activationCreated = branch32(NotEqual, tagFor(activation), TrustedImm32(JSValue::EmptyValueTag));
1543 JITStubCall(this, cti_op_push_activation).call(activation);
1544 activationCreated.link(this);
1545 }
1546
1547 void JIT::emit_op_create_arguments(Instruction* currentInstruction)
1548 {
1549 unsigned dst = currentInstruction[1].u.operand;
1550
1551 Jump argsCreated = branch32(NotEqual, tagFor(dst), TrustedImm32(JSValue::EmptyValueTag));
1552
1553 if (m_codeBlock->m_numParameters == 1)
1554 JITStubCall(this, cti_op_create_arguments_no_params).call();
1555 else
1556 JITStubCall(this, cti_op_create_arguments).call();
1557
1558 emitStore(dst, regT1, regT0);
1559 emitStore(unmodifiedArgumentsRegister(dst), regT1, regT0);
1560
1561 argsCreated.link(this);
1562 }
1563
1564 void JIT::emit_op_init_lazy_reg(Instruction* currentInstruction)
1565 {
1566 unsigned dst = currentInstruction[1].u.operand;
1567
1568 emitStore(dst, JSValue());
1569 }
1570
1571 void JIT::emit_op_get_callee(Instruction* currentInstruction)
1572 {
1573 int dst = currentInstruction[1].u.operand;
1574 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT0);
1575 emitStoreCell(dst, regT0);
1576 }
1577
1578 void JIT::emit_op_create_this(Instruction* currentInstruction)
1579 {
1580 unsigned protoRegister = currentInstruction[2].u.operand;
1581 emitLoad(protoRegister, regT1, regT0);
1582 JITStubCall stubCall(this, cti_op_create_this);
1583 stubCall.addArgument(regT1, regT0);
1584 stubCall.call(currentInstruction[1].u.operand);
1585 }
1586
1587 void JIT::emit_op_convert_this(Instruction* currentInstruction)
1588 {
1589 unsigned thisRegister = currentInstruction[1].u.operand;
1590
1591 emitLoad(thisRegister, regT1, regT0);
1592
1593 addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)));
1594
1595 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
1596 addSlowCase(branchTest8(NonZero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(NeedsThisConversion)));
1597
1598 map(m_bytecodeOffset + OPCODE_LENGTH(op_convert_this), thisRegister, regT1, regT0);
1599 }
1600
1601 void JIT::emit_op_convert_this_strict(Instruction* currentInstruction)
1602 {
1603 unsigned thisRegister = currentInstruction[1].u.operand;
1604
1605 emitLoad(thisRegister, regT1, regT0);
1606
1607 Jump notNull = branch32(NotEqual, regT1, TrustedImm32(JSValue::EmptyValueTag));
1608 emitStore(thisRegister, jsNull());
1609 Jump setThis = jump();
1610 notNull.link(this);
1611 Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
1612 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
1613 Jump notAnObject = branch8(NotEqual, Address(regT2, Structure::typeInfoTypeOffset()), TrustedImm32(ObjectType));
1614 addSlowCase(branchTest8(NonZero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(NeedsThisConversion)));
1615 isImmediate.link(this);
1616 notAnObject.link(this);
1617 setThis.link(this);
1618 map(m_bytecodeOffset + OPCODE_LENGTH(op_convert_this_strict), thisRegister, regT1, regT0);
1619 }
1620
1621 void JIT::emitSlow_op_convert_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1622 {
1623 unsigned thisRegister = currentInstruction[1].u.operand;
1624
1625 linkSlowCase(iter);
1626 linkSlowCase(iter);
1627
1628 JITStubCall stubCall(this, cti_op_convert_this);
1629 stubCall.addArgument(regT1, regT0);
1630 stubCall.call(thisRegister);
1631 }
1632
1633 void JIT::emitSlow_op_convert_this_strict(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1634 {
1635 unsigned thisRegister = currentInstruction[1].u.operand;
1636
1637 linkSlowCase(iter);
1638
1639 JITStubCall stubCall(this, cti_op_convert_this_strict);
1640 stubCall.addArgument(regT1, regT0);
1641 stubCall.call(thisRegister);
1642 }
1643
1644 void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
1645 {
1646 peek(regT2, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof(void*));
1647 Jump noProfiler = branchTestPtr(Zero, Address(regT2));
1648
1649 JITStubCall stubCall(this, cti_op_profile_will_call);
1650 stubCall.addArgument(currentInstruction[1].u.operand);
1651 stubCall.call();
1652 noProfiler.link(this);
1653 }
1654
1655 void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
1656 {
1657 peek(regT2, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof(void*));
1658 Jump noProfiler = branchTestPtr(Zero, Address(regT2));
1659
1660 JITStubCall stubCall(this, cti_op_profile_did_call);
1661 stubCall.addArgument(currentInstruction[1].u.operand);
1662 stubCall.call();
1663 noProfiler.link(this);
1664 }
1665
1666 void JIT::emit_op_get_arguments_length(Instruction* currentInstruction)
1667 {
1668 int dst = currentInstruction[1].u.operand;
1669 int argumentsRegister = currentInstruction[2].u.operand;
1670 addSlowCase(branch32(NotEqual, tagFor(argumentsRegister), TrustedImm32(JSValue::EmptyValueTag)));
1671 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT0);
1672 sub32(TrustedImm32(1), regT0);
1673 emitStoreInt32(dst, regT0);
1674 }
1675
1676 void JIT::emitSlow_op_get_arguments_length(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1677 {
1678 linkSlowCase(iter);
1679 int dst = currentInstruction[1].u.operand;
1680 int base = currentInstruction[2].u.operand;
1681 int ident = currentInstruction[3].u.operand;
1682
1683 JITStubCall stubCall(this, cti_op_get_by_id_generic);
1684 stubCall.addArgument(base);
1685 stubCall.addArgument(TrustedImmPtr(&(m_codeBlock->identifier(ident))));
1686 stubCall.call(dst);
1687 }
1688
1689 void JIT::emit_op_get_argument_by_val(Instruction* currentInstruction)
1690 {
1691 int dst = currentInstruction[1].u.operand;
1692 int argumentsRegister = currentInstruction[2].u.operand;
1693 int property = currentInstruction[3].u.operand;
1694 addSlowCase(branch32(NotEqual, tagFor(argumentsRegister), TrustedImm32(JSValue::EmptyValueTag)));
1695 emitLoad(property, regT1, regT2);
1696 addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::Int32Tag)));
1697 add32(TrustedImm32(1), regT2);
1698 // regT2 now contains the integer index of the argument we want, including this
1699 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT3);
1700 addSlowCase(branch32(AboveOrEqual, regT2, regT3));
1701
1702 Jump skipOutofLineParams;
1703 int numArgs = m_codeBlock->m_numParameters;
1704 if (numArgs) {
1705 Jump notInInPlaceArgs = branch32(AboveOrEqual, regT2, Imm32(numArgs));
1706 addPtr(Imm32(static_cast<unsigned>(-(RegisterFile::CallFrameHeaderSize + numArgs) * sizeof(Register))), callFrameRegister, regT1);
1707 loadPtr(BaseIndex(regT1, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
1708 loadPtr(BaseIndex(regT1, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
1709 skipOutofLineParams = jump();
1710 notInInPlaceArgs.link(this);
1711 }
1712
1713 addPtr(Imm32(static_cast<unsigned>(-(RegisterFile::CallFrameHeaderSize + numArgs) * sizeof(Register))), callFrameRegister, regT1);
1714 mul32(TrustedImm32(sizeof(Register)), regT3, regT3);
1715 subPtr(regT3, regT1);
1716 loadPtr(BaseIndex(regT1, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
1717 loadPtr(BaseIndex(regT1, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
1718 if (numArgs)
1719 skipOutofLineParams.link(this);
1720 emitStore(dst, regT1, regT0);
1721 }
1722
1723 void JIT::emitSlow_op_get_argument_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1724 {
1725 unsigned dst = currentInstruction[1].u.operand;
1726 unsigned arguments = currentInstruction[2].u.operand;
1727 unsigned property = currentInstruction[3].u.operand;
1728
1729 linkSlowCase(iter);
1730 Jump skipArgumentsCreation = jump();
1731
1732 linkSlowCase(iter);
1733 linkSlowCase(iter);
1734 if (m_codeBlock->m_numParameters == 1)
1735 JITStubCall(this, cti_op_create_arguments_no_params).call();
1736 else
1737 JITStubCall(this, cti_op_create_arguments).call();
1738
1739 emitStore(arguments, regT1, regT0);
1740 emitStore(unmodifiedArgumentsRegister(arguments), regT1, regT0);
1741
1742 skipArgumentsCreation.link(this);
1743 JITStubCall stubCall(this, cti_op_get_by_val);
1744 stubCall.addArgument(arguments);
1745 stubCall.addArgument(property);
1746 stubCall.call(dst);
1747 }
1748
1749 #if ENABLE(JIT_USE_SOFT_MODULO)
1750 void JIT::softModulo()
1751 {
1752 push(regT1);
1753 push(regT3);
1754 move(regT2, regT3);
1755 move(regT0, regT2);
1756 move(TrustedImm32(0), regT1);
1757
1758 // Check for negative result reminder
1759 Jump positiveRegT3 = branch32(GreaterThanOrEqual, regT3, TrustedImm32(0));
1760 neg32(regT3);
1761 xor32(TrustedImm32(1), regT1);
1762 positiveRegT3.link(this);
1763
1764 Jump positiveRegT2 = branch32(GreaterThanOrEqual, regT2, TrustedImm32(0));
1765 neg32(regT2);
1766 xor32(TrustedImm32(2), regT1);
1767 positiveRegT2.link(this);
1768
1769 // Save the condition for negative reminder
1770 push(regT1);
1771
1772 Jump exitBranch = branch32(LessThan, regT2, regT3);
1773
1774 // Power of two fast case
1775 move(regT3, regT0);
1776 sub32(TrustedImm32(1), regT0);
1777 Jump powerOfTwo = branchTest32(NonZero, regT0, regT3);
1778 and32(regT0, regT2);
1779 powerOfTwo.link(this);
1780
1781 and32(regT3, regT0);
1782
1783 Jump exitBranch2 = branchTest32(Zero, regT0);
1784
1785 countLeadingZeros32(regT2, regT0);
1786 countLeadingZeros32(regT3, regT1);
1787 sub32(regT0, regT1);
1788
1789 Jump useFullTable = branch32(Equal, regT1, TrustedImm32(31));
1790
1791 neg32(regT1);
1792 add32(TrustedImm32(31), regT1);
1793
1794 int elementSizeByShift = -1;
1795 #if CPU(ARM)
1796 elementSizeByShift = 3;
1797 #else
1798 #error "JIT_OPTIMIZE_MOD not yet supported on this platform."
1799 #endif
1800 relativeTableJump(regT1, elementSizeByShift);
1801
1802 useFullTable.link(this);
1803 // Modulo table
1804 for (int i = 31; i > 0; --i) {
1805 #if CPU(ARM_TRADITIONAL)
1806 m_assembler.cmp_r(regT2, m_assembler.lsl(regT3, i));
1807 m_assembler.sub_r(regT2, regT2, m_assembler.lsl(regT3, i), ARMAssembler::CS);
1808 #elif CPU(ARM_THUMB2)
1809 ShiftTypeAndAmount shift(SRType_LSL, i);
1810 m_assembler.sub_S(regT1, regT2, regT3, shift);
1811 m_assembler.it(ARMv7Assembler::ConditionCS);
1812 m_assembler.mov(regT2, regT1);
1813 #else
1814 #error "JIT_OPTIMIZE_MOD not yet supported on this platform."
1815 #endif
1816 }
1817
1818 Jump lower = branch32(Below, regT2, regT3);
1819 sub32(regT3, regT2);
1820 lower.link(this);
1821
1822 exitBranch.link(this);
1823 exitBranch2.link(this);
1824
1825 // Check for negative reminder
1826 pop(regT1);
1827 Jump positiveResult = branch32(Equal, regT1, TrustedImm32(0));
1828 neg32(regT2);
1829 positiveResult.link(this);
1830
1831 move(regT2, regT0);
1832
1833 pop(regT3);
1834 pop(regT1);
1835 ret();
1836 }
1837 #endif // ENABLE(JIT_USE_SOFT_MODULO)
1838
1839 } // namespace JSC
1840
1841 #endif // USE(JSVALUE32_64)
1842 #endif // ENABLE(JIT)