2 * Copyright (C) 2009 Apple Inc. All rights reserved.
3 * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
14 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
18 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
33 #include "JITInlineMethods.h"
34 #include "JITStubCall.h"
37 #include "JSFunction.h"
38 #include "JSPropertyNameIterator.h"
39 #include "LinkBuffer.h"
43 PassRefPtr
<ExecutableMemoryHandle
> JIT::privateCompileCTIMachineTrampolines(JSGlobalData
* globalData
, TrampolineStructure
*trampolines
)
45 // (1) This function provides fast property access for string length
46 Label stringLengthBegin
= align();
48 // regT0 holds payload, regT1 holds tag
50 Jump string_failureCases1
= branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
));
51 Jump string_failureCases2
= branchPtr(NotEqual
, Address(regT0
, JSCell::classInfoOffset()), TrustedImmPtr(&JSString::s_info
));
53 // Checks out okay! - get the length from the Ustring.
54 load32(Address(regT0
, OBJECT_OFFSETOF(JSString
, m_length
)), regT2
);
56 Jump string_failureCases3
= branch32(Above
, regT2
, TrustedImm32(INT_MAX
));
58 move(TrustedImm32(JSValue::Int32Tag
), regT1
);
62 JumpList callSlowCase
;
63 JumpList constructSlowCase
;
65 // VirtualCallLink Trampoline
66 // regT1, regT0 holds callee; callFrame is moved and partially initialized.
67 Label virtualCallLinkBegin
= align();
68 callSlowCase
.append(branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
)));
69 callSlowCase
.append(emitJumpIfNotType(regT0
, regT1
, JSFunctionType
));
71 // Finish canonical initialization before JS function call.
72 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSFunction
, m_scopeChain
)), regT1
);
73 emitPutCellToCallFrameHeader(regT1
, RegisterFile::ScopeChain
);
75 // Also initialize ReturnPC for use by lazy linking and exceptions.
76 preserveReturnAddressAfterCall(regT3
);
77 emitPutToCallFrameHeader(regT3
, RegisterFile::ReturnPC
);
79 storePtr(callFrameRegister
, &m_globalData
->topCallFrame
);
80 restoreArgumentReference();
81 Call callLazyLinkCall
= call();
82 restoreReturnAddressBeforeReturn(regT3
);
85 // VirtualConstructLink Trampoline
86 // regT1, regT0 holds callee; callFrame is moved and partially initialized.
87 Label virtualConstructLinkBegin
= align();
88 constructSlowCase
.append(branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
)));
89 constructSlowCase
.append(emitJumpIfNotType(regT0
, regT1
, JSFunctionType
));
91 // Finish canonical initialization before JS function call.
92 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSFunction
, m_scopeChain
)), regT1
);
93 emitPutCellToCallFrameHeader(regT1
, RegisterFile::ScopeChain
);
95 // Also initialize ReturnPC for use by lazy linking and exeptions.
96 preserveReturnAddressAfterCall(regT3
);
97 emitPutToCallFrameHeader(regT3
, RegisterFile::ReturnPC
);
99 storePtr(callFrameRegister
, &m_globalData
->topCallFrame
);
100 restoreArgumentReference();
101 Call callLazyLinkConstruct
= call();
102 restoreReturnAddressBeforeReturn(regT3
);
105 // VirtualCall Trampoline
106 // regT1, regT0 holds callee; regT2 will hold the FunctionExecutable.
107 Label virtualCallBegin
= align();
108 callSlowCase
.append(branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
)));
109 callSlowCase
.append(emitJumpIfNotType(regT0
, regT1
, JSFunctionType
));
111 // Finish canonical initialization before JS function call.
112 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSFunction
, m_scopeChain
)), regT1
);
113 emitPutCellToCallFrameHeader(regT1
, RegisterFile::ScopeChain
);
115 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSFunction
, m_executable
)), regT2
);
116 Jump hasCodeBlock1
= branch32(GreaterThanOrEqual
, Address(regT2
, OBJECT_OFFSETOF(FunctionExecutable
, m_numParametersForCall
)), TrustedImm32(0));
117 preserveReturnAddressAfterCall(regT3
);
119 storePtr(callFrameRegister
, &m_globalData
->topCallFrame
);
120 restoreArgumentReference();
121 Call callCompileCall
= call();
122 restoreReturnAddressBeforeReturn(regT3
);
123 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSFunction
, m_executable
)), regT2
);
125 hasCodeBlock1
.link(this);
126 loadPtr(Address(regT2
, OBJECT_OFFSETOF(FunctionExecutable
, m_jitCodeForCallWithArityCheck
)), regT0
);
129 // VirtualConstruct Trampoline
130 // regT1, regT0 holds callee; regT2 will hold the FunctionExecutable.
131 Label virtualConstructBegin
= align();
132 constructSlowCase
.append(branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
)));
133 constructSlowCase
.append(emitJumpIfNotType(regT0
, regT1
, JSFunctionType
));
135 // Finish canonical initialization before JS function call.
136 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSFunction
, m_scopeChain
)), regT1
);
137 emitPutCellToCallFrameHeader(regT1
, RegisterFile::ScopeChain
);
139 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSFunction
, m_executable
)), regT2
);
140 Jump hasCodeBlock2
= branch32(GreaterThanOrEqual
, Address(regT2
, OBJECT_OFFSETOF(FunctionExecutable
, m_numParametersForConstruct
)), TrustedImm32(0));
141 preserveReturnAddressAfterCall(regT3
);
143 storePtr(callFrameRegister
, &m_globalData
->topCallFrame
);
144 restoreArgumentReference();
145 Call callCompileConstruct
= call();
146 restoreReturnAddressBeforeReturn(regT3
);
147 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSFunction
, m_executable
)), regT2
);
149 hasCodeBlock2
.link(this);
150 loadPtr(Address(regT2
, OBJECT_OFFSETOF(FunctionExecutable
, m_jitCodeForConstructWithArityCheck
)), regT0
);
153 callSlowCase
.link(this);
154 // Finish canonical initialization before JS function call.
155 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame
, regT2
);
156 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain
, regT2
, regT2
);
157 emitPutCellToCallFrameHeader(regT2
, RegisterFile::ScopeChain
);
159 // Also initialize ReturnPC and CodeBlock, like a JS function would.
160 preserveReturnAddressAfterCall(regT3
);
161 emitPutToCallFrameHeader(regT3
, RegisterFile::ReturnPC
);
162 emitPutImmediateToCallFrameHeader(0, RegisterFile::CodeBlock
);
164 storePtr(callFrameRegister
, &m_globalData
->topCallFrame
);
165 restoreArgumentReference();
166 Call callCallNotJSFunction
= call();
167 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame
, callFrameRegister
);
168 restoreReturnAddressBeforeReturn(regT3
);
171 constructSlowCase
.link(this);
172 // Finish canonical initialization before JS function call.
173 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame
, regT2
);
174 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain
, regT2
, regT2
);
175 emitPutCellToCallFrameHeader(regT2
, RegisterFile::ScopeChain
);
177 // Also initialize ReturnPC and CodeBlock, like a JS function would.
178 preserveReturnAddressAfterCall(regT3
);
179 emitPutToCallFrameHeader(regT3
, RegisterFile::ReturnPC
);
180 emitPutImmediateToCallFrameHeader(0, RegisterFile::CodeBlock
);
182 storePtr(callFrameRegister
, &m_globalData
->topCallFrame
);
183 restoreArgumentReference();
184 Call callConstructNotJSFunction
= call();
185 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame
, callFrameRegister
);
186 restoreReturnAddressBeforeReturn(regT3
);
189 // NativeCall Trampoline
190 Label nativeCallThunk
= privateCompileCTINativeCall(globalData
);
191 Label nativeConstructThunk
= privateCompileCTINativeCall(globalData
, true);
193 Call string_failureCases1Call
= makeTailRecursiveCall(string_failureCases1
);
194 Call string_failureCases2Call
= makeTailRecursiveCall(string_failureCases2
);
195 Call string_failureCases3Call
= makeTailRecursiveCall(string_failureCases3
);
197 // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
198 LinkBuffer
patchBuffer(*m_globalData
, this, GLOBAL_THUNK_ID
);
200 patchBuffer
.link(string_failureCases1Call
, FunctionPtr(cti_op_get_by_id_string_fail
));
201 patchBuffer
.link(string_failureCases2Call
, FunctionPtr(cti_op_get_by_id_string_fail
));
202 patchBuffer
.link(string_failureCases3Call
, FunctionPtr(cti_op_get_by_id_string_fail
));
203 patchBuffer
.link(callLazyLinkCall
, FunctionPtr(cti_vm_lazyLinkCall
));
204 patchBuffer
.link(callLazyLinkConstruct
, FunctionPtr(cti_vm_lazyLinkConstruct
));
205 patchBuffer
.link(callCompileCall
, FunctionPtr(cti_op_call_jitCompile
));
206 patchBuffer
.link(callCompileConstruct
, FunctionPtr(cti_op_construct_jitCompile
));
207 patchBuffer
.link(callCallNotJSFunction
, FunctionPtr(cti_op_call_NotJSFunction
));
208 patchBuffer
.link(callConstructNotJSFunction
, FunctionPtr(cti_op_construct_NotJSConstruct
));
210 CodeRef finalCode
= patchBuffer
.finalizeCode();
211 RefPtr
<ExecutableMemoryHandle
> executableMemory
= finalCode
.executableMemory();
213 trampolines
->ctiVirtualCallLink
= patchBuffer
.trampolineAt(virtualCallLinkBegin
);
214 trampolines
->ctiVirtualConstructLink
= patchBuffer
.trampolineAt(virtualConstructLinkBegin
);
215 trampolines
->ctiVirtualCall
= patchBuffer
.trampolineAt(virtualCallBegin
);
216 trampolines
->ctiVirtualConstruct
= patchBuffer
.trampolineAt(virtualConstructBegin
);
217 trampolines
->ctiNativeCall
= patchBuffer
.trampolineAt(nativeCallThunk
);
218 trampolines
->ctiNativeConstruct
= patchBuffer
.trampolineAt(nativeConstructThunk
);
219 trampolines
->ctiStringLengthTrampoline
= patchBuffer
.trampolineAt(stringLengthBegin
);
221 return executableMemory
.release();
224 JIT::Label
JIT::privateCompileCTINativeCall(JSGlobalData
* globalData
, bool isConstruct
)
226 int executableOffsetToFunction
= isConstruct
? OBJECT_OFFSETOF(NativeExecutable
, m_constructor
) : OBJECT_OFFSETOF(NativeExecutable
, m_function
);
228 Label nativeCallThunk
= align();
230 emitPutImmediateToCallFrameHeader(0, RegisterFile::CodeBlock
);
231 storePtr(callFrameRegister
, &m_globalData
->topCallFrame
);
234 // Load caller frame's scope chain into this callframe so that whatever we call can
235 // get to its global data.
236 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame
, regT0
);
237 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain
, regT1
, regT0
);
238 emitPutCellToCallFrameHeader(regT1
, RegisterFile::ScopeChain
);
241 emitPutToCallFrameHeader(regT1
, RegisterFile::ReturnPC
);
243 // Calling convention: f(ecx, edx, ...);
244 // Host function signature: f(ExecState*);
245 move(callFrameRegister
, X86Registers::ecx
);
247 subPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister
); // Align stack after call.
250 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee
, regT1
);
251 loadPtr(Address(regT1
, OBJECT_OFFSETOF(JSFunction
, m_executable
)), regT1
);
252 move(regT0
, callFrameRegister
); // Eagerly restore caller frame register to avoid loading from stack.
253 call(Address(regT1
, executableOffsetToFunction
));
255 addPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister
);
258 // Load caller frame's scope chain into this callframe so that whatever we call can
259 // get to its global data.
260 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame
, regT2
);
261 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain
, regT1
, regT2
);
262 emitPutCellToCallFrameHeader(regT1
, RegisterFile::ScopeChain
);
264 preserveReturnAddressAfterCall(regT3
); // Callee preserved
265 emitPutToCallFrameHeader(regT3
, RegisterFile::ReturnPC
);
267 // Calling convention: f(r0 == regT0, r1 == regT1, ...);
268 // Host function signature: f(ExecState*);
269 move(callFrameRegister
, ARMRegisters::r0
);
272 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee
, ARMRegisters::r1
);
273 move(regT2
, callFrameRegister
); // Eagerly restore caller frame register to avoid loading from stack.
274 loadPtr(Address(ARMRegisters::r1
, OBJECT_OFFSETOF(JSFunction
, m_executable
)), regT2
);
275 call(Address(regT2
, executableOffsetToFunction
));
277 restoreReturnAddressBeforeReturn(regT3
);
279 // Load caller frame's scope chain into this callframe so that whatever we call can
280 // get to its global data.
281 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame
, regT2
);
282 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain
, regT1
, regT2
);
283 emitPutCellToCallFrameHeader(regT1
, RegisterFile::ScopeChain
);
285 preserveReturnAddressAfterCall(regT3
); // Callee preserved
286 emitPutToCallFrameHeader(regT3
, RegisterFile::ReturnPC
);
288 // Calling convention: f(r0 == regT4, r1 == regT5, ...);
289 // Host function signature: f(ExecState*);
290 move(callFrameRegister
, regT4
);
292 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee
, regT5
);
293 move(regT2
, callFrameRegister
); // Eagerly restore caller frame register to avoid loading from stack.
294 loadPtr(Address(regT5
, OBJECT_OFFSETOF(JSFunction
, m_executable
)), regT2
);
296 call(Address(regT2
, executableOffsetToFunction
), regT0
);
297 restoreReturnAddressBeforeReturn(regT3
);
299 // Load caller frame's scope chain into this callframe so that whatever we call can
300 // get to its global data.
301 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame
, regT0
);
302 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain
, regT1
, regT0
);
303 emitPutCellToCallFrameHeader(regT1
, RegisterFile::ScopeChain
);
305 preserveReturnAddressAfterCall(regT3
); // Callee preserved
306 emitPutToCallFrameHeader(regT3
, RegisterFile::ReturnPC
);
308 // Calling convention: f(a0, a1, a2, a3);
309 // Host function signature: f(ExecState*);
311 // Allocate stack space for 16 bytes (8-byte aligned)
312 // 16 bytes (unused) for 4 arguments
313 subPtr(TrustedImm32(16), stackPointerRegister
);
316 move(callFrameRegister
, MIPSRegisters::a0
);
319 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee
, MIPSRegisters::a2
);
320 loadPtr(Address(MIPSRegisters::a2
, OBJECT_OFFSETOF(JSFunction
, m_executable
)), regT2
);
321 move(regT0
, callFrameRegister
); // Eagerly restore caller frame register to avoid loading from stack.
322 call(Address(regT2
, executableOffsetToFunction
));
324 // Restore stack space
325 addPtr(TrustedImm32(16), stackPointerRegister
);
327 restoreReturnAddressBeforeReturn(regT3
);
330 #error "JIT not supported on this platform."
331 UNUSED_PARAM(executableOffsetToFunction
);
335 // Check for an exception
336 Jump sawException
= branch32(NotEqual
, AbsoluteAddress(reinterpret_cast<char*>(&globalData
->exception
) + OBJECT_OFFSETOF(JSValue
, u
.asBits
.tag
)), TrustedImm32(JSValue::EmptyValueTag
));
341 // Handle an exception
342 sawException
.link(this);
344 // Grab the return address.
345 preserveReturnAddressAfterCall(regT1
);
347 move(TrustedImmPtr(&globalData
->exceptionLocation
), regT2
);
348 storePtr(regT1
, regT2
);
349 poke(callFrameRegister
, OBJECT_OFFSETOF(struct JITStackFrame
, callFrame
) / sizeof(void*));
351 storePtr(callFrameRegister
, &m_globalData
->topCallFrame
);
352 // Set the return address.
353 move(TrustedImmPtr(FunctionPtr(ctiVMThrowTrampoline
).value()), regT1
);
354 restoreReturnAddressBeforeReturn(regT1
);
358 return nativeCallThunk
;
361 JIT::CodeRef
JIT::privateCompileCTINativeCall(JSGlobalData
* globalData
, NativeFunction func
)
365 emitPutImmediateToCallFrameHeader(0, RegisterFile::CodeBlock
);
366 storePtr(callFrameRegister
, &m_globalData
->topCallFrame
);
369 // Load caller frame's scope chain into this callframe so that whatever we call can
370 // get to its global data.
371 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame
, regT0
);
372 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain
, regT1
, regT0
);
373 emitPutCellToCallFrameHeader(regT1
, RegisterFile::ScopeChain
);
376 emitPutToCallFrameHeader(regT1
, RegisterFile::ReturnPC
);
378 // Calling convention: f(ecx, edx, ...);
379 // Host function signature: f(ExecState*);
380 move(callFrameRegister
, X86Registers::ecx
);
382 subPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister
); // Align stack after call.
384 move(regT0
, callFrameRegister
); // Eagerly restore caller frame register to avoid loading from stack.
389 addPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister
);
392 // Load caller frame's scope chain into this callframe so that whatever we call can
393 // get to its global data.
394 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame
, regT2
);
395 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain
, regT1
, regT2
);
396 emitPutCellToCallFrameHeader(regT1
, RegisterFile::ScopeChain
);
398 preserveReturnAddressAfterCall(regT3
); // Callee preserved
399 emitPutToCallFrameHeader(regT3
, RegisterFile::ReturnPC
);
401 // Calling convention: f(r0 == regT0, r1 == regT1, ...);
402 // Host function signature: f(ExecState*);
403 move(callFrameRegister
, ARMRegisters::r0
);
405 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee
, ARMRegisters::r1
);
406 move(regT2
, callFrameRegister
); // Eagerly restore caller frame register to avoid loading from stack.
407 loadPtr(Address(ARMRegisters::r1
, OBJECT_OFFSETOF(JSFunction
, m_executable
)), regT2
);
412 restoreReturnAddressBeforeReturn(regT3
);
415 // Load caller frame's scope chain into this callframe so that whatever we call can
416 // get to its global data.
417 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame
, regT0
);
418 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain
, regT1
, regT0
);
419 emitPutCellToCallFrameHeader(regT1
, RegisterFile::ScopeChain
);
421 preserveReturnAddressAfterCall(regT3
); // Callee preserved
422 emitPutToCallFrameHeader(regT3
, RegisterFile::ReturnPC
);
424 // Calling convention: f(a0, a1, a2, a3);
425 // Host function signature: f(ExecState*);
427 // Allocate stack space for 16 bytes (8-byte aligned)
428 // 16 bytes (unused) for 4 arguments
429 subPtr(TrustedImm32(16), stackPointerRegister
);
432 move(callFrameRegister
, MIPSRegisters::a0
);
435 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee
, MIPSRegisters::a2
);
436 loadPtr(Address(MIPSRegisters::a2
, OBJECT_OFFSETOF(JSFunction
, m_executable
)), regT2
);
437 move(regT0
, callFrameRegister
); // Eagerly restore caller frame register to avoid loading from stack.
442 // Restore stack space
443 addPtr(TrustedImm32(16), stackPointerRegister
);
445 restoreReturnAddressBeforeReturn(regT3
);
447 // Load caller frame's scope chain into this callframe so that whatever we call can
448 // get to its global data.
449 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame
, regT2
);
450 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain
, regT1
, regT2
);
451 emitPutCellToCallFrameHeader(regT1
, RegisterFile::ScopeChain
);
453 preserveReturnAddressAfterCall(regT3
); // Callee preserved
454 emitPutToCallFrameHeader(regT3
, RegisterFile::ReturnPC
);
456 // Calling convention: f(r0 == regT4, r1 == regT5, ...);
457 // Host function signature: f(ExecState*);
458 move(callFrameRegister
, regT4
);
460 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee
, regT5
);
461 move(regT2
, callFrameRegister
); // Eagerly restore caller frame register to avoid loading from stack.
462 loadPtr(Address(regT5
, OBJECT_OFFSETOF(JSFunction
, m_executable
)), regT2
);
467 restoreReturnAddressBeforeReturn(regT3
);
469 #error "JIT not supported on this platform."
473 // Check for an exception
474 Jump sawException
= branch32(NotEqual
, AbsoluteAddress(reinterpret_cast<char*>(&globalData
->exception
) + OBJECT_OFFSETOF(JSValue
, u
.asBits
.tag
)), TrustedImm32(JSValue::EmptyValueTag
));
479 // Handle an exception
480 sawException
.link(this);
482 // Grab the return address.
483 preserveReturnAddressAfterCall(regT1
);
485 move(TrustedImmPtr(&globalData
->exceptionLocation
), regT2
);
486 storePtr(regT1
, regT2
);
487 poke(callFrameRegister
, OBJECT_OFFSETOF(struct JITStackFrame
, callFrame
) / sizeof(void*));
489 storePtr(callFrameRegister
, &m_globalData
->topCallFrame
);
490 // Set the return address.
491 move(TrustedImmPtr(FunctionPtr(ctiVMThrowTrampoline
).value()), regT1
);
492 restoreReturnAddressBeforeReturn(regT1
);
496 // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
497 LinkBuffer
patchBuffer(*m_globalData
, this, GLOBAL_THUNK_ID
);
499 patchBuffer
.link(nativeCall
, FunctionPtr(func
));
500 return patchBuffer
.finalizeCode();
503 void JIT::emit_op_mov(Instruction
* currentInstruction
)
505 unsigned dst
= currentInstruction
[1].u
.operand
;
506 unsigned src
= currentInstruction
[2].u
.operand
;
508 if (m_codeBlock
->isConstantRegisterIndex(src
))
509 emitStore(dst
, getConstantOperand(src
));
511 emitLoad(src
, regT1
, regT0
);
512 emitStore(dst
, regT1
, regT0
);
513 map(m_bytecodeOffset
+ OPCODE_LENGTH(op_mov
), dst
, regT1
, regT0
);
517 void JIT::emit_op_end(Instruction
* currentInstruction
)
519 ASSERT(returnValueRegister
!= callFrameRegister
);
520 emitLoad(currentInstruction
[1].u
.operand
, regT1
, regT0
);
521 restoreReturnAddressBeforeReturn(Address(callFrameRegister
, RegisterFile::ReturnPC
* static_cast<int>(sizeof(Register
))));
525 void JIT::emit_op_jmp(Instruction
* currentInstruction
)
527 unsigned target
= currentInstruction
[1].u
.operand
;
528 addJump(jump(), target
);
531 void JIT::emit_op_new_object(Instruction
* currentInstruction
)
533 emitAllocateJSFinalObject(TrustedImmPtr(m_codeBlock
->globalObject()->emptyObjectStructure()), regT0
, regT1
);
535 emitStoreCell(currentInstruction
[1].u
.operand
, regT0
);
538 void JIT::emitSlow_op_new_object(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
541 JITStubCall(this, cti_op_new_object
).call(currentInstruction
[1].u
.operand
);
544 void JIT::emit_op_check_has_instance(Instruction
* currentInstruction
)
546 unsigned baseVal
= currentInstruction
[1].u
.operand
;
548 emitLoadPayload(baseVal
, regT0
);
550 // Check that baseVal is a cell.
551 emitJumpSlowCaseIfNotJSCell(baseVal
);
553 // Check that baseVal 'ImplementsHasInstance'.
554 loadPtr(Address(regT0
, JSCell::structureOffset()), regT0
);
555 addSlowCase(branchTest8(Zero
, Address(regT0
, Structure::typeInfoFlagsOffset()), TrustedImm32(ImplementsHasInstance
)));
558 void JIT::emit_op_instanceof(Instruction
* currentInstruction
)
560 unsigned dst
= currentInstruction
[1].u
.operand
;
561 unsigned value
= currentInstruction
[2].u
.operand
;
562 unsigned baseVal
= currentInstruction
[3].u
.operand
;
563 unsigned proto
= currentInstruction
[4].u
.operand
;
565 // Load the operands into registers.
566 // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
567 emitLoadPayload(value
, regT2
);
568 emitLoadPayload(baseVal
, regT0
);
569 emitLoadPayload(proto
, regT1
);
571 // Check that proto are cells. baseVal must be a cell - this is checked by op_check_has_instance.
572 emitJumpSlowCaseIfNotJSCell(value
);
573 emitJumpSlowCaseIfNotJSCell(proto
);
575 // Check that prototype is an object
576 loadPtr(Address(regT1
, JSCell::structureOffset()), regT3
);
577 addSlowCase(emitJumpIfNotObject(regT3
));
579 // Fixme: this check is only needed because the JSC API allows HasInstance to be overridden; we should deprecate this.
580 // Check that baseVal 'ImplementsDefaultHasInstance'.
581 loadPtr(Address(regT0
, JSCell::structureOffset()), regT0
);
582 addSlowCase(branchTest8(Zero
, Address(regT0
, Structure::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance
)));
584 // Optimistically load the result true, and start looping.
585 // Initially, regT1 still contains proto and regT2 still contains value.
586 // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
587 move(TrustedImm32(1), regT0
);
590 // Load the prototype of the cell in regT2. If this is equal to regT1 - WIN!
591 // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
592 loadPtr(Address(regT2
, JSCell::structureOffset()), regT2
);
593 load32(Address(regT2
, Structure::prototypeOffset() + OBJECT_OFFSETOF(JSValue
, u
.asBits
.payload
)), regT2
);
594 Jump isInstance
= branchPtr(Equal
, regT2
, regT1
);
595 branchTest32(NonZero
, regT2
).linkTo(loop
, this);
597 // We get here either by dropping out of the loop, or if value was not an Object. Result is false.
598 move(TrustedImm32(0), regT0
);
600 // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
601 isInstance
.link(this);
602 emitStoreBool(dst
, regT0
);
605 void JIT::emitSlow_op_check_has_instance(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
607 unsigned baseVal
= currentInstruction
[1].u
.operand
;
609 linkSlowCaseIfNotJSCell(iter
, baseVal
);
612 JITStubCall
stubCall(this, cti_op_check_has_instance
);
613 stubCall
.addArgument(baseVal
);
617 void JIT::emitSlow_op_instanceof(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
619 unsigned dst
= currentInstruction
[1].u
.operand
;
620 unsigned value
= currentInstruction
[2].u
.operand
;
621 unsigned baseVal
= currentInstruction
[3].u
.operand
;
622 unsigned proto
= currentInstruction
[4].u
.operand
;
624 linkSlowCaseIfNotJSCell(iter
, value
);
625 linkSlowCaseIfNotJSCell(iter
, proto
);
629 JITStubCall
stubCall(this, cti_op_instanceof
);
630 stubCall
.addArgument(value
);
631 stubCall
.addArgument(baseVal
);
632 stubCall
.addArgument(proto
);
636 void JIT::emit_op_is_undefined(Instruction
* currentInstruction
)
638 unsigned dst
= currentInstruction
[1].u
.operand
;
639 unsigned value
= currentInstruction
[2].u
.operand
;
641 emitLoad(value
, regT1
, regT0
);
642 Jump isCell
= branch32(Equal
, regT1
, TrustedImm32(JSValue::CellTag
));
644 compare32(Equal
, regT1
, TrustedImm32(JSValue::UndefinedTag
), regT0
);
648 loadPtr(Address(regT0
, JSCell::structureOffset()), regT1
);
649 test8(NonZero
, Address(regT1
, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined
), regT0
);
652 emitStoreBool(dst
, regT0
);
655 void JIT::emit_op_is_boolean(Instruction
* currentInstruction
)
657 unsigned dst
= currentInstruction
[1].u
.operand
;
658 unsigned value
= currentInstruction
[2].u
.operand
;
660 emitLoadTag(value
, regT0
);
661 compare32(Equal
, regT0
, TrustedImm32(JSValue::BooleanTag
), regT0
);
662 emitStoreBool(dst
, regT0
);
665 void JIT::emit_op_is_number(Instruction
* currentInstruction
)
667 unsigned dst
= currentInstruction
[1].u
.operand
;
668 unsigned value
= currentInstruction
[2].u
.operand
;
670 emitLoadTag(value
, regT0
);
671 add32(TrustedImm32(1), regT0
);
672 compare32(Below
, regT0
, TrustedImm32(JSValue::LowestTag
+ 1), regT0
);
673 emitStoreBool(dst
, regT0
);
676 void JIT::emit_op_is_string(Instruction
* currentInstruction
)
678 unsigned dst
= currentInstruction
[1].u
.operand
;
679 unsigned value
= currentInstruction
[2].u
.operand
;
681 emitLoad(value
, regT1
, regT0
);
682 Jump isNotCell
= branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
));
684 loadPtr(Address(regT0
, JSCell::structureOffset()), regT1
);
685 compare8(Equal
, Address(regT1
, Structure::typeInfoTypeOffset()), TrustedImm32(StringType
), regT0
);
688 isNotCell
.link(this);
689 move(TrustedImm32(0), regT0
);
692 emitStoreBool(dst
, regT0
);
695 void JIT::emit_op_tear_off_activation(Instruction
* currentInstruction
)
697 unsigned activation
= currentInstruction
[1].u
.operand
;
698 unsigned arguments
= currentInstruction
[2].u
.operand
;
699 Jump activationCreated
= branch32(NotEqual
, tagFor(activation
), TrustedImm32(JSValue::EmptyValueTag
));
700 Jump argumentsNotCreated
= branch32(Equal
, tagFor(arguments
), TrustedImm32(JSValue::EmptyValueTag
));
701 activationCreated
.link(this);
702 JITStubCall
stubCall(this, cti_op_tear_off_activation
);
703 stubCall
.addArgument(currentInstruction
[1].u
.operand
);
704 stubCall
.addArgument(unmodifiedArgumentsRegister(currentInstruction
[2].u
.operand
));
706 argumentsNotCreated
.link(this);
709 void JIT::emit_op_tear_off_arguments(Instruction
* currentInstruction
)
711 int dst
= currentInstruction
[1].u
.operand
;
713 Jump argsNotCreated
= branch32(Equal
, tagFor(unmodifiedArgumentsRegister(dst
)), TrustedImm32(JSValue::EmptyValueTag
));
714 JITStubCall
stubCall(this, cti_op_tear_off_arguments
);
715 stubCall
.addArgument(unmodifiedArgumentsRegister(dst
));
717 argsNotCreated
.link(this);
720 void JIT::emit_op_resolve(Instruction
* currentInstruction
)
722 JITStubCall
stubCall(this, cti_op_resolve
);
723 stubCall
.addArgument(TrustedImmPtr(&m_codeBlock
->identifier(currentInstruction
[2].u
.operand
)));
724 stubCall
.callWithValueProfiling(currentInstruction
[1].u
.operand
);
727 void JIT::emit_op_to_primitive(Instruction
* currentInstruction
)
729 int dst
= currentInstruction
[1].u
.operand
;
730 int src
= currentInstruction
[2].u
.operand
;
732 emitLoad(src
, regT1
, regT0
);
734 Jump isImm
= branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
));
735 addSlowCase(branchPtr(NotEqual
, Address(regT0
, JSCell::classInfoOffset()), TrustedImmPtr(&JSString::s_info
)));
739 emitStore(dst
, regT1
, regT0
);
740 map(m_bytecodeOffset
+ OPCODE_LENGTH(op_to_primitive
), dst
, regT1
, regT0
);
743 void JIT::emitSlow_op_to_primitive(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
745 int dst
= currentInstruction
[1].u
.operand
;
749 JITStubCall
stubCall(this, cti_op_to_primitive
);
750 stubCall
.addArgument(regT1
, regT0
);
754 void JIT::emit_op_strcat(Instruction
* currentInstruction
)
756 JITStubCall
stubCall(this, cti_op_strcat
);
757 stubCall
.addArgument(TrustedImm32(currentInstruction
[2].u
.operand
));
758 stubCall
.addArgument(TrustedImm32(currentInstruction
[3].u
.operand
));
759 stubCall
.call(currentInstruction
[1].u
.operand
);
762 void JIT::emit_op_resolve_base(Instruction
* currentInstruction
)
764 JITStubCall
stubCall(this, currentInstruction
[3].u
.operand
? cti_op_resolve_base_strict_put
: cti_op_resolve_base
);
765 stubCall
.addArgument(TrustedImmPtr(&m_codeBlock
->identifier(currentInstruction
[2].u
.operand
)));
766 stubCall
.callWithValueProfiling(currentInstruction
[1].u
.operand
);
769 void JIT::emit_op_ensure_property_exists(Instruction
* currentInstruction
)
771 JITStubCall
stubCall(this, cti_op_ensure_property_exists
);
772 stubCall
.addArgument(TrustedImm32(currentInstruction
[1].u
.operand
));
773 stubCall
.addArgument(TrustedImmPtr(&m_codeBlock
->identifier(currentInstruction
[2].u
.operand
)));
774 stubCall
.call(currentInstruction
[1].u
.operand
);
777 void JIT::emit_op_resolve_skip(Instruction
* currentInstruction
)
779 JITStubCall
stubCall(this, cti_op_resolve_skip
);
780 stubCall
.addArgument(TrustedImmPtr(&m_codeBlock
->identifier(currentInstruction
[2].u
.operand
)));
781 stubCall
.addArgument(TrustedImm32(currentInstruction
[3].u
.operand
));
782 stubCall
.callWithValueProfiling(currentInstruction
[1].u
.operand
);
785 void JIT::emit_op_resolve_global(Instruction
* currentInstruction
, bool dynamic
)
787 // FIXME: Optimize to use patching instead of so many memory accesses.
789 unsigned dst
= currentInstruction
[1].u
.operand
;
790 void* globalObject
= m_codeBlock
->globalObject();
792 unsigned currentIndex
= m_globalResolveInfoIndex
++;
793 GlobalResolveInfo
* resolveInfoAddress
= &m_codeBlock
->globalResolveInfo(currentIndex
);
797 move(TrustedImmPtr(globalObject
), regT0
);
798 move(TrustedImmPtr(resolveInfoAddress
), regT3
);
799 loadPtr(Address(regT3
, OBJECT_OFFSETOF(GlobalResolveInfo
, structure
)), regT1
);
800 addSlowCase(branchPtr(NotEqual
, regT1
, Address(regT0
, JSCell::structureOffset())));
803 loadPtr(Address(regT0
, OBJECT_OFFSETOF(JSGlobalObject
, m_propertyStorage
)), regT2
);
804 load32(Address(regT3
, OBJECT_OFFSETOF(GlobalResolveInfo
, offset
)), regT3
);
805 load32(BaseIndex(regT2
, regT3
, TimesEight
, OBJECT_OFFSETOF(JSValue
, u
.asBits
.payload
)), regT0
); // payload
806 load32(BaseIndex(regT2
, regT3
, TimesEight
, OBJECT_OFFSETOF(JSValue
, u
.asBits
.tag
)), regT1
); // tag
807 emitValueProfilingSite();
808 emitStore(dst
, regT1
, regT0
);
809 map(m_bytecodeOffset
+ (dynamic
? OPCODE_LENGTH(op_resolve_global_dynamic
) : OPCODE_LENGTH(op_resolve_global
)), dst
, regT1
, regT0
);
812 void JIT::emitSlow_op_resolve_global(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
814 unsigned dst
= currentInstruction
[1].u
.operand
;
815 Identifier
* ident
= &m_codeBlock
->identifier(currentInstruction
[2].u
.operand
);
817 unsigned currentIndex
= m_globalResolveInfoIndex
++;
820 JITStubCall
stubCall(this, cti_op_resolve_global
);
821 stubCall
.addArgument(TrustedImmPtr(ident
));
822 stubCall
.addArgument(TrustedImm32(currentIndex
));
823 stubCall
.callWithValueProfiling(dst
);
826 void JIT::emit_op_not(Instruction
* currentInstruction
)
828 unsigned dst
= currentInstruction
[1].u
.operand
;
829 unsigned src
= currentInstruction
[2].u
.operand
;
831 emitLoadTag(src
, regT0
);
833 emitLoad(src
, regT1
, regT0
);
834 addSlowCase(branch32(NotEqual
, regT1
, TrustedImm32(JSValue::BooleanTag
)));
835 xor32(TrustedImm32(1), regT0
);
837 emitStoreBool(dst
, regT0
, (dst
== src
));
840 void JIT::emitSlow_op_not(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
842 unsigned dst
= currentInstruction
[1].u
.operand
;
843 unsigned src
= currentInstruction
[2].u
.operand
;
847 JITStubCall
stubCall(this, cti_op_not
);
848 stubCall
.addArgument(src
);
852 void JIT::emit_op_jfalse(Instruction
* currentInstruction
)
854 unsigned cond
= currentInstruction
[1].u
.operand
;
855 unsigned target
= currentInstruction
[2].u
.operand
;
857 emitLoad(cond
, regT1
, regT0
);
859 ASSERT((JSValue::BooleanTag
+ 1 == JSValue::Int32Tag
) && !(JSValue::Int32Tag
+ 1));
860 addSlowCase(branch32(Below
, regT1
, TrustedImm32(JSValue::BooleanTag
)));
861 addJump(branchTest32(Zero
, regT0
), target
);
864 void JIT::emitSlow_op_jfalse(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
866 unsigned cond
= currentInstruction
[1].u
.operand
;
867 unsigned target
= currentInstruction
[2].u
.operand
;
871 if (supportsFloatingPoint()) {
872 // regT1 contains the tag from the hot path.
873 Jump notNumber
= branch32(Above
, regT1
, TrustedImm32(JSValue::LowestTag
));
875 emitLoadDouble(cond
, fpRegT0
);
876 emitJumpSlowToHot(branchDoubleZeroOrNaN(fpRegT0
, fpRegT1
), target
);
877 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_jfalse
));
879 notNumber
.link(this);
882 JITStubCall
stubCall(this, cti_op_jtrue
);
883 stubCall
.addArgument(cond
);
885 emitJumpSlowToHot(branchTest32(Zero
, regT0
), target
); // Inverted.
888 void JIT::emit_op_jtrue(Instruction
* currentInstruction
)
890 unsigned cond
= currentInstruction
[1].u
.operand
;
891 unsigned target
= currentInstruction
[2].u
.operand
;
893 emitLoad(cond
, regT1
, regT0
);
895 ASSERT((JSValue::BooleanTag
+ 1 == JSValue::Int32Tag
) && !(JSValue::Int32Tag
+ 1));
896 addSlowCase(branch32(Below
, regT1
, TrustedImm32(JSValue::BooleanTag
)));
897 addJump(branchTest32(NonZero
, regT0
), target
);
900 void JIT::emitSlow_op_jtrue(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
902 unsigned cond
= currentInstruction
[1].u
.operand
;
903 unsigned target
= currentInstruction
[2].u
.operand
;
907 if (supportsFloatingPoint()) {
908 // regT1 contains the tag from the hot path.
909 Jump notNumber
= branch32(Above
, regT1
, TrustedImm32(JSValue::LowestTag
));
911 emitLoadDouble(cond
, fpRegT0
);
912 emitJumpSlowToHot(branchDoubleNonZero(fpRegT0
, fpRegT1
), target
);
913 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_jtrue
));
915 notNumber
.link(this);
918 JITStubCall
stubCall(this, cti_op_jtrue
);
919 stubCall
.addArgument(cond
);
921 emitJumpSlowToHot(branchTest32(NonZero
, regT0
), target
);
924 void JIT::emit_op_jeq_null(Instruction
* currentInstruction
)
926 unsigned src
= currentInstruction
[1].u
.operand
;
927 unsigned target
= currentInstruction
[2].u
.operand
;
929 emitLoad(src
, regT1
, regT0
);
931 Jump isImmediate
= branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
));
933 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
934 loadPtr(Address(regT0
, JSCell::structureOffset()), regT2
);
935 addJump(branchTest8(NonZero
, Address(regT2
, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined
)), target
);
937 Jump wasNotImmediate
= jump();
939 // Now handle the immediate cases - undefined & null
940 isImmediate
.link(this);
942 ASSERT((JSValue::UndefinedTag
+ 1 == JSValue::NullTag
) && (JSValue::NullTag
& 0x1));
943 or32(TrustedImm32(1), regT1
);
944 addJump(branch32(Equal
, regT1
, TrustedImm32(JSValue::NullTag
)), target
);
946 wasNotImmediate
.link(this);
949 void JIT::emit_op_jneq_null(Instruction
* currentInstruction
)
951 unsigned src
= currentInstruction
[1].u
.operand
;
952 unsigned target
= currentInstruction
[2].u
.operand
;
954 emitLoad(src
, regT1
, regT0
);
956 Jump isImmediate
= branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
));
958 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
959 loadPtr(Address(regT0
, JSCell::structureOffset()), regT2
);
960 addJump(branchTest8(Zero
, Address(regT2
, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined
)), target
);
962 Jump wasNotImmediate
= jump();
964 // Now handle the immediate cases - undefined & null
965 isImmediate
.link(this);
967 ASSERT((JSValue::UndefinedTag
+ 1 == JSValue::NullTag
) && (JSValue::NullTag
& 0x1));
968 or32(TrustedImm32(1), regT1
);
969 addJump(branch32(NotEqual
, regT1
, TrustedImm32(JSValue::NullTag
)), target
);
971 wasNotImmediate
.link(this);
974 void JIT::emit_op_jneq_ptr(Instruction
* currentInstruction
)
976 unsigned src
= currentInstruction
[1].u
.operand
;
977 JSCell
* ptr
= currentInstruction
[2].u
.jsCell
.get();
978 unsigned target
= currentInstruction
[3].u
.operand
;
980 emitLoad(src
, regT1
, regT0
);
981 addJump(branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
)), target
);
982 addJump(branchPtr(NotEqual
, regT0
, TrustedImmPtr(ptr
)), target
);
985 void JIT::emit_op_eq(Instruction
* currentInstruction
)
987 unsigned dst
= currentInstruction
[1].u
.operand
;
988 unsigned src1
= currentInstruction
[2].u
.operand
;
989 unsigned src2
= currentInstruction
[3].u
.operand
;
991 emitLoad2(src1
, regT1
, regT0
, src2
, regT3
, regT2
);
992 addSlowCase(branch32(NotEqual
, regT1
, regT3
));
993 addSlowCase(branch32(Equal
, regT1
, TrustedImm32(JSValue::CellTag
)));
994 addSlowCase(branch32(Below
, regT1
, TrustedImm32(JSValue::LowestTag
)));
996 compare32(Equal
, regT0
, regT2
, regT0
);
998 emitStoreBool(dst
, regT0
);
1001 void JIT::emitSlow_op_eq(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1003 unsigned dst
= currentInstruction
[1].u
.operand
;
1004 unsigned op1
= currentInstruction
[2].u
.operand
;
1005 unsigned op2
= currentInstruction
[3].u
.operand
;
1007 JumpList storeResult
;
1008 JumpList genericCase
;
1010 genericCase
.append(getSlowCase(iter
)); // tags not equal
1012 linkSlowCase(iter
); // tags equal and JSCell
1013 genericCase
.append(branchPtr(NotEqual
, Address(regT0
, JSCell::classInfoOffset()), TrustedImmPtr(&JSString::s_info
)));
1014 genericCase
.append(branchPtr(NotEqual
, Address(regT2
, JSCell::classInfoOffset()), TrustedImmPtr(&JSString::s_info
)));
1017 JITStubCall
stubCallEqStrings(this, cti_op_eq_strings
);
1018 stubCallEqStrings
.addArgument(regT0
);
1019 stubCallEqStrings
.addArgument(regT2
);
1020 stubCallEqStrings
.call();
1021 storeResult
.append(jump());
1024 genericCase
.append(getSlowCase(iter
)); // doubles
1025 genericCase
.link(this);
1026 JITStubCall
stubCallEq(this, cti_op_eq
);
1027 stubCallEq
.addArgument(op1
);
1028 stubCallEq
.addArgument(op2
);
1029 stubCallEq
.call(regT0
);
1031 storeResult
.link(this);
1032 emitStoreBool(dst
, regT0
);
1035 void JIT::emit_op_neq(Instruction
* currentInstruction
)
1037 unsigned dst
= currentInstruction
[1].u
.operand
;
1038 unsigned src1
= currentInstruction
[2].u
.operand
;
1039 unsigned src2
= currentInstruction
[3].u
.operand
;
1041 emitLoad2(src1
, regT1
, regT0
, src2
, regT3
, regT2
);
1042 addSlowCase(branch32(NotEqual
, regT1
, regT3
));
1043 addSlowCase(branch32(Equal
, regT1
, TrustedImm32(JSValue::CellTag
)));
1044 addSlowCase(branch32(Below
, regT1
, TrustedImm32(JSValue::LowestTag
)));
1046 compare32(NotEqual
, regT0
, regT2
, regT0
);
1048 emitStoreBool(dst
, regT0
);
1051 void JIT::emitSlow_op_neq(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1053 unsigned dst
= currentInstruction
[1].u
.operand
;
1055 JumpList storeResult
;
1056 JumpList genericCase
;
1058 genericCase
.append(getSlowCase(iter
)); // tags not equal
1060 linkSlowCase(iter
); // tags equal and JSCell
1061 genericCase
.append(branchPtr(NotEqual
, Address(regT0
, JSCell::classInfoOffset()), TrustedImmPtr(&JSString::s_info
)));
1062 genericCase
.append(branchPtr(NotEqual
, Address(regT2
, JSCell::classInfoOffset()), TrustedImmPtr(&JSString::s_info
)));
1065 JITStubCall
stubCallEqStrings(this, cti_op_eq_strings
);
1066 stubCallEqStrings
.addArgument(regT0
);
1067 stubCallEqStrings
.addArgument(regT2
);
1068 stubCallEqStrings
.call(regT0
);
1069 storeResult
.append(jump());
1072 genericCase
.append(getSlowCase(iter
)); // doubles
1073 genericCase
.link(this);
1074 JITStubCall
stubCallEq(this, cti_op_eq
);
1075 stubCallEq
.addArgument(regT1
, regT0
);
1076 stubCallEq
.addArgument(regT3
, regT2
);
1077 stubCallEq
.call(regT0
);
1079 storeResult
.link(this);
1080 xor32(TrustedImm32(0x1), regT0
);
1081 emitStoreBool(dst
, regT0
);
1084 void JIT::compileOpStrictEq(Instruction
* currentInstruction
, CompileOpStrictEqType type
)
1086 unsigned dst
= currentInstruction
[1].u
.operand
;
1087 unsigned src1
= currentInstruction
[2].u
.operand
;
1088 unsigned src2
= currentInstruction
[3].u
.operand
;
1090 emitLoad2(src1
, regT1
, regT0
, src2
, regT3
, regT2
);
1092 // Bail if the tags differ, or are double.
1093 addSlowCase(branch32(NotEqual
, regT1
, regT3
));
1094 addSlowCase(branch32(Below
, regT1
, TrustedImm32(JSValue::LowestTag
)));
1096 // Jump to a slow case if both are strings.
1097 Jump notCell
= branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
));
1098 Jump firstNotString
= branchPtr(NotEqual
, Address(regT0
, JSCell::classInfoOffset()), TrustedImmPtr(&JSString::s_info
));
1099 addSlowCase(branchPtr(Equal
, Address(regT2
, JSCell::classInfoOffset()), TrustedImmPtr(&JSString::s_info
)));
1101 firstNotString
.link(this);
1103 // Simply compare the payloads.
1104 if (type
== OpStrictEq
)
1105 compare32(Equal
, regT0
, regT2
, regT0
);
1107 compare32(NotEqual
, regT0
, regT2
, regT0
);
1109 emitStoreBool(dst
, regT0
);
1112 void JIT::emit_op_stricteq(Instruction
* currentInstruction
)
1114 compileOpStrictEq(currentInstruction
, OpStrictEq
);
1117 void JIT::emitSlow_op_stricteq(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1119 unsigned dst
= currentInstruction
[1].u
.operand
;
1120 unsigned src1
= currentInstruction
[2].u
.operand
;
1121 unsigned src2
= currentInstruction
[3].u
.operand
;
1127 JITStubCall
stubCall(this, cti_op_stricteq
);
1128 stubCall
.addArgument(src1
);
1129 stubCall
.addArgument(src2
);
1133 void JIT::emit_op_nstricteq(Instruction
* currentInstruction
)
1135 compileOpStrictEq(currentInstruction
, OpNStrictEq
);
1138 void JIT::emitSlow_op_nstricteq(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1140 unsigned dst
= currentInstruction
[1].u
.operand
;
1141 unsigned src1
= currentInstruction
[2].u
.operand
;
1142 unsigned src2
= currentInstruction
[3].u
.operand
;
1148 JITStubCall
stubCall(this, cti_op_nstricteq
);
1149 stubCall
.addArgument(src1
);
1150 stubCall
.addArgument(src2
);
1154 void JIT::emit_op_eq_null(Instruction
* currentInstruction
)
1156 unsigned dst
= currentInstruction
[1].u
.operand
;
1157 unsigned src
= currentInstruction
[2].u
.operand
;
1159 emitLoad(src
, regT1
, regT0
);
1160 Jump isImmediate
= branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
));
1162 loadPtr(Address(regT0
, JSCell::structureOffset()), regT1
);
1163 test8(NonZero
, Address(regT1
, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined
), regT1
);
1165 Jump wasNotImmediate
= jump();
1167 isImmediate
.link(this);
1169 compare32(Equal
, regT1
, TrustedImm32(JSValue::NullTag
), regT2
);
1170 compare32(Equal
, regT1
, TrustedImm32(JSValue::UndefinedTag
), regT1
);
1173 wasNotImmediate
.link(this);
1175 emitStoreBool(dst
, regT1
);
1178 void JIT::emit_op_neq_null(Instruction
* currentInstruction
)
1180 unsigned dst
= currentInstruction
[1].u
.operand
;
1181 unsigned src
= currentInstruction
[2].u
.operand
;
1183 emitLoad(src
, regT1
, regT0
);
1184 Jump isImmediate
= branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
));
1186 loadPtr(Address(regT0
, JSCell::structureOffset()), regT1
);
1187 test8(Zero
, Address(regT1
, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined
), regT1
);
1189 Jump wasNotImmediate
= jump();
1191 isImmediate
.link(this);
1193 compare32(NotEqual
, regT1
, TrustedImm32(JSValue::NullTag
), regT2
);
1194 compare32(NotEqual
, regT1
, TrustedImm32(JSValue::UndefinedTag
), regT1
);
1195 and32(regT2
, regT1
);
1197 wasNotImmediate
.link(this);
1199 emitStoreBool(dst
, regT1
);
1202 void JIT::emit_op_resolve_with_base(Instruction
* currentInstruction
)
1204 JITStubCall
stubCall(this, cti_op_resolve_with_base
);
1205 stubCall
.addArgument(TrustedImmPtr(&m_codeBlock
->identifier(currentInstruction
[3].u
.operand
)));
1206 stubCall
.addArgument(TrustedImm32(currentInstruction
[1].u
.operand
));
1207 stubCall
.callWithValueProfiling(currentInstruction
[2].u
.operand
);
1210 void JIT::emit_op_resolve_with_this(Instruction
* currentInstruction
)
1212 JITStubCall
stubCall(this, cti_op_resolve_with_this
);
1213 stubCall
.addArgument(TrustedImmPtr(&m_codeBlock
->identifier(currentInstruction
[3].u
.operand
)));
1214 stubCall
.addArgument(TrustedImm32(currentInstruction
[1].u
.operand
));
1215 stubCall
.callWithValueProfiling(currentInstruction
[2].u
.operand
);
1218 void JIT::emit_op_throw(Instruction
* currentInstruction
)
1220 unsigned exception
= currentInstruction
[1].u
.operand
;
1221 JITStubCall
stubCall(this, cti_op_throw
);
1222 stubCall
.addArgument(exception
);
1226 // cti_op_throw always changes it's return address,
1227 // this point in the code should never be reached.
1232 void JIT::emit_op_get_pnames(Instruction
* currentInstruction
)
1234 int dst
= currentInstruction
[1].u
.operand
;
1235 int base
= currentInstruction
[2].u
.operand
;
1236 int i
= currentInstruction
[3].u
.operand
;
1237 int size
= currentInstruction
[4].u
.operand
;
1238 int breakTarget
= currentInstruction
[5].u
.operand
;
1240 JumpList isNotObject
;
1242 emitLoad(base
, regT1
, regT0
);
1243 if (!m_codeBlock
->isKnownNotImmediate(base
))
1244 isNotObject
.append(branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
)));
1245 if (base
!= m_codeBlock
->thisRegister() || m_codeBlock
->isStrictMode()) {
1246 loadPtr(Address(regT0
, JSCell::structureOffset()), regT2
);
1247 isNotObject
.append(emitJumpIfNotObject(regT2
));
1250 // We could inline the case where you have a valid cache, but
1251 // this call doesn't seem to be hot.
1252 Label
isObject(this);
1253 JITStubCall
getPnamesStubCall(this, cti_op_get_pnames
);
1254 getPnamesStubCall
.addArgument(regT0
);
1255 getPnamesStubCall
.call(dst
);
1256 load32(Address(regT0
, OBJECT_OFFSETOF(JSPropertyNameIterator
, m_jsStringsSize
)), regT3
);
1257 store32(TrustedImm32(Int32Tag
), intTagFor(i
));
1258 store32(TrustedImm32(0), intPayloadFor(i
));
1259 store32(TrustedImm32(Int32Tag
), intTagFor(size
));
1260 store32(regT3
, payloadFor(size
));
1263 isNotObject
.link(this);
1264 addJump(branch32(Equal
, regT1
, TrustedImm32(JSValue::NullTag
)), breakTarget
);
1265 addJump(branch32(Equal
, regT1
, TrustedImm32(JSValue::UndefinedTag
)), breakTarget
);
1266 JITStubCall
toObjectStubCall(this, cti_to_object
);
1267 toObjectStubCall
.addArgument(regT1
, regT0
);
1268 toObjectStubCall
.call(base
);
1269 jump().linkTo(isObject
, this);
1274 void JIT::emit_op_next_pname(Instruction
* currentInstruction
)
1276 int dst
= currentInstruction
[1].u
.operand
;
1277 int base
= currentInstruction
[2].u
.operand
;
1278 int i
= currentInstruction
[3].u
.operand
;
1279 int size
= currentInstruction
[4].u
.operand
;
1280 int it
= currentInstruction
[5].u
.operand
;
1281 int target
= currentInstruction
[6].u
.operand
;
1283 JumpList callHasProperty
;
1286 load32(intPayloadFor(i
), regT0
);
1287 Jump end
= branch32(Equal
, regT0
, intPayloadFor(size
));
1290 loadPtr(payloadFor(it
), regT1
);
1291 loadPtr(Address(regT1
, OBJECT_OFFSETOF(JSPropertyNameIterator
, m_jsStrings
)), regT2
);
1292 load32(BaseIndex(regT2
, regT0
, TimesEight
), regT2
);
1293 store32(TrustedImm32(JSValue::CellTag
), tagFor(dst
));
1294 store32(regT2
, payloadFor(dst
));
1297 add32(TrustedImm32(1), regT0
);
1298 store32(regT0
, intPayloadFor(i
));
1300 // Verify that i is valid:
1301 loadPtr(payloadFor(base
), regT0
);
1303 // Test base's structure
1304 loadPtr(Address(regT0
, JSCell::structureOffset()), regT2
);
1305 callHasProperty
.append(branchPtr(NotEqual
, regT2
, Address(Address(regT1
, OBJECT_OFFSETOF(JSPropertyNameIterator
, m_cachedStructure
)))));
1307 // Test base's prototype chain
1308 loadPtr(Address(Address(regT1
, OBJECT_OFFSETOF(JSPropertyNameIterator
, m_cachedPrototypeChain
))), regT3
);
1309 loadPtr(Address(regT3
, OBJECT_OFFSETOF(StructureChain
, m_vector
)), regT3
);
1310 addJump(branchTestPtr(Zero
, Address(regT3
)), target
);
1312 Label
checkPrototype(this);
1313 callHasProperty
.append(branch32(Equal
, Address(regT2
, Structure::prototypeOffset() + OBJECT_OFFSETOF(JSValue
, u
.asBits
.tag
)), TrustedImm32(JSValue::NullTag
)));
1314 loadPtr(Address(regT2
, Structure::prototypeOffset() + OBJECT_OFFSETOF(JSValue
, u
.asBits
.payload
)), regT2
);
1315 loadPtr(Address(regT2
, JSCell::structureOffset()), regT2
);
1316 callHasProperty
.append(branchPtr(NotEqual
, regT2
, Address(regT3
)));
1317 addPtr(TrustedImm32(sizeof(Structure
*)), regT3
);
1318 branchTestPtr(NonZero
, Address(regT3
)).linkTo(checkPrototype
, this);
1321 addJump(jump(), target
);
1323 // Slow case: Ask the object if i is valid.
1324 callHasProperty
.link(this);
1325 loadPtr(addressFor(dst
), regT1
);
1326 JITStubCall
stubCall(this, cti_has_property
);
1327 stubCall
.addArgument(regT0
);
1328 stubCall
.addArgument(regT1
);
1331 // Test for valid key.
1332 addJump(branchTest32(NonZero
, regT0
), target
);
1333 jump().linkTo(begin
, this);
1339 void JIT::emit_op_push_scope(Instruction
* currentInstruction
)
1341 JITStubCall
stubCall(this, cti_op_push_scope
);
1342 stubCall
.addArgument(currentInstruction
[1].u
.operand
);
1343 stubCall
.call(currentInstruction
[1].u
.operand
);
1346 void JIT::emit_op_pop_scope(Instruction
*)
1348 JITStubCall(this, cti_op_pop_scope
).call();
1351 void JIT::emit_op_to_jsnumber(Instruction
* currentInstruction
)
1353 int dst
= currentInstruction
[1].u
.operand
;
1354 int src
= currentInstruction
[2].u
.operand
;
1356 emitLoad(src
, regT1
, regT0
);
1358 Jump isInt32
= branch32(Equal
, regT1
, TrustedImm32(JSValue::Int32Tag
));
1359 addSlowCase(branch32(AboveOrEqual
, regT1
, TrustedImm32(JSValue::EmptyValueTag
)));
1363 emitStore(dst
, regT1
, regT0
);
1364 map(m_bytecodeOffset
+ OPCODE_LENGTH(op_to_jsnumber
), dst
, regT1
, regT0
);
1367 void JIT::emitSlow_op_to_jsnumber(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1369 int dst
= currentInstruction
[1].u
.operand
;
1373 JITStubCall
stubCall(this, cti_op_to_jsnumber
);
1374 stubCall
.addArgument(regT1
, regT0
);
1378 void JIT::emit_op_push_new_scope(Instruction
* currentInstruction
)
1380 JITStubCall
stubCall(this, cti_op_push_new_scope
);
1381 stubCall
.addArgument(TrustedImmPtr(&m_codeBlock
->identifier(currentInstruction
[2].u
.operand
)));
1382 stubCall
.addArgument(currentInstruction
[3].u
.operand
);
1383 stubCall
.call(currentInstruction
[1].u
.operand
);
1386 void JIT::emit_op_catch(Instruction
* currentInstruction
)
1388 // cti_op_throw returns the callFrame for the handler.
1389 move(regT0
, callFrameRegister
);
1391 // Now store the exception returned by cti_op_throw.
1392 loadPtr(Address(stackPointerRegister
, OBJECT_OFFSETOF(struct JITStackFrame
, globalData
)), regT3
);
1393 load32(Address(regT3
, OBJECT_OFFSETOF(JSGlobalData
, exception
) + OBJECT_OFFSETOF(JSValue
, u
.asBits
.payload
)), regT0
);
1394 load32(Address(regT3
, OBJECT_OFFSETOF(JSGlobalData
, exception
) + OBJECT_OFFSETOF(JSValue
, u
.asBits
.tag
)), regT1
);
1395 store32(TrustedImm32(JSValue().payload()), Address(regT3
, OBJECT_OFFSETOF(JSGlobalData
, exception
) + OBJECT_OFFSETOF(JSValue
, u
.asBits
.payload
)));
1396 store32(TrustedImm32(JSValue().tag()), Address(regT3
, OBJECT_OFFSETOF(JSGlobalData
, exception
) + OBJECT_OFFSETOF(JSValue
, u
.asBits
.tag
)));
1398 unsigned exception
= currentInstruction
[1].u
.operand
;
1399 emitStore(exception
, regT1
, regT0
);
1400 map(m_bytecodeOffset
+ OPCODE_LENGTH(op_catch
), exception
, regT1
, regT0
);
1403 void JIT::emit_op_jmp_scopes(Instruction
* currentInstruction
)
1405 JITStubCall
stubCall(this, cti_op_jmp_scopes
);
1406 stubCall
.addArgument(TrustedImm32(currentInstruction
[1].u
.operand
));
1408 addJump(jump(), currentInstruction
[2].u
.operand
);
1411 void JIT::emit_op_switch_imm(Instruction
* currentInstruction
)
1413 unsigned tableIndex
= currentInstruction
[1].u
.operand
;
1414 unsigned defaultOffset
= currentInstruction
[2].u
.operand
;
1415 unsigned scrutinee
= currentInstruction
[3].u
.operand
;
1417 // create jump table for switch destinations, track this switch statement.
1418 SimpleJumpTable
* jumpTable
= &m_codeBlock
->immediateSwitchJumpTable(tableIndex
);
1419 m_switches
.append(SwitchRecord(jumpTable
, m_bytecodeOffset
, defaultOffset
, SwitchRecord::Immediate
));
1420 jumpTable
->ctiOffsets
.grow(jumpTable
->branchOffsets
.size());
1422 JITStubCall
stubCall(this, cti_op_switch_imm
);
1423 stubCall
.addArgument(scrutinee
);
1424 stubCall
.addArgument(TrustedImm32(tableIndex
));
1429 void JIT::emit_op_switch_char(Instruction
* currentInstruction
)
1431 unsigned tableIndex
= currentInstruction
[1].u
.operand
;
1432 unsigned defaultOffset
= currentInstruction
[2].u
.operand
;
1433 unsigned scrutinee
= currentInstruction
[3].u
.operand
;
1435 // create jump table for switch destinations, track this switch statement.
1436 SimpleJumpTable
* jumpTable
= &m_codeBlock
->characterSwitchJumpTable(tableIndex
);
1437 m_switches
.append(SwitchRecord(jumpTable
, m_bytecodeOffset
, defaultOffset
, SwitchRecord::Character
));
1438 jumpTable
->ctiOffsets
.grow(jumpTable
->branchOffsets
.size());
1440 JITStubCall
stubCall(this, cti_op_switch_char
);
1441 stubCall
.addArgument(scrutinee
);
1442 stubCall
.addArgument(TrustedImm32(tableIndex
));
1447 void JIT::emit_op_switch_string(Instruction
* currentInstruction
)
1449 unsigned tableIndex
= currentInstruction
[1].u
.operand
;
1450 unsigned defaultOffset
= currentInstruction
[2].u
.operand
;
1451 unsigned scrutinee
= currentInstruction
[3].u
.operand
;
1453 // create jump table for switch destinations, track this switch statement.
1454 StringJumpTable
* jumpTable
= &m_codeBlock
->stringSwitchJumpTable(tableIndex
);
1455 m_switches
.append(SwitchRecord(jumpTable
, m_bytecodeOffset
, defaultOffset
));
1457 JITStubCall
stubCall(this, cti_op_switch_string
);
1458 stubCall
.addArgument(scrutinee
);
1459 stubCall
.addArgument(TrustedImm32(tableIndex
));
1464 void JIT::emit_op_throw_reference_error(Instruction
* currentInstruction
)
1466 unsigned message
= currentInstruction
[1].u
.operand
;
1468 JITStubCall
stubCall(this, cti_op_throw_reference_error
);
1469 stubCall
.addArgument(m_codeBlock
->getConstant(message
));
1473 void JIT::emit_op_debug(Instruction
* currentInstruction
)
1475 #if ENABLE(DEBUG_WITH_BREAKPOINT)
1476 UNUSED_PARAM(currentInstruction
);
1479 JITStubCall
stubCall(this, cti_op_debug
);
1480 stubCall
.addArgument(Imm32(currentInstruction
[1].u
.operand
));
1481 stubCall
.addArgument(Imm32(currentInstruction
[2].u
.operand
));
1482 stubCall
.addArgument(Imm32(currentInstruction
[3].u
.operand
));
1488 void JIT::emit_op_enter(Instruction
*)
1490 // Even though JIT code doesn't use them, we initialize our constant
1491 // registers to zap stale pointers, to avoid unnecessarily prolonging
1492 // object lifetime and increasing GC pressure.
1493 for (int i
= 0; i
< m_codeBlock
->m_numVars
; ++i
)
1494 emitStore(i
, jsUndefined());
1497 void JIT::emit_op_create_activation(Instruction
* currentInstruction
)
1499 unsigned activation
= currentInstruction
[1].u
.operand
;
1501 Jump activationCreated
= branch32(NotEqual
, tagFor(activation
), TrustedImm32(JSValue::EmptyValueTag
));
1502 JITStubCall(this, cti_op_push_activation
).call(activation
);
1503 activationCreated
.link(this);
1506 void JIT::emit_op_create_arguments(Instruction
* currentInstruction
)
1508 unsigned dst
= currentInstruction
[1].u
.operand
;
1510 Jump argsCreated
= branch32(NotEqual
, tagFor(dst
), TrustedImm32(JSValue::EmptyValueTag
));
1512 JITStubCall(this, cti_op_create_arguments
).call();
1513 emitStore(dst
, regT1
, regT0
);
1514 emitStore(unmodifiedArgumentsRegister(dst
), regT1
, regT0
);
1516 argsCreated
.link(this);
1519 void JIT::emit_op_init_lazy_reg(Instruction
* currentInstruction
)
1521 unsigned dst
= currentInstruction
[1].u
.operand
;
1523 emitStore(dst
, JSValue());
1526 void JIT::emit_op_get_callee(Instruction
* currentInstruction
)
1528 int dst
= currentInstruction
[1].u
.operand
;
1529 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee
, regT0
);
1530 emitStoreCell(dst
, regT0
);
1533 void JIT::emit_op_create_this(Instruction
* currentInstruction
)
1535 emitLoad(currentInstruction
[2].u
.operand
, regT1
, regT0
);
1536 emitJumpSlowCaseIfNotJSCell(currentInstruction
[2].u
.operand
, regT1
);
1537 loadPtr(Address(regT0
, JSCell::structureOffset()), regT1
);
1538 addSlowCase(emitJumpIfNotObject(regT1
));
1540 // now we know that the prototype is an object, but we don't know if it's got an
1543 loadPtr(Address(regT0
, JSObject::offsetOfInheritorID()), regT2
);
1544 addSlowCase(branchTestPtr(Zero
, regT2
));
1546 // now regT2 contains the inheritorID, which is the structure that the newly
1547 // allocated object will have.
1549 emitAllocateJSFinalObject(regT2
, regT0
, regT1
);
1551 emitStoreCell(currentInstruction
[1].u
.operand
, regT0
);
1554 void JIT::emitSlow_op_create_this(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1556 linkSlowCaseIfNotJSCell(iter
, currentInstruction
[2].u
.operand
); // not a cell
1557 linkSlowCase(iter
); // not an object
1558 linkSlowCase(iter
); // doesn't have an inheritor ID
1559 linkSlowCase(iter
); // allocation failed
1560 unsigned protoRegister
= currentInstruction
[2].u
.operand
;
1561 emitLoad(protoRegister
, regT1
, regT0
);
1562 JITStubCall
stubCall(this, cti_op_create_this
);
1563 stubCall
.addArgument(regT1
, regT0
);
1564 stubCall
.call(currentInstruction
[1].u
.operand
);
1567 void JIT::emit_op_convert_this(Instruction
* currentInstruction
)
1569 unsigned thisRegister
= currentInstruction
[1].u
.operand
;
1571 emitLoad(thisRegister
, regT1
, regT0
);
1573 addSlowCase(branch32(NotEqual
, regT1
, TrustedImm32(JSValue::CellTag
)));
1574 addSlowCase(branchPtr(Equal
, Address(regT0
, JSCell::classInfoOffset()), TrustedImmPtr(&JSString::s_info
)));
1576 map(m_bytecodeOffset
+ OPCODE_LENGTH(op_convert_this
), thisRegister
, regT1
, regT0
);
1579 void JIT::emitSlow_op_convert_this(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1581 void* globalThis
= m_codeBlock
->globalObject()->globalScopeChain()->globalThis
.get();
1582 unsigned thisRegister
= currentInstruction
[1].u
.operand
;
1585 Jump isNotUndefined
= branch32(NotEqual
, regT1
, TrustedImm32(JSValue::UndefinedTag
));
1586 move(TrustedImmPtr(globalThis
), regT0
);
1587 move(TrustedImm32(JSValue::CellTag
), regT1
);
1588 emitStore(thisRegister
, regT1
, regT0
);
1589 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_convert_this
));
1591 isNotUndefined
.link(this);
1593 JITStubCall
stubCall(this, cti_op_convert_this
);
1594 stubCall
.addArgument(regT1
, regT0
);
1595 stubCall
.call(thisRegister
);
1598 void JIT::emit_op_profile_will_call(Instruction
* currentInstruction
)
1600 peek(regT2
, OBJECT_OFFSETOF(JITStackFrame
, enabledProfilerReference
) / sizeof(void*));
1601 Jump noProfiler
= branchTestPtr(Zero
, Address(regT2
));
1603 JITStubCall
stubCall(this, cti_op_profile_will_call
);
1604 stubCall
.addArgument(currentInstruction
[1].u
.operand
);
1606 noProfiler
.link(this);
1609 void JIT::emit_op_profile_did_call(Instruction
* currentInstruction
)
1611 peek(regT2
, OBJECT_OFFSETOF(JITStackFrame
, enabledProfilerReference
) / sizeof(void*));
1612 Jump noProfiler
= branchTestPtr(Zero
, Address(regT2
));
1614 JITStubCall
stubCall(this, cti_op_profile_did_call
);
1615 stubCall
.addArgument(currentInstruction
[1].u
.operand
);
1617 noProfiler
.link(this);
1620 void JIT::emit_op_get_arguments_length(Instruction
* currentInstruction
)
1622 int dst
= currentInstruction
[1].u
.operand
;
1623 int argumentsRegister
= currentInstruction
[2].u
.operand
;
1624 addSlowCase(branch32(NotEqual
, tagFor(argumentsRegister
), TrustedImm32(JSValue::EmptyValueTag
)));
1625 load32(payloadFor(RegisterFile::ArgumentCount
), regT0
);
1626 sub32(TrustedImm32(1), regT0
);
1627 emitStoreInt32(dst
, regT0
);
1630 void JIT::emitSlow_op_get_arguments_length(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1633 int dst
= currentInstruction
[1].u
.operand
;
1634 int base
= currentInstruction
[2].u
.operand
;
1635 int ident
= currentInstruction
[3].u
.operand
;
1637 JITStubCall
stubCall(this, cti_op_get_by_id_generic
);
1638 stubCall
.addArgument(base
);
1639 stubCall
.addArgument(TrustedImmPtr(&(m_codeBlock
->identifier(ident
))));
1643 void JIT::emit_op_get_argument_by_val(Instruction
* currentInstruction
)
1645 int dst
= currentInstruction
[1].u
.operand
;
1646 int argumentsRegister
= currentInstruction
[2].u
.operand
;
1647 int property
= currentInstruction
[3].u
.operand
;
1648 addSlowCase(branch32(NotEqual
, tagFor(argumentsRegister
), TrustedImm32(JSValue::EmptyValueTag
)));
1649 emitLoad(property
, regT1
, regT2
);
1650 addSlowCase(branch32(NotEqual
, regT1
, TrustedImm32(JSValue::Int32Tag
)));
1651 add32(TrustedImm32(1), regT2
);
1652 // regT2 now contains the integer index of the argument we want, including this
1653 load32(payloadFor(RegisterFile::ArgumentCount
), regT3
);
1654 addSlowCase(branch32(AboveOrEqual
, regT2
, regT3
));
1657 loadPtr(BaseIndex(callFrameRegister
, regT2
, TimesEight
, OBJECT_OFFSETOF(JSValue
, u
.asBits
.payload
) + CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register
))), regT0
);
1658 loadPtr(BaseIndex(callFrameRegister
, regT2
, TimesEight
, OBJECT_OFFSETOF(JSValue
, u
.asBits
.tag
) + CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register
))), regT1
);
1659 emitStore(dst
, regT1
, regT0
);
1662 void JIT::emitSlow_op_get_argument_by_val(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
1664 unsigned dst
= currentInstruction
[1].u
.operand
;
1665 unsigned arguments
= currentInstruction
[2].u
.operand
;
1666 unsigned property
= currentInstruction
[3].u
.operand
;
1669 Jump skipArgumentsCreation
= jump();
1673 JITStubCall(this, cti_op_create_arguments
).call();
1674 emitStore(arguments
, regT1
, regT0
);
1675 emitStore(unmodifiedArgumentsRegister(arguments
), regT1
, regT0
);
1677 skipArgumentsCreation
.link(this);
1678 JITStubCall
stubCall(this, cti_op_get_by_val
);
1679 stubCall
.addArgument(arguments
);
1680 stubCall
.addArgument(property
);
1686 #endif // USE(JSVALUE32_64)
1687 #endif // ENABLE(JIT)