]> git.saurik.com Git - apple/javascriptcore.git/blob - jit/JITOpcodes.cpp
JavaScriptCore-903.5.tar.gz
[apple/javascriptcore.git] / jit / JITOpcodes.cpp
1 /*
2 * Copyright (C) 2009 Apple Inc. All rights reserved.
3 * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
13 *
14 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
18 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25 */
26
27 #include "config.h"
28 #if ENABLE(JIT)
29 #include "JIT.h"
30
31 #include "Arguments.h"
32 #include "JITInlineMethods.h"
33 #include "JITStubCall.h"
34 #include "JSArray.h"
35 #include "JSCell.h"
36 #include "JSFunction.h"
37 #include "JSPropertyNameIterator.h"
38 #include "LinkBuffer.h"
39
40 namespace JSC {
41
42 #if USE(JSVALUE64)
43
44 void JIT::privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool>* executablePool, JSGlobalData* globalData, TrampolineStructure *trampolines)
45 {
46 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
47 // (2) The second function provides fast property access for string length
48 Label stringLengthBegin = align();
49
50 // Check eax is a string
51 Jump string_failureCases1 = emitJumpIfNotJSCell(regT0);
52 Jump string_failureCases2 = branchPtr(NotEqual, Address(regT0), TrustedImmPtr(m_globalData->jsStringVPtr));
53
54 // Checks out okay! - get the length from the Ustring.
55 load32(Address(regT0, OBJECT_OFFSETOF(JSString, m_length)), regT0);
56
57 Jump string_failureCases3 = branch32(LessThan, regT0, TrustedImm32(0));
58
59 // regT0 contains a 64 bit value (is positive, is zero extended) so we don't need sign extend here.
60 emitFastArithIntToImmNoCheck(regT0, regT0);
61
62 ret();
63 #endif
64
65 // (3) Trampolines for the slow cases of op_call / op_call_eval / op_construct.
66 COMPILE_ASSERT(sizeof(CodeType) == 4, CodeTypeEnumMustBe32Bit);
67
68 // VirtualCallLink Trampoline
69 // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
70 JumpList callLinkFailures;
71 Label virtualCallLinkBegin = align();
72 compileOpCallInitializeCallFrame();
73 preserveReturnAddressAfterCall(regT3);
74 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
75 restoreArgumentReference();
76 Call callLazyLinkCall = call();
77 callLinkFailures.append(branchTestPtr(Zero, regT0));
78 restoreReturnAddressBeforeReturn(regT3);
79 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT1);
80 jump(regT0);
81
82 // VirtualConstructLink Trampoline
83 // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
84 Label virtualConstructLinkBegin = align();
85 compileOpCallInitializeCallFrame();
86 preserveReturnAddressAfterCall(regT3);
87 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
88 restoreArgumentReference();
89 Call callLazyLinkConstruct = call();
90 callLinkFailures.append(branchTestPtr(Zero, regT0));
91 restoreReturnAddressBeforeReturn(regT3);
92 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT1);
93 jump(regT0);
94
95 // VirtualCall Trampoline
96 // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
97 Label virtualCallBegin = align();
98 compileOpCallInitializeCallFrame();
99
100 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
101
102 Jump hasCodeBlock3 = branch32(GreaterThanOrEqual, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParametersForCall)), TrustedImm32(0));
103 preserveReturnAddressAfterCall(regT3);
104 restoreArgumentReference();
105 Call callCompileCall = call();
106 callLinkFailures.append(branchTestPtr(Zero, regT0));
107 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT1);
108 restoreReturnAddressBeforeReturn(regT3);
109 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
110 hasCodeBlock3.link(this);
111
112 loadPtr(Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_jitCodeForCallWithArityCheck)), regT0);
113 jump(regT0);
114
115 // VirtualConstruct Trampoline
116 // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
117 Label virtualConstructBegin = align();
118 compileOpCallInitializeCallFrame();
119
120 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
121
122 Jump hasCodeBlock4 = branch32(GreaterThanOrEqual, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParametersForConstruct)), TrustedImm32(0));
123 preserveReturnAddressAfterCall(regT3);
124 restoreArgumentReference();
125 Call callCompileConstruct = call();
126 callLinkFailures.append(branchTestPtr(Zero, regT0));
127 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT1);
128 restoreReturnAddressBeforeReturn(regT3);
129 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
130 hasCodeBlock4.link(this);
131
132 loadPtr(Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_jitCodeForConstructWithArityCheck)), regT0);
133 jump(regT0);
134
135 // If the parser fails we want to be able to be able to keep going,
136 // So we handle this as a parse failure.
137 callLinkFailures.link(this);
138 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
139 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
140 restoreReturnAddressBeforeReturn(regT1);
141 move(TrustedImmPtr(&globalData->exceptionLocation), regT2);
142 storePtr(regT1, regT2);
143 poke(callFrameRegister, 1 + OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
144 poke(TrustedImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()));
145 ret();
146
147 // NativeCall Trampoline
148 Label nativeCallThunk = privateCompileCTINativeCall(globalData);
149 Label nativeConstructThunk = privateCompileCTINativeCall(globalData, true);
150
151 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
152 Call string_failureCases1Call = makeTailRecursiveCall(string_failureCases1);
153 Call string_failureCases2Call = makeTailRecursiveCall(string_failureCases2);
154 Call string_failureCases3Call = makeTailRecursiveCall(string_failureCases3);
155 #endif
156
157 // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
158 LinkBuffer patchBuffer(*m_globalData, this, m_globalData->executableAllocator);
159
160 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
161 patchBuffer.link(string_failureCases1Call, FunctionPtr(cti_op_get_by_id_string_fail));
162 patchBuffer.link(string_failureCases2Call, FunctionPtr(cti_op_get_by_id_string_fail));
163 patchBuffer.link(string_failureCases3Call, FunctionPtr(cti_op_get_by_id_string_fail));
164 #endif
165 #if ENABLE(JIT_OPTIMIZE_CALL)
166 patchBuffer.link(callLazyLinkCall, FunctionPtr(cti_vm_lazyLinkCall));
167 patchBuffer.link(callLazyLinkConstruct, FunctionPtr(cti_vm_lazyLinkConstruct));
168 #endif
169 patchBuffer.link(callCompileCall, FunctionPtr(cti_op_call_jitCompile));
170 patchBuffer.link(callCompileConstruct, FunctionPtr(cti_op_construct_jitCompile));
171
172 CodeRef finalCode = patchBuffer.finalizeCode();
173 *executablePool = finalCode.m_executablePool;
174
175 trampolines->ctiVirtualCallLink = patchBuffer.trampolineAt(virtualCallLinkBegin);
176 trampolines->ctiVirtualConstructLink = patchBuffer.trampolineAt(virtualConstructLinkBegin);
177 trampolines->ctiVirtualCall = patchBuffer.trampolineAt(virtualCallBegin);
178 trampolines->ctiVirtualConstruct = patchBuffer.trampolineAt(virtualConstructBegin);
179 trampolines->ctiNativeCall = patchBuffer.trampolineAt(nativeCallThunk);
180 trampolines->ctiNativeConstruct = patchBuffer.trampolineAt(nativeConstructThunk);
181 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
182 trampolines->ctiStringLengthTrampoline = patchBuffer.trampolineAt(stringLengthBegin);
183 #endif
184 }
185
186 JIT::Label JIT::privateCompileCTINativeCall(JSGlobalData* globalData, bool isConstruct)
187 {
188 int executableOffsetToFunction = isConstruct ? OBJECT_OFFSETOF(NativeExecutable, m_constructor) : OBJECT_OFFSETOF(NativeExecutable, m_function);
189
190 Label nativeCallThunk = align();
191
192 emitPutImmediateToCallFrameHeader(0, RegisterFile::CodeBlock);
193
194 #if CPU(X86_64)
195 // Load caller frame's scope chain into this callframe so that whatever we call can
196 // get to its global data.
197 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
198 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT0);
199 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
200
201 peek(regT1);
202 emitPutToCallFrameHeader(regT1, RegisterFile::ReturnPC);
203
204 // Calling convention: f(edi, esi, edx, ecx, ...);
205 // Host function signature: f(ExecState*);
206 move(callFrameRegister, X86Registers::edi);
207
208 subPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister); // Align stack after call.
209
210 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, X86Registers::esi);
211 loadPtr(Address(X86Registers::esi, OBJECT_OFFSETOF(JSFunction, m_executable)), X86Registers::r9);
212 move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
213 call(Address(X86Registers::r9, executableOffsetToFunction));
214
215 addPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister);
216
217 #elif CPU(ARM)
218 // Load caller frame's scope chain into this callframe so that whatever we call can
219 // get to its global data.
220 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
221 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT2);
222 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
223
224 preserveReturnAddressAfterCall(regT3); // Callee preserved
225 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
226
227 // Calling convention: f(r0 == regT0, r1 == regT1, ...);
228 // Host function signature: f(ExecState*);
229 move(callFrameRegister, ARMRegisters::r0);
230
231 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, ARMRegisters::r1);
232 move(regT2, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
233 loadPtr(Address(ARMRegisters::r1, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
234 call(Address(regT2, executableOffsetToFunction));
235
236 restoreReturnAddressBeforeReturn(regT3);
237
238 #elif CPU(MIPS)
239 // Load caller frame's scope chain into this callframe so that whatever we call can
240 // get to its global data.
241 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
242 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT0);
243 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
244
245 preserveReturnAddressAfterCall(regT3); // Callee preserved
246 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
247
248 // Calling convention: f(a0, a1, a2, a3);
249 // Host function signature: f(ExecState*);
250
251 // Allocate stack space for 16 bytes (8-byte aligned)
252 // 16 bytes (unused) for 4 arguments
253 subPtr(TrustedImm32(16), stackPointerRegister);
254
255 // Setup arg0
256 move(callFrameRegister, MIPSRegisters::a0);
257
258 // Call
259 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, MIPSRegisters::a2);
260 loadPtr(Address(MIPSRegisters::a2, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
261 move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
262 call(Address(regT2, executableOffsetToFunction));
263
264 // Restore stack space
265 addPtr(TrustedImm32(16), stackPointerRegister);
266
267 restoreReturnAddressBeforeReturn(regT3);
268
269 #elif ENABLE(JIT_OPTIMIZE_NATIVE_CALL)
270 #error "JIT_OPTIMIZE_NATIVE_CALL not yet supported on this platform."
271 #else
272 UNUSED_PARAM(executableOffsetToFunction);
273 breakpoint();
274 #endif
275
276 // Check for an exception
277 loadPtr(&(globalData->exception), regT2);
278 Jump exceptionHandler = branchTestPtr(NonZero, regT2);
279
280 // Return.
281 ret();
282
283 // Handle an exception
284 exceptionHandler.link(this);
285
286 // Grab the return address.
287 preserveReturnAddressAfterCall(regT1);
288
289 move(TrustedImmPtr(&globalData->exceptionLocation), regT2);
290 storePtr(regT1, regT2);
291 poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
292
293 // Set the return address.
294 move(TrustedImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()), regT1);
295 restoreReturnAddressBeforeReturn(regT1);
296
297 ret();
298
299 return nativeCallThunk;
300 }
301
302 JIT::CodePtr JIT::privateCompileCTINativeCall(PassRefPtr<ExecutablePool>, JSGlobalData* globalData, NativeFunction)
303 {
304 return globalData->jitStubs->ctiNativeCall();
305 }
306
307 void JIT::emit_op_mov(Instruction* currentInstruction)
308 {
309 int dst = currentInstruction[1].u.operand;
310 int src = currentInstruction[2].u.operand;
311
312 if (m_codeBlock->isConstantRegisterIndex(src)) {
313 storePtr(ImmPtr(JSValue::encode(getConstantOperand(src))), Address(callFrameRegister, dst * sizeof(Register)));
314 if (dst == m_lastResultBytecodeRegister)
315 killLastResultRegister();
316 } else if ((src == m_lastResultBytecodeRegister) || (dst == m_lastResultBytecodeRegister)) {
317 // If either the src or dst is the cached register go though
318 // get/put registers to make sure we track this correctly.
319 emitGetVirtualRegister(src, regT0);
320 emitPutVirtualRegister(dst);
321 } else {
322 // Perform the copy via regT1; do not disturb any mapping in regT0.
323 loadPtr(Address(callFrameRegister, src * sizeof(Register)), regT1);
324 storePtr(regT1, Address(callFrameRegister, dst * sizeof(Register)));
325 }
326 }
327
328 void JIT::emit_op_end(Instruction* currentInstruction)
329 {
330 ASSERT(returnValueRegister != callFrameRegister);
331 emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
332 restoreReturnAddressBeforeReturn(Address(callFrameRegister, RegisterFile::ReturnPC * static_cast<int>(sizeof(Register))));
333 ret();
334 }
335
336 void JIT::emit_op_jmp(Instruction* currentInstruction)
337 {
338 unsigned target = currentInstruction[1].u.operand;
339 addJump(jump(), target);
340 }
341
342 void JIT::emit_op_loop_if_lesseq(Instruction* currentInstruction)
343 {
344 emitTimeoutCheck();
345
346 unsigned op1 = currentInstruction[1].u.operand;
347 unsigned op2 = currentInstruction[2].u.operand;
348 unsigned target = currentInstruction[3].u.operand;
349 if (isOperandConstantImmediateInt(op2)) {
350 emitGetVirtualRegister(op1, regT0);
351 emitJumpSlowCaseIfNotImmediateInteger(regT0);
352 int32_t op2imm = getConstantOperandImmediateInt(op2);
353 addJump(branch32(LessThanOrEqual, regT0, Imm32(op2imm)), target);
354 } else {
355 emitGetVirtualRegisters(op1, regT0, op2, regT1);
356 emitJumpSlowCaseIfNotImmediateInteger(regT0);
357 emitJumpSlowCaseIfNotImmediateInteger(regT1);
358 addJump(branch32(LessThanOrEqual, regT0, regT1), target);
359 }
360 }
361
362 void JIT::emit_op_new_object(Instruction* currentInstruction)
363 {
364 JITStubCall(this, cti_op_new_object).call(currentInstruction[1].u.operand);
365 }
366
367 void JIT::emit_op_check_has_instance(Instruction* currentInstruction)
368 {
369 unsigned baseVal = currentInstruction[1].u.operand;
370
371 emitGetVirtualRegister(baseVal, regT0);
372
373 // Check that baseVal is a cell.
374 emitJumpSlowCaseIfNotJSCell(regT0, baseVal);
375
376 // Check that baseVal 'ImplementsHasInstance'.
377 loadPtr(Address(regT0, JSCell::structureOffset()), regT0);
378 addSlowCase(branchTest8(Zero, Address(regT0, Structure::typeInfoFlagsOffset()), TrustedImm32(ImplementsHasInstance)));
379 }
380
381 void JIT::emit_op_instanceof(Instruction* currentInstruction)
382 {
383 unsigned dst = currentInstruction[1].u.operand;
384 unsigned value = currentInstruction[2].u.operand;
385 unsigned baseVal = currentInstruction[3].u.operand;
386 unsigned proto = currentInstruction[4].u.operand;
387
388 // Load the operands (baseVal, proto, and value respectively) into registers.
389 // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
390 emitGetVirtualRegister(value, regT2);
391 emitGetVirtualRegister(baseVal, regT0);
392 emitGetVirtualRegister(proto, regT1);
393
394 // Check that proto are cells. baseVal must be a cell - this is checked by op_check_has_instance.
395 emitJumpSlowCaseIfNotJSCell(regT2, value);
396 emitJumpSlowCaseIfNotJSCell(regT1, proto);
397
398 // Check that prototype is an object
399 loadPtr(Address(regT1, JSCell::structureOffset()), regT3);
400 addSlowCase(branch8(NotEqual, Address(regT3, Structure::typeInfoTypeOffset()), TrustedImm32(ObjectType)));
401
402 // Fixme: this check is only needed because the JSC API allows HasInstance to be overridden; we should deprecate this.
403 // Check that baseVal 'ImplementsDefaultHasInstance'.
404 loadPtr(Address(regT0, JSCell::structureOffset()), regT0);
405 addSlowCase(branchTest8(Zero, Address(regT0, Structure::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance)));
406
407 // Optimistically load the result true, and start looping.
408 // Initially, regT1 still contains proto and regT2 still contains value.
409 // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
410 move(TrustedImmPtr(JSValue::encode(jsBoolean(true))), regT0);
411 Label loop(this);
412
413 // Load the prototype of the object in regT2. If this is equal to regT1 - WIN!
414 // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
415 loadPtr(Address(regT2, JSCell::structureOffset()), regT2);
416 loadPtr(Address(regT2, Structure::prototypeOffset()), regT2);
417 Jump isInstance = branchPtr(Equal, regT2, regT1);
418 emitJumpIfJSCell(regT2).linkTo(loop, this);
419
420 // We get here either by dropping out of the loop, or if value was not an Object. Result is false.
421 move(TrustedImmPtr(JSValue::encode(jsBoolean(false))), regT0);
422
423 // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
424 isInstance.link(this);
425 emitPutVirtualRegister(dst);
426 }
427
428 void JIT::emit_op_call(Instruction* currentInstruction)
429 {
430 compileOpCall(op_call, currentInstruction, m_callLinkInfoIndex++);
431 }
432
433 void JIT::emit_op_call_eval(Instruction* currentInstruction)
434 {
435 compileOpCall(op_call_eval, currentInstruction, m_callLinkInfoIndex++);
436 }
437
438 void JIT::emit_op_call_varargs(Instruction* currentInstruction)
439 {
440 compileOpCallVarargs(currentInstruction);
441 }
442
443 void JIT::emit_op_construct(Instruction* currentInstruction)
444 {
445 compileOpCall(op_construct, currentInstruction, m_callLinkInfoIndex++);
446 }
447
448 void JIT::emit_op_get_global_var(Instruction* currentInstruction)
449 {
450 JSVariableObject* globalObject = m_codeBlock->globalObject();
451 loadPtr(&globalObject->m_registers, regT0);
452 loadPtr(Address(regT0, currentInstruction[2].u.operand * sizeof(Register)), regT0);
453 emitPutVirtualRegister(currentInstruction[1].u.operand);
454 }
455
456 void JIT::emit_op_put_global_var(Instruction* currentInstruction)
457 {
458 emitGetVirtualRegister(currentInstruction[2].u.operand, regT1);
459 JSVariableObject* globalObject = m_codeBlock->globalObject();
460 loadPtr(&globalObject->m_registers, regT0);
461 storePtr(regT1, Address(regT0, currentInstruction[1].u.operand * sizeof(Register)));
462 }
463
464 void JIT::emit_op_get_scoped_var(Instruction* currentInstruction)
465 {
466 int skip = currentInstruction[3].u.operand;
467
468 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT0);
469 bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain();
470 ASSERT(skip || !checkTopLevel);
471 if (checkTopLevel && skip--) {
472 Jump activationNotCreated;
473 if (checkTopLevel)
474 activationNotCreated = branchTestPtr(Zero, addressFor(m_codeBlock->activationRegister()));
475 loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, next)), regT0);
476 activationNotCreated.link(this);
477 }
478 while (skip--)
479 loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, next)), regT0);
480
481 loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, object)), regT0);
482 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSVariableObject, m_registers)), regT0);
483 loadPtr(Address(regT0, currentInstruction[2].u.operand * sizeof(Register)), regT0);
484 emitPutVirtualRegister(currentInstruction[1].u.operand);
485 }
486
487 void JIT::emit_op_put_scoped_var(Instruction* currentInstruction)
488 {
489 int skip = currentInstruction[2].u.operand;
490
491 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1);
492 emitGetVirtualRegister(currentInstruction[3].u.operand, regT0);
493 bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain();
494 ASSERT(skip || !checkTopLevel);
495 if (checkTopLevel && skip--) {
496 Jump activationNotCreated;
497 if (checkTopLevel)
498 activationNotCreated = branchTestPtr(Zero, addressFor(m_codeBlock->activationRegister()));
499 loadPtr(Address(regT1, OBJECT_OFFSETOF(ScopeChainNode, next)), regT1);
500 activationNotCreated.link(this);
501 }
502 while (skip--)
503 loadPtr(Address(regT1, OBJECT_OFFSETOF(ScopeChainNode, next)), regT1);
504
505 loadPtr(Address(regT1, OBJECT_OFFSETOF(ScopeChainNode, object)), regT1);
506 loadPtr(Address(regT1, OBJECT_OFFSETOF(JSVariableObject, m_registers)), regT1);
507 storePtr(regT0, Address(regT1, currentInstruction[1].u.operand * sizeof(Register)));
508 }
509
510 void JIT::emit_op_tear_off_activation(Instruction* currentInstruction)
511 {
512 unsigned activation = currentInstruction[1].u.operand;
513 unsigned arguments = currentInstruction[2].u.operand;
514 Jump activationCreated = branchTestPtr(NonZero, addressFor(activation));
515 Jump argumentsNotCreated = branchTestPtr(Zero, addressFor(arguments));
516 activationCreated.link(this);
517 JITStubCall stubCall(this, cti_op_tear_off_activation);
518 stubCall.addArgument(activation, regT2);
519 stubCall.addArgument(unmodifiedArgumentsRegister(arguments), regT2);
520 stubCall.call();
521 argumentsNotCreated.link(this);
522 }
523
524 void JIT::emit_op_tear_off_arguments(Instruction* currentInstruction)
525 {
526 unsigned dst = currentInstruction[1].u.operand;
527
528 Jump argsNotCreated = branchTestPtr(Zero, Address(callFrameRegister, sizeof(Register) * (unmodifiedArgumentsRegister(dst))));
529 JITStubCall stubCall(this, cti_op_tear_off_arguments);
530 stubCall.addArgument(unmodifiedArgumentsRegister(dst), regT2);
531 stubCall.call();
532 argsNotCreated.link(this);
533 }
534
535 void JIT::emit_op_ret(Instruction* currentInstruction)
536 {
537 ASSERT(callFrameRegister != regT1);
538 ASSERT(regT1 != returnValueRegister);
539 ASSERT(returnValueRegister != callFrameRegister);
540
541 // Return the result in %eax.
542 emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
543
544 // Grab the return address.
545 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
546
547 // Restore our caller's "r".
548 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
549
550 // Return.
551 restoreReturnAddressBeforeReturn(regT1);
552 ret();
553 }
554
555 void JIT::emit_op_ret_object_or_this(Instruction* currentInstruction)
556 {
557 ASSERT(callFrameRegister != regT1);
558 ASSERT(regT1 != returnValueRegister);
559 ASSERT(returnValueRegister != callFrameRegister);
560
561 // Return the result in %eax.
562 emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
563 Jump notJSCell = emitJumpIfNotJSCell(returnValueRegister);
564 loadPtr(Address(returnValueRegister, JSCell::structureOffset()), regT2);
565 Jump notObject = branch8(NotEqual, Address(regT2, Structure::typeInfoTypeOffset()), TrustedImm32(ObjectType));
566
567 // Grab the return address.
568 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
569
570 // Restore our caller's "r".
571 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
572
573 // Return.
574 restoreReturnAddressBeforeReturn(regT1);
575 ret();
576
577 // Return 'this' in %eax.
578 notJSCell.link(this);
579 notObject.link(this);
580 emitGetVirtualRegister(currentInstruction[2].u.operand, returnValueRegister);
581
582 // Grab the return address.
583 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
584
585 // Restore our caller's "r".
586 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
587
588 // Return.
589 restoreReturnAddressBeforeReturn(regT1);
590 ret();
591 }
592
593 void JIT::emit_op_resolve(Instruction* currentInstruction)
594 {
595 JITStubCall stubCall(this, cti_op_resolve);
596 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
597 stubCall.call(currentInstruction[1].u.operand);
598 }
599
600 void JIT::emit_op_to_primitive(Instruction* currentInstruction)
601 {
602 int dst = currentInstruction[1].u.operand;
603 int src = currentInstruction[2].u.operand;
604
605 emitGetVirtualRegister(src, regT0);
606
607 Jump isImm = emitJumpIfNotJSCell(regT0);
608 addSlowCase(branchPtr(NotEqual, Address(regT0), TrustedImmPtr(m_globalData->jsStringVPtr)));
609 isImm.link(this);
610
611 if (dst != src)
612 emitPutVirtualRegister(dst);
613
614 }
615
616 void JIT::emit_op_strcat(Instruction* currentInstruction)
617 {
618 JITStubCall stubCall(this, cti_op_strcat);
619 stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
620 stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
621 stubCall.call(currentInstruction[1].u.operand);
622 }
623
624 void JIT::emit_op_resolve_base(Instruction* currentInstruction)
625 {
626 JITStubCall stubCall(this, currentInstruction[3].u.operand ? cti_op_resolve_base_strict_put : cti_op_resolve_base);
627 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
628 stubCall.call(currentInstruction[1].u.operand);
629 }
630
631 void JIT::emit_op_ensure_property_exists(Instruction* currentInstruction)
632 {
633 JITStubCall stubCall(this, cti_op_ensure_property_exists);
634 stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
635 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
636 stubCall.call(currentInstruction[1].u.operand);
637 }
638
639 void JIT::emit_op_resolve_skip(Instruction* currentInstruction)
640 {
641 JITStubCall stubCall(this, cti_op_resolve_skip);
642 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
643 stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
644 stubCall.call(currentInstruction[1].u.operand);
645 }
646
647 void JIT::emit_op_resolve_global(Instruction* currentInstruction, bool)
648 {
649 // Fast case
650 void* globalObject = m_codeBlock->globalObject();
651 unsigned currentIndex = m_globalResolveInfoIndex++;
652 GlobalResolveInfo* resolveInfoAddress = &(m_codeBlock->globalResolveInfo(currentIndex));
653
654 // Check Structure of global object
655 move(TrustedImmPtr(globalObject), regT0);
656 move(TrustedImmPtr(resolveInfoAddress), regT2);
657 loadPtr(Address(regT2, OBJECT_OFFSETOF(GlobalResolveInfo, structure)), regT1);
658 addSlowCase(branchPtr(NotEqual, regT1, Address(regT0, JSCell::structureOffset()))); // Structures don't match
659
660 // Load cached property
661 // Assume that the global object always uses external storage.
662 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSGlobalObject, m_propertyStorage)), regT0);
663 load32(Address(regT2, OBJECT_OFFSETOF(GlobalResolveInfo, offset)), regT1);
664 loadPtr(BaseIndex(regT0, regT1, ScalePtr), regT0);
665 emitPutVirtualRegister(currentInstruction[1].u.operand);
666 }
667
668 void JIT::emitSlow_op_resolve_global(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
669 {
670 unsigned dst = currentInstruction[1].u.operand;
671 Identifier* ident = &m_codeBlock->identifier(currentInstruction[2].u.operand);
672
673 unsigned currentIndex = m_globalResolveInfoIndex++;
674
675 linkSlowCase(iter);
676 JITStubCall stubCall(this, cti_op_resolve_global);
677 stubCall.addArgument(TrustedImmPtr(ident));
678 stubCall.addArgument(Imm32(currentIndex));
679 stubCall.addArgument(regT0);
680 stubCall.call(dst);
681 }
682
683 void JIT::emit_op_not(Instruction* currentInstruction)
684 {
685 emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
686
687 // Invert against JSValue(false); if the value was tagged as a boolean, then all bits will be
688 // clear other than the low bit (which will be 0 or 1 for false or true inputs respectively).
689 // Then invert against JSValue(true), which will add the tag back in, and flip the low bit.
690 xorPtr(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
691 addSlowCase(branchTestPtr(NonZero, regT0, TrustedImm32(static_cast<int32_t>(~1))));
692 xorPtr(TrustedImm32(static_cast<int32_t>(ValueTrue)), regT0);
693
694 emitPutVirtualRegister(currentInstruction[1].u.operand);
695 }
696
697 void JIT::emit_op_jfalse(Instruction* currentInstruction)
698 {
699 unsigned target = currentInstruction[2].u.operand;
700 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
701
702 addJump(branchPtr(Equal, regT0, TrustedImmPtr(JSValue::encode(jsNumber(0)))), target);
703 Jump isNonZero = emitJumpIfImmediateInteger(regT0);
704
705 addJump(branchPtr(Equal, regT0, TrustedImmPtr(JSValue::encode(jsBoolean(false)))), target);
706 addSlowCase(branchPtr(NotEqual, regT0, TrustedImmPtr(JSValue::encode(jsBoolean(true)))));
707
708 isNonZero.link(this);
709 }
710
711 void JIT::emit_op_jeq_null(Instruction* currentInstruction)
712 {
713 unsigned src = currentInstruction[1].u.operand;
714 unsigned target = currentInstruction[2].u.operand;
715
716 emitGetVirtualRegister(src, regT0);
717 Jump isImmediate = emitJumpIfNotJSCell(regT0);
718
719 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
720 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
721 addJump(branchTest8(NonZero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
722 Jump wasNotImmediate = jump();
723
724 // Now handle the immediate cases - undefined & null
725 isImmediate.link(this);
726 andPtr(TrustedImm32(~TagBitUndefined), regT0);
727 addJump(branchPtr(Equal, regT0, TrustedImmPtr(JSValue::encode(jsNull()))), target);
728
729 wasNotImmediate.link(this);
730 };
731 void JIT::emit_op_jneq_null(Instruction* currentInstruction)
732 {
733 unsigned src = currentInstruction[1].u.operand;
734 unsigned target = currentInstruction[2].u.operand;
735
736 emitGetVirtualRegister(src, regT0);
737 Jump isImmediate = emitJumpIfNotJSCell(regT0);
738
739 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
740 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
741 addJump(branchTest8(Zero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
742 Jump wasNotImmediate = jump();
743
744 // Now handle the immediate cases - undefined & null
745 isImmediate.link(this);
746 andPtr(TrustedImm32(~TagBitUndefined), regT0);
747 addJump(branchPtr(NotEqual, regT0, TrustedImmPtr(JSValue::encode(jsNull()))), target);
748
749 wasNotImmediate.link(this);
750 }
751
752 void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
753 {
754 unsigned src = currentInstruction[1].u.operand;
755 JSCell* ptr = currentInstruction[2].u.jsCell.get();
756 unsigned target = currentInstruction[3].u.operand;
757
758 emitGetVirtualRegister(src, regT0);
759 addJump(branchPtr(NotEqual, regT0, TrustedImmPtr(JSValue::encode(JSValue(ptr)))), target);
760 }
761
762 void JIT::emit_op_jsr(Instruction* currentInstruction)
763 {
764 int retAddrDst = currentInstruction[1].u.operand;
765 int target = currentInstruction[2].u.operand;
766 DataLabelPtr storeLocation = storePtrWithPatch(TrustedImmPtr(0), Address(callFrameRegister, sizeof(Register) * retAddrDst));
767 addJump(jump(), target);
768 m_jsrSites.append(JSRInfo(storeLocation, label()));
769 killLastResultRegister();
770 }
771
772 void JIT::emit_op_sret(Instruction* currentInstruction)
773 {
774 jump(Address(callFrameRegister, sizeof(Register) * currentInstruction[1].u.operand));
775 killLastResultRegister();
776 }
777
778 void JIT::emit_op_eq(Instruction* currentInstruction)
779 {
780 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
781 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
782 compare32(Equal, regT1, regT0, regT0);
783 emitTagAsBoolImmediate(regT0);
784 emitPutVirtualRegister(currentInstruction[1].u.operand);
785 }
786
787 void JIT::emit_op_bitnot(Instruction* currentInstruction)
788 {
789 emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
790 emitJumpSlowCaseIfNotImmediateInteger(regT0);
791 not32(regT0);
792 emitFastArithIntToImmNoCheck(regT0, regT0);
793 emitPutVirtualRegister(currentInstruction[1].u.operand);
794 }
795
796 void JIT::emit_op_resolve_with_base(Instruction* currentInstruction)
797 {
798 JITStubCall stubCall(this, cti_op_resolve_with_base);
799 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[3].u.operand)));
800 stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
801 stubCall.call(currentInstruction[2].u.operand);
802 }
803
804 void JIT::emit_op_new_func_exp(Instruction* currentInstruction)
805 {
806 JITStubCall stubCall(this, cti_op_new_func_exp);
807 stubCall.addArgument(TrustedImmPtr(m_codeBlock->functionExpr(currentInstruction[2].u.operand)));
808 stubCall.call(currentInstruction[1].u.operand);
809 }
810
811 void JIT::emit_op_jtrue(Instruction* currentInstruction)
812 {
813 unsigned target = currentInstruction[2].u.operand;
814 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
815
816 Jump isZero = branchPtr(Equal, regT0, TrustedImmPtr(JSValue::encode(jsNumber(0))));
817 addJump(emitJumpIfImmediateInteger(regT0), target);
818
819 addJump(branchPtr(Equal, regT0, TrustedImmPtr(JSValue::encode(jsBoolean(true)))), target);
820 addSlowCase(branchPtr(NotEqual, regT0, TrustedImmPtr(JSValue::encode(jsBoolean(false)))));
821
822 isZero.link(this);
823 }
824
825 void JIT::emit_op_neq(Instruction* currentInstruction)
826 {
827 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
828 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
829 compare32(NotEqual, regT1, regT0, regT0);
830 emitTagAsBoolImmediate(regT0);
831
832 emitPutVirtualRegister(currentInstruction[1].u.operand);
833
834 }
835
836 void JIT::emit_op_bitxor(Instruction* currentInstruction)
837 {
838 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
839 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
840 xorPtr(regT1, regT0);
841 emitFastArithReTagImmediate(regT0, regT0);
842 emitPutVirtualRegister(currentInstruction[1].u.operand);
843 }
844
845 void JIT::emit_op_bitor(Instruction* currentInstruction)
846 {
847 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
848 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
849 orPtr(regT1, regT0);
850 emitPutVirtualRegister(currentInstruction[1].u.operand);
851 }
852
853 void JIT::emit_op_throw(Instruction* currentInstruction)
854 {
855 JITStubCall stubCall(this, cti_op_throw);
856 stubCall.addArgument(currentInstruction[1].u.operand, regT2);
857 stubCall.call();
858 ASSERT(regT0 == returnValueRegister);
859 #ifndef NDEBUG
860 // cti_op_throw always changes it's return address,
861 // this point in the code should never be reached.
862 breakpoint();
863 #endif
864 }
865
866 void JIT::emit_op_get_pnames(Instruction* currentInstruction)
867 {
868 int dst = currentInstruction[1].u.operand;
869 int base = currentInstruction[2].u.operand;
870 int i = currentInstruction[3].u.operand;
871 int size = currentInstruction[4].u.operand;
872 int breakTarget = currentInstruction[5].u.operand;
873
874 JumpList isNotObject;
875
876 emitGetVirtualRegister(base, regT0);
877 if (!m_codeBlock->isKnownNotImmediate(base))
878 isNotObject.append(emitJumpIfNotJSCell(regT0));
879 if (base != m_codeBlock->thisRegister() || m_codeBlock->isStrictMode()) {
880 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
881 isNotObject.append(branch8(NotEqual, Address(regT2, Structure::typeInfoTypeOffset()), TrustedImm32(ObjectType)));
882 }
883
884 // We could inline the case where you have a valid cache, but
885 // this call doesn't seem to be hot.
886 Label isObject(this);
887 JITStubCall getPnamesStubCall(this, cti_op_get_pnames);
888 getPnamesStubCall.addArgument(regT0);
889 getPnamesStubCall.call(dst);
890 load32(Address(regT0, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStringsSize)), regT3);
891 storePtr(tagTypeNumberRegister, payloadFor(i));
892 store32(TrustedImm32(Int32Tag), intTagFor(size));
893 store32(regT3, intPayloadFor(size));
894 Jump end = jump();
895
896 isNotObject.link(this);
897 move(regT0, regT1);
898 and32(TrustedImm32(~TagBitUndefined), regT1);
899 addJump(branch32(Equal, regT1, TrustedImm32(ValueNull)), breakTarget);
900
901 JITStubCall toObjectStubCall(this, cti_to_object);
902 toObjectStubCall.addArgument(regT0);
903 toObjectStubCall.call(base);
904 jump().linkTo(isObject, this);
905
906 end.link(this);
907 }
908
909 void JIT::emit_op_next_pname(Instruction* currentInstruction)
910 {
911 int dst = currentInstruction[1].u.operand;
912 int base = currentInstruction[2].u.operand;
913 int i = currentInstruction[3].u.operand;
914 int size = currentInstruction[4].u.operand;
915 int it = currentInstruction[5].u.operand;
916 int target = currentInstruction[6].u.operand;
917
918 JumpList callHasProperty;
919
920 Label begin(this);
921 load32(intPayloadFor(i), regT0);
922 Jump end = branch32(Equal, regT0, intPayloadFor(size));
923
924 // Grab key @ i
925 loadPtr(addressFor(it), regT1);
926 loadPtr(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStrings)), regT2);
927
928 loadPtr(BaseIndex(regT2, regT0, TimesEight), regT2);
929
930 emitPutVirtualRegister(dst, regT2);
931
932 // Increment i
933 add32(TrustedImm32(1), regT0);
934 store32(regT0, intPayloadFor(i));
935
936 // Verify that i is valid:
937 emitGetVirtualRegister(base, regT0);
938
939 // Test base's structure
940 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
941 callHasProperty.append(branchPtr(NotEqual, regT2, Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure)))));
942
943 // Test base's prototype chain
944 loadPtr(Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedPrototypeChain))), regT3);
945 loadPtr(Address(regT3, OBJECT_OFFSETOF(StructureChain, m_vector)), regT3);
946 addJump(branchTestPtr(Zero, Address(regT3)), target);
947
948 Label checkPrototype(this);
949 loadPtr(Address(regT2, Structure::prototypeOffset()), regT2);
950 callHasProperty.append(emitJumpIfNotJSCell(regT2));
951 loadPtr(Address(regT2, JSCell::structureOffset()), regT2);
952 callHasProperty.append(branchPtr(NotEqual, regT2, Address(regT3)));
953 addPtr(TrustedImm32(sizeof(Structure*)), regT3);
954 branchTestPtr(NonZero, Address(regT3)).linkTo(checkPrototype, this);
955
956 // Continue loop.
957 addJump(jump(), target);
958
959 // Slow case: Ask the object if i is valid.
960 callHasProperty.link(this);
961 emitGetVirtualRegister(dst, regT1);
962 JITStubCall stubCall(this, cti_has_property);
963 stubCall.addArgument(regT0);
964 stubCall.addArgument(regT1);
965 stubCall.call();
966
967 // Test for valid key.
968 addJump(branchTest32(NonZero, regT0), target);
969 jump().linkTo(begin, this);
970
971 // End of loop.
972 end.link(this);
973 }
974
975 void JIT::emit_op_push_scope(Instruction* currentInstruction)
976 {
977 JITStubCall stubCall(this, cti_op_push_scope);
978 stubCall.addArgument(currentInstruction[1].u.operand, regT2);
979 stubCall.call(currentInstruction[1].u.operand);
980 }
981
982 void JIT::emit_op_pop_scope(Instruction*)
983 {
984 JITStubCall(this, cti_op_pop_scope).call();
985 }
986
987 void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
988 {
989 unsigned dst = currentInstruction[1].u.operand;
990 unsigned src1 = currentInstruction[2].u.operand;
991 unsigned src2 = currentInstruction[3].u.operand;
992
993 emitGetVirtualRegisters(src1, regT0, src2, regT1);
994
995 // Jump to a slow case if either operand is a number, or if both are JSCell*s.
996 move(regT0, regT2);
997 orPtr(regT1, regT2);
998 addSlowCase(emitJumpIfJSCell(regT2));
999 addSlowCase(emitJumpIfImmediateNumber(regT2));
1000
1001 if (type == OpStrictEq)
1002 compare32(Equal, regT1, regT0, regT0);
1003 else
1004 compare32(NotEqual, regT1, regT0, regT0);
1005 emitTagAsBoolImmediate(regT0);
1006
1007 emitPutVirtualRegister(dst);
1008 }
1009
1010 void JIT::emit_op_stricteq(Instruction* currentInstruction)
1011 {
1012 compileOpStrictEq(currentInstruction, OpStrictEq);
1013 }
1014
1015 void JIT::emit_op_nstricteq(Instruction* currentInstruction)
1016 {
1017 compileOpStrictEq(currentInstruction, OpNStrictEq);
1018 }
1019
1020 void JIT::emit_op_to_jsnumber(Instruction* currentInstruction)
1021 {
1022 int srcVReg = currentInstruction[2].u.operand;
1023 emitGetVirtualRegister(srcVReg, regT0);
1024
1025 Jump wasImmediate = emitJumpIfImmediateInteger(regT0);
1026
1027 emitJumpSlowCaseIfNotJSCell(regT0, srcVReg);
1028 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
1029 addSlowCase(branch8(NotEqual, Address(regT2, Structure::typeInfoTypeOffset()), TrustedImm32(NumberType)));
1030
1031 wasImmediate.link(this);
1032
1033 emitPutVirtualRegister(currentInstruction[1].u.operand);
1034 }
1035
1036 void JIT::emit_op_push_new_scope(Instruction* currentInstruction)
1037 {
1038 JITStubCall stubCall(this, cti_op_push_new_scope);
1039 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
1040 stubCall.addArgument(currentInstruction[3].u.operand, regT2);
1041 stubCall.call(currentInstruction[1].u.operand);
1042 }
1043
1044 void JIT::emit_op_catch(Instruction* currentInstruction)
1045 {
1046 killLastResultRegister(); // FIXME: Implicitly treat op_catch as a labeled statement, and remove this line of code.
1047 move(regT0, callFrameRegister);
1048 peek(regT3, OBJECT_OFFSETOF(struct JITStackFrame, globalData) / sizeof(void*));
1049 loadPtr(Address(regT3, OBJECT_OFFSETOF(JSGlobalData, exception)), regT0);
1050 storePtr(TrustedImmPtr(JSValue::encode(JSValue())), Address(regT3, OBJECT_OFFSETOF(JSGlobalData, exception)));
1051 emitPutVirtualRegister(currentInstruction[1].u.operand);
1052 }
1053
1054 void JIT::emit_op_jmp_scopes(Instruction* currentInstruction)
1055 {
1056 JITStubCall stubCall(this, cti_op_jmp_scopes);
1057 stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
1058 stubCall.call();
1059 addJump(jump(), currentInstruction[2].u.operand);
1060 }
1061
1062 void JIT::emit_op_switch_imm(Instruction* currentInstruction)
1063 {
1064 unsigned tableIndex = currentInstruction[1].u.operand;
1065 unsigned defaultOffset = currentInstruction[2].u.operand;
1066 unsigned scrutinee = currentInstruction[3].u.operand;
1067
1068 // create jump table for switch destinations, track this switch statement.
1069 SimpleJumpTable* jumpTable = &m_codeBlock->immediateSwitchJumpTable(tableIndex);
1070 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Immediate));
1071 jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1072
1073 JITStubCall stubCall(this, cti_op_switch_imm);
1074 stubCall.addArgument(scrutinee, regT2);
1075 stubCall.addArgument(Imm32(tableIndex));
1076 stubCall.call();
1077 jump(regT0);
1078 }
1079
1080 void JIT::emit_op_switch_char(Instruction* currentInstruction)
1081 {
1082 unsigned tableIndex = currentInstruction[1].u.operand;
1083 unsigned defaultOffset = currentInstruction[2].u.operand;
1084 unsigned scrutinee = currentInstruction[3].u.operand;
1085
1086 // create jump table for switch destinations, track this switch statement.
1087 SimpleJumpTable* jumpTable = &m_codeBlock->characterSwitchJumpTable(tableIndex);
1088 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Character));
1089 jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1090
1091 JITStubCall stubCall(this, cti_op_switch_char);
1092 stubCall.addArgument(scrutinee, regT2);
1093 stubCall.addArgument(Imm32(tableIndex));
1094 stubCall.call();
1095 jump(regT0);
1096 }
1097
1098 void JIT::emit_op_switch_string(Instruction* currentInstruction)
1099 {
1100 unsigned tableIndex = currentInstruction[1].u.operand;
1101 unsigned defaultOffset = currentInstruction[2].u.operand;
1102 unsigned scrutinee = currentInstruction[3].u.operand;
1103
1104 // create jump table for switch destinations, track this switch statement.
1105 StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
1106 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset));
1107
1108 JITStubCall stubCall(this, cti_op_switch_string);
1109 stubCall.addArgument(scrutinee, regT2);
1110 stubCall.addArgument(Imm32(tableIndex));
1111 stubCall.call();
1112 jump(regT0);
1113 }
1114
1115 void JIT::emit_op_throw_reference_error(Instruction* currentInstruction)
1116 {
1117 JITStubCall stubCall(this, cti_op_throw_reference_error);
1118 stubCall.addArgument(ImmPtr(JSValue::encode(m_codeBlock->getConstant(currentInstruction[1].u.operand))));
1119 stubCall.call();
1120 }
1121
1122 void JIT::emit_op_debug(Instruction* currentInstruction)
1123 {
1124 #if ENABLE(DEBUG_WITH_BREAKPOINT)
1125 UNUSED_PARAM(currentInstruction);
1126 breakpoint();
1127 #else
1128 JITStubCall stubCall(this, cti_op_debug);
1129 stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
1130 stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
1131 stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
1132 stubCall.call();
1133 #endif
1134 }
1135
1136 void JIT::emit_op_eq_null(Instruction* currentInstruction)
1137 {
1138 unsigned dst = currentInstruction[1].u.operand;
1139 unsigned src1 = currentInstruction[2].u.operand;
1140
1141 emitGetVirtualRegister(src1, regT0);
1142 Jump isImmediate = emitJumpIfNotJSCell(regT0);
1143
1144 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
1145 test8(NonZero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined), regT0);
1146
1147 Jump wasNotImmediate = jump();
1148
1149 isImmediate.link(this);
1150
1151 andPtr(TrustedImm32(~TagBitUndefined), regT0);
1152 comparePtr(Equal, regT0, TrustedImm32(ValueNull), regT0);
1153
1154 wasNotImmediate.link(this);
1155
1156 emitTagAsBoolImmediate(regT0);
1157 emitPutVirtualRegister(dst);
1158
1159 }
1160
1161 void JIT::emit_op_neq_null(Instruction* currentInstruction)
1162 {
1163 unsigned dst = currentInstruction[1].u.operand;
1164 unsigned src1 = currentInstruction[2].u.operand;
1165
1166 emitGetVirtualRegister(src1, regT0);
1167 Jump isImmediate = emitJumpIfNotJSCell(regT0);
1168
1169 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
1170 test8(Zero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined), regT0);
1171
1172 Jump wasNotImmediate = jump();
1173
1174 isImmediate.link(this);
1175
1176 andPtr(TrustedImm32(~TagBitUndefined), regT0);
1177 comparePtr(NotEqual, regT0, TrustedImm32(ValueNull), regT0);
1178
1179 wasNotImmediate.link(this);
1180
1181 emitTagAsBoolImmediate(regT0);
1182 emitPutVirtualRegister(dst);
1183 }
1184
1185 void JIT::emit_op_enter(Instruction*)
1186 {
1187 // Even though CTI doesn't use them, we initialize our constant
1188 // registers to zap stale pointers, to avoid unnecessarily prolonging
1189 // object lifetime and increasing GC pressure.
1190 size_t count = m_codeBlock->m_numVars;
1191 for (size_t j = 0; j < count; ++j)
1192 emitInitRegister(j);
1193
1194 }
1195
1196 void JIT::emit_op_create_activation(Instruction* currentInstruction)
1197 {
1198 unsigned dst = currentInstruction[1].u.operand;
1199
1200 Jump activationCreated = branchTestPtr(NonZero, Address(callFrameRegister, sizeof(Register) * dst));
1201 JITStubCall(this, cti_op_push_activation).call(currentInstruction[1].u.operand);
1202 emitPutVirtualRegister(dst);
1203 activationCreated.link(this);
1204 }
1205
1206 void JIT::emit_op_create_arguments(Instruction* currentInstruction)
1207 {
1208 unsigned dst = currentInstruction[1].u.operand;
1209
1210 Jump argsCreated = branchTestPtr(NonZero, Address(callFrameRegister, sizeof(Register) * dst));
1211 if (m_codeBlock->m_numParameters == 1)
1212 JITStubCall(this, cti_op_create_arguments_no_params).call();
1213 else
1214 JITStubCall(this, cti_op_create_arguments).call();
1215 emitPutVirtualRegister(dst);
1216 emitPutVirtualRegister(unmodifiedArgumentsRegister(dst));
1217 argsCreated.link(this);
1218 }
1219
1220 void JIT::emit_op_init_lazy_reg(Instruction* currentInstruction)
1221 {
1222 unsigned dst = currentInstruction[1].u.operand;
1223
1224 storePtr(TrustedImmPtr(0), Address(callFrameRegister, sizeof(Register) * dst));
1225 }
1226
1227 void JIT::emit_op_convert_this(Instruction* currentInstruction)
1228 {
1229 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
1230
1231 emitJumpSlowCaseIfNotJSCell(regT0);
1232 loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
1233 addSlowCase(branchTest8(NonZero, Address(regT1, Structure::typeInfoFlagsOffset()), TrustedImm32(NeedsThisConversion)));
1234 }
1235
1236 void JIT::emit_op_convert_this_strict(Instruction* currentInstruction)
1237 {
1238 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
1239 Jump notNull = branchTestPtr(NonZero, regT0);
1240 move(TrustedImmPtr(JSValue::encode(jsNull())), regT0);
1241 emitPutVirtualRegister(currentInstruction[1].u.operand, regT0);
1242 Jump setThis = jump();
1243 notNull.link(this);
1244 Jump isImmediate = emitJumpIfNotJSCell(regT0);
1245 loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
1246 Jump notAnObject = branch8(NotEqual, Address(regT1, Structure::typeInfoTypeOffset()), TrustedImm32(ObjectType));
1247 addSlowCase(branchTest8(NonZero, Address(regT1, Structure::typeInfoFlagsOffset()), TrustedImm32(NeedsThisConversion)));
1248 isImmediate.link(this);
1249 notAnObject.link(this);
1250 setThis.link(this);
1251 }
1252
1253 void JIT::emit_op_get_callee(Instruction* currentInstruction)
1254 {
1255 unsigned result = currentInstruction[1].u.operand;
1256 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT0);
1257 emitPutVirtualRegister(result);
1258 }
1259
1260 void JIT::emit_op_create_this(Instruction* currentInstruction)
1261 {
1262 JITStubCall stubCall(this, cti_op_create_this);
1263 stubCall.addArgument(currentInstruction[2].u.operand, regT1);
1264 stubCall.call(currentInstruction[1].u.operand);
1265 }
1266
1267 void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
1268 {
1269 peek(regT1, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof(void*));
1270 Jump noProfiler = branchTestPtr(Zero, Address(regT1));
1271
1272 JITStubCall stubCall(this, cti_op_profile_will_call);
1273 stubCall.addArgument(currentInstruction[1].u.operand, regT1);
1274 stubCall.call();
1275 noProfiler.link(this);
1276
1277 }
1278
1279 void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
1280 {
1281 peek(regT1, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof(void*));
1282 Jump noProfiler = branchTestPtr(Zero, Address(regT1));
1283
1284 JITStubCall stubCall(this, cti_op_profile_did_call);
1285 stubCall.addArgument(currentInstruction[1].u.operand, regT1);
1286 stubCall.call();
1287 noProfiler.link(this);
1288 }
1289
1290
1291 // Slow cases
1292
1293 void JIT::emitSlow_op_convert_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1294 {
1295 linkSlowCase(iter);
1296 linkSlowCase(iter);
1297 JITStubCall stubCall(this, cti_op_convert_this);
1298 stubCall.addArgument(regT0);
1299 stubCall.call(currentInstruction[1].u.operand);
1300 }
1301
1302 void JIT::emitSlow_op_convert_this_strict(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1303 {
1304 linkSlowCase(iter);
1305 JITStubCall stubCall(this, cti_op_convert_this_strict);
1306 stubCall.addArgument(regT0);
1307 stubCall.call(currentInstruction[1].u.operand);
1308 }
1309
1310 void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1311 {
1312 linkSlowCase(iter);
1313
1314 JITStubCall stubCall(this, cti_op_to_primitive);
1315 stubCall.addArgument(regT0);
1316 stubCall.call(currentInstruction[1].u.operand);
1317 }
1318
1319 void JIT::emitSlow_op_loop_if_lesseq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1320 {
1321 unsigned op2 = currentInstruction[2].u.operand;
1322 unsigned target = currentInstruction[3].u.operand;
1323 if (isOperandConstantImmediateInt(op2)) {
1324 linkSlowCase(iter);
1325 JITStubCall stubCall(this, cti_op_loop_if_lesseq);
1326 stubCall.addArgument(regT0);
1327 stubCall.addArgument(currentInstruction[2].u.operand, regT2);
1328 stubCall.call();
1329 emitJumpSlowToHot(branchTest32(NonZero, regT0), target);
1330 } else {
1331 linkSlowCase(iter);
1332 linkSlowCase(iter);
1333 JITStubCall stubCall(this, cti_op_loop_if_lesseq);
1334 stubCall.addArgument(regT0);
1335 stubCall.addArgument(regT1);
1336 stubCall.call();
1337 emitJumpSlowToHot(branchTest32(NonZero, regT0), target);
1338 }
1339 }
1340
1341 void JIT::emitSlow_op_put_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1342 {
1343 unsigned base = currentInstruction[1].u.operand;
1344 unsigned property = currentInstruction[2].u.operand;
1345 unsigned value = currentInstruction[3].u.operand;
1346
1347 linkSlowCase(iter); // property int32 check
1348 linkSlowCaseIfNotJSCell(iter, base); // base cell check
1349 linkSlowCase(iter); // base not array check
1350 linkSlowCase(iter); // in vector check
1351
1352 JITStubCall stubPutByValCall(this, cti_op_put_by_val);
1353 stubPutByValCall.addArgument(regT0);
1354 stubPutByValCall.addArgument(property, regT2);
1355 stubPutByValCall.addArgument(value, regT2);
1356 stubPutByValCall.call();
1357 }
1358
1359 void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1360 {
1361 linkSlowCase(iter);
1362 xorPtr(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
1363 JITStubCall stubCall(this, cti_op_not);
1364 stubCall.addArgument(regT0);
1365 stubCall.call(currentInstruction[1].u.operand);
1366 }
1367
1368 void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1369 {
1370 linkSlowCase(iter);
1371 JITStubCall stubCall(this, cti_op_jtrue);
1372 stubCall.addArgument(regT0);
1373 stubCall.call();
1374 emitJumpSlowToHot(branchTest32(Zero, regT0), currentInstruction[2].u.operand); // inverted!
1375 }
1376
1377 void JIT::emitSlow_op_bitnot(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1378 {
1379 linkSlowCase(iter);
1380 JITStubCall stubCall(this, cti_op_bitnot);
1381 stubCall.addArgument(regT0);
1382 stubCall.call(currentInstruction[1].u.operand);
1383 }
1384
1385 void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1386 {
1387 linkSlowCase(iter);
1388 JITStubCall stubCall(this, cti_op_jtrue);
1389 stubCall.addArgument(regT0);
1390 stubCall.call();
1391 emitJumpSlowToHot(branchTest32(NonZero, regT0), currentInstruction[2].u.operand);
1392 }
1393
1394 void JIT::emitSlow_op_bitxor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1395 {
1396 linkSlowCase(iter);
1397 JITStubCall stubCall(this, cti_op_bitxor);
1398 stubCall.addArgument(regT0);
1399 stubCall.addArgument(regT1);
1400 stubCall.call(currentInstruction[1].u.operand);
1401 }
1402
1403 void JIT::emitSlow_op_bitor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1404 {
1405 linkSlowCase(iter);
1406 JITStubCall stubCall(this, cti_op_bitor);
1407 stubCall.addArgument(regT0);
1408 stubCall.addArgument(regT1);
1409 stubCall.call(currentInstruction[1].u.operand);
1410 }
1411
1412 void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1413 {
1414 linkSlowCase(iter);
1415 JITStubCall stubCall(this, cti_op_eq);
1416 stubCall.addArgument(regT0);
1417 stubCall.addArgument(regT1);
1418 stubCall.call();
1419 emitTagAsBoolImmediate(regT0);
1420 emitPutVirtualRegister(currentInstruction[1].u.operand);
1421 }
1422
1423 void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1424 {
1425 linkSlowCase(iter);
1426 JITStubCall stubCall(this, cti_op_eq);
1427 stubCall.addArgument(regT0);
1428 stubCall.addArgument(regT1);
1429 stubCall.call();
1430 xor32(TrustedImm32(0x1), regT0);
1431 emitTagAsBoolImmediate(regT0);
1432 emitPutVirtualRegister(currentInstruction[1].u.operand);
1433 }
1434
1435 void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1436 {
1437 linkSlowCase(iter);
1438 linkSlowCase(iter);
1439 JITStubCall stubCall(this, cti_op_stricteq);
1440 stubCall.addArgument(regT0);
1441 stubCall.addArgument(regT1);
1442 stubCall.call(currentInstruction[1].u.operand);
1443 }
1444
1445 void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1446 {
1447 linkSlowCase(iter);
1448 linkSlowCase(iter);
1449 JITStubCall stubCall(this, cti_op_nstricteq);
1450 stubCall.addArgument(regT0);
1451 stubCall.addArgument(regT1);
1452 stubCall.call(currentInstruction[1].u.operand);
1453 }
1454
1455 void JIT::emitSlow_op_check_has_instance(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1456 {
1457 unsigned baseVal = currentInstruction[1].u.operand;
1458
1459 linkSlowCaseIfNotJSCell(iter, baseVal);
1460 linkSlowCase(iter);
1461 JITStubCall stubCall(this, cti_op_check_has_instance);
1462 stubCall.addArgument(baseVal, regT2);
1463 stubCall.call();
1464 }
1465
1466 void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1467 {
1468 unsigned dst = currentInstruction[1].u.operand;
1469 unsigned value = currentInstruction[2].u.operand;
1470 unsigned baseVal = currentInstruction[3].u.operand;
1471 unsigned proto = currentInstruction[4].u.operand;
1472
1473 linkSlowCaseIfNotJSCell(iter, value);
1474 linkSlowCaseIfNotJSCell(iter, proto);
1475 linkSlowCase(iter);
1476 linkSlowCase(iter);
1477 JITStubCall stubCall(this, cti_op_instanceof);
1478 stubCall.addArgument(value, regT2);
1479 stubCall.addArgument(baseVal, regT2);
1480 stubCall.addArgument(proto, regT2);
1481 stubCall.call(dst);
1482 }
1483
1484 void JIT::emitSlow_op_call(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1485 {
1486 compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_call);
1487 }
1488
1489 void JIT::emitSlow_op_call_eval(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1490 {
1491 compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_call_eval);
1492 }
1493
1494 void JIT::emitSlow_op_call_varargs(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1495 {
1496 compileOpCallVarargsSlowCase(currentInstruction, iter);
1497 }
1498
1499 void JIT::emitSlow_op_construct(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1500 {
1501 compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_construct);
1502 }
1503
1504 void JIT::emitSlow_op_to_jsnumber(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1505 {
1506 linkSlowCaseIfNotJSCell(iter, currentInstruction[2].u.operand);
1507 linkSlowCase(iter);
1508
1509 JITStubCall stubCall(this, cti_op_to_jsnumber);
1510 stubCall.addArgument(regT0);
1511 stubCall.call(currentInstruction[1].u.operand);
1512 }
1513
1514 void JIT::emit_op_get_arguments_length(Instruction* currentInstruction)
1515 {
1516 int dst = currentInstruction[1].u.operand;
1517 int argumentsRegister = currentInstruction[2].u.operand;
1518 addSlowCase(branchTestPtr(NonZero, addressFor(argumentsRegister)));
1519 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT0);
1520 sub32(TrustedImm32(1), regT0);
1521 emitFastArithReTagImmediate(regT0, regT0);
1522 emitPutVirtualRegister(dst, regT0);
1523 }
1524
1525 void JIT::emitSlow_op_get_arguments_length(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1526 {
1527 linkSlowCase(iter);
1528 unsigned dst = currentInstruction[1].u.operand;
1529 unsigned base = currentInstruction[2].u.operand;
1530 Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
1531
1532 emitGetVirtualRegister(base, regT0);
1533 JITStubCall stubCall(this, cti_op_get_by_id_generic);
1534 stubCall.addArgument(regT0);
1535 stubCall.addArgument(TrustedImmPtr(ident));
1536 stubCall.call(dst);
1537 }
1538
1539 void JIT::emit_op_get_argument_by_val(Instruction* currentInstruction)
1540 {
1541 int dst = currentInstruction[1].u.operand;
1542 int argumentsRegister = currentInstruction[2].u.operand;
1543 int property = currentInstruction[3].u.operand;
1544 addSlowCase(branchTestPtr(NonZero, addressFor(argumentsRegister)));
1545 emitGetVirtualRegister(property, regT1);
1546 addSlowCase(emitJumpIfNotImmediateInteger(regT1));
1547 add32(TrustedImm32(1), regT1);
1548 // regT1 now contains the integer index of the argument we want, including this
1549 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT2);
1550 addSlowCase(branch32(AboveOrEqual, regT1, regT2));
1551
1552 Jump skipOutofLineParams;
1553 int numArgs = m_codeBlock->m_numParameters;
1554 if (numArgs) {
1555 Jump notInInPlaceArgs = branch32(AboveOrEqual, regT1, Imm32(numArgs));
1556 addPtr(Imm32(static_cast<unsigned>(-(RegisterFile::CallFrameHeaderSize + numArgs) * sizeof(Register))), callFrameRegister, regT0);
1557 loadPtr(BaseIndex(regT0, regT1, TimesEight, 0), regT0);
1558 skipOutofLineParams = jump();
1559 notInInPlaceArgs.link(this);
1560 }
1561
1562 addPtr(Imm32(static_cast<unsigned>(-(RegisterFile::CallFrameHeaderSize + numArgs) * sizeof(Register))), callFrameRegister, regT0);
1563 mul32(TrustedImm32(sizeof(Register)), regT2, regT2);
1564 subPtr(regT2, regT0);
1565 loadPtr(BaseIndex(regT0, regT1, TimesEight, 0), regT0);
1566 if (numArgs)
1567 skipOutofLineParams.link(this);
1568 emitPutVirtualRegister(dst, regT0);
1569 }
1570
1571 void JIT::emitSlow_op_get_argument_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1572 {
1573 unsigned dst = currentInstruction[1].u.operand;
1574 unsigned arguments = currentInstruction[2].u.operand;
1575 unsigned property = currentInstruction[3].u.operand;
1576
1577 linkSlowCase(iter);
1578 Jump skipArgumentsCreation = jump();
1579
1580 linkSlowCase(iter);
1581 linkSlowCase(iter);
1582 if (m_codeBlock->m_numParameters == 1)
1583 JITStubCall(this, cti_op_create_arguments_no_params).call();
1584 else
1585 JITStubCall(this, cti_op_create_arguments).call();
1586 emitPutVirtualRegister(arguments);
1587 emitPutVirtualRegister(unmodifiedArgumentsRegister(arguments));
1588
1589 skipArgumentsCreation.link(this);
1590 JITStubCall stubCall(this, cti_op_get_by_val);
1591 stubCall.addArgument(arguments, regT2);
1592 stubCall.addArgument(property, regT2);
1593 stubCall.call(dst);
1594 }
1595
1596 #endif // USE(JSVALUE64)
1597
1598 void JIT::emit_op_resolve_global_dynamic(Instruction* currentInstruction)
1599 {
1600 int skip = currentInstruction[5].u.operand;
1601
1602 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT0);
1603
1604 bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain();
1605 ASSERT(skip || !checkTopLevel);
1606 if (checkTopLevel && skip--) {
1607 Jump activationNotCreated;
1608 if (checkTopLevel)
1609 activationNotCreated = branchTestPtr(Zero, addressFor(m_codeBlock->activationRegister()));
1610 loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, object)), regT1);
1611 addSlowCase(checkStructure(regT1, m_globalData->activationStructure.get()));
1612 loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, next)), regT0);
1613 activationNotCreated.link(this);
1614 }
1615 while (skip--) {
1616 loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, object)), regT1);
1617 addSlowCase(checkStructure(regT1, m_globalData->activationStructure.get()));
1618 loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, next)), regT0);
1619 }
1620 emit_op_resolve_global(currentInstruction, true);
1621 }
1622
1623 void JIT::emitSlow_op_resolve_global_dynamic(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1624 {
1625 unsigned dst = currentInstruction[1].u.operand;
1626 Identifier* ident = &m_codeBlock->identifier(currentInstruction[2].u.operand);
1627 int skip = currentInstruction[5].u.operand;
1628 while (skip--)
1629 linkSlowCase(iter);
1630 JITStubCall resolveStubCall(this, cti_op_resolve);
1631 resolveStubCall.addArgument(TrustedImmPtr(ident));
1632 resolveStubCall.call(dst);
1633 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_resolve_global_dynamic));
1634
1635 unsigned currentIndex = m_globalResolveInfoIndex++;
1636
1637 linkSlowCase(iter); // We managed to skip all the nodes in the scope chain, but the cache missed.
1638 JITStubCall stubCall(this, cti_op_resolve_global);
1639 stubCall.addArgument(TrustedImmPtr(ident));
1640 stubCall.addArgument(Imm32(currentIndex));
1641 stubCall.addArgument(regT0);
1642 stubCall.call(dst);
1643 }
1644
1645 void JIT::emit_op_new_regexp(Instruction* currentInstruction)
1646 {
1647 JITStubCall stubCall(this, cti_op_new_regexp);
1648 stubCall.addArgument(TrustedImmPtr(m_codeBlock->regexp(currentInstruction[2].u.operand)));
1649 stubCall.call(currentInstruction[1].u.operand);
1650 }
1651
1652 void JIT::emit_op_load_varargs(Instruction* currentInstruction)
1653 {
1654 int argCountDst = currentInstruction[1].u.operand;
1655 int argsOffset = currentInstruction[2].u.operand;
1656 int registerOffset = currentInstruction[3].u.operand;
1657 ASSERT(argsOffset <= registerOffset);
1658
1659 int expectedParams = m_codeBlock->m_numParameters - 1;
1660 // Don't do inline copying if we aren't guaranteed to have a single stream
1661 // of arguments
1662 if (expectedParams) {
1663 JITStubCall stubCall(this, cti_op_load_varargs);
1664 stubCall.addArgument(Imm32(argsOffset));
1665 stubCall.call();
1666 // Stores a naked int32 in the register file.
1667 store32(returnValueRegister, Address(callFrameRegister, argCountDst * sizeof(Register)));
1668 return;
1669 }
1670
1671 #if USE(JSVALUE32_64)
1672 addSlowCase(branch32(NotEqual, tagFor(argsOffset), TrustedImm32(JSValue::EmptyValueTag)));
1673 #else
1674 addSlowCase(branchTestPtr(NonZero, addressFor(argsOffset)));
1675 #endif
1676 // Load arg count into regT0
1677 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT0);
1678 store32(TrustedImm32(Int32Tag), intTagFor(argCountDst));
1679 store32(regT0, intPayloadFor(argCountDst));
1680 Jump endBranch = branch32(Equal, regT0, TrustedImm32(1));
1681
1682 mul32(TrustedImm32(sizeof(Register)), regT0, regT3);
1683 addPtr(TrustedImm32(static_cast<unsigned>(sizeof(Register) - RegisterFile::CallFrameHeaderSize * sizeof(Register))), callFrameRegister, regT1);
1684 subPtr(regT3, regT1); // regT1 is now the start of the out of line arguments
1685 addPtr(Imm32(argsOffset * sizeof(Register)), callFrameRegister, regT2); // regT2 is the target buffer
1686
1687 // Bounds check the registerfile
1688 addPtr(regT2, regT3);
1689 addPtr(Imm32((registerOffset - argsOffset) * sizeof(Register)), regT3);
1690 addSlowCase(branchPtr(Below, AbsoluteAddress(m_globalData->interpreter->registerFile().addressOfEnd()), regT3));
1691
1692 sub32(TrustedImm32(1), regT0);
1693 Label loopStart = label();
1694 loadPtr(BaseIndex(regT1, regT0, TimesEight, static_cast<unsigned>(0 - 2 * sizeof(Register))), regT3);
1695 storePtr(regT3, BaseIndex(regT2, regT0, TimesEight, static_cast<unsigned>(0 - sizeof(Register))));
1696 #if USE(JSVALUE32_64)
1697 loadPtr(BaseIndex(regT1, regT0, TimesEight, static_cast<unsigned>(sizeof(void*) - 2 * sizeof(Register))), regT3);
1698 storePtr(regT3, BaseIndex(regT2, regT0, TimesEight, static_cast<unsigned>(sizeof(void*) - sizeof(Register))));
1699 #endif
1700 branchSubPtr(NonZero, TrustedImm32(1), regT0).linkTo(loopStart, this);
1701 endBranch.link(this);
1702 }
1703
1704 void JIT::emitSlow_op_load_varargs(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1705 {
1706 int argCountDst = currentInstruction[1].u.operand;
1707 int argsOffset = currentInstruction[2].u.operand;
1708 int expectedParams = m_codeBlock->m_numParameters - 1;
1709 if (expectedParams)
1710 return;
1711
1712 linkSlowCase(iter);
1713 linkSlowCase(iter);
1714 JITStubCall stubCall(this, cti_op_load_varargs);
1715 stubCall.addArgument(Imm32(argsOffset));
1716 stubCall.call();
1717
1718 store32(TrustedImm32(Int32Tag), intTagFor(argCountDst));
1719 store32(returnValueRegister, intPayloadFor(argCountDst));
1720 }
1721
1722 void JIT::emit_op_new_func(Instruction* currentInstruction)
1723 {
1724 Jump lazyJump;
1725 int dst = currentInstruction[1].u.operand;
1726 if (currentInstruction[3].u.operand) {
1727 #if USE(JSVALUE32_64)
1728 lazyJump = branch32(NotEqual, tagFor(dst), TrustedImm32(JSValue::EmptyValueTag));
1729 #else
1730 lazyJump = branchTestPtr(NonZero, addressFor(dst));
1731 #endif
1732 }
1733 JITStubCall stubCall(this, cti_op_new_func);
1734 stubCall.addArgument(TrustedImmPtr(m_codeBlock->functionDecl(currentInstruction[2].u.operand)));
1735 stubCall.call(currentInstruction[1].u.operand);
1736 if (currentInstruction[3].u.operand)
1737 lazyJump.link(this);
1738 }
1739
1740 void JIT::emit_op_new_array(Instruction* currentInstruction)
1741 {
1742 JITStubCall stubCall(this, cti_op_new_array);
1743 stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
1744 stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
1745 stubCall.call(currentInstruction[1].u.operand);
1746 }
1747
1748 void JIT::emit_op_new_array_buffer(Instruction* currentInstruction)
1749 {
1750 JITStubCall stubCall(this, cti_op_new_array_buffer);
1751 stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
1752 stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
1753 stubCall.call(currentInstruction[1].u.operand);
1754 }
1755
1756 } // namespace JSC
1757
1758 #endif // ENABLE(JIT)