]> git.saurik.com Git - apple/javascriptcore.git/blob - jit/JITOpcodes.cpp
d68f4109d6d172105a6ad4eea04296d2a684437b
[apple/javascriptcore.git] / jit / JITOpcodes.cpp
1 /*
2 * Copyright (C) 2009 Apple Inc. All rights reserved.
3 * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
13 *
14 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
18 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25 */
26
27 #include "config.h"
28 #if ENABLE(JIT)
29 #include "JIT.h"
30
31 #include "Arguments.h"
32 #include "CopiedSpaceInlineMethods.h"
33 #include "Heap.h"
34 #include "JITInlineMethods.h"
35 #include "JITStubCall.h"
36 #include "JSArray.h"
37 #include "JSCell.h"
38 #include "JSFunction.h"
39 #include "JSPropertyNameIterator.h"
40 #include "LinkBuffer.h"
41
42 namespace JSC {
43
44 #if USE(JSVALUE64)
45
46 PassRefPtr<ExecutableMemoryHandle> JIT::privateCompileCTIMachineTrampolines(JSGlobalData* globalData, TrampolineStructure *trampolines)
47 {
48 // (2) The second function provides fast property access for string length
49 Label stringLengthBegin = align();
50
51 // Check eax is a string
52 Jump string_failureCases1 = emitJumpIfNotJSCell(regT0);
53 Jump string_failureCases2 = branchPtr(NotEqual, Address(regT0, JSCell::classInfoOffset()), TrustedImmPtr(&JSString::s_info));
54
55 // Checks out okay! - get the length from the Ustring.
56 load32(Address(regT0, OBJECT_OFFSETOF(JSString, m_length)), regT0);
57
58 Jump string_failureCases3 = branch32(LessThan, regT0, TrustedImm32(0));
59
60 // regT0 contains a 64 bit value (is positive, is zero extended) so we don't need sign extend here.
61 emitFastArithIntToImmNoCheck(regT0, regT0);
62
63 ret();
64
65 // (3) Trampolines for the slow cases of op_call / op_call_eval / op_construct.
66 COMPILE_ASSERT(sizeof(CodeType) == 4, CodeTypeEnumMustBe32Bit);
67
68 JumpList callSlowCase;
69 JumpList constructSlowCase;
70
71 // VirtualCallLink Trampoline
72 // regT0 holds callee; callFrame is moved and partially initialized.
73 Label virtualCallLinkBegin = align();
74 callSlowCase.append(emitJumpIfNotJSCell(regT0));
75 callSlowCase.append(emitJumpIfNotType(regT0, regT1, JSFunctionType));
76
77 // Finish canonical initialization before JS function call.
78 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_scopeChain)), regT1);
79 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
80
81 // Also initialize ReturnPC for use by lazy linking and exceptions.
82 preserveReturnAddressAfterCall(regT3);
83 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
84
85 storePtr(callFrameRegister, &m_globalData->topCallFrame);
86 restoreArgumentReference();
87 Call callLazyLinkCall = call();
88 restoreReturnAddressBeforeReturn(regT3);
89 jump(regT0);
90
91 // VirtualConstructLink Trampoline
92 // regT0 holds callee; callFrame is moved and partially initialized.
93 Label virtualConstructLinkBegin = align();
94 constructSlowCase.append(emitJumpIfNotJSCell(regT0));
95 constructSlowCase.append(emitJumpIfNotType(regT0, regT1, JSFunctionType));
96
97 // Finish canonical initialization before JS function call.
98 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_scopeChain)), regT1);
99 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
100
101 // Also initialize ReturnPC for use by lazy linking and exeptions.
102 preserveReturnAddressAfterCall(regT3);
103 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
104
105 storePtr(callFrameRegister, &m_globalData->topCallFrame);
106 restoreArgumentReference();
107 Call callLazyLinkConstruct = call();
108 restoreReturnAddressBeforeReturn(regT3);
109 jump(regT0);
110
111 // VirtualCall Trampoline
112 // regT0 holds callee; regT2 will hold the FunctionExecutable.
113 Label virtualCallBegin = align();
114 callSlowCase.append(emitJumpIfNotJSCell(regT0));
115 callSlowCase.append(emitJumpIfNotType(regT0, regT1, JSFunctionType));
116
117 // Finish canonical initialization before JS function call.
118 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_scopeChain)), regT1);
119 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
120
121 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
122 Jump hasCodeBlock1 = branch32(GreaterThanOrEqual, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParametersForCall)), TrustedImm32(0));
123 preserveReturnAddressAfterCall(regT3);
124 storePtr(callFrameRegister, &m_globalData->topCallFrame);
125 restoreArgumentReference();
126 Call callCompileCall = call();
127 restoreReturnAddressBeforeReturn(regT3);
128 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
129
130 hasCodeBlock1.link(this);
131 loadPtr(Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_jitCodeForCallWithArityCheck)), regT0);
132 jump(regT0);
133
134 // VirtualConstruct Trampoline
135 // regT0 holds callee; regT2 will hold the FunctionExecutable.
136 Label virtualConstructBegin = align();
137 constructSlowCase.append(emitJumpIfNotJSCell(regT0));
138 constructSlowCase.append(emitJumpIfNotType(regT0, regT1, JSFunctionType));
139
140 // Finish canonical initialization before JS function call.
141 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_scopeChain)), regT1);
142 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
143
144 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
145 Jump hasCodeBlock2 = branch32(GreaterThanOrEqual, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParametersForConstruct)), TrustedImm32(0));
146 preserveReturnAddressAfterCall(regT3);
147 storePtr(callFrameRegister, &m_globalData->topCallFrame);
148 restoreArgumentReference();
149 Call callCompileConstruct = call();
150 restoreReturnAddressBeforeReturn(regT3);
151 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
152
153 hasCodeBlock2.link(this);
154 loadPtr(Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_jitCodeForConstructWithArityCheck)), regT0);
155 jump(regT0);
156
157 callSlowCase.link(this);
158 // Finish canonical initialization before JS function call.
159 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
160 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2, regT2);
161 emitPutCellToCallFrameHeader(regT2, RegisterFile::ScopeChain);
162
163 // Also initialize ReturnPC and CodeBlock, like a JS function would.
164 preserveReturnAddressAfterCall(regT3);
165 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
166 emitPutImmediateToCallFrameHeader(0, RegisterFile::CodeBlock);
167
168 storePtr(callFrameRegister, &m_globalData->topCallFrame);
169 restoreArgumentReference();
170 Call callCallNotJSFunction = call();
171 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
172 restoreReturnAddressBeforeReturn(regT3);
173 ret();
174
175 constructSlowCase.link(this);
176 // Finish canonical initialization before JS function call.
177 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
178 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2, regT2);
179 emitPutCellToCallFrameHeader(regT2, RegisterFile::ScopeChain);
180
181 // Also initialize ReturnPC and CodeBlock, like a JS function would.
182 preserveReturnAddressAfterCall(regT3);
183 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
184 emitPutImmediateToCallFrameHeader(0, RegisterFile::CodeBlock);
185
186 storePtr(callFrameRegister, &m_globalData->topCallFrame);
187 restoreArgumentReference();
188 Call callConstructNotJSFunction = call();
189 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
190 restoreReturnAddressBeforeReturn(regT3);
191 ret();
192
193 // NativeCall Trampoline
194 Label nativeCallThunk = privateCompileCTINativeCall(globalData);
195 Label nativeConstructThunk = privateCompileCTINativeCall(globalData, true);
196
197 Call string_failureCases1Call = makeTailRecursiveCall(string_failureCases1);
198 Call string_failureCases2Call = makeTailRecursiveCall(string_failureCases2);
199 Call string_failureCases3Call = makeTailRecursiveCall(string_failureCases3);
200
201 // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
202 LinkBuffer patchBuffer(*m_globalData, this, GLOBAL_THUNK_ID);
203
204 patchBuffer.link(string_failureCases1Call, FunctionPtr(cti_op_get_by_id_string_fail));
205 patchBuffer.link(string_failureCases2Call, FunctionPtr(cti_op_get_by_id_string_fail));
206 patchBuffer.link(string_failureCases3Call, FunctionPtr(cti_op_get_by_id_string_fail));
207 patchBuffer.link(callLazyLinkCall, FunctionPtr(cti_vm_lazyLinkCall));
208 patchBuffer.link(callLazyLinkConstruct, FunctionPtr(cti_vm_lazyLinkConstruct));
209 patchBuffer.link(callCompileCall, FunctionPtr(cti_op_call_jitCompile));
210 patchBuffer.link(callCompileConstruct, FunctionPtr(cti_op_construct_jitCompile));
211 patchBuffer.link(callCallNotJSFunction, FunctionPtr(cti_op_call_NotJSFunction));
212 patchBuffer.link(callConstructNotJSFunction, FunctionPtr(cti_op_construct_NotJSConstruct));
213
214 CodeRef finalCode = patchBuffer.finalizeCode();
215 RefPtr<ExecutableMemoryHandle> executableMemory = finalCode.executableMemory();
216
217 trampolines->ctiVirtualCallLink = patchBuffer.trampolineAt(virtualCallLinkBegin);
218 trampolines->ctiVirtualConstructLink = patchBuffer.trampolineAt(virtualConstructLinkBegin);
219 trampolines->ctiVirtualCall = patchBuffer.trampolineAt(virtualCallBegin);
220 trampolines->ctiVirtualConstruct = patchBuffer.trampolineAt(virtualConstructBegin);
221 trampolines->ctiNativeCall = patchBuffer.trampolineAt(nativeCallThunk);
222 trampolines->ctiNativeConstruct = patchBuffer.trampolineAt(nativeConstructThunk);
223 trampolines->ctiStringLengthTrampoline = patchBuffer.trampolineAt(stringLengthBegin);
224
225 return executableMemory.release();
226 }
227
228 JIT::Label JIT::privateCompileCTINativeCall(JSGlobalData* globalData, bool isConstruct)
229 {
230 int executableOffsetToFunction = isConstruct ? OBJECT_OFFSETOF(NativeExecutable, m_constructor) : OBJECT_OFFSETOF(NativeExecutable, m_function);
231
232 Label nativeCallThunk = align();
233
234 emitPutImmediateToCallFrameHeader(0, RegisterFile::CodeBlock);
235 storePtr(callFrameRegister, &m_globalData->topCallFrame);
236
237 #if CPU(X86_64)
238 // Load caller frame's scope chain into this callframe so that whatever we call can
239 // get to its global data.
240 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
241 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT0);
242 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
243
244 peek(regT1);
245 emitPutToCallFrameHeader(regT1, RegisterFile::ReturnPC);
246
247 // Calling convention: f(edi, esi, edx, ecx, ...);
248 // Host function signature: f(ExecState*);
249 move(callFrameRegister, X86Registers::edi);
250
251 subPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister); // Align stack after call.
252
253 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, X86Registers::esi);
254 loadPtr(Address(X86Registers::esi, OBJECT_OFFSETOF(JSFunction, m_executable)), X86Registers::r9);
255 move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
256 call(Address(X86Registers::r9, executableOffsetToFunction));
257
258 addPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister);
259
260 #elif CPU(ARM)
261 // Load caller frame's scope chain into this callframe so that whatever we call can
262 // get to its global data.
263 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
264 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT2);
265 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
266
267 preserveReturnAddressAfterCall(regT3); // Callee preserved
268 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
269
270 // Calling convention: f(r0 == regT0, r1 == regT1, ...);
271 // Host function signature: f(ExecState*);
272 move(callFrameRegister, ARMRegisters::r0);
273
274 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, ARMRegisters::r1);
275 move(regT2, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
276 loadPtr(Address(ARMRegisters::r1, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
277 call(Address(regT2, executableOffsetToFunction));
278
279 restoreReturnAddressBeforeReturn(regT3);
280
281 #elif CPU(MIPS)
282 // Load caller frame's scope chain into this callframe so that whatever we call can
283 // get to its global data.
284 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
285 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT0);
286 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
287
288 preserveReturnAddressAfterCall(regT3); // Callee preserved
289 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
290
291 // Calling convention: f(a0, a1, a2, a3);
292 // Host function signature: f(ExecState*);
293
294 // Allocate stack space for 16 bytes (8-byte aligned)
295 // 16 bytes (unused) for 4 arguments
296 subPtr(TrustedImm32(16), stackPointerRegister);
297
298 // Setup arg0
299 move(callFrameRegister, MIPSRegisters::a0);
300
301 // Call
302 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, MIPSRegisters::a2);
303 loadPtr(Address(MIPSRegisters::a2, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
304 move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
305 call(Address(regT2, executableOffsetToFunction));
306
307 // Restore stack space
308 addPtr(TrustedImm32(16), stackPointerRegister);
309
310 restoreReturnAddressBeforeReturn(regT3);
311
312 #else
313 #error "JIT not supported on this platform."
314 UNUSED_PARAM(executableOffsetToFunction);
315 breakpoint();
316 #endif
317
318 // Check for an exception
319 loadPtr(&(globalData->exception), regT2);
320 Jump exceptionHandler = branchTestPtr(NonZero, regT2);
321
322 // Return.
323 ret();
324
325 // Handle an exception
326 exceptionHandler.link(this);
327
328 // Grab the return address.
329 preserveReturnAddressAfterCall(regT1);
330
331 move(TrustedImmPtr(&globalData->exceptionLocation), regT2);
332 storePtr(regT1, regT2);
333 poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
334
335 storePtr(callFrameRegister, &m_globalData->topCallFrame);
336 // Set the return address.
337 move(TrustedImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()), regT1);
338 restoreReturnAddressBeforeReturn(regT1);
339
340 ret();
341
342 return nativeCallThunk;
343 }
344
345 JIT::CodeRef JIT::privateCompileCTINativeCall(JSGlobalData* globalData, NativeFunction)
346 {
347 return CodeRef::createSelfManagedCodeRef(globalData->jitStubs->ctiNativeCall());
348 }
349
350 void JIT::emit_op_mov(Instruction* currentInstruction)
351 {
352 int dst = currentInstruction[1].u.operand;
353 int src = currentInstruction[2].u.operand;
354
355 if (canBeOptimized()) {
356 // Use simpler approach, since the DFG thinks that the last result register
357 // is always set to the destination on every operation.
358 emitGetVirtualRegister(src, regT0);
359 emitPutVirtualRegister(dst);
360 } else {
361 if (m_codeBlock->isConstantRegisterIndex(src)) {
362 if (!getConstantOperand(src).isNumber())
363 storePtr(TrustedImmPtr(JSValue::encode(getConstantOperand(src))), Address(callFrameRegister, dst * sizeof(Register)));
364 else
365 storePtr(ImmPtr(JSValue::encode(getConstantOperand(src))), Address(callFrameRegister, dst * sizeof(Register)));
366 if (dst == m_lastResultBytecodeRegister)
367 killLastResultRegister();
368 } else if ((src == m_lastResultBytecodeRegister) || (dst == m_lastResultBytecodeRegister)) {
369 // If either the src or dst is the cached register go though
370 // get/put registers to make sure we track this correctly.
371 emitGetVirtualRegister(src, regT0);
372 emitPutVirtualRegister(dst);
373 } else {
374 // Perform the copy via regT1; do not disturb any mapping in regT0.
375 loadPtr(Address(callFrameRegister, src * sizeof(Register)), regT1);
376 storePtr(regT1, Address(callFrameRegister, dst * sizeof(Register)));
377 }
378 }
379 }
380
381 void JIT::emit_op_end(Instruction* currentInstruction)
382 {
383 ASSERT(returnValueRegister != callFrameRegister);
384 emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
385 restoreReturnAddressBeforeReturn(Address(callFrameRegister, RegisterFile::ReturnPC * static_cast<int>(sizeof(Register))));
386 ret();
387 }
388
389 void JIT::emit_op_jmp(Instruction* currentInstruction)
390 {
391 unsigned target = currentInstruction[1].u.operand;
392 addJump(jump(), target);
393 }
394
395 void JIT::emit_op_new_object(Instruction* currentInstruction)
396 {
397 emitAllocateJSFinalObject(TrustedImmPtr(m_codeBlock->globalObject()->emptyObjectStructure()), regT0, regT1);
398
399 emitPutVirtualRegister(currentInstruction[1].u.operand);
400 }
401
402 void JIT::emitSlow_op_new_object(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
403 {
404 linkSlowCase(iter);
405 JITStubCall(this, cti_op_new_object).call(currentInstruction[1].u.operand);
406 }
407
408 void JIT::emit_op_check_has_instance(Instruction* currentInstruction)
409 {
410 unsigned baseVal = currentInstruction[1].u.operand;
411
412 emitGetVirtualRegister(baseVal, regT0);
413
414 // Check that baseVal is a cell.
415 emitJumpSlowCaseIfNotJSCell(regT0, baseVal);
416
417 // Check that baseVal 'ImplementsHasInstance'.
418 loadPtr(Address(regT0, JSCell::structureOffset()), regT0);
419 addSlowCase(branchTest8(Zero, Address(regT0, Structure::typeInfoFlagsOffset()), TrustedImm32(ImplementsHasInstance)));
420 }
421
422 void JIT::emit_op_instanceof(Instruction* currentInstruction)
423 {
424 unsigned dst = currentInstruction[1].u.operand;
425 unsigned value = currentInstruction[2].u.operand;
426 unsigned baseVal = currentInstruction[3].u.operand;
427 unsigned proto = currentInstruction[4].u.operand;
428
429 // Load the operands (baseVal, proto, and value respectively) into registers.
430 // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
431 emitGetVirtualRegister(value, regT2);
432 emitGetVirtualRegister(baseVal, regT0);
433 emitGetVirtualRegister(proto, regT1);
434
435 // Check that proto are cells. baseVal must be a cell - this is checked by op_check_has_instance.
436 emitJumpSlowCaseIfNotJSCell(regT2, value);
437 emitJumpSlowCaseIfNotJSCell(regT1, proto);
438
439 // Check that prototype is an object
440 loadPtr(Address(regT1, JSCell::structureOffset()), regT3);
441 addSlowCase(emitJumpIfNotObject(regT3));
442
443 // Fixme: this check is only needed because the JSC API allows HasInstance to be overridden; we should deprecate this.
444 // Check that baseVal 'ImplementsDefaultHasInstance'.
445 loadPtr(Address(regT0, JSCell::structureOffset()), regT0);
446 addSlowCase(branchTest8(Zero, Address(regT0, Structure::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance)));
447
448 // Optimistically load the result true, and start looping.
449 // Initially, regT1 still contains proto and regT2 still contains value.
450 // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
451 move(TrustedImmPtr(JSValue::encode(jsBoolean(true))), regT0);
452 Label loop(this);
453
454 // Load the prototype of the object in regT2. If this is equal to regT1 - WIN!
455 // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
456 loadPtr(Address(regT2, JSCell::structureOffset()), regT2);
457 loadPtr(Address(regT2, Structure::prototypeOffset()), regT2);
458 Jump isInstance = branchPtr(Equal, regT2, regT1);
459 emitJumpIfJSCell(regT2).linkTo(loop, this);
460
461 // We get here either by dropping out of the loop, or if value was not an Object. Result is false.
462 move(TrustedImmPtr(JSValue::encode(jsBoolean(false))), regT0);
463
464 // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
465 isInstance.link(this);
466 emitPutVirtualRegister(dst);
467 }
468
469 void JIT::emit_op_is_undefined(Instruction* currentInstruction)
470 {
471 unsigned dst = currentInstruction[1].u.operand;
472 unsigned value = currentInstruction[2].u.operand;
473
474 emitGetVirtualRegister(value, regT0);
475 Jump isCell = emitJumpIfJSCell(regT0);
476
477 comparePtr(Equal, regT0, TrustedImm32(ValueUndefined), regT0);
478 Jump done = jump();
479
480 isCell.link(this);
481 loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
482 test8(NonZero, Address(regT1, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined), regT0);
483
484 done.link(this);
485 emitTagAsBoolImmediate(regT0);
486 emitPutVirtualRegister(dst);
487 }
488
489 void JIT::emit_op_is_boolean(Instruction* currentInstruction)
490 {
491 unsigned dst = currentInstruction[1].u.operand;
492 unsigned value = currentInstruction[2].u.operand;
493
494 emitGetVirtualRegister(value, regT0);
495 xorPtr(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
496 testPtr(Zero, regT0, TrustedImm32(static_cast<int32_t>(~1)), regT0);
497 emitTagAsBoolImmediate(regT0);
498 emitPutVirtualRegister(dst);
499 }
500
501 void JIT::emit_op_is_number(Instruction* currentInstruction)
502 {
503 unsigned dst = currentInstruction[1].u.operand;
504 unsigned value = currentInstruction[2].u.operand;
505
506 emitGetVirtualRegister(value, regT0);
507 testPtr(NonZero, regT0, tagTypeNumberRegister, regT0);
508 emitTagAsBoolImmediate(regT0);
509 emitPutVirtualRegister(dst);
510 }
511
512 void JIT::emit_op_is_string(Instruction* currentInstruction)
513 {
514 unsigned dst = currentInstruction[1].u.operand;
515 unsigned value = currentInstruction[2].u.operand;
516
517 emitGetVirtualRegister(value, regT0);
518 Jump isNotCell = emitJumpIfNotJSCell(regT0);
519
520 loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
521 compare8(Equal, Address(regT1, Structure::typeInfoTypeOffset()), TrustedImm32(StringType), regT0);
522 emitTagAsBoolImmediate(regT0);
523 Jump done = jump();
524
525 isNotCell.link(this);
526 move(TrustedImm32(ValueFalse), regT0);
527
528 done.link(this);
529 emitPutVirtualRegister(dst);
530 }
531
532 void JIT::emit_op_call(Instruction* currentInstruction)
533 {
534 compileOpCall(op_call, currentInstruction, m_callLinkInfoIndex++);
535 }
536
537 void JIT::emit_op_call_eval(Instruction* currentInstruction)
538 {
539 compileOpCall(op_call_eval, currentInstruction, m_callLinkInfoIndex);
540 }
541
542 void JIT::emit_op_call_varargs(Instruction* currentInstruction)
543 {
544 compileOpCall(op_call_varargs, currentInstruction, m_callLinkInfoIndex++);
545 }
546
547 void JIT::emit_op_construct(Instruction* currentInstruction)
548 {
549 compileOpCall(op_construct, currentInstruction, m_callLinkInfoIndex++);
550 }
551
552 void JIT::emit_op_tear_off_activation(Instruction* currentInstruction)
553 {
554 unsigned activation = currentInstruction[1].u.operand;
555 unsigned arguments = currentInstruction[2].u.operand;
556 Jump activationCreated = branchTestPtr(NonZero, addressFor(activation));
557 Jump argumentsNotCreated = branchTestPtr(Zero, addressFor(arguments));
558 activationCreated.link(this);
559 JITStubCall stubCall(this, cti_op_tear_off_activation);
560 stubCall.addArgument(activation, regT2);
561 stubCall.addArgument(unmodifiedArgumentsRegister(arguments), regT2);
562 stubCall.call();
563 argumentsNotCreated.link(this);
564 }
565
566 void JIT::emit_op_tear_off_arguments(Instruction* currentInstruction)
567 {
568 unsigned dst = currentInstruction[1].u.operand;
569
570 Jump argsNotCreated = branchTestPtr(Zero, Address(callFrameRegister, sizeof(Register) * (unmodifiedArgumentsRegister(dst))));
571 JITStubCall stubCall(this, cti_op_tear_off_arguments);
572 stubCall.addArgument(unmodifiedArgumentsRegister(dst), regT2);
573 stubCall.call();
574 argsNotCreated.link(this);
575 }
576
577 void JIT::emit_op_ret(Instruction* currentInstruction)
578 {
579 emitOptimizationCheck(RetOptimizationCheck);
580
581 ASSERT(callFrameRegister != regT1);
582 ASSERT(regT1 != returnValueRegister);
583 ASSERT(returnValueRegister != callFrameRegister);
584
585 // Return the result in %eax.
586 emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
587
588 // Grab the return address.
589 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
590
591 // Restore our caller's "r".
592 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
593
594 // Return.
595 restoreReturnAddressBeforeReturn(regT1);
596 ret();
597 }
598
599 void JIT::emit_op_ret_object_or_this(Instruction* currentInstruction)
600 {
601 emitOptimizationCheck(RetOptimizationCheck);
602
603 ASSERT(callFrameRegister != regT1);
604 ASSERT(regT1 != returnValueRegister);
605 ASSERT(returnValueRegister != callFrameRegister);
606
607 // Return the result in %eax.
608 emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
609 Jump notJSCell = emitJumpIfNotJSCell(returnValueRegister);
610 loadPtr(Address(returnValueRegister, JSCell::structureOffset()), regT2);
611 Jump notObject = emitJumpIfNotObject(regT2);
612
613 // Grab the return address.
614 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
615
616 // Restore our caller's "r".
617 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
618
619 // Return.
620 restoreReturnAddressBeforeReturn(regT1);
621 ret();
622
623 // Return 'this' in %eax.
624 notJSCell.link(this);
625 notObject.link(this);
626 emitGetVirtualRegister(currentInstruction[2].u.operand, returnValueRegister);
627
628 // Grab the return address.
629 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
630
631 // Restore our caller's "r".
632 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
633
634 // Return.
635 restoreReturnAddressBeforeReturn(regT1);
636 ret();
637 }
638
639 void JIT::emit_op_resolve(Instruction* currentInstruction)
640 {
641 JITStubCall stubCall(this, cti_op_resolve);
642 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
643 stubCall.callWithValueProfiling(currentInstruction[1].u.operand);
644 }
645
646 void JIT::emit_op_to_primitive(Instruction* currentInstruction)
647 {
648 int dst = currentInstruction[1].u.operand;
649 int src = currentInstruction[2].u.operand;
650
651 emitGetVirtualRegister(src, regT0);
652
653 Jump isImm = emitJumpIfNotJSCell(regT0);
654 addSlowCase(branchPtr(NotEqual, Address(regT0, JSCell::classInfoOffset()), TrustedImmPtr(&JSString::s_info)));
655 isImm.link(this);
656
657 if (dst != src)
658 emitPutVirtualRegister(dst);
659
660 }
661
662 void JIT::emit_op_strcat(Instruction* currentInstruction)
663 {
664 JITStubCall stubCall(this, cti_op_strcat);
665 stubCall.addArgument(TrustedImm32(currentInstruction[2].u.operand));
666 stubCall.addArgument(TrustedImm32(currentInstruction[3].u.operand));
667 stubCall.call(currentInstruction[1].u.operand);
668 }
669
670 void JIT::emit_op_resolve_base(Instruction* currentInstruction)
671 {
672 JITStubCall stubCall(this, currentInstruction[3].u.operand ? cti_op_resolve_base_strict_put : cti_op_resolve_base);
673 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
674 stubCall.callWithValueProfiling(currentInstruction[1].u.operand);
675 }
676
677 void JIT::emit_op_ensure_property_exists(Instruction* currentInstruction)
678 {
679 JITStubCall stubCall(this, cti_op_ensure_property_exists);
680 stubCall.addArgument(TrustedImm32(currentInstruction[1].u.operand));
681 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
682 stubCall.call(currentInstruction[1].u.operand);
683 }
684
685 void JIT::emit_op_resolve_skip(Instruction* currentInstruction)
686 {
687 JITStubCall stubCall(this, cti_op_resolve_skip);
688 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
689 stubCall.addArgument(TrustedImm32(currentInstruction[3].u.operand));
690 stubCall.callWithValueProfiling(currentInstruction[1].u.operand);
691 }
692
693 void JIT::emit_op_resolve_global(Instruction* currentInstruction, bool)
694 {
695 // Fast case
696 void* globalObject = m_codeBlock->globalObject();
697 unsigned currentIndex = m_globalResolveInfoIndex++;
698 GlobalResolveInfo* resolveInfoAddress = &(m_codeBlock->globalResolveInfo(currentIndex));
699
700 // Check Structure of global object
701 move(TrustedImmPtr(globalObject), regT0);
702 move(TrustedImmPtr(resolveInfoAddress), regT2);
703 loadPtr(Address(regT2, OBJECT_OFFSETOF(GlobalResolveInfo, structure)), regT1);
704 addSlowCase(branchPtr(NotEqual, regT1, Address(regT0, JSCell::structureOffset()))); // Structures don't match
705
706 // Load cached property
707 // Assume that the global object always uses external storage.
708 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSGlobalObject, m_propertyStorage)), regT0);
709 load32(Address(regT2, OBJECT_OFFSETOF(GlobalResolveInfo, offset)), regT1);
710 loadPtr(BaseIndex(regT0, regT1, ScalePtr), regT0);
711 emitValueProfilingSite();
712 emitPutVirtualRegister(currentInstruction[1].u.operand);
713 }
714
715 void JIT::emitSlow_op_resolve_global(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
716 {
717 unsigned dst = currentInstruction[1].u.operand;
718 Identifier* ident = &m_codeBlock->identifier(currentInstruction[2].u.operand);
719
720 unsigned currentIndex = m_globalResolveInfoIndex++;
721
722 linkSlowCase(iter);
723 JITStubCall stubCall(this, cti_op_resolve_global);
724 stubCall.addArgument(TrustedImmPtr(ident));
725 stubCall.addArgument(TrustedImm32(currentIndex));
726 stubCall.addArgument(regT0);
727 stubCall.callWithValueProfiling(dst);
728 }
729
730 void JIT::emit_op_not(Instruction* currentInstruction)
731 {
732 emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
733
734 // Invert against JSValue(false); if the value was tagged as a boolean, then all bits will be
735 // clear other than the low bit (which will be 0 or 1 for false or true inputs respectively).
736 // Then invert against JSValue(true), which will add the tag back in, and flip the low bit.
737 xorPtr(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
738 addSlowCase(branchTestPtr(NonZero, regT0, TrustedImm32(static_cast<int32_t>(~1))));
739 xorPtr(TrustedImm32(static_cast<int32_t>(ValueTrue)), regT0);
740
741 emitPutVirtualRegister(currentInstruction[1].u.operand);
742 }
743
744 void JIT::emit_op_jfalse(Instruction* currentInstruction)
745 {
746 unsigned target = currentInstruction[2].u.operand;
747 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
748
749 addJump(branchPtr(Equal, regT0, TrustedImmPtr(JSValue::encode(jsNumber(0)))), target);
750 Jump isNonZero = emitJumpIfImmediateInteger(regT0);
751
752 addJump(branchPtr(Equal, regT0, TrustedImmPtr(JSValue::encode(jsBoolean(false)))), target);
753 addSlowCase(branchPtr(NotEqual, regT0, TrustedImmPtr(JSValue::encode(jsBoolean(true)))));
754
755 isNonZero.link(this);
756 }
757
758 void JIT::emit_op_jeq_null(Instruction* currentInstruction)
759 {
760 unsigned src = currentInstruction[1].u.operand;
761 unsigned target = currentInstruction[2].u.operand;
762
763 emitGetVirtualRegister(src, regT0);
764 Jump isImmediate = emitJumpIfNotJSCell(regT0);
765
766 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
767 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
768 addJump(branchTest8(NonZero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
769 Jump wasNotImmediate = jump();
770
771 // Now handle the immediate cases - undefined & null
772 isImmediate.link(this);
773 andPtr(TrustedImm32(~TagBitUndefined), regT0);
774 addJump(branchPtr(Equal, regT0, TrustedImmPtr(JSValue::encode(jsNull()))), target);
775
776 wasNotImmediate.link(this);
777 };
778 void JIT::emit_op_jneq_null(Instruction* currentInstruction)
779 {
780 unsigned src = currentInstruction[1].u.operand;
781 unsigned target = currentInstruction[2].u.operand;
782
783 emitGetVirtualRegister(src, regT0);
784 Jump isImmediate = emitJumpIfNotJSCell(regT0);
785
786 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
787 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
788 addJump(branchTest8(Zero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
789 Jump wasNotImmediate = jump();
790
791 // Now handle the immediate cases - undefined & null
792 isImmediate.link(this);
793 andPtr(TrustedImm32(~TagBitUndefined), regT0);
794 addJump(branchPtr(NotEqual, regT0, TrustedImmPtr(JSValue::encode(jsNull()))), target);
795
796 wasNotImmediate.link(this);
797 }
798
799 void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
800 {
801 unsigned src = currentInstruction[1].u.operand;
802 JSCell* ptr = currentInstruction[2].u.jsCell.get();
803 unsigned target = currentInstruction[3].u.operand;
804
805 emitGetVirtualRegister(src, regT0);
806 addJump(branchPtr(NotEqual, regT0, TrustedImmPtr(JSValue::encode(JSValue(ptr)))), target);
807 }
808
809 void JIT::emit_op_eq(Instruction* currentInstruction)
810 {
811 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
812 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
813 compare32(Equal, regT1, regT0, regT0);
814 emitTagAsBoolImmediate(regT0);
815 emitPutVirtualRegister(currentInstruction[1].u.operand);
816 }
817
818 void JIT::emit_op_resolve_with_base(Instruction* currentInstruction)
819 {
820 JITStubCall stubCall(this, cti_op_resolve_with_base);
821 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[3].u.operand)));
822 stubCall.addArgument(TrustedImm32(currentInstruction[1].u.operand));
823 stubCall.callWithValueProfiling(currentInstruction[2].u.operand);
824 }
825
826 void JIT::emit_op_resolve_with_this(Instruction* currentInstruction)
827 {
828 JITStubCall stubCall(this, cti_op_resolve_with_this);
829 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[3].u.operand)));
830 stubCall.addArgument(TrustedImm32(currentInstruction[1].u.operand));
831 stubCall.callWithValueProfiling(currentInstruction[2].u.operand);
832 }
833
834 void JIT::emit_op_jtrue(Instruction* currentInstruction)
835 {
836 unsigned target = currentInstruction[2].u.operand;
837 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
838
839 Jump isZero = branchPtr(Equal, regT0, TrustedImmPtr(JSValue::encode(jsNumber(0))));
840 addJump(emitJumpIfImmediateInteger(regT0), target);
841
842 addJump(branchPtr(Equal, regT0, TrustedImmPtr(JSValue::encode(jsBoolean(true)))), target);
843 addSlowCase(branchPtr(NotEqual, regT0, TrustedImmPtr(JSValue::encode(jsBoolean(false)))));
844
845 isZero.link(this);
846 }
847
848 void JIT::emit_op_neq(Instruction* currentInstruction)
849 {
850 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
851 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
852 compare32(NotEqual, regT1, regT0, regT0);
853 emitTagAsBoolImmediate(regT0);
854
855 emitPutVirtualRegister(currentInstruction[1].u.operand);
856
857 }
858
859 void JIT::emit_op_bitxor(Instruction* currentInstruction)
860 {
861 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
862 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
863 xorPtr(regT1, regT0);
864 emitFastArithReTagImmediate(regT0, regT0);
865 emitPutVirtualRegister(currentInstruction[1].u.operand);
866 }
867
868 void JIT::emit_op_bitor(Instruction* currentInstruction)
869 {
870 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
871 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
872 orPtr(regT1, regT0);
873 emitPutVirtualRegister(currentInstruction[1].u.operand);
874 }
875
876 void JIT::emit_op_throw(Instruction* currentInstruction)
877 {
878 JITStubCall stubCall(this, cti_op_throw);
879 stubCall.addArgument(currentInstruction[1].u.operand, regT2);
880 stubCall.call();
881 ASSERT(regT0 == returnValueRegister);
882 #ifndef NDEBUG
883 // cti_op_throw always changes it's return address,
884 // this point in the code should never be reached.
885 breakpoint();
886 #endif
887 }
888
889 void JIT::emit_op_get_pnames(Instruction* currentInstruction)
890 {
891 int dst = currentInstruction[1].u.operand;
892 int base = currentInstruction[2].u.operand;
893 int i = currentInstruction[3].u.operand;
894 int size = currentInstruction[4].u.operand;
895 int breakTarget = currentInstruction[5].u.operand;
896
897 JumpList isNotObject;
898
899 emitGetVirtualRegister(base, regT0);
900 if (!m_codeBlock->isKnownNotImmediate(base))
901 isNotObject.append(emitJumpIfNotJSCell(regT0));
902 if (base != m_codeBlock->thisRegister() || m_codeBlock->isStrictMode()) {
903 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
904 isNotObject.append(emitJumpIfNotObject(regT2));
905 }
906
907 // We could inline the case where you have a valid cache, but
908 // this call doesn't seem to be hot.
909 Label isObject(this);
910 JITStubCall getPnamesStubCall(this, cti_op_get_pnames);
911 getPnamesStubCall.addArgument(regT0);
912 getPnamesStubCall.call(dst);
913 load32(Address(regT0, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStringsSize)), regT3);
914 storePtr(tagTypeNumberRegister, payloadFor(i));
915 store32(TrustedImm32(Int32Tag), intTagFor(size));
916 store32(regT3, intPayloadFor(size));
917 Jump end = jump();
918
919 isNotObject.link(this);
920 move(regT0, regT1);
921 and32(TrustedImm32(~TagBitUndefined), regT1);
922 addJump(branch32(Equal, regT1, TrustedImm32(ValueNull)), breakTarget);
923
924 JITStubCall toObjectStubCall(this, cti_to_object);
925 toObjectStubCall.addArgument(regT0);
926 toObjectStubCall.call(base);
927 jump().linkTo(isObject, this);
928
929 end.link(this);
930 }
931
932 void JIT::emit_op_next_pname(Instruction* currentInstruction)
933 {
934 int dst = currentInstruction[1].u.operand;
935 int base = currentInstruction[2].u.operand;
936 int i = currentInstruction[3].u.operand;
937 int size = currentInstruction[4].u.operand;
938 int it = currentInstruction[5].u.operand;
939 int target = currentInstruction[6].u.operand;
940
941 JumpList callHasProperty;
942
943 Label begin(this);
944 load32(intPayloadFor(i), regT0);
945 Jump end = branch32(Equal, regT0, intPayloadFor(size));
946
947 // Grab key @ i
948 loadPtr(addressFor(it), regT1);
949 loadPtr(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStrings)), regT2);
950
951 loadPtr(BaseIndex(regT2, regT0, TimesEight), regT2);
952
953 emitPutVirtualRegister(dst, regT2);
954
955 // Increment i
956 add32(TrustedImm32(1), regT0);
957 store32(regT0, intPayloadFor(i));
958
959 // Verify that i is valid:
960 emitGetVirtualRegister(base, regT0);
961
962 // Test base's structure
963 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
964 callHasProperty.append(branchPtr(NotEqual, regT2, Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure)))));
965
966 // Test base's prototype chain
967 loadPtr(Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedPrototypeChain))), regT3);
968 loadPtr(Address(regT3, OBJECT_OFFSETOF(StructureChain, m_vector)), regT3);
969 addJump(branchTestPtr(Zero, Address(regT3)), target);
970
971 Label checkPrototype(this);
972 loadPtr(Address(regT2, Structure::prototypeOffset()), regT2);
973 callHasProperty.append(emitJumpIfNotJSCell(regT2));
974 loadPtr(Address(regT2, JSCell::structureOffset()), regT2);
975 callHasProperty.append(branchPtr(NotEqual, regT2, Address(regT3)));
976 addPtr(TrustedImm32(sizeof(Structure*)), regT3);
977 branchTestPtr(NonZero, Address(regT3)).linkTo(checkPrototype, this);
978
979 // Continue loop.
980 addJump(jump(), target);
981
982 // Slow case: Ask the object if i is valid.
983 callHasProperty.link(this);
984 emitGetVirtualRegister(dst, regT1);
985 JITStubCall stubCall(this, cti_has_property);
986 stubCall.addArgument(regT0);
987 stubCall.addArgument(regT1);
988 stubCall.call();
989
990 // Test for valid key.
991 addJump(branchTest32(NonZero, regT0), target);
992 jump().linkTo(begin, this);
993
994 // End of loop.
995 end.link(this);
996 }
997
998 void JIT::emit_op_push_scope(Instruction* currentInstruction)
999 {
1000 JITStubCall stubCall(this, cti_op_push_scope);
1001 stubCall.addArgument(currentInstruction[1].u.operand, regT2);
1002 stubCall.call(currentInstruction[1].u.operand);
1003 }
1004
1005 void JIT::emit_op_pop_scope(Instruction*)
1006 {
1007 JITStubCall(this, cti_op_pop_scope).call();
1008 }
1009
1010 void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
1011 {
1012 unsigned dst = currentInstruction[1].u.operand;
1013 unsigned src1 = currentInstruction[2].u.operand;
1014 unsigned src2 = currentInstruction[3].u.operand;
1015
1016 emitGetVirtualRegisters(src1, regT0, src2, regT1);
1017
1018 // Jump slow if both are cells (to cover strings).
1019 move(regT0, regT2);
1020 orPtr(regT1, regT2);
1021 addSlowCase(emitJumpIfJSCell(regT2));
1022
1023 // Jump slow if either is a double. First test if it's an integer, which is fine, and then test
1024 // if it's a double.
1025 Jump leftOK = emitJumpIfImmediateInteger(regT0);
1026 addSlowCase(emitJumpIfImmediateNumber(regT0));
1027 leftOK.link(this);
1028 Jump rightOK = emitJumpIfImmediateInteger(regT1);
1029 addSlowCase(emitJumpIfImmediateNumber(regT1));
1030 rightOK.link(this);
1031
1032 if (type == OpStrictEq)
1033 comparePtr(Equal, regT1, regT0, regT0);
1034 else
1035 comparePtr(NotEqual, regT1, regT0, regT0);
1036 emitTagAsBoolImmediate(regT0);
1037
1038 emitPutVirtualRegister(dst);
1039 }
1040
1041 void JIT::emit_op_stricteq(Instruction* currentInstruction)
1042 {
1043 compileOpStrictEq(currentInstruction, OpStrictEq);
1044 }
1045
1046 void JIT::emit_op_nstricteq(Instruction* currentInstruction)
1047 {
1048 compileOpStrictEq(currentInstruction, OpNStrictEq);
1049 }
1050
1051 void JIT::emit_op_to_jsnumber(Instruction* currentInstruction)
1052 {
1053 int srcVReg = currentInstruction[2].u.operand;
1054 emitGetVirtualRegister(srcVReg, regT0);
1055
1056 Jump wasImmediate = emitJumpIfImmediateInteger(regT0);
1057
1058 emitJumpSlowCaseIfNotJSCell(regT0, srcVReg);
1059 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
1060 addSlowCase(branch8(NotEqual, Address(regT2, Structure::typeInfoTypeOffset()), TrustedImm32(NumberType)));
1061
1062 wasImmediate.link(this);
1063
1064 emitPutVirtualRegister(currentInstruction[1].u.operand);
1065 }
1066
1067 void JIT::emit_op_push_new_scope(Instruction* currentInstruction)
1068 {
1069 JITStubCall stubCall(this, cti_op_push_new_scope);
1070 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
1071 stubCall.addArgument(currentInstruction[3].u.operand, regT2);
1072 stubCall.call(currentInstruction[1].u.operand);
1073 }
1074
1075 void JIT::emit_op_catch(Instruction* currentInstruction)
1076 {
1077 killLastResultRegister(); // FIXME: Implicitly treat op_catch as a labeled statement, and remove this line of code.
1078 move(regT0, callFrameRegister);
1079 peek(regT3, OBJECT_OFFSETOF(struct JITStackFrame, globalData) / sizeof(void*));
1080 loadPtr(Address(regT3, OBJECT_OFFSETOF(JSGlobalData, exception)), regT0);
1081 storePtr(TrustedImmPtr(JSValue::encode(JSValue())), Address(regT3, OBJECT_OFFSETOF(JSGlobalData, exception)));
1082 emitPutVirtualRegister(currentInstruction[1].u.operand);
1083 }
1084
1085 void JIT::emit_op_jmp_scopes(Instruction* currentInstruction)
1086 {
1087 JITStubCall stubCall(this, cti_op_jmp_scopes);
1088 stubCall.addArgument(TrustedImm32(currentInstruction[1].u.operand));
1089 stubCall.call();
1090 addJump(jump(), currentInstruction[2].u.operand);
1091 }
1092
1093 void JIT::emit_op_switch_imm(Instruction* currentInstruction)
1094 {
1095 unsigned tableIndex = currentInstruction[1].u.operand;
1096 unsigned defaultOffset = currentInstruction[2].u.operand;
1097 unsigned scrutinee = currentInstruction[3].u.operand;
1098
1099 // create jump table for switch destinations, track this switch statement.
1100 SimpleJumpTable* jumpTable = &m_codeBlock->immediateSwitchJumpTable(tableIndex);
1101 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Immediate));
1102 jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1103
1104 JITStubCall stubCall(this, cti_op_switch_imm);
1105 stubCall.addArgument(scrutinee, regT2);
1106 stubCall.addArgument(TrustedImm32(tableIndex));
1107 stubCall.call();
1108 jump(regT0);
1109 }
1110
1111 void JIT::emit_op_switch_char(Instruction* currentInstruction)
1112 {
1113 unsigned tableIndex = currentInstruction[1].u.operand;
1114 unsigned defaultOffset = currentInstruction[2].u.operand;
1115 unsigned scrutinee = currentInstruction[3].u.operand;
1116
1117 // create jump table for switch destinations, track this switch statement.
1118 SimpleJumpTable* jumpTable = &m_codeBlock->characterSwitchJumpTable(tableIndex);
1119 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Character));
1120 jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1121
1122 JITStubCall stubCall(this, cti_op_switch_char);
1123 stubCall.addArgument(scrutinee, regT2);
1124 stubCall.addArgument(TrustedImm32(tableIndex));
1125 stubCall.call();
1126 jump(regT0);
1127 }
1128
1129 void JIT::emit_op_switch_string(Instruction* currentInstruction)
1130 {
1131 unsigned tableIndex = currentInstruction[1].u.operand;
1132 unsigned defaultOffset = currentInstruction[2].u.operand;
1133 unsigned scrutinee = currentInstruction[3].u.operand;
1134
1135 // create jump table for switch destinations, track this switch statement.
1136 StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
1137 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset));
1138
1139 JITStubCall stubCall(this, cti_op_switch_string);
1140 stubCall.addArgument(scrutinee, regT2);
1141 stubCall.addArgument(TrustedImm32(tableIndex));
1142 stubCall.call();
1143 jump(regT0);
1144 }
1145
1146 void JIT::emit_op_throw_reference_error(Instruction* currentInstruction)
1147 {
1148 JITStubCall stubCall(this, cti_op_throw_reference_error);
1149 if (!m_codeBlock->getConstant(currentInstruction[1].u.operand).isNumber())
1150 stubCall.addArgument(TrustedImmPtr(JSValue::encode(m_codeBlock->getConstant(currentInstruction[1].u.operand))));
1151 else
1152 stubCall.addArgument(ImmPtr(JSValue::encode(m_codeBlock->getConstant(currentInstruction[1].u.operand))));
1153 stubCall.call();
1154 }
1155
1156 void JIT::emit_op_debug(Instruction* currentInstruction)
1157 {
1158 #if ENABLE(DEBUG_WITH_BREAKPOINT)
1159 UNUSED_PARAM(currentInstruction);
1160 breakpoint();
1161 #else
1162 JITStubCall stubCall(this, cti_op_debug);
1163 stubCall.addArgument(TrustedImm32(currentInstruction[1].u.operand));
1164 stubCall.addArgument(TrustedImm32(currentInstruction[2].u.operand));
1165 stubCall.addArgument(TrustedImm32(currentInstruction[3].u.operand));
1166 stubCall.call();
1167 #endif
1168 }
1169
1170 void JIT::emit_op_eq_null(Instruction* currentInstruction)
1171 {
1172 unsigned dst = currentInstruction[1].u.operand;
1173 unsigned src1 = currentInstruction[2].u.operand;
1174
1175 emitGetVirtualRegister(src1, regT0);
1176 Jump isImmediate = emitJumpIfNotJSCell(regT0);
1177
1178 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
1179 test8(NonZero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined), regT0);
1180
1181 Jump wasNotImmediate = jump();
1182
1183 isImmediate.link(this);
1184
1185 andPtr(TrustedImm32(~TagBitUndefined), regT0);
1186 comparePtr(Equal, regT0, TrustedImm32(ValueNull), regT0);
1187
1188 wasNotImmediate.link(this);
1189
1190 emitTagAsBoolImmediate(regT0);
1191 emitPutVirtualRegister(dst);
1192
1193 }
1194
1195 void JIT::emit_op_neq_null(Instruction* currentInstruction)
1196 {
1197 unsigned dst = currentInstruction[1].u.operand;
1198 unsigned src1 = currentInstruction[2].u.operand;
1199
1200 emitGetVirtualRegister(src1, regT0);
1201 Jump isImmediate = emitJumpIfNotJSCell(regT0);
1202
1203 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
1204 test8(Zero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined), regT0);
1205
1206 Jump wasNotImmediate = jump();
1207
1208 isImmediate.link(this);
1209
1210 andPtr(TrustedImm32(~TagBitUndefined), regT0);
1211 comparePtr(NotEqual, regT0, TrustedImm32(ValueNull), regT0);
1212
1213 wasNotImmediate.link(this);
1214
1215 emitTagAsBoolImmediate(regT0);
1216 emitPutVirtualRegister(dst);
1217 }
1218
1219 void JIT::emit_op_enter(Instruction*)
1220 {
1221 // Even though CTI doesn't use them, we initialize our constant
1222 // registers to zap stale pointers, to avoid unnecessarily prolonging
1223 // object lifetime and increasing GC pressure.
1224 size_t count = m_codeBlock->m_numVars;
1225 for (size_t j = 0; j < count; ++j)
1226 emitInitRegister(j);
1227
1228 }
1229
1230 void JIT::emit_op_create_activation(Instruction* currentInstruction)
1231 {
1232 unsigned dst = currentInstruction[1].u.operand;
1233
1234 Jump activationCreated = branchTestPtr(NonZero, Address(callFrameRegister, sizeof(Register) * dst));
1235 JITStubCall(this, cti_op_push_activation).call(currentInstruction[1].u.operand);
1236 emitPutVirtualRegister(dst);
1237 activationCreated.link(this);
1238 }
1239
1240 void JIT::emit_op_create_arguments(Instruction* currentInstruction)
1241 {
1242 unsigned dst = currentInstruction[1].u.operand;
1243
1244 Jump argsCreated = branchTestPtr(NonZero, Address(callFrameRegister, sizeof(Register) * dst));
1245 JITStubCall(this, cti_op_create_arguments).call();
1246 emitPutVirtualRegister(dst);
1247 emitPutVirtualRegister(unmodifiedArgumentsRegister(dst));
1248 argsCreated.link(this);
1249 }
1250
1251 void JIT::emit_op_init_lazy_reg(Instruction* currentInstruction)
1252 {
1253 unsigned dst = currentInstruction[1].u.operand;
1254
1255 storePtr(TrustedImmPtr(0), Address(callFrameRegister, sizeof(Register) * dst));
1256 }
1257
1258 void JIT::emit_op_convert_this(Instruction* currentInstruction)
1259 {
1260 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
1261
1262 emitJumpSlowCaseIfNotJSCell(regT0);
1263 addSlowCase(branchPtr(Equal, Address(regT0, JSCell::classInfoOffset()), TrustedImmPtr(&JSString::s_info)));
1264 }
1265
1266 void JIT::emit_op_get_callee(Instruction* currentInstruction)
1267 {
1268 unsigned result = currentInstruction[1].u.operand;
1269 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT0);
1270 emitPutVirtualRegister(result);
1271 }
1272
1273 void JIT::emit_op_create_this(Instruction* currentInstruction)
1274 {
1275 emitGetVirtualRegister(currentInstruction[2].u.operand, regT2);
1276 emitJumpSlowCaseIfNotJSCell(regT2, currentInstruction[2].u.operand);
1277 loadPtr(Address(regT2, JSCell::structureOffset()), regT1);
1278 addSlowCase(emitJumpIfNotObject(regT1));
1279
1280 // now we know that the prototype is an object, but we don't know if it's got an
1281 // inheritor ID
1282
1283 loadPtr(Address(regT2, JSObject::offsetOfInheritorID()), regT2);
1284 addSlowCase(branchTestPtr(Zero, regT2));
1285
1286 // now regT2 contains the inheritorID, which is the structure that the newly
1287 // allocated object will have.
1288
1289 emitAllocateJSFinalObject(regT2, regT0, regT1);
1290
1291 emitPutVirtualRegister(currentInstruction[1].u.operand);
1292 }
1293
1294 void JIT::emitSlow_op_create_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1295 {
1296 linkSlowCaseIfNotJSCell(iter, currentInstruction[2].u.operand); // not a cell
1297 linkSlowCase(iter); // not an object
1298 linkSlowCase(iter); // doesn't have an inheritor ID
1299 linkSlowCase(iter); // allocation failed
1300 JITStubCall stubCall(this, cti_op_create_this);
1301 stubCall.addArgument(currentInstruction[2].u.operand, regT1);
1302 stubCall.call(currentInstruction[1].u.operand);
1303 }
1304
1305 void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
1306 {
1307 peek(regT1, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof(void*));
1308 Jump noProfiler = branchTestPtr(Zero, Address(regT1));
1309
1310 JITStubCall stubCall(this, cti_op_profile_will_call);
1311 stubCall.addArgument(currentInstruction[1].u.operand, regT1);
1312 stubCall.call();
1313 noProfiler.link(this);
1314
1315 }
1316
1317 void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
1318 {
1319 peek(regT1, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof(void*));
1320 Jump noProfiler = branchTestPtr(Zero, Address(regT1));
1321
1322 JITStubCall stubCall(this, cti_op_profile_did_call);
1323 stubCall.addArgument(currentInstruction[1].u.operand, regT1);
1324 stubCall.call();
1325 noProfiler.link(this);
1326 }
1327
1328
1329 // Slow cases
1330
1331 void JIT::emitSlow_op_convert_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1332 {
1333 void* globalThis = m_codeBlock->globalObject()->globalScopeChain()->globalThis.get();
1334
1335 linkSlowCase(iter);
1336 Jump isNotUndefined = branchPtr(NotEqual, regT0, TrustedImmPtr(JSValue::encode(jsUndefined())));
1337 move(TrustedImmPtr(globalThis), regT0);
1338 emitPutVirtualRegister(currentInstruction[1].u.operand, regT0);
1339 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_convert_this));
1340
1341 isNotUndefined.link(this);
1342 linkSlowCase(iter);
1343 JITStubCall stubCall(this, cti_op_convert_this);
1344 stubCall.addArgument(regT0);
1345 stubCall.call(currentInstruction[1].u.operand);
1346 }
1347
1348 void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1349 {
1350 linkSlowCase(iter);
1351
1352 JITStubCall stubCall(this, cti_op_to_primitive);
1353 stubCall.addArgument(regT0);
1354 stubCall.call(currentInstruction[1].u.operand);
1355 }
1356
1357 void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1358 {
1359 linkSlowCase(iter);
1360 xorPtr(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
1361 JITStubCall stubCall(this, cti_op_not);
1362 stubCall.addArgument(regT0);
1363 stubCall.call(currentInstruction[1].u.operand);
1364 }
1365
1366 void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1367 {
1368 linkSlowCase(iter);
1369 JITStubCall stubCall(this, cti_op_jtrue);
1370 stubCall.addArgument(regT0);
1371 stubCall.call();
1372 emitJumpSlowToHot(branchTest32(Zero, regT0), currentInstruction[2].u.operand); // inverted!
1373 }
1374
1375 void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1376 {
1377 linkSlowCase(iter);
1378 JITStubCall stubCall(this, cti_op_jtrue);
1379 stubCall.addArgument(regT0);
1380 stubCall.call();
1381 emitJumpSlowToHot(branchTest32(NonZero, regT0), currentInstruction[2].u.operand);
1382 }
1383
1384 void JIT::emitSlow_op_bitxor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1385 {
1386 linkSlowCase(iter);
1387 JITStubCall stubCall(this, cti_op_bitxor);
1388 stubCall.addArgument(regT0);
1389 stubCall.addArgument(regT1);
1390 stubCall.call(currentInstruction[1].u.operand);
1391 }
1392
1393 void JIT::emitSlow_op_bitor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1394 {
1395 linkSlowCase(iter);
1396 JITStubCall stubCall(this, cti_op_bitor);
1397 stubCall.addArgument(regT0);
1398 stubCall.addArgument(regT1);
1399 stubCall.call(currentInstruction[1].u.operand);
1400 }
1401
1402 void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1403 {
1404 linkSlowCase(iter);
1405 JITStubCall stubCall(this, cti_op_eq);
1406 stubCall.addArgument(regT0);
1407 stubCall.addArgument(regT1);
1408 stubCall.call();
1409 emitTagAsBoolImmediate(regT0);
1410 emitPutVirtualRegister(currentInstruction[1].u.operand);
1411 }
1412
1413 void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1414 {
1415 linkSlowCase(iter);
1416 JITStubCall stubCall(this, cti_op_eq);
1417 stubCall.addArgument(regT0);
1418 stubCall.addArgument(regT1);
1419 stubCall.call();
1420 xor32(TrustedImm32(0x1), regT0);
1421 emitTagAsBoolImmediate(regT0);
1422 emitPutVirtualRegister(currentInstruction[1].u.operand);
1423 }
1424
1425 void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1426 {
1427 linkSlowCase(iter);
1428 linkSlowCase(iter);
1429 linkSlowCase(iter);
1430 JITStubCall stubCall(this, cti_op_stricteq);
1431 stubCall.addArgument(regT0);
1432 stubCall.addArgument(regT1);
1433 stubCall.call(currentInstruction[1].u.operand);
1434 }
1435
1436 void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1437 {
1438 linkSlowCase(iter);
1439 linkSlowCase(iter);
1440 linkSlowCase(iter);
1441 JITStubCall stubCall(this, cti_op_nstricteq);
1442 stubCall.addArgument(regT0);
1443 stubCall.addArgument(regT1);
1444 stubCall.call(currentInstruction[1].u.operand);
1445 }
1446
1447 void JIT::emitSlow_op_check_has_instance(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1448 {
1449 unsigned baseVal = currentInstruction[1].u.operand;
1450
1451 linkSlowCaseIfNotJSCell(iter, baseVal);
1452 linkSlowCase(iter);
1453 JITStubCall stubCall(this, cti_op_check_has_instance);
1454 stubCall.addArgument(baseVal, regT2);
1455 stubCall.call();
1456 }
1457
1458 void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1459 {
1460 unsigned dst = currentInstruction[1].u.operand;
1461 unsigned value = currentInstruction[2].u.operand;
1462 unsigned baseVal = currentInstruction[3].u.operand;
1463 unsigned proto = currentInstruction[4].u.operand;
1464
1465 linkSlowCaseIfNotJSCell(iter, value);
1466 linkSlowCaseIfNotJSCell(iter, proto);
1467 linkSlowCase(iter);
1468 linkSlowCase(iter);
1469 JITStubCall stubCall(this, cti_op_instanceof);
1470 stubCall.addArgument(value, regT2);
1471 stubCall.addArgument(baseVal, regT2);
1472 stubCall.addArgument(proto, regT2);
1473 stubCall.call(dst);
1474 }
1475
1476 void JIT::emitSlow_op_call(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1477 {
1478 compileOpCallSlowCase(op_call, currentInstruction, iter, m_callLinkInfoIndex++);
1479 }
1480
1481 void JIT::emitSlow_op_call_eval(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1482 {
1483 compileOpCallSlowCase(op_call_eval, currentInstruction, iter, m_callLinkInfoIndex);
1484 }
1485
1486 void JIT::emitSlow_op_call_varargs(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1487 {
1488 compileOpCallSlowCase(op_call_varargs, currentInstruction, iter, m_callLinkInfoIndex++);
1489 }
1490
1491 void JIT::emitSlow_op_construct(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1492 {
1493 compileOpCallSlowCase(op_construct, currentInstruction, iter, m_callLinkInfoIndex++);
1494 }
1495
1496 void JIT::emitSlow_op_to_jsnumber(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1497 {
1498 linkSlowCaseIfNotJSCell(iter, currentInstruction[2].u.operand);
1499 linkSlowCase(iter);
1500
1501 JITStubCall stubCall(this, cti_op_to_jsnumber);
1502 stubCall.addArgument(regT0);
1503 stubCall.call(currentInstruction[1].u.operand);
1504 }
1505
1506 void JIT::emit_op_get_arguments_length(Instruction* currentInstruction)
1507 {
1508 int dst = currentInstruction[1].u.operand;
1509 int argumentsRegister = currentInstruction[2].u.operand;
1510 addSlowCase(branchTestPtr(NonZero, addressFor(argumentsRegister)));
1511 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT0);
1512 sub32(TrustedImm32(1), regT0);
1513 emitFastArithReTagImmediate(regT0, regT0);
1514 emitPutVirtualRegister(dst, regT0);
1515 }
1516
1517 void JIT::emitSlow_op_get_arguments_length(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1518 {
1519 linkSlowCase(iter);
1520 unsigned dst = currentInstruction[1].u.operand;
1521 unsigned base = currentInstruction[2].u.operand;
1522 Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
1523
1524 emitGetVirtualRegister(base, regT0);
1525 JITStubCall stubCall(this, cti_op_get_by_id_generic);
1526 stubCall.addArgument(regT0);
1527 stubCall.addArgument(TrustedImmPtr(ident));
1528 stubCall.call(dst);
1529 }
1530
1531 void JIT::emit_op_get_argument_by_val(Instruction* currentInstruction)
1532 {
1533 int dst = currentInstruction[1].u.operand;
1534 int argumentsRegister = currentInstruction[2].u.operand;
1535 int property = currentInstruction[3].u.operand;
1536 addSlowCase(branchTestPtr(NonZero, addressFor(argumentsRegister)));
1537 emitGetVirtualRegister(property, regT1);
1538 addSlowCase(emitJumpIfNotImmediateInteger(regT1));
1539 add32(TrustedImm32(1), regT1);
1540 // regT1 now contains the integer index of the argument we want, including this
1541 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT2);
1542 addSlowCase(branch32(AboveOrEqual, regT1, regT2));
1543
1544 neg32(regT1);
1545 signExtend32ToPtr(regT1, regT1);
1546 loadPtr(BaseIndex(callFrameRegister, regT1, TimesEight, CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register))), regT0);
1547 emitPutVirtualRegister(dst, regT0);
1548 }
1549
1550 void JIT::emitSlow_op_get_argument_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1551 {
1552 unsigned dst = currentInstruction[1].u.operand;
1553 unsigned arguments = currentInstruction[2].u.operand;
1554 unsigned property = currentInstruction[3].u.operand;
1555
1556 linkSlowCase(iter);
1557 Jump skipArgumentsCreation = jump();
1558
1559 linkSlowCase(iter);
1560 linkSlowCase(iter);
1561 JITStubCall(this, cti_op_create_arguments).call();
1562 emitPutVirtualRegister(arguments);
1563 emitPutVirtualRegister(unmodifiedArgumentsRegister(arguments));
1564
1565 skipArgumentsCreation.link(this);
1566 JITStubCall stubCall(this, cti_op_get_by_val);
1567 stubCall.addArgument(arguments, regT2);
1568 stubCall.addArgument(property, regT2);
1569 stubCall.call(dst);
1570 }
1571
1572 #endif // USE(JSVALUE64)
1573
1574 void JIT::emit_op_resolve_global_dynamic(Instruction* currentInstruction)
1575 {
1576 int skip = currentInstruction[5].u.operand;
1577
1578 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT0);
1579
1580 bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain();
1581 ASSERT(skip || !checkTopLevel);
1582 if (checkTopLevel && skip--) {
1583 Jump activationNotCreated;
1584 if (checkTopLevel)
1585 activationNotCreated = branchTestPtr(Zero, addressFor(m_codeBlock->activationRegister()));
1586 loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, object)), regT1);
1587 addSlowCase(checkStructure(regT1, m_globalData->activationStructure.get()));
1588 loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, next)), regT0);
1589 activationNotCreated.link(this);
1590 }
1591 while (skip--) {
1592 loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, object)), regT1);
1593 addSlowCase(checkStructure(regT1, m_globalData->activationStructure.get()));
1594 loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, next)), regT0);
1595 }
1596 emit_op_resolve_global(currentInstruction, true);
1597 }
1598
1599 void JIT::emitSlow_op_resolve_global_dynamic(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1600 {
1601 unsigned dst = currentInstruction[1].u.operand;
1602 Identifier* ident = &m_codeBlock->identifier(currentInstruction[2].u.operand);
1603 int skip = currentInstruction[5].u.operand;
1604 while (skip--)
1605 linkSlowCase(iter);
1606 JITStubCall resolveStubCall(this, cti_op_resolve);
1607 resolveStubCall.addArgument(TrustedImmPtr(ident));
1608 resolveStubCall.call(dst);
1609 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_resolve_global_dynamic));
1610
1611 unsigned currentIndex = m_globalResolveInfoIndex++;
1612
1613 linkSlowCase(iter); // We managed to skip all the nodes in the scope chain, but the cache missed.
1614 JITStubCall stubCall(this, cti_op_resolve_global);
1615 stubCall.addArgument(TrustedImmPtr(ident));
1616 stubCall.addArgument(TrustedImm32(currentIndex));
1617 stubCall.addArgument(regT0);
1618 stubCall.callWithValueProfiling(dst);
1619 }
1620
1621 void JIT::emit_op_new_regexp(Instruction* currentInstruction)
1622 {
1623 JITStubCall stubCall(this, cti_op_new_regexp);
1624 stubCall.addArgument(TrustedImmPtr(m_codeBlock->regexp(currentInstruction[2].u.operand)));
1625 stubCall.call(currentInstruction[1].u.operand);
1626 }
1627
1628 void JIT::emit_op_new_func(Instruction* currentInstruction)
1629 {
1630 Jump lazyJump;
1631 int dst = currentInstruction[1].u.operand;
1632 if (currentInstruction[3].u.operand) {
1633 #if USE(JSVALUE32_64)
1634 lazyJump = branch32(NotEqual, tagFor(dst), TrustedImm32(JSValue::EmptyValueTag));
1635 #else
1636 lazyJump = branchTestPtr(NonZero, addressFor(dst));
1637 #endif
1638 }
1639
1640 FunctionExecutable* executable = m_codeBlock->functionDecl(currentInstruction[2].u.operand);
1641 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2);
1642 emitAllocateJSFunction(executable, regT2, regT0, regT1);
1643
1644 emitStoreCell(dst, regT0);
1645
1646 if (currentInstruction[3].u.operand) {
1647 #if USE(JSVALUE32_64)
1648 unmap();
1649 #else
1650 killLastResultRegister();
1651 #endif
1652 lazyJump.link(this);
1653 }
1654 }
1655
1656 void JIT::emitSlow_op_new_func(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1657 {
1658 linkSlowCase(iter);
1659 JITStubCall stubCall(this, cti_op_new_func);
1660 stubCall.addArgument(TrustedImmPtr(m_codeBlock->functionDecl(currentInstruction[2].u.operand)));
1661 stubCall.call(currentInstruction[1].u.operand);
1662 }
1663
1664 void JIT::emit_op_new_func_exp(Instruction* currentInstruction)
1665 {
1666 FunctionExecutable* executable = m_codeBlock->functionExpr(currentInstruction[2].u.operand);
1667
1668 // We only inline the allocation of a anonymous function expressions
1669 // If we want to be able to allocate a named function expression, we would
1670 // need to be able to do inline allocation of a JSStaticScopeObject.
1671 if (executable->name().isNull()) {
1672 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2);
1673 emitAllocateJSFunction(executable, regT2, regT0, regT1);
1674 emitStoreCell(currentInstruction[1].u.operand, regT0);
1675 return;
1676 }
1677
1678 JITStubCall stubCall(this, cti_op_new_func_exp);
1679 stubCall.addArgument(TrustedImmPtr(m_codeBlock->functionExpr(currentInstruction[2].u.operand)));
1680 stubCall.call(currentInstruction[1].u.operand);
1681 }
1682
1683 void JIT::emitSlow_op_new_func_exp(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1684 {
1685 FunctionExecutable* executable = m_codeBlock->functionExpr(currentInstruction[2].u.operand);
1686 if (!executable->name().isNull())
1687 return;
1688 linkSlowCase(iter);
1689 JITStubCall stubCall(this, cti_op_new_func_exp);
1690 stubCall.addArgument(TrustedImmPtr(executable));
1691 stubCall.call(currentInstruction[1].u.operand);
1692 }
1693
1694 void JIT::emit_op_new_array(Instruction* currentInstruction)
1695 {
1696 int length = currentInstruction[3].u.operand;
1697 if (CopiedSpace::isOversize(JSArray::storageSize(length))) {
1698 JITStubCall stubCall(this, cti_op_new_array);
1699 stubCall.addArgument(TrustedImm32(currentInstruction[2].u.operand));
1700 stubCall.addArgument(TrustedImm32(currentInstruction[3].u.operand));
1701 stubCall.call(currentInstruction[1].u.operand);
1702 return;
1703 }
1704 int dst = currentInstruction[1].u.operand;
1705 int values = currentInstruction[2].u.operand;
1706
1707 emitAllocateJSArray(values, length, regT0, regT1, regT2);
1708 emitStoreCell(dst, regT0);
1709 }
1710
1711 void JIT::emitSlow_op_new_array(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1712 {
1713 // If the allocation would be oversize, we will already make the proper stub call above in
1714 // emit_op_new_array.
1715 int length = currentInstruction[3].u.operand;
1716 if (CopiedSpace::isOversize(JSArray::storageSize(length)))
1717 return;
1718 linkSlowCase(iter); // Not enough space in CopiedSpace for storage.
1719 linkSlowCase(iter); // Not enough space in MarkedSpace for cell.
1720
1721 JITStubCall stubCall(this, cti_op_new_array);
1722 stubCall.addArgument(TrustedImm32(currentInstruction[2].u.operand));
1723 stubCall.addArgument(TrustedImm32(currentInstruction[3].u.operand));
1724 stubCall.call(currentInstruction[1].u.operand);
1725 }
1726
1727 void JIT::emit_op_new_array_buffer(Instruction* currentInstruction)
1728 {
1729 JITStubCall stubCall(this, cti_op_new_array_buffer);
1730 stubCall.addArgument(TrustedImm32(currentInstruction[2].u.operand));
1731 stubCall.addArgument(TrustedImm32(currentInstruction[3].u.operand));
1732 stubCall.call(currentInstruction[1].u.operand);
1733 }
1734
1735 } // namespace JSC
1736
1737 #endif // ENABLE(JIT)