]> git.saurik.com Git - apple/javascriptcore.git/blob - jit/JITOpcodes.cpp
d9a32d9ae39c6649067bb0909b14f749c3d49515
[apple/javascriptcore.git] / jit / JITOpcodes.cpp
1 /*
2 * Copyright (C) 2009 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26 #include "config.h"
27 #include "JIT.h"
28
29 #if ENABLE(JIT)
30
31 #include "JITInlineMethods.h"
32 #include "JITStubCall.h"
33 #include "JSArray.h"
34 #include "JSCell.h"
35 #include "JSFunction.h"
36 #include "JSPropertyNameIterator.h"
37 #include "LinkBuffer.h"
38
39 namespace JSC {
40
41 #if USE(JSVALUE32_64)
42
43 void JIT::privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool>* executablePool, JSGlobalData* globalData, CodePtr* ctiStringLengthTrampoline, CodePtr* ctiVirtualCallLink, CodePtr* ctiVirtualCall, CodePtr* ctiNativeCallThunk)
44 {
45 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
46 // (1) This function provides fast property access for string length
47 Label stringLengthBegin = align();
48
49 // regT0 holds payload, regT1 holds tag
50
51 Jump string_failureCases1 = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
52 Jump string_failureCases2 = branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr));
53
54 // Checks out okay! - get the length from the Ustring.
55 load32(Address(regT0, OBJECT_OFFSETOF(JSString, m_stringLength)), regT2);
56
57 Jump string_failureCases3 = branch32(Above, regT2, Imm32(INT_MAX));
58 move(regT2, regT0);
59 move(Imm32(JSValue::Int32Tag), regT1);
60
61 ret();
62 #endif
63
64 // (2) Trampolines for the slow cases of op_call / op_call_eval / op_construct.
65
66 #if ENABLE(JIT_OPTIMIZE_CALL)
67 // VirtualCallLink Trampoline
68 // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
69 Label virtualCallLinkBegin = align();
70 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
71
72 Jump isNativeFunc2 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), Imm32(0));
73
74 Jump hasCodeBlock2 = branch32(GreaterThan, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), Imm32(0));
75 preserveReturnAddressAfterCall(regT3);
76 restoreArgumentReference();
77 Call callJSFunction2 = call();
78 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
79 emitGetJITStubArg(2, regT1); // argCount
80 restoreReturnAddressBeforeReturn(regT3);
81 hasCodeBlock2.link(this);
82
83 // Check argCount matches callee arity.
84 Jump arityCheckOkay2 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), regT1);
85 preserveReturnAddressAfterCall(regT3);
86 emitPutJITStubArg(regT3, 1); // return address
87 restoreArgumentReference();
88 Call callArityCheck2 = call();
89 move(regT1, callFrameRegister);
90 emitGetJITStubArg(2, regT1); // argCount
91 restoreReturnAddressBeforeReturn(regT3);
92 arityCheckOkay2.link(this);
93
94 isNativeFunc2.link(this);
95
96 compileOpCallInitializeCallFrame();
97
98 preserveReturnAddressAfterCall(regT3);
99 emitPutJITStubArg(regT3, 1); // return address
100 restoreArgumentReference();
101 Call callLazyLinkCall = call();
102 restoreReturnAddressBeforeReturn(regT3);
103 jump(regT0);
104 #endif // ENABLE(JIT_OPTIMIZE_CALL)
105
106 // VirtualCall Trampoline
107 // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
108 Label virtualCallBegin = align();
109 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
110
111 Jump isNativeFunc3 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), Imm32(0));
112
113 Jump hasCodeBlock3 = branch32(GreaterThan, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), Imm32(0));
114 preserveReturnAddressAfterCall(regT3);
115 restoreArgumentReference();
116 Call callJSFunction1 = call();
117 emitGetJITStubArg(2, regT1); // argCount
118 restoreReturnAddressBeforeReturn(regT3);
119 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
120 hasCodeBlock3.link(this);
121
122 // Check argCount matches callee arity.
123 Jump arityCheckOkay3 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), regT1);
124 preserveReturnAddressAfterCall(regT3);
125 emitPutJITStubArg(regT3, 1); // return address
126 restoreArgumentReference();
127 Call callArityCheck1 = call();
128 move(regT1, callFrameRegister);
129 emitGetJITStubArg(2, regT1); // argCount
130 restoreReturnAddressBeforeReturn(regT3);
131 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
132 arityCheckOkay3.link(this);
133
134 isNativeFunc3.link(this);
135
136 compileOpCallInitializeCallFrame();
137 loadPtr(Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_jitCode)), regT0);
138 jump(regT0);
139
140 #if CPU(X86) || CPU(ARM_TRADITIONAL)
141 Label nativeCallThunk = align();
142 preserveReturnAddressAfterCall(regT0);
143 emitPutToCallFrameHeader(regT0, RegisterFile::ReturnPC); // Push return address
144
145 // Load caller frame's scope chain into this callframe so that whatever we call can
146 // get to its global data.
147 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT1);
148 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT1);
149 emitPutToCallFrameHeader(regT1, RegisterFile::ScopeChain);
150
151 #if CPU(X86)
152 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT0);
153
154 /* We have two structs that we use to describe the stackframe we set up for our
155 * call to native code. NativeCallFrameStructure describes the how we set up the stack
156 * in advance of the call. NativeFunctionCalleeSignature describes the callframe
157 * as the native code expects it. We do this as we are using the fastcall calling
158 * convention which results in the callee popping its arguments off the stack, but
159 * not the rest of the callframe so we need a nice way to ensure we increment the
160 * stack pointer by the right amount after the call.
161 */
162
163 #if COMPILER(MSVC) || OS(LINUX)
164 #if COMPILER(MSVC)
165 #pragma pack(push)
166 #pragma pack(4)
167 #endif // COMPILER(MSVC)
168 struct NativeCallFrameStructure {
169 // CallFrame* callFrame; // passed in EDX
170 JSObject* callee;
171 JSValue thisValue;
172 ArgList* argPointer;
173 ArgList args;
174 JSValue result;
175 };
176 struct NativeFunctionCalleeSignature {
177 JSObject* callee;
178 JSValue thisValue;
179 ArgList* argPointer;
180 };
181 #if COMPILER(MSVC)
182 #pragma pack(pop)
183 #endif // COMPILER(MSVC)
184 #else
185 struct NativeCallFrameStructure {
186 // CallFrame* callFrame; // passed in ECX
187 // JSObject* callee; // passed in EDX
188 JSValue thisValue;
189 ArgList* argPointer;
190 ArgList args;
191 };
192 struct NativeFunctionCalleeSignature {
193 JSValue thisValue;
194 ArgList* argPointer;
195 };
196 #endif
197
198 const int NativeCallFrameSize = (sizeof(NativeCallFrameStructure) + 15) & ~15;
199 // Allocate system stack frame
200 subPtr(Imm32(NativeCallFrameSize), stackPointerRegister);
201
202 // Set up arguments
203 subPtr(Imm32(1), regT0); // Don't include 'this' in argcount
204
205 // push argcount
206 storePtr(regT0, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, args) + OBJECT_OFFSETOF(ArgList, m_argCount)));
207
208 // Calculate the start of the callframe header, and store in regT1
209 addPtr(Imm32(-RegisterFile::CallFrameHeaderSize * (int)sizeof(Register)), callFrameRegister, regT1);
210
211 // Calculate start of arguments as callframe header - sizeof(Register) * argcount (regT0)
212 mul32(Imm32(sizeof(Register)), regT0, regT0);
213 subPtr(regT0, regT1);
214 storePtr(regT1, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, args) + OBJECT_OFFSETOF(ArgList, m_args)));
215
216 // ArgList is passed by reference so is stackPointerRegister + 4 * sizeof(Register)
217 addPtr(Imm32(OBJECT_OFFSETOF(NativeCallFrameStructure, args)), stackPointerRegister, regT0);
218 storePtr(regT0, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, argPointer)));
219
220 // regT1 currently points to the first argument, regT1 - sizeof(Register) points to 'this'
221 loadPtr(Address(regT1, -(int)sizeof(Register) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT2);
222 loadPtr(Address(regT1, -(int)sizeof(Register) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT3);
223 storePtr(regT2, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, thisValue) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)));
224 storePtr(regT3, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, thisValue) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)));
225
226 #if COMPILER(MSVC) || OS(LINUX)
227 // ArgList is passed by reference so is stackPointerRegister + 4 * sizeof(Register)
228 addPtr(Imm32(OBJECT_OFFSETOF(NativeCallFrameStructure, result)), stackPointerRegister, X86Registers::ecx);
229
230 // Plant callee
231 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, X86Registers::eax);
232 storePtr(X86Registers::eax, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, callee)));
233
234 // Plant callframe
235 move(callFrameRegister, X86Registers::edx);
236
237 call(Address(X86Registers::eax, OBJECT_OFFSETOF(JSFunction, m_data)));
238
239 // JSValue is a non-POD type, so eax points to it
240 emitLoad(0, regT1, regT0, X86Registers::eax);
241 #else
242 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, X86Registers::edx); // callee
243 move(callFrameRegister, X86Registers::ecx); // callFrame
244 call(Address(X86Registers::edx, OBJECT_OFFSETOF(JSFunction, m_data)));
245 #endif
246
247 // We've put a few temporaries on the stack in addition to the actual arguments
248 // so pull them off now
249 addPtr(Imm32(NativeCallFrameSize - sizeof(NativeFunctionCalleeSignature)), stackPointerRegister);
250
251 #elif CPU(ARM_TRADITIONAL)
252 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT0);
253
254 // Allocate stack space for our arglist
255 COMPILE_ASSERT((sizeof(ArgList) & 0x7) == 0 && sizeof(JSValue) == 8 && sizeof(Register) == 8, ArgList_should_by_8byte_aligned);
256 subPtr(Imm32(sizeof(ArgList)), stackPointerRegister);
257
258 // Set up arguments
259 subPtr(Imm32(1), regT0); // Don't include 'this' in argcount
260
261 // Push argcount
262 storePtr(regT0, Address(stackPointerRegister, OBJECT_OFFSETOF(ArgList, m_argCount)));
263
264 // Calculate the start of the callframe header, and store in regT1
265 move(callFrameRegister, regT1);
266 sub32(Imm32(RegisterFile::CallFrameHeaderSize * (int32_t)sizeof(Register)), regT1);
267
268 // Calculate start of arguments as callframe header - sizeof(Register) * argcount (regT1)
269 mul32(Imm32(sizeof(Register)), regT0, regT0);
270 subPtr(regT0, regT1);
271
272 // push pointer to arguments
273 storePtr(regT1, Address(stackPointerRegister, OBJECT_OFFSETOF(ArgList, m_args)));
274
275 // Argument passing method:
276 // r0 - points to return value
277 // r1 - callFrame
278 // r2 - callee
279 // stack: this(JSValue) and a pointer to ArgList
280
281 move(stackPointerRegister, regT3);
282 subPtr(Imm32(8), stackPointerRegister);
283 move(stackPointerRegister, regT0);
284 subPtr(Imm32(8 + 4 + 4 /* padding */), stackPointerRegister);
285
286 // Setup arg4:
287 storePtr(regT3, Address(stackPointerRegister, 8));
288
289 // Setup arg3
290 // regT1 currently points to the first argument, regT1-sizeof(Register) points to 'this'
291 load32(Address(regT1, -(int32_t)sizeof(void*) * 2), regT3);
292 storePtr(regT3, Address(stackPointerRegister, 0));
293 load32(Address(regT1, -(int32_t)sizeof(void*)), regT3);
294 storePtr(regT3, Address(stackPointerRegister, 4));
295
296 // Setup arg2:
297 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT2);
298
299 // Setup arg1:
300 move(callFrameRegister, regT1);
301
302 call(Address(regT2, OBJECT_OFFSETOF(JSFunction, m_data)));
303
304 // Load return value
305 load32(Address(stackPointerRegister, 16), regT0);
306 load32(Address(stackPointerRegister, 20), regT1);
307
308 addPtr(Imm32(sizeof(ArgList) + 16 + 8), stackPointerRegister);
309 #endif
310
311 // Check for an exception
312 move(ImmPtr(&globalData->exception), regT2);
313 Jump sawException = branch32(NotEqual, tagFor(0, regT2), Imm32(JSValue::EmptyValueTag));
314
315 // Grab the return address.
316 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT3);
317
318 // Restore our caller's "r".
319 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
320
321 // Return.
322 restoreReturnAddressBeforeReturn(regT3);
323 ret();
324
325 // Handle an exception
326 sawException.link(this);
327 // Grab the return address.
328 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
329 move(ImmPtr(&globalData->exceptionLocation), regT2);
330 storePtr(regT1, regT2);
331 move(ImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()), regT2);
332 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
333 poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof (void*));
334 restoreReturnAddressBeforeReturn(regT2);
335 ret();
336
337 #elif ENABLE(JIT_OPTIMIZE_NATIVE_CALL)
338 #error "JIT_OPTIMIZE_NATIVE_CALL not yet supported on this platform."
339 #else
340 breakpoint();
341 #endif
342
343 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
344 Call string_failureCases1Call = makeTailRecursiveCall(string_failureCases1);
345 Call string_failureCases2Call = makeTailRecursiveCall(string_failureCases2);
346 Call string_failureCases3Call = makeTailRecursiveCall(string_failureCases3);
347 #endif
348
349 // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
350 LinkBuffer patchBuffer(this, m_globalData->executableAllocator.poolForSize(m_assembler.size()));
351
352 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
353 patchBuffer.link(string_failureCases1Call, FunctionPtr(cti_op_get_by_id_string_fail));
354 patchBuffer.link(string_failureCases2Call, FunctionPtr(cti_op_get_by_id_string_fail));
355 patchBuffer.link(string_failureCases3Call, FunctionPtr(cti_op_get_by_id_string_fail));
356 #endif
357 patchBuffer.link(callArityCheck1, FunctionPtr(cti_op_call_arityCheck));
358 patchBuffer.link(callJSFunction1, FunctionPtr(cti_op_call_JSFunction));
359 #if ENABLE(JIT_OPTIMIZE_CALL)
360 patchBuffer.link(callArityCheck2, FunctionPtr(cti_op_call_arityCheck));
361 patchBuffer.link(callJSFunction2, FunctionPtr(cti_op_call_JSFunction));
362 patchBuffer.link(callLazyLinkCall, FunctionPtr(cti_vm_lazyLinkCall));
363 #endif
364
365 CodeRef finalCode = patchBuffer.finalizeCode();
366 *executablePool = finalCode.m_executablePool;
367
368 *ctiVirtualCall = trampolineAt(finalCode, virtualCallBegin);
369 *ctiNativeCallThunk = trampolineAt(finalCode, nativeCallThunk);
370 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
371 *ctiStringLengthTrampoline = trampolineAt(finalCode, stringLengthBegin);
372 #else
373 UNUSED_PARAM(ctiStringLengthTrampoline);
374 #endif
375 #if ENABLE(JIT_OPTIMIZE_CALL)
376 *ctiVirtualCallLink = trampolineAt(finalCode, virtualCallLinkBegin);
377 #else
378 UNUSED_PARAM(ctiVirtualCallLink);
379 #endif
380 }
381
382 void JIT::emit_op_mov(Instruction* currentInstruction)
383 {
384 unsigned dst = currentInstruction[1].u.operand;
385 unsigned src = currentInstruction[2].u.operand;
386
387 if (m_codeBlock->isConstantRegisterIndex(src))
388 emitStore(dst, getConstantOperand(src));
389 else {
390 emitLoad(src, regT1, regT0);
391 emitStore(dst, regT1, regT0);
392 map(m_bytecodeIndex + OPCODE_LENGTH(op_mov), dst, regT1, regT0);
393 }
394 }
395
396 void JIT::emit_op_end(Instruction* currentInstruction)
397 {
398 if (m_codeBlock->needsFullScopeChain())
399 JITStubCall(this, cti_op_end).call();
400 ASSERT(returnValueRegister != callFrameRegister);
401 emitLoad(currentInstruction[1].u.operand, regT1, regT0);
402 restoreReturnAddressBeforeReturn(Address(callFrameRegister, RegisterFile::ReturnPC * static_cast<int>(sizeof(Register))));
403 ret();
404 }
405
406 void JIT::emit_op_jmp(Instruction* currentInstruction)
407 {
408 unsigned target = currentInstruction[1].u.operand;
409 addJump(jump(), target);
410 }
411
412 void JIT::emit_op_loop_if_lesseq(Instruction* currentInstruction)
413 {
414 unsigned op1 = currentInstruction[1].u.operand;
415 unsigned op2 = currentInstruction[2].u.operand;
416 unsigned target = currentInstruction[3].u.operand;
417
418 emitTimeoutCheck();
419
420 if (isOperandConstantImmediateInt(op1)) {
421 emitLoad(op2, regT1, regT0);
422 addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
423 addJump(branch32(GreaterThanOrEqual, regT0, Imm32(getConstantOperand(op1).asInt32())), target);
424 return;
425 }
426
427 if (isOperandConstantImmediateInt(op2)) {
428 emitLoad(op1, regT1, regT0);
429 addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
430 addJump(branch32(LessThanOrEqual, regT0, Imm32(getConstantOperand(op2).asInt32())), target);
431 return;
432 }
433
434 emitLoad2(op1, regT1, regT0, op2, regT3, regT2);
435 addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
436 addSlowCase(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
437 addJump(branch32(LessThanOrEqual, regT0, regT2), target);
438 }
439
440 void JIT::emitSlow_op_loop_if_lesseq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
441 {
442 unsigned op1 = currentInstruction[1].u.operand;
443 unsigned op2 = currentInstruction[2].u.operand;
444 unsigned target = currentInstruction[3].u.operand;
445
446 if (!isOperandConstantImmediateInt(op1) && !isOperandConstantImmediateInt(op2))
447 linkSlowCase(iter); // int32 check
448 linkSlowCase(iter); // int32 check
449
450 JITStubCall stubCall(this, cti_op_loop_if_lesseq);
451 stubCall.addArgument(op1);
452 stubCall.addArgument(op2);
453 stubCall.call();
454 emitJumpSlowToHot(branchTest32(NonZero, regT0), target);
455 }
456
457 void JIT::emit_op_new_object(Instruction* currentInstruction)
458 {
459 JITStubCall(this, cti_op_new_object).call(currentInstruction[1].u.operand);
460 }
461
462 void JIT::emit_op_instanceof(Instruction* currentInstruction)
463 {
464 unsigned dst = currentInstruction[1].u.operand;
465 unsigned value = currentInstruction[2].u.operand;
466 unsigned baseVal = currentInstruction[3].u.operand;
467 unsigned proto = currentInstruction[4].u.operand;
468
469 // Load the operands into registers.
470 // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
471 emitLoadPayload(value, regT2);
472 emitLoadPayload(baseVal, regT0);
473 emitLoadPayload(proto, regT1);
474
475 // Check that value, baseVal, and proto are cells.
476 emitJumpSlowCaseIfNotJSCell(value);
477 emitJumpSlowCaseIfNotJSCell(baseVal);
478 emitJumpSlowCaseIfNotJSCell(proto);
479
480 // Check that baseVal 'ImplementsDefaultHasInstance'.
481 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT0);
482 addSlowCase(branchTest32(Zero, Address(regT0, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(ImplementsDefaultHasInstance)));
483
484 // Optimistically load the result true, and start looping.
485 // Initially, regT1 still contains proto and regT2 still contains value.
486 // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
487 move(Imm32(JSValue::TrueTag), regT0);
488 Label loop(this);
489
490 // Load the prototype of the cell in regT2. If this is equal to regT1 - WIN!
491 // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
492 loadPtr(Address(regT2, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
493 load32(Address(regT2, OBJECT_OFFSETOF(Structure, m_prototype) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT2);
494 Jump isInstance = branchPtr(Equal, regT2, regT1);
495 branchTest32(NonZero, regT2).linkTo(loop, this);
496
497 // We get here either by dropping out of the loop, or if value was not an Object. Result is false.
498 move(Imm32(JSValue::FalseTag), regT0);
499
500 // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
501 isInstance.link(this);
502 emitStoreBool(dst, regT0);
503 }
504
505 void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
506 {
507 unsigned dst = currentInstruction[1].u.operand;
508 unsigned value = currentInstruction[2].u.operand;
509 unsigned baseVal = currentInstruction[3].u.operand;
510 unsigned proto = currentInstruction[4].u.operand;
511
512 linkSlowCaseIfNotJSCell(iter, value);
513 linkSlowCaseIfNotJSCell(iter, baseVal);
514 linkSlowCaseIfNotJSCell(iter, proto);
515 linkSlowCase(iter);
516
517 JITStubCall stubCall(this, cti_op_instanceof);
518 stubCall.addArgument(value);
519 stubCall.addArgument(baseVal);
520 stubCall.addArgument(proto);
521 stubCall.call(dst);
522 }
523
524 void JIT::emit_op_new_func(Instruction* currentInstruction)
525 {
526 JITStubCall stubCall(this, cti_op_new_func);
527 stubCall.addArgument(ImmPtr(m_codeBlock->functionDecl(currentInstruction[2].u.operand)));
528 stubCall.call(currentInstruction[1].u.operand);
529 }
530
531 void JIT::emit_op_get_global_var(Instruction* currentInstruction)
532 {
533 int dst = currentInstruction[1].u.operand;
534 JSGlobalObject* globalObject = static_cast<JSGlobalObject*>(currentInstruction[2].u.jsCell);
535 ASSERT(globalObject->isGlobalObject());
536 int index = currentInstruction[3].u.operand;
537
538 loadPtr(&globalObject->d()->registers, regT2);
539
540 emitLoad(index, regT1, regT0, regT2);
541 emitStore(dst, regT1, regT0);
542 map(m_bytecodeIndex + OPCODE_LENGTH(op_get_global_var), dst, regT1, regT0);
543 }
544
545 void JIT::emit_op_put_global_var(Instruction* currentInstruction)
546 {
547 JSGlobalObject* globalObject = static_cast<JSGlobalObject*>(currentInstruction[1].u.jsCell);
548 ASSERT(globalObject->isGlobalObject());
549 int index = currentInstruction[2].u.operand;
550 int value = currentInstruction[3].u.operand;
551
552 emitLoad(value, regT1, regT0);
553
554 loadPtr(&globalObject->d()->registers, regT2);
555 emitStore(index, regT1, regT0, regT2);
556 map(m_bytecodeIndex + OPCODE_LENGTH(op_put_global_var), value, regT1, regT0);
557 }
558
559 void JIT::emit_op_get_scoped_var(Instruction* currentInstruction)
560 {
561 int dst = currentInstruction[1].u.operand;
562 int index = currentInstruction[2].u.operand;
563 int skip = currentInstruction[3].u.operand + m_codeBlock->needsFullScopeChain();
564
565 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2);
566 while (skip--)
567 loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
568
569 loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, object)), regT2);
570 loadPtr(Address(regT2, OBJECT_OFFSETOF(JSVariableObject, d)), regT2);
571 loadPtr(Address(regT2, OBJECT_OFFSETOF(JSVariableObject::JSVariableObjectData, registers)), regT2);
572
573 emitLoad(index, regT1, regT0, regT2);
574 emitStore(dst, regT1, regT0);
575 map(m_bytecodeIndex + OPCODE_LENGTH(op_get_scoped_var), dst, regT1, regT0);
576 }
577
578 void JIT::emit_op_put_scoped_var(Instruction* currentInstruction)
579 {
580 int index = currentInstruction[1].u.operand;
581 int skip = currentInstruction[2].u.operand + m_codeBlock->needsFullScopeChain();
582 int value = currentInstruction[3].u.operand;
583
584 emitLoad(value, regT1, regT0);
585
586 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2);
587 while (skip--)
588 loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
589
590 loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, object)), regT2);
591 loadPtr(Address(regT2, OBJECT_OFFSETOF(JSVariableObject, d)), regT2);
592 loadPtr(Address(regT2, OBJECT_OFFSETOF(JSVariableObject::JSVariableObjectData, registers)), regT2);
593
594 emitStore(index, regT1, regT0, regT2);
595 map(m_bytecodeIndex + OPCODE_LENGTH(op_put_scoped_var), value, regT1, regT0);
596 }
597
598 void JIT::emit_op_tear_off_activation(Instruction* currentInstruction)
599 {
600 JITStubCall stubCall(this, cti_op_tear_off_activation);
601 stubCall.addArgument(currentInstruction[1].u.operand);
602 stubCall.call();
603 }
604
605 void JIT::emit_op_tear_off_arguments(Instruction*)
606 {
607 JITStubCall(this, cti_op_tear_off_arguments).call();
608 }
609
610 void JIT::emit_op_new_array(Instruction* currentInstruction)
611 {
612 JITStubCall stubCall(this, cti_op_new_array);
613 stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
614 stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
615 stubCall.call(currentInstruction[1].u.operand);
616 }
617
618 void JIT::emit_op_resolve(Instruction* currentInstruction)
619 {
620 JITStubCall stubCall(this, cti_op_resolve);
621 stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
622 stubCall.call(currentInstruction[1].u.operand);
623 }
624
625 void JIT::emit_op_to_primitive(Instruction* currentInstruction)
626 {
627 int dst = currentInstruction[1].u.operand;
628 int src = currentInstruction[2].u.operand;
629
630 emitLoad(src, regT1, regT0);
631
632 Jump isImm = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
633 addSlowCase(branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr)));
634 isImm.link(this);
635
636 if (dst != src)
637 emitStore(dst, regT1, regT0);
638 map(m_bytecodeIndex + OPCODE_LENGTH(op_to_primitive), dst, regT1, regT0);
639 }
640
641 void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
642 {
643 int dst = currentInstruction[1].u.operand;
644
645 linkSlowCase(iter);
646
647 JITStubCall stubCall(this, cti_op_to_primitive);
648 stubCall.addArgument(regT1, regT0);
649 stubCall.call(dst);
650 }
651
652 void JIT::emit_op_strcat(Instruction* currentInstruction)
653 {
654 JITStubCall stubCall(this, cti_op_strcat);
655 stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
656 stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
657 stubCall.call(currentInstruction[1].u.operand);
658 }
659
660 void JIT::emit_op_resolve_base(Instruction* currentInstruction)
661 {
662 JITStubCall stubCall(this, cti_op_resolve_base);
663 stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
664 stubCall.call(currentInstruction[1].u.operand);
665 }
666
667 void JIT::emit_op_resolve_skip(Instruction* currentInstruction)
668 {
669 JITStubCall stubCall(this, cti_op_resolve_skip);
670 stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
671 stubCall.addArgument(Imm32(currentInstruction[3].u.operand + m_codeBlock->needsFullScopeChain()));
672 stubCall.call(currentInstruction[1].u.operand);
673 }
674
675 void JIT::emit_op_resolve_global(Instruction* currentInstruction)
676 {
677 // FIXME: Optimize to use patching instead of so many memory accesses.
678
679 unsigned dst = currentInstruction[1].u.operand;
680 void* globalObject = currentInstruction[2].u.jsCell;
681
682 unsigned currentIndex = m_globalResolveInfoIndex++;
683 void* structureAddress = &(m_codeBlock->globalResolveInfo(currentIndex).structure);
684 void* offsetAddr = &(m_codeBlock->globalResolveInfo(currentIndex).offset);
685
686 // Verify structure.
687 move(ImmPtr(globalObject), regT0);
688 loadPtr(structureAddress, regT1);
689 addSlowCase(branchPtr(NotEqual, regT1, Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure))));
690
691 // Load property.
692 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSGlobalObject, m_externalStorage)), regT2);
693 load32(offsetAddr, regT3);
694 load32(BaseIndex(regT2, regT3, TimesEight), regT0); // payload
695 load32(BaseIndex(regT2, regT3, TimesEight, 4), regT1); // tag
696 emitStore(dst, regT1, regT0);
697 map(m_bytecodeIndex + OPCODE_LENGTH(op_resolve_global), dst, regT1, regT0);
698 }
699
700 void JIT::emitSlow_op_resolve_global(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
701 {
702 unsigned dst = currentInstruction[1].u.operand;
703 void* globalObject = currentInstruction[2].u.jsCell;
704 Identifier* ident = &m_codeBlock->identifier(currentInstruction[3].u.operand);
705
706 unsigned currentIndex = m_globalResolveInfoIndex++;
707
708 linkSlowCase(iter);
709 JITStubCall stubCall(this, cti_op_resolve_global);
710 stubCall.addArgument(ImmPtr(globalObject));
711 stubCall.addArgument(ImmPtr(ident));
712 stubCall.addArgument(Imm32(currentIndex));
713 stubCall.call(dst);
714 }
715
716 void JIT::emit_op_not(Instruction* currentInstruction)
717 {
718 unsigned dst = currentInstruction[1].u.operand;
719 unsigned src = currentInstruction[2].u.operand;
720
721 emitLoadTag(src, regT0);
722
723 xor32(Imm32(JSValue::FalseTag), regT0);
724 addSlowCase(branchTest32(NonZero, regT0, Imm32(~1)));
725 xor32(Imm32(JSValue::TrueTag), regT0);
726
727 emitStoreBool(dst, regT0, (dst == src));
728 }
729
730 void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
731 {
732 unsigned dst = currentInstruction[1].u.operand;
733 unsigned src = currentInstruction[2].u.operand;
734
735 linkSlowCase(iter);
736
737 JITStubCall stubCall(this, cti_op_not);
738 stubCall.addArgument(src);
739 stubCall.call(dst);
740 }
741
742 void JIT::emit_op_jfalse(Instruction* currentInstruction)
743 {
744 unsigned cond = currentInstruction[1].u.operand;
745 unsigned target = currentInstruction[2].u.operand;
746
747 emitLoad(cond, regT1, regT0);
748
749 Jump isTrue = branch32(Equal, regT1, Imm32(JSValue::TrueTag));
750 addJump(branch32(Equal, regT1, Imm32(JSValue::FalseTag)), target);
751
752 Jump isNotInteger = branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag));
753 Jump isTrue2 = branch32(NotEqual, regT0, Imm32(0));
754 addJump(jump(), target);
755
756 if (supportsFloatingPoint()) {
757 isNotInteger.link(this);
758
759 addSlowCase(branch32(Above, regT1, Imm32(JSValue::LowestTag)));
760
761 zeroDouble(fpRegT0);
762 emitLoadDouble(cond, fpRegT1);
763 addJump(branchDouble(DoubleEqualOrUnordered, fpRegT0, fpRegT1), target);
764 } else
765 addSlowCase(isNotInteger);
766
767 isTrue.link(this);
768 isTrue2.link(this);
769 }
770
771 void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
772 {
773 unsigned cond = currentInstruction[1].u.operand;
774 unsigned target = currentInstruction[2].u.operand;
775
776 linkSlowCase(iter);
777 JITStubCall stubCall(this, cti_op_jtrue);
778 stubCall.addArgument(cond);
779 stubCall.call();
780 emitJumpSlowToHot(branchTest32(Zero, regT0), target); // Inverted.
781 }
782
783 void JIT::emit_op_jtrue(Instruction* currentInstruction)
784 {
785 unsigned cond = currentInstruction[1].u.operand;
786 unsigned target = currentInstruction[2].u.operand;
787
788 emitLoad(cond, regT1, regT0);
789
790 Jump isFalse = branch32(Equal, regT1, Imm32(JSValue::FalseTag));
791 addJump(branch32(Equal, regT1, Imm32(JSValue::TrueTag)), target);
792
793 Jump isNotInteger = branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag));
794 Jump isFalse2 = branch32(Equal, regT0, Imm32(0));
795 addJump(jump(), target);
796
797 if (supportsFloatingPoint()) {
798 isNotInteger.link(this);
799
800 addSlowCase(branch32(Above, regT1, Imm32(JSValue::LowestTag)));
801
802 zeroDouble(fpRegT0);
803 emitLoadDouble(cond, fpRegT1);
804 addJump(branchDouble(DoubleNotEqual, fpRegT0, fpRegT1), target);
805 } else
806 addSlowCase(isNotInteger);
807
808 isFalse.link(this);
809 isFalse2.link(this);
810 }
811
812 void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
813 {
814 unsigned cond = currentInstruction[1].u.operand;
815 unsigned target = currentInstruction[2].u.operand;
816
817 linkSlowCase(iter);
818 JITStubCall stubCall(this, cti_op_jtrue);
819 stubCall.addArgument(cond);
820 stubCall.call();
821 emitJumpSlowToHot(branchTest32(NonZero, regT0), target);
822 }
823
824 void JIT::emit_op_jeq_null(Instruction* currentInstruction)
825 {
826 unsigned src = currentInstruction[1].u.operand;
827 unsigned target = currentInstruction[2].u.operand;
828
829 emitLoad(src, regT1, regT0);
830
831 Jump isImmediate = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
832
833 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
834 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
835 addJump(branchTest32(NonZero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined)), target);
836
837 Jump wasNotImmediate = jump();
838
839 // Now handle the immediate cases - undefined & null
840 isImmediate.link(this);
841
842 set32(Equal, regT1, Imm32(JSValue::NullTag), regT2);
843 set32(Equal, regT1, Imm32(JSValue::UndefinedTag), regT1);
844 or32(regT2, regT1);
845
846 addJump(branchTest32(NonZero, regT1), target);
847
848 wasNotImmediate.link(this);
849 }
850
851 void JIT::emit_op_jneq_null(Instruction* currentInstruction)
852 {
853 unsigned src = currentInstruction[1].u.operand;
854 unsigned target = currentInstruction[2].u.operand;
855
856 emitLoad(src, regT1, regT0);
857
858 Jump isImmediate = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
859
860 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
861 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
862 addJump(branchTest32(Zero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined)), target);
863
864 Jump wasNotImmediate = jump();
865
866 // Now handle the immediate cases - undefined & null
867 isImmediate.link(this);
868
869 set32(Equal, regT1, Imm32(JSValue::NullTag), regT2);
870 set32(Equal, regT1, Imm32(JSValue::UndefinedTag), regT1);
871 or32(regT2, regT1);
872
873 addJump(branchTest32(Zero, regT1), target);
874
875 wasNotImmediate.link(this);
876 }
877
878 void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
879 {
880 unsigned src = currentInstruction[1].u.operand;
881 JSCell* ptr = currentInstruction[2].u.jsCell;
882 unsigned target = currentInstruction[3].u.operand;
883
884 emitLoad(src, regT1, regT0);
885 addJump(branch32(NotEqual, regT1, Imm32(JSValue::CellTag)), target);
886 addJump(branchPtr(NotEqual, regT0, ImmPtr(ptr)), target);
887 }
888
889 void JIT::emit_op_jsr(Instruction* currentInstruction)
890 {
891 int retAddrDst = currentInstruction[1].u.operand;
892 int target = currentInstruction[2].u.operand;
893 DataLabelPtr storeLocation = storePtrWithPatch(ImmPtr(0), Address(callFrameRegister, sizeof(Register) * retAddrDst));
894 addJump(jump(), target);
895 m_jsrSites.append(JSRInfo(storeLocation, label()));
896 }
897
898 void JIT::emit_op_sret(Instruction* currentInstruction)
899 {
900 jump(Address(callFrameRegister, sizeof(Register) * currentInstruction[1].u.operand));
901 }
902
903 void JIT::emit_op_eq(Instruction* currentInstruction)
904 {
905 unsigned dst = currentInstruction[1].u.operand;
906 unsigned src1 = currentInstruction[2].u.operand;
907 unsigned src2 = currentInstruction[3].u.operand;
908
909 emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
910 addSlowCase(branch32(NotEqual, regT1, regT3));
911 addSlowCase(branch32(Equal, regT1, Imm32(JSValue::CellTag)));
912 addSlowCase(branch32(Below, regT1, Imm32(JSValue::LowestTag)));
913
914 set8(Equal, regT0, regT2, regT0);
915 or32(Imm32(JSValue::FalseTag), regT0);
916
917 emitStoreBool(dst, regT0);
918 }
919
920 void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
921 {
922 unsigned dst = currentInstruction[1].u.operand;
923 unsigned op1 = currentInstruction[2].u.operand;
924 unsigned op2 = currentInstruction[3].u.operand;
925
926 JumpList storeResult;
927 JumpList genericCase;
928
929 genericCase.append(getSlowCase(iter)); // tags not equal
930
931 linkSlowCase(iter); // tags equal and JSCell
932 genericCase.append(branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr)));
933 genericCase.append(branchPtr(NotEqual, Address(regT2), ImmPtr(m_globalData->jsStringVPtr)));
934
935 // String case.
936 JITStubCall stubCallEqStrings(this, cti_op_eq_strings);
937 stubCallEqStrings.addArgument(regT0);
938 stubCallEqStrings.addArgument(regT2);
939 stubCallEqStrings.call();
940 storeResult.append(jump());
941
942 // Generic case.
943 genericCase.append(getSlowCase(iter)); // doubles
944 genericCase.link(this);
945 JITStubCall stubCallEq(this, cti_op_eq);
946 stubCallEq.addArgument(op1);
947 stubCallEq.addArgument(op2);
948 stubCallEq.call(regT0);
949
950 storeResult.link(this);
951 or32(Imm32(JSValue::FalseTag), regT0);
952 emitStoreBool(dst, regT0);
953 }
954
955 void JIT::emit_op_neq(Instruction* currentInstruction)
956 {
957 unsigned dst = currentInstruction[1].u.operand;
958 unsigned src1 = currentInstruction[2].u.operand;
959 unsigned src2 = currentInstruction[3].u.operand;
960
961 emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
962 addSlowCase(branch32(NotEqual, regT1, regT3));
963 addSlowCase(branch32(Equal, regT1, Imm32(JSValue::CellTag)));
964 addSlowCase(branch32(Below, regT1, Imm32(JSValue::LowestTag)));
965
966 set8(NotEqual, regT0, regT2, regT0);
967 or32(Imm32(JSValue::FalseTag), regT0);
968
969 emitStoreBool(dst, regT0);
970 }
971
972 void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
973 {
974 unsigned dst = currentInstruction[1].u.operand;
975
976 JumpList storeResult;
977 JumpList genericCase;
978
979 genericCase.append(getSlowCase(iter)); // tags not equal
980
981 linkSlowCase(iter); // tags equal and JSCell
982 genericCase.append(branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr)));
983 genericCase.append(branchPtr(NotEqual, Address(regT2), ImmPtr(m_globalData->jsStringVPtr)));
984
985 // String case.
986 JITStubCall stubCallEqStrings(this, cti_op_eq_strings);
987 stubCallEqStrings.addArgument(regT0);
988 stubCallEqStrings.addArgument(regT2);
989 stubCallEqStrings.call(regT0);
990 storeResult.append(jump());
991
992 // Generic case.
993 genericCase.append(getSlowCase(iter)); // doubles
994 genericCase.link(this);
995 JITStubCall stubCallEq(this, cti_op_eq);
996 stubCallEq.addArgument(regT1, regT0);
997 stubCallEq.addArgument(regT3, regT2);
998 stubCallEq.call(regT0);
999
1000 storeResult.link(this);
1001 xor32(Imm32(0x1), regT0);
1002 or32(Imm32(JSValue::FalseTag), regT0);
1003 emitStoreBool(dst, regT0);
1004 }
1005
1006 void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
1007 {
1008 unsigned dst = currentInstruction[1].u.operand;
1009 unsigned src1 = currentInstruction[2].u.operand;
1010 unsigned src2 = currentInstruction[3].u.operand;
1011
1012 emitLoadTag(src1, regT0);
1013 emitLoadTag(src2, regT1);
1014
1015 // Jump to a slow case if either operand is double, or if both operands are
1016 // cells and/or Int32s.
1017 move(regT0, regT2);
1018 and32(regT1, regT2);
1019 addSlowCase(branch32(Below, regT2, Imm32(JSValue::LowestTag)));
1020 addSlowCase(branch32(AboveOrEqual, regT2, Imm32(JSValue::CellTag)));
1021
1022 if (type == OpStrictEq)
1023 set8(Equal, regT0, regT1, regT0);
1024 else
1025 set8(NotEqual, regT0, regT1, regT0);
1026
1027 or32(Imm32(JSValue::FalseTag), regT0);
1028
1029 emitStoreBool(dst, regT0);
1030 }
1031
1032 void JIT::emit_op_stricteq(Instruction* currentInstruction)
1033 {
1034 compileOpStrictEq(currentInstruction, OpStrictEq);
1035 }
1036
1037 void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1038 {
1039 unsigned dst = currentInstruction[1].u.operand;
1040 unsigned src1 = currentInstruction[2].u.operand;
1041 unsigned src2 = currentInstruction[3].u.operand;
1042
1043 linkSlowCase(iter);
1044 linkSlowCase(iter);
1045
1046 JITStubCall stubCall(this, cti_op_stricteq);
1047 stubCall.addArgument(src1);
1048 stubCall.addArgument(src2);
1049 stubCall.call(dst);
1050 }
1051
1052 void JIT::emit_op_nstricteq(Instruction* currentInstruction)
1053 {
1054 compileOpStrictEq(currentInstruction, OpNStrictEq);
1055 }
1056
1057 void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1058 {
1059 unsigned dst = currentInstruction[1].u.operand;
1060 unsigned src1 = currentInstruction[2].u.operand;
1061 unsigned src2 = currentInstruction[3].u.operand;
1062
1063 linkSlowCase(iter);
1064 linkSlowCase(iter);
1065
1066 JITStubCall stubCall(this, cti_op_nstricteq);
1067 stubCall.addArgument(src1);
1068 stubCall.addArgument(src2);
1069 stubCall.call(dst);
1070 }
1071
1072 void JIT::emit_op_eq_null(Instruction* currentInstruction)
1073 {
1074 unsigned dst = currentInstruction[1].u.operand;
1075 unsigned src = currentInstruction[2].u.operand;
1076
1077 emitLoad(src, regT1, regT0);
1078 Jump isImmediate = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
1079
1080 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT1);
1081 setTest8(NonZero, Address(regT1, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined), regT1);
1082
1083 Jump wasNotImmediate = jump();
1084
1085 isImmediate.link(this);
1086
1087 set8(Equal, regT1, Imm32(JSValue::NullTag), regT2);
1088 set8(Equal, regT1, Imm32(JSValue::UndefinedTag), regT1);
1089 or32(regT2, regT1);
1090
1091 wasNotImmediate.link(this);
1092
1093 or32(Imm32(JSValue::FalseTag), regT1);
1094
1095 emitStoreBool(dst, regT1);
1096 }
1097
1098 void JIT::emit_op_neq_null(Instruction* currentInstruction)
1099 {
1100 unsigned dst = currentInstruction[1].u.operand;
1101 unsigned src = currentInstruction[2].u.operand;
1102
1103 emitLoad(src, regT1, regT0);
1104 Jump isImmediate = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
1105
1106 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT1);
1107 setTest8(Zero, Address(regT1, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined), regT1);
1108
1109 Jump wasNotImmediate = jump();
1110
1111 isImmediate.link(this);
1112
1113 set8(NotEqual, regT1, Imm32(JSValue::NullTag), regT2);
1114 set8(NotEqual, regT1, Imm32(JSValue::UndefinedTag), regT1);
1115 and32(regT2, regT1);
1116
1117 wasNotImmediate.link(this);
1118
1119 or32(Imm32(JSValue::FalseTag), regT1);
1120
1121 emitStoreBool(dst, regT1);
1122 }
1123
1124 void JIT::emit_op_resolve_with_base(Instruction* currentInstruction)
1125 {
1126 JITStubCall stubCall(this, cti_op_resolve_with_base);
1127 stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[3].u.operand)));
1128 stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
1129 stubCall.call(currentInstruction[2].u.operand);
1130 }
1131
1132 void JIT::emit_op_new_func_exp(Instruction* currentInstruction)
1133 {
1134 JITStubCall stubCall(this, cti_op_new_func_exp);
1135 stubCall.addArgument(ImmPtr(m_codeBlock->functionExpr(currentInstruction[2].u.operand)));
1136 stubCall.call(currentInstruction[1].u.operand);
1137 }
1138
1139 void JIT::emit_op_new_regexp(Instruction* currentInstruction)
1140 {
1141 JITStubCall stubCall(this, cti_op_new_regexp);
1142 stubCall.addArgument(ImmPtr(m_codeBlock->regexp(currentInstruction[2].u.operand)));
1143 stubCall.call(currentInstruction[1].u.operand);
1144 }
1145
1146 void JIT::emit_op_throw(Instruction* currentInstruction)
1147 {
1148 unsigned exception = currentInstruction[1].u.operand;
1149 JITStubCall stubCall(this, cti_op_throw);
1150 stubCall.addArgument(exception);
1151 stubCall.call();
1152
1153 #ifndef NDEBUG
1154 // cti_op_throw always changes it's return address,
1155 // this point in the code should never be reached.
1156 breakpoint();
1157 #endif
1158 }
1159
1160 void JIT::emit_op_get_pnames(Instruction* currentInstruction)
1161 {
1162 int dst = currentInstruction[1].u.operand;
1163 int base = currentInstruction[2].u.operand;
1164 int i = currentInstruction[3].u.operand;
1165 int size = currentInstruction[4].u.operand;
1166 int breakTarget = currentInstruction[5].u.operand;
1167
1168 JumpList isNotObject;
1169
1170 emitLoad(base, regT1, regT0);
1171 if (!m_codeBlock->isKnownNotImmediate(base))
1172 isNotObject.append(branch32(NotEqual, regT1, Imm32(JSValue::CellTag)));
1173 if (base != m_codeBlock->thisRegister()) {
1174 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
1175 isNotObject.append(branch32(NotEqual, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_type)), Imm32(ObjectType)));
1176 }
1177
1178 // We could inline the case where you have a valid cache, but
1179 // this call doesn't seem to be hot.
1180 Label isObject(this);
1181 JITStubCall getPnamesStubCall(this, cti_op_get_pnames);
1182 getPnamesStubCall.addArgument(regT0);
1183 getPnamesStubCall.call(dst);
1184 load32(Address(regT0, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStringsSize)), regT3);
1185 store32(Imm32(0), addressFor(i));
1186 store32(regT3, addressFor(size));
1187 Jump end = jump();
1188
1189 isNotObject.link(this);
1190 addJump(branch32(Equal, regT1, Imm32(JSValue::NullTag)), breakTarget);
1191 addJump(branch32(Equal, regT1, Imm32(JSValue::UndefinedTag)), breakTarget);
1192 JITStubCall toObjectStubCall(this, cti_to_object);
1193 toObjectStubCall.addArgument(regT1, regT0);
1194 toObjectStubCall.call(base);
1195 jump().linkTo(isObject, this);
1196
1197 end.link(this);
1198 }
1199
1200 void JIT::emit_op_next_pname(Instruction* currentInstruction)
1201 {
1202 int dst = currentInstruction[1].u.operand;
1203 int base = currentInstruction[2].u.operand;
1204 int i = currentInstruction[3].u.operand;
1205 int size = currentInstruction[4].u.operand;
1206 int it = currentInstruction[5].u.operand;
1207 int target = currentInstruction[6].u.operand;
1208
1209 JumpList callHasProperty;
1210
1211 Label begin(this);
1212 load32(addressFor(i), regT0);
1213 Jump end = branch32(Equal, regT0, addressFor(size));
1214
1215 // Grab key @ i
1216 loadPtr(addressFor(it), regT1);
1217 loadPtr(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStrings)), regT2);
1218 load32(BaseIndex(regT2, regT0, TimesEight), regT2);
1219 store32(Imm32(JSValue::CellTag), tagFor(dst));
1220 store32(regT2, payloadFor(dst));
1221
1222 // Increment i
1223 add32(Imm32(1), regT0);
1224 store32(regT0, addressFor(i));
1225
1226 // Verify that i is valid:
1227 loadPtr(addressFor(base), regT0);
1228
1229 // Test base's structure
1230 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
1231 callHasProperty.append(branchPtr(NotEqual, regT2, Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure)))));
1232
1233 // Test base's prototype chain
1234 loadPtr(Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedPrototypeChain))), regT3);
1235 loadPtr(Address(regT3, OBJECT_OFFSETOF(StructureChain, m_vector)), regT3);
1236 addJump(branchTestPtr(Zero, Address(regT3)), target);
1237
1238 Label checkPrototype(this);
1239 callHasProperty.append(branch32(Equal, Address(regT2, OBJECT_OFFSETOF(Structure, m_prototype) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), Imm32(JSValue::NullTag)));
1240 loadPtr(Address(regT2, OBJECT_OFFSETOF(Structure, m_prototype) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT2);
1241 loadPtr(Address(regT2, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
1242 callHasProperty.append(branchPtr(NotEqual, regT2, Address(regT3)));
1243 addPtr(Imm32(sizeof(Structure*)), regT3);
1244 branchTestPtr(NonZero, Address(regT3)).linkTo(checkPrototype, this);
1245
1246 // Continue loop.
1247 addJump(jump(), target);
1248
1249 // Slow case: Ask the object if i is valid.
1250 callHasProperty.link(this);
1251 loadPtr(addressFor(dst), regT1);
1252 JITStubCall stubCall(this, cti_has_property);
1253 stubCall.addArgument(regT0);
1254 stubCall.addArgument(regT1);
1255 stubCall.call();
1256
1257 // Test for valid key.
1258 addJump(branchTest32(NonZero, regT0), target);
1259 jump().linkTo(begin, this);
1260
1261 // End of loop.
1262 end.link(this);
1263 }
1264
1265 void JIT::emit_op_push_scope(Instruction* currentInstruction)
1266 {
1267 JITStubCall stubCall(this, cti_op_push_scope);
1268 stubCall.addArgument(currentInstruction[1].u.operand);
1269 stubCall.call(currentInstruction[1].u.operand);
1270 }
1271
1272 void JIT::emit_op_pop_scope(Instruction*)
1273 {
1274 JITStubCall(this, cti_op_pop_scope).call();
1275 }
1276
1277 void JIT::emit_op_to_jsnumber(Instruction* currentInstruction)
1278 {
1279 int dst = currentInstruction[1].u.operand;
1280 int src = currentInstruction[2].u.operand;
1281
1282 emitLoad(src, regT1, regT0);
1283
1284 Jump isInt32 = branch32(Equal, regT1, Imm32(JSValue::Int32Tag));
1285 addSlowCase(branch32(AboveOrEqual, regT1, Imm32(JSValue::EmptyValueTag)));
1286 isInt32.link(this);
1287
1288 if (src != dst)
1289 emitStore(dst, regT1, regT0);
1290 map(m_bytecodeIndex + OPCODE_LENGTH(op_to_jsnumber), dst, regT1, regT0);
1291 }
1292
1293 void JIT::emitSlow_op_to_jsnumber(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1294 {
1295 int dst = currentInstruction[1].u.operand;
1296
1297 linkSlowCase(iter);
1298
1299 JITStubCall stubCall(this, cti_op_to_jsnumber);
1300 stubCall.addArgument(regT1, regT0);
1301 stubCall.call(dst);
1302 }
1303
1304 void JIT::emit_op_push_new_scope(Instruction* currentInstruction)
1305 {
1306 JITStubCall stubCall(this, cti_op_push_new_scope);
1307 stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
1308 stubCall.addArgument(currentInstruction[3].u.operand);
1309 stubCall.call(currentInstruction[1].u.operand);
1310 }
1311
1312 void JIT::emit_op_catch(Instruction* currentInstruction)
1313 {
1314 unsigned exception = currentInstruction[1].u.operand;
1315
1316 // This opcode only executes after a return from cti_op_throw.
1317
1318 // cti_op_throw may have taken us to a call frame further up the stack; reload
1319 // the call frame pointer to adjust.
1320 peek(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof (void*));
1321
1322 // Now store the exception returned by cti_op_throw.
1323 emitStore(exception, regT1, regT0);
1324 map(m_bytecodeIndex + OPCODE_LENGTH(op_catch), exception, regT1, regT0);
1325 }
1326
1327 void JIT::emit_op_jmp_scopes(Instruction* currentInstruction)
1328 {
1329 JITStubCall stubCall(this, cti_op_jmp_scopes);
1330 stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
1331 stubCall.call();
1332 addJump(jump(), currentInstruction[2].u.operand);
1333 }
1334
1335 void JIT::emit_op_switch_imm(Instruction* currentInstruction)
1336 {
1337 unsigned tableIndex = currentInstruction[1].u.operand;
1338 unsigned defaultOffset = currentInstruction[2].u.operand;
1339 unsigned scrutinee = currentInstruction[3].u.operand;
1340
1341 // create jump table for switch destinations, track this switch statement.
1342 SimpleJumpTable* jumpTable = &m_codeBlock->immediateSwitchJumpTable(tableIndex);
1343 m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset, SwitchRecord::Immediate));
1344 jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1345
1346 JITStubCall stubCall(this, cti_op_switch_imm);
1347 stubCall.addArgument(scrutinee);
1348 stubCall.addArgument(Imm32(tableIndex));
1349 stubCall.call();
1350 jump(regT0);
1351 }
1352
1353 void JIT::emit_op_switch_char(Instruction* currentInstruction)
1354 {
1355 unsigned tableIndex = currentInstruction[1].u.operand;
1356 unsigned defaultOffset = currentInstruction[2].u.operand;
1357 unsigned scrutinee = currentInstruction[3].u.operand;
1358
1359 // create jump table for switch destinations, track this switch statement.
1360 SimpleJumpTable* jumpTable = &m_codeBlock->characterSwitchJumpTable(tableIndex);
1361 m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset, SwitchRecord::Character));
1362 jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1363
1364 JITStubCall stubCall(this, cti_op_switch_char);
1365 stubCall.addArgument(scrutinee);
1366 stubCall.addArgument(Imm32(tableIndex));
1367 stubCall.call();
1368 jump(regT0);
1369 }
1370
1371 void JIT::emit_op_switch_string(Instruction* currentInstruction)
1372 {
1373 unsigned tableIndex = currentInstruction[1].u.operand;
1374 unsigned defaultOffset = currentInstruction[2].u.operand;
1375 unsigned scrutinee = currentInstruction[3].u.operand;
1376
1377 // create jump table for switch destinations, track this switch statement.
1378 StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
1379 m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset));
1380
1381 JITStubCall stubCall(this, cti_op_switch_string);
1382 stubCall.addArgument(scrutinee);
1383 stubCall.addArgument(Imm32(tableIndex));
1384 stubCall.call();
1385 jump(regT0);
1386 }
1387
1388 void JIT::emit_op_new_error(Instruction* currentInstruction)
1389 {
1390 unsigned dst = currentInstruction[1].u.operand;
1391 unsigned type = currentInstruction[2].u.operand;
1392 unsigned message = currentInstruction[3].u.operand;
1393
1394 JITStubCall stubCall(this, cti_op_new_error);
1395 stubCall.addArgument(Imm32(type));
1396 stubCall.addArgument(m_codeBlock->getConstant(message));
1397 stubCall.addArgument(Imm32(m_bytecodeIndex));
1398 stubCall.call(dst);
1399 }
1400
1401 void JIT::emit_op_debug(Instruction* currentInstruction)
1402 {
1403 JITStubCall stubCall(this, cti_op_debug);
1404 stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
1405 stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
1406 stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
1407 stubCall.call();
1408 }
1409
1410
1411 void JIT::emit_op_enter(Instruction*)
1412 {
1413 // Even though JIT code doesn't use them, we initialize our constant
1414 // registers to zap stale pointers, to avoid unnecessarily prolonging
1415 // object lifetime and increasing GC pressure.
1416 for (int i = 0; i < m_codeBlock->m_numVars; ++i)
1417 emitStore(i, jsUndefined());
1418 }
1419
1420 void JIT::emit_op_enter_with_activation(Instruction* currentInstruction)
1421 {
1422 emit_op_enter(currentInstruction);
1423
1424 JITStubCall(this, cti_op_push_activation).call(currentInstruction[1].u.operand);
1425 }
1426
1427 void JIT::emit_op_create_arguments(Instruction*)
1428 {
1429 Jump argsCreated = branch32(NotEqual, tagFor(RegisterFile::ArgumentsRegister, callFrameRegister), Imm32(JSValue::EmptyValueTag));
1430
1431 // If we get here the arguments pointer is a null cell - i.e. arguments need lazy creation.
1432 if (m_codeBlock->m_numParameters == 1)
1433 JITStubCall(this, cti_op_create_arguments_no_params).call();
1434 else
1435 JITStubCall(this, cti_op_create_arguments).call();
1436
1437 argsCreated.link(this);
1438 }
1439
1440 void JIT::emit_op_init_arguments(Instruction*)
1441 {
1442 emitStore(RegisterFile::ArgumentsRegister, JSValue(), callFrameRegister);
1443 }
1444
1445 void JIT::emit_op_convert_this(Instruction* currentInstruction)
1446 {
1447 unsigned thisRegister = currentInstruction[1].u.operand;
1448
1449 emitLoad(thisRegister, regT1, regT0);
1450
1451 addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::CellTag)));
1452
1453 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
1454 addSlowCase(branchTest32(NonZero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(NeedsThisConversion)));
1455
1456 map(m_bytecodeIndex + OPCODE_LENGTH(op_convert_this), thisRegister, regT1, regT0);
1457 }
1458
1459 void JIT::emitSlow_op_convert_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1460 {
1461 unsigned thisRegister = currentInstruction[1].u.operand;
1462
1463 linkSlowCase(iter);
1464 linkSlowCase(iter);
1465
1466 JITStubCall stubCall(this, cti_op_convert_this);
1467 stubCall.addArgument(regT1, regT0);
1468 stubCall.call(thisRegister);
1469 }
1470
1471 void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
1472 {
1473 peek(regT2, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof (void*));
1474 Jump noProfiler = branchTestPtr(Zero, Address(regT2));
1475
1476 JITStubCall stubCall(this, cti_op_profile_will_call);
1477 stubCall.addArgument(currentInstruction[1].u.operand);
1478 stubCall.call();
1479 noProfiler.link(this);
1480 }
1481
1482 void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
1483 {
1484 peek(regT2, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof (void*));
1485 Jump noProfiler = branchTestPtr(Zero, Address(regT2));
1486
1487 JITStubCall stubCall(this, cti_op_profile_did_call);
1488 stubCall.addArgument(currentInstruction[1].u.operand);
1489 stubCall.call();
1490 noProfiler.link(this);
1491 }
1492
1493 #else // USE(JSVALUE32_64)
1494
1495 #define RECORD_JUMP_TARGET(targetOffset) \
1496 do { m_labels[m_bytecodeIndex + (targetOffset)].used(); } while (false)
1497
1498 void JIT::privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool>* executablePool, JSGlobalData* globalData, CodePtr* ctiStringLengthTrampoline, CodePtr* ctiVirtualCallLink, CodePtr* ctiVirtualCall, CodePtr* ctiNativeCallThunk)
1499 {
1500 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
1501 // (2) The second function provides fast property access for string length
1502 Label stringLengthBegin = align();
1503
1504 // Check eax is a string
1505 Jump string_failureCases1 = emitJumpIfNotJSCell(regT0);
1506 Jump string_failureCases2 = branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr));
1507
1508 // Checks out okay! - get the length from the Ustring.
1509 load32(Address(regT0, OBJECT_OFFSETOF(JSString, m_stringLength)), regT0);
1510
1511 Jump string_failureCases3 = branch32(Above, regT0, Imm32(JSImmediate::maxImmediateInt));
1512
1513 // regT0 contains a 64 bit value (is positive, is zero extended) so we don't need sign extend here.
1514 emitFastArithIntToImmNoCheck(regT0, regT0);
1515
1516 ret();
1517 #endif
1518
1519 // (3) Trampolines for the slow cases of op_call / op_call_eval / op_construct.
1520 COMPILE_ASSERT(sizeof(CodeType) == 4, CodeTypeEnumMustBe32Bit);
1521
1522 // VirtualCallLink Trampoline
1523 // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
1524 Label virtualCallLinkBegin = align();
1525 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
1526
1527 Jump isNativeFunc2 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), Imm32(0));
1528
1529 Jump hasCodeBlock2 = branch32(GreaterThan, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), Imm32(0));
1530 preserveReturnAddressAfterCall(regT3);
1531 restoreArgumentReference();
1532 Call callJSFunction2 = call();
1533 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
1534 emitGetJITStubArg(2, regT1); // argCount
1535 restoreReturnAddressBeforeReturn(regT3);
1536 hasCodeBlock2.link(this);
1537
1538 // Check argCount matches callee arity.
1539 Jump arityCheckOkay2 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), regT1);
1540 preserveReturnAddressAfterCall(regT3);
1541 emitPutJITStubArg(regT3, 1); // return address
1542 restoreArgumentReference();
1543 Call callArityCheck2 = call();
1544 move(regT1, callFrameRegister);
1545 emitGetJITStubArg(2, regT1); // argCount
1546 restoreReturnAddressBeforeReturn(regT3);
1547 arityCheckOkay2.link(this);
1548
1549 isNativeFunc2.link(this);
1550
1551 compileOpCallInitializeCallFrame();
1552 preserveReturnAddressAfterCall(regT3);
1553 emitPutJITStubArg(regT3, 1); // return address
1554 restoreArgumentReference();
1555 Call callLazyLinkCall = call();
1556 restoreReturnAddressBeforeReturn(regT3);
1557 jump(regT0);
1558
1559 // VirtualCall Trampoline
1560 // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
1561 Label virtualCallBegin = align();
1562 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
1563
1564 Jump isNativeFunc3 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), Imm32(0));
1565
1566 Jump hasCodeBlock3 = branch32(GreaterThan, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), Imm32(0));
1567 preserveReturnAddressAfterCall(regT3);
1568 restoreArgumentReference();
1569 Call callJSFunction1 = call();
1570 emitGetJITStubArg(2, regT1); // argCount
1571 restoreReturnAddressBeforeReturn(regT3);
1572 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
1573 hasCodeBlock3.link(this);
1574
1575 // Check argCount matches callee arity.
1576 Jump arityCheckOkay3 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), regT1);
1577 preserveReturnAddressAfterCall(regT3);
1578 emitPutJITStubArg(regT3, 1); // return address
1579 restoreArgumentReference();
1580 Call callArityCheck1 = call();
1581 move(regT1, callFrameRegister);
1582 emitGetJITStubArg(2, regT1); // argCount
1583 restoreReturnAddressBeforeReturn(regT3);
1584 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
1585 arityCheckOkay3.link(this);
1586
1587 isNativeFunc3.link(this);
1588
1589 compileOpCallInitializeCallFrame();
1590 loadPtr(Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_jitCode)), regT0);
1591 jump(regT0);
1592
1593 Label nativeCallThunk = align();
1594 preserveReturnAddressAfterCall(regT0);
1595 emitPutToCallFrameHeader(regT0, RegisterFile::ReturnPC); // Push return address
1596
1597 // Load caller frame's scope chain into this callframe so that whatever we call can
1598 // get to its global data.
1599 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT1);
1600 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT1);
1601 emitPutToCallFrameHeader(regT1, RegisterFile::ScopeChain);
1602
1603
1604 #if CPU(X86_64)
1605 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, X86Registers::ecx);
1606
1607 // Allocate stack space for our arglist
1608 subPtr(Imm32(sizeof(ArgList)), stackPointerRegister);
1609 COMPILE_ASSERT((sizeof(ArgList) & 0xf) == 0, ArgList_should_by_16byte_aligned);
1610
1611 // Set up arguments
1612 subPtr(Imm32(1), X86Registers::ecx); // Don't include 'this' in argcount
1613
1614 // Push argcount
1615 storePtr(X86Registers::ecx, Address(stackPointerRegister, OBJECT_OFFSETOF(ArgList, m_argCount)));
1616
1617 // Calculate the start of the callframe header, and store in edx
1618 addPtr(Imm32(-RegisterFile::CallFrameHeaderSize * (int32_t)sizeof(Register)), callFrameRegister, X86Registers::edx);
1619
1620 // Calculate start of arguments as callframe header - sizeof(Register) * argcount (ecx)
1621 mul32(Imm32(sizeof(Register)), X86Registers::ecx, X86Registers::ecx);
1622 subPtr(X86Registers::ecx, X86Registers::edx);
1623
1624 // push pointer to arguments
1625 storePtr(X86Registers::edx, Address(stackPointerRegister, OBJECT_OFFSETOF(ArgList, m_args)));
1626
1627 // ArgList is passed by reference so is stackPointerRegister
1628 move(stackPointerRegister, X86Registers::ecx);
1629
1630 // edx currently points to the first argument, edx-sizeof(Register) points to 'this'
1631 loadPtr(Address(X86Registers::edx, -(int32_t)sizeof(Register)), X86Registers::edx);
1632
1633 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, X86Registers::esi);
1634
1635 move(callFrameRegister, X86Registers::edi);
1636
1637 call(Address(X86Registers::esi, OBJECT_OFFSETOF(JSFunction, m_data)));
1638
1639 addPtr(Imm32(sizeof(ArgList)), stackPointerRegister);
1640 #elif CPU(X86)
1641 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT0);
1642
1643 /* We have two structs that we use to describe the stackframe we set up for our
1644 * call to native code. NativeCallFrameStructure describes the how we set up the stack
1645 * in advance of the call. NativeFunctionCalleeSignature describes the callframe
1646 * as the native code expects it. We do this as we are using the fastcall calling
1647 * convention which results in the callee popping its arguments off the stack, but
1648 * not the rest of the callframe so we need a nice way to ensure we increment the
1649 * stack pointer by the right amount after the call.
1650 */
1651 #if COMPILER(MSVC) || OS(LINUX)
1652 struct NativeCallFrameStructure {
1653 // CallFrame* callFrame; // passed in EDX
1654 JSObject* callee;
1655 JSValue thisValue;
1656 ArgList* argPointer;
1657 ArgList args;
1658 JSValue result;
1659 };
1660 struct NativeFunctionCalleeSignature {
1661 JSObject* callee;
1662 JSValue thisValue;
1663 ArgList* argPointer;
1664 };
1665 #else
1666 struct NativeCallFrameStructure {
1667 // CallFrame* callFrame; // passed in ECX
1668 // JSObject* callee; // passed in EDX
1669 JSValue thisValue;
1670 ArgList* argPointer;
1671 ArgList args;
1672 };
1673 struct NativeFunctionCalleeSignature {
1674 JSValue thisValue;
1675 ArgList* argPointer;
1676 };
1677 #endif
1678 const int NativeCallFrameSize = (sizeof(NativeCallFrameStructure) + 15) & ~15;
1679 // Allocate system stack frame
1680 subPtr(Imm32(NativeCallFrameSize), stackPointerRegister);
1681
1682 // Set up arguments
1683 subPtr(Imm32(1), regT0); // Don't include 'this' in argcount
1684
1685 // push argcount
1686 storePtr(regT0, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, args) + OBJECT_OFFSETOF(ArgList, m_argCount)));
1687
1688 // Calculate the start of the callframe header, and store in regT1
1689 addPtr(Imm32(-RegisterFile::CallFrameHeaderSize * (int)sizeof(Register)), callFrameRegister, regT1);
1690
1691 // Calculate start of arguments as callframe header - sizeof(Register) * argcount (regT0)
1692 mul32(Imm32(sizeof(Register)), regT0, regT0);
1693 subPtr(regT0, regT1);
1694 storePtr(regT1, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, args) + OBJECT_OFFSETOF(ArgList, m_args)));
1695
1696 // ArgList is passed by reference so is stackPointerRegister + 4 * sizeof(Register)
1697 addPtr(Imm32(OBJECT_OFFSETOF(NativeCallFrameStructure, args)), stackPointerRegister, regT0);
1698 storePtr(regT0, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, argPointer)));
1699
1700 // regT1 currently points to the first argument, regT1 - sizeof(Register) points to 'this'
1701 loadPtr(Address(regT1, -(int)sizeof(Register)), regT1);
1702 storePtr(regT1, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, thisValue)));
1703
1704 #if COMPILER(MSVC) || OS(LINUX)
1705 // ArgList is passed by reference so is stackPointerRegister + 4 * sizeof(Register)
1706 addPtr(Imm32(OBJECT_OFFSETOF(NativeCallFrameStructure, result)), stackPointerRegister, X86Registers::ecx);
1707
1708 // Plant callee
1709 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, X86Registers::eax);
1710 storePtr(X86Registers::eax, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, callee)));
1711
1712 // Plant callframe
1713 move(callFrameRegister, X86Registers::edx);
1714
1715 call(Address(X86Registers::eax, OBJECT_OFFSETOF(JSFunction, m_data)));
1716
1717 // JSValue is a non-POD type
1718 loadPtr(Address(X86Registers::eax), X86Registers::eax);
1719 #else
1720 // Plant callee
1721 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, X86Registers::edx);
1722
1723 // Plant callframe
1724 move(callFrameRegister, X86Registers::ecx);
1725 call(Address(X86Registers::edx, OBJECT_OFFSETOF(JSFunction, m_data)));
1726 #endif
1727
1728 // We've put a few temporaries on the stack in addition to the actual arguments
1729 // so pull them off now
1730 addPtr(Imm32(NativeCallFrameSize - sizeof(NativeFunctionCalleeSignature)), stackPointerRegister);
1731
1732 #elif CPU(ARM)
1733 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT0);
1734
1735 // Allocate stack space for our arglist
1736 COMPILE_ASSERT((sizeof(ArgList) & 0x7) == 0, ArgList_should_by_8byte_aligned);
1737 subPtr(Imm32(sizeof(ArgList)), stackPointerRegister);
1738
1739 // Set up arguments
1740 subPtr(Imm32(1), regT0); // Don't include 'this' in argcount
1741
1742 // Push argcount
1743 storePtr(regT0, Address(stackPointerRegister, OBJECT_OFFSETOF(ArgList, m_argCount)));
1744
1745 // Calculate the start of the callframe header, and store in regT1
1746 move(callFrameRegister, regT1);
1747 sub32(Imm32(RegisterFile::CallFrameHeaderSize * (int32_t)sizeof(Register)), regT1);
1748
1749 // Calculate start of arguments as callframe header - sizeof(Register) * argcount (regT1)
1750 mul32(Imm32(sizeof(Register)), regT0, regT0);
1751 subPtr(regT0, regT1);
1752
1753 // push pointer to arguments
1754 storePtr(regT1, Address(stackPointerRegister, OBJECT_OFFSETOF(ArgList, m_args)));
1755
1756 // Setup arg3: regT1 currently points to the first argument, regT1-sizeof(Register) points to 'this'
1757 loadPtr(Address(regT1, -(int32_t)sizeof(Register)), regT2);
1758
1759 // Setup arg2:
1760 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT1);
1761
1762 // Setup arg1:
1763 move(callFrameRegister, regT0);
1764
1765 // Setup arg4: This is a plain hack
1766 move(stackPointerRegister, ARMRegisters::r3);
1767
1768 call(Address(regT1, OBJECT_OFFSETOF(JSFunction, m_data)));
1769
1770 addPtr(Imm32(sizeof(ArgList)), stackPointerRegister);
1771
1772 #elif ENABLE(JIT_OPTIMIZE_NATIVE_CALL)
1773 #error "JIT_OPTIMIZE_NATIVE_CALL not yet supported on this platform."
1774 #else
1775 breakpoint();
1776 #endif
1777
1778 // Check for an exception
1779 loadPtr(&(globalData->exception), regT2);
1780 Jump exceptionHandler = branchTestPtr(NonZero, regT2);
1781
1782 // Grab the return address.
1783 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
1784
1785 // Restore our caller's "r".
1786 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
1787
1788 // Return.
1789 restoreReturnAddressBeforeReturn(regT1);
1790 ret();
1791
1792 // Handle an exception
1793 exceptionHandler.link(this);
1794 // Grab the return address.
1795 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
1796 move(ImmPtr(&globalData->exceptionLocation), regT2);
1797 storePtr(regT1, regT2);
1798 move(ImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()), regT2);
1799 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
1800 poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof (void*));
1801 restoreReturnAddressBeforeReturn(regT2);
1802 ret();
1803
1804
1805 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
1806 Call string_failureCases1Call = makeTailRecursiveCall(string_failureCases1);
1807 Call string_failureCases2Call = makeTailRecursiveCall(string_failureCases2);
1808 Call string_failureCases3Call = makeTailRecursiveCall(string_failureCases3);
1809 #endif
1810
1811 // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
1812 LinkBuffer patchBuffer(this, m_globalData->executableAllocator.poolForSize(m_assembler.size()));
1813
1814 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
1815 patchBuffer.link(string_failureCases1Call, FunctionPtr(cti_op_get_by_id_string_fail));
1816 patchBuffer.link(string_failureCases2Call, FunctionPtr(cti_op_get_by_id_string_fail));
1817 patchBuffer.link(string_failureCases3Call, FunctionPtr(cti_op_get_by_id_string_fail));
1818 #endif
1819 patchBuffer.link(callArityCheck1, FunctionPtr(cti_op_call_arityCheck));
1820 patchBuffer.link(callJSFunction1, FunctionPtr(cti_op_call_JSFunction));
1821 #if ENABLE(JIT_OPTIMIZE_CALL)
1822 patchBuffer.link(callArityCheck2, FunctionPtr(cti_op_call_arityCheck));
1823 patchBuffer.link(callJSFunction2, FunctionPtr(cti_op_call_JSFunction));
1824 patchBuffer.link(callLazyLinkCall, FunctionPtr(cti_vm_lazyLinkCall));
1825 #endif
1826
1827 CodeRef finalCode = patchBuffer.finalizeCode();
1828 *executablePool = finalCode.m_executablePool;
1829
1830 *ctiVirtualCallLink = trampolineAt(finalCode, virtualCallLinkBegin);
1831 *ctiVirtualCall = trampolineAt(finalCode, virtualCallBegin);
1832 *ctiNativeCallThunk = trampolineAt(finalCode, nativeCallThunk);
1833 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
1834 *ctiStringLengthTrampoline = trampolineAt(finalCode, stringLengthBegin);
1835 #else
1836 UNUSED_PARAM(ctiStringLengthTrampoline);
1837 #endif
1838 }
1839
1840 void JIT::emit_op_mov(Instruction* currentInstruction)
1841 {
1842 int dst = currentInstruction[1].u.operand;
1843 int src = currentInstruction[2].u.operand;
1844
1845 if (m_codeBlock->isConstantRegisterIndex(src)) {
1846 storePtr(ImmPtr(JSValue::encode(getConstantOperand(src))), Address(callFrameRegister, dst * sizeof(Register)));
1847 if (dst == m_lastResultBytecodeRegister)
1848 killLastResultRegister();
1849 } else if ((src == m_lastResultBytecodeRegister) || (dst == m_lastResultBytecodeRegister)) {
1850 // If either the src or dst is the cached register go though
1851 // get/put registers to make sure we track this correctly.
1852 emitGetVirtualRegister(src, regT0);
1853 emitPutVirtualRegister(dst);
1854 } else {
1855 // Perform the copy via regT1; do not disturb any mapping in regT0.
1856 loadPtr(Address(callFrameRegister, src * sizeof(Register)), regT1);
1857 storePtr(regT1, Address(callFrameRegister, dst * sizeof(Register)));
1858 }
1859 }
1860
1861 void JIT::emit_op_end(Instruction* currentInstruction)
1862 {
1863 if (m_codeBlock->needsFullScopeChain())
1864 JITStubCall(this, cti_op_end).call();
1865 ASSERT(returnValueRegister != callFrameRegister);
1866 emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
1867 restoreReturnAddressBeforeReturn(Address(callFrameRegister, RegisterFile::ReturnPC * static_cast<int>(sizeof(Register))));
1868 ret();
1869 }
1870
1871 void JIT::emit_op_jmp(Instruction* currentInstruction)
1872 {
1873 unsigned target = currentInstruction[1].u.operand;
1874 addJump(jump(), target);
1875 RECORD_JUMP_TARGET(target);
1876 }
1877
1878 void JIT::emit_op_loop_if_lesseq(Instruction* currentInstruction)
1879 {
1880 emitTimeoutCheck();
1881
1882 unsigned op1 = currentInstruction[1].u.operand;
1883 unsigned op2 = currentInstruction[2].u.operand;
1884 unsigned target = currentInstruction[3].u.operand;
1885 if (isOperandConstantImmediateInt(op2)) {
1886 emitGetVirtualRegister(op1, regT0);
1887 emitJumpSlowCaseIfNotImmediateInteger(regT0);
1888 #if USE(JSVALUE64)
1889 int32_t op2imm = getConstantOperandImmediateInt(op2);
1890 #else
1891 int32_t op2imm = static_cast<int32_t>(JSImmediate::rawValue(getConstantOperand(op2)));
1892 #endif
1893 addJump(branch32(LessThanOrEqual, regT0, Imm32(op2imm)), target);
1894 } else {
1895 emitGetVirtualRegisters(op1, regT0, op2, regT1);
1896 emitJumpSlowCaseIfNotImmediateInteger(regT0);
1897 emitJumpSlowCaseIfNotImmediateInteger(regT1);
1898 addJump(branch32(LessThanOrEqual, regT0, regT1), target);
1899 }
1900 }
1901
1902 void JIT::emit_op_new_object(Instruction* currentInstruction)
1903 {
1904 JITStubCall(this, cti_op_new_object).call(currentInstruction[1].u.operand);
1905 }
1906
1907 void JIT::emit_op_instanceof(Instruction* currentInstruction)
1908 {
1909 unsigned dst = currentInstruction[1].u.operand;
1910 unsigned value = currentInstruction[2].u.operand;
1911 unsigned baseVal = currentInstruction[3].u.operand;
1912 unsigned proto = currentInstruction[4].u.operand;
1913
1914 // Load the operands (baseVal, proto, and value respectively) into registers.
1915 // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
1916 emitGetVirtualRegister(value, regT2);
1917 emitGetVirtualRegister(baseVal, regT0);
1918 emitGetVirtualRegister(proto, regT1);
1919
1920 // Check that baseVal & proto are cells.
1921 emitJumpSlowCaseIfNotJSCell(regT2, value);
1922 emitJumpSlowCaseIfNotJSCell(regT0, baseVal);
1923 emitJumpSlowCaseIfNotJSCell(regT1, proto);
1924
1925 // Check that baseVal 'ImplementsDefaultHasInstance'.
1926 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT0);
1927 addSlowCase(branchTest32(Zero, Address(regT0, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(ImplementsDefaultHasInstance)));
1928
1929 // Optimistically load the result true, and start looping.
1930 // Initially, regT1 still contains proto and regT2 still contains value.
1931 // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
1932 move(ImmPtr(JSValue::encode(jsBoolean(true))), regT0);
1933 Label loop(this);
1934
1935 // Load the prototype of the object in regT2. If this is equal to regT1 - WIN!
1936 // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
1937 loadPtr(Address(regT2, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
1938 loadPtr(Address(regT2, OBJECT_OFFSETOF(Structure, m_prototype)), regT2);
1939 Jump isInstance = branchPtr(Equal, regT2, regT1);
1940 emitJumpIfJSCell(regT2).linkTo(loop, this);
1941
1942 // We get here either by dropping out of the loop, or if value was not an Object. Result is false.
1943 move(ImmPtr(JSValue::encode(jsBoolean(false))), regT0);
1944
1945 // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
1946 isInstance.link(this);
1947 emitPutVirtualRegister(dst);
1948 }
1949
1950 void JIT::emit_op_new_func(Instruction* currentInstruction)
1951 {
1952 JITStubCall stubCall(this, cti_op_new_func);
1953 stubCall.addArgument(ImmPtr(m_codeBlock->functionDecl(currentInstruction[2].u.operand)));
1954 stubCall.call(currentInstruction[1].u.operand);
1955 }
1956
1957 void JIT::emit_op_call(Instruction* currentInstruction)
1958 {
1959 compileOpCall(op_call, currentInstruction, m_callLinkInfoIndex++);
1960 }
1961
1962 void JIT::emit_op_call_eval(Instruction* currentInstruction)
1963 {
1964 compileOpCall(op_call_eval, currentInstruction, m_callLinkInfoIndex++);
1965 }
1966
1967 void JIT::emit_op_load_varargs(Instruction* currentInstruction)
1968 {
1969 int argCountDst = currentInstruction[1].u.operand;
1970 int argsOffset = currentInstruction[2].u.operand;
1971
1972 JITStubCall stubCall(this, cti_op_load_varargs);
1973 stubCall.addArgument(Imm32(argsOffset));
1974 stubCall.call();
1975 // Stores a naked int32 in the register file.
1976 store32(returnValueRegister, Address(callFrameRegister, argCountDst * sizeof(Register)));
1977 }
1978
1979 void JIT::emit_op_call_varargs(Instruction* currentInstruction)
1980 {
1981 compileOpCallVarargs(currentInstruction);
1982 }
1983
1984 void JIT::emit_op_construct(Instruction* currentInstruction)
1985 {
1986 compileOpCall(op_construct, currentInstruction, m_callLinkInfoIndex++);
1987 }
1988
1989 void JIT::emit_op_get_global_var(Instruction* currentInstruction)
1990 {
1991 JSVariableObject* globalObject = static_cast<JSVariableObject*>(currentInstruction[2].u.jsCell);
1992 move(ImmPtr(globalObject), regT0);
1993 emitGetVariableObjectRegister(regT0, currentInstruction[3].u.operand, regT0);
1994 emitPutVirtualRegister(currentInstruction[1].u.operand);
1995 }
1996
1997 void JIT::emit_op_put_global_var(Instruction* currentInstruction)
1998 {
1999 emitGetVirtualRegister(currentInstruction[3].u.operand, regT1);
2000 JSVariableObject* globalObject = static_cast<JSVariableObject*>(currentInstruction[1].u.jsCell);
2001 move(ImmPtr(globalObject), regT0);
2002 emitPutVariableObjectRegister(regT1, regT0, currentInstruction[2].u.operand);
2003 }
2004
2005 void JIT::emit_op_get_scoped_var(Instruction* currentInstruction)
2006 {
2007 int skip = currentInstruction[3].u.operand + m_codeBlock->needsFullScopeChain();
2008
2009 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT0);
2010 while (skip--)
2011 loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, next)), regT0);
2012
2013 loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, object)), regT0);
2014 emitGetVariableObjectRegister(regT0, currentInstruction[2].u.operand, regT0);
2015 emitPutVirtualRegister(currentInstruction[1].u.operand);
2016 }
2017
2018 void JIT::emit_op_put_scoped_var(Instruction* currentInstruction)
2019 {
2020 int skip = currentInstruction[2].u.operand + m_codeBlock->needsFullScopeChain();
2021
2022 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1);
2023 emitGetVirtualRegister(currentInstruction[3].u.operand, regT0);
2024 while (skip--)
2025 loadPtr(Address(regT1, OBJECT_OFFSETOF(ScopeChainNode, next)), regT1);
2026
2027 loadPtr(Address(regT1, OBJECT_OFFSETOF(ScopeChainNode, object)), regT1);
2028 emitPutVariableObjectRegister(regT0, regT1, currentInstruction[1].u.operand);
2029 }
2030
2031 void JIT::emit_op_tear_off_activation(Instruction* currentInstruction)
2032 {
2033 JITStubCall stubCall(this, cti_op_tear_off_activation);
2034 stubCall.addArgument(currentInstruction[1].u.operand, regT2);
2035 stubCall.call();
2036 }
2037
2038 void JIT::emit_op_tear_off_arguments(Instruction*)
2039 {
2040 JITStubCall(this, cti_op_tear_off_arguments).call();
2041 }
2042
2043 void JIT::emit_op_ret(Instruction* currentInstruction)
2044 {
2045 // We could JIT generate the deref, only calling out to C when the refcount hits zero.
2046 if (m_codeBlock->needsFullScopeChain())
2047 JITStubCall(this, cti_op_ret_scopeChain).call();
2048
2049 ASSERT(callFrameRegister != regT1);
2050 ASSERT(regT1 != returnValueRegister);
2051 ASSERT(returnValueRegister != callFrameRegister);
2052
2053 // Return the result in %eax.
2054 emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
2055
2056 // Grab the return address.
2057 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
2058
2059 // Restore our caller's "r".
2060 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
2061
2062 // Return.
2063 restoreReturnAddressBeforeReturn(regT1);
2064 ret();
2065 }
2066
2067 void JIT::emit_op_new_array(Instruction* currentInstruction)
2068 {
2069 JITStubCall stubCall(this, cti_op_new_array);
2070 stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
2071 stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
2072 stubCall.call(currentInstruction[1].u.operand);
2073 }
2074
2075 void JIT::emit_op_resolve(Instruction* currentInstruction)
2076 {
2077 JITStubCall stubCall(this, cti_op_resolve);
2078 stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
2079 stubCall.call(currentInstruction[1].u.operand);
2080 }
2081
2082 void JIT::emit_op_construct_verify(Instruction* currentInstruction)
2083 {
2084 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
2085
2086 emitJumpSlowCaseIfNotJSCell(regT0);
2087 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
2088 addSlowCase(branch32(NotEqual, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo) + OBJECT_OFFSETOF(TypeInfo, m_type)), Imm32(ObjectType)));
2089
2090 }
2091
2092 void JIT::emit_op_to_primitive(Instruction* currentInstruction)
2093 {
2094 int dst = currentInstruction[1].u.operand;
2095 int src = currentInstruction[2].u.operand;
2096
2097 emitGetVirtualRegister(src, regT0);
2098
2099 Jump isImm = emitJumpIfNotJSCell(regT0);
2100 addSlowCase(branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr)));
2101 isImm.link(this);
2102
2103 if (dst != src)
2104 emitPutVirtualRegister(dst);
2105
2106 }
2107
2108 void JIT::emit_op_strcat(Instruction* currentInstruction)
2109 {
2110 JITStubCall stubCall(this, cti_op_strcat);
2111 stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
2112 stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
2113 stubCall.call(currentInstruction[1].u.operand);
2114 }
2115
2116 void JIT::emit_op_resolve_base(Instruction* currentInstruction)
2117 {
2118 JITStubCall stubCall(this, cti_op_resolve_base);
2119 stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
2120 stubCall.call(currentInstruction[1].u.operand);
2121 }
2122
2123 void JIT::emit_op_resolve_skip(Instruction* currentInstruction)
2124 {
2125 JITStubCall stubCall(this, cti_op_resolve_skip);
2126 stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
2127 stubCall.addArgument(Imm32(currentInstruction[3].u.operand + m_codeBlock->needsFullScopeChain()));
2128 stubCall.call(currentInstruction[1].u.operand);
2129 }
2130
2131 void JIT::emit_op_resolve_global(Instruction* currentInstruction)
2132 {
2133 // Fast case
2134 void* globalObject = currentInstruction[2].u.jsCell;
2135 Identifier* ident = &m_codeBlock->identifier(currentInstruction[3].u.operand);
2136
2137 unsigned currentIndex = m_globalResolveInfoIndex++;
2138 void* structureAddress = &(m_codeBlock->globalResolveInfo(currentIndex).structure);
2139 void* offsetAddr = &(m_codeBlock->globalResolveInfo(currentIndex).offset);
2140
2141 // Check Structure of global object
2142 move(ImmPtr(globalObject), regT0);
2143 loadPtr(structureAddress, regT1);
2144 Jump noMatch = branchPtr(NotEqual, regT1, Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure))); // Structures don't match
2145
2146 // Load cached property
2147 // Assume that the global object always uses external storage.
2148 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSGlobalObject, m_externalStorage)), regT0);
2149 load32(offsetAddr, regT1);
2150 loadPtr(BaseIndex(regT0, regT1, ScalePtr), regT0);
2151 emitPutVirtualRegister(currentInstruction[1].u.operand);
2152 Jump end = jump();
2153
2154 // Slow case
2155 noMatch.link(this);
2156 JITStubCall stubCall(this, cti_op_resolve_global);
2157 stubCall.addArgument(ImmPtr(globalObject));
2158 stubCall.addArgument(ImmPtr(ident));
2159 stubCall.addArgument(Imm32(currentIndex));
2160 stubCall.call(currentInstruction[1].u.operand);
2161 end.link(this);
2162 }
2163
2164 void JIT::emit_op_not(Instruction* currentInstruction)
2165 {
2166 emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
2167 xorPtr(Imm32(static_cast<int32_t>(JSImmediate::FullTagTypeBool)), regT0);
2168 addSlowCase(branchTestPtr(NonZero, regT0, Imm32(static_cast<int32_t>(~JSImmediate::ExtendedPayloadBitBoolValue))));
2169 xorPtr(Imm32(static_cast<int32_t>(JSImmediate::FullTagTypeBool | JSImmediate::ExtendedPayloadBitBoolValue)), regT0);
2170 emitPutVirtualRegister(currentInstruction[1].u.operand);
2171 }
2172
2173 void JIT::emit_op_jfalse(Instruction* currentInstruction)
2174 {
2175 unsigned target = currentInstruction[2].u.operand;
2176 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
2177
2178 addJump(branchPtr(Equal, regT0, ImmPtr(JSValue::encode(jsNumber(m_globalData, 0)))), target);
2179 Jump isNonZero = emitJumpIfImmediateInteger(regT0);
2180
2181 addJump(branchPtr(Equal, regT0, ImmPtr(JSValue::encode(jsBoolean(false)))), target);
2182 addSlowCase(branchPtr(NotEqual, regT0, ImmPtr(JSValue::encode(jsBoolean(true)))));
2183
2184 isNonZero.link(this);
2185 RECORD_JUMP_TARGET(target);
2186 };
2187 void JIT::emit_op_jeq_null(Instruction* currentInstruction)
2188 {
2189 unsigned src = currentInstruction[1].u.operand;
2190 unsigned target = currentInstruction[2].u.operand;
2191
2192 emitGetVirtualRegister(src, regT0);
2193 Jump isImmediate = emitJumpIfNotJSCell(regT0);
2194
2195 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
2196 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
2197 addJump(branchTest32(NonZero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined)), target);
2198 Jump wasNotImmediate = jump();
2199
2200 // Now handle the immediate cases - undefined & null
2201 isImmediate.link(this);
2202 andPtr(Imm32(~JSImmediate::ExtendedTagBitUndefined), regT0);
2203 addJump(branchPtr(Equal, regT0, ImmPtr(JSValue::encode(jsNull()))), target);
2204
2205 wasNotImmediate.link(this);
2206 RECORD_JUMP_TARGET(target);
2207 };
2208 void JIT::emit_op_jneq_null(Instruction* currentInstruction)
2209 {
2210 unsigned src = currentInstruction[1].u.operand;
2211 unsigned target = currentInstruction[2].u.operand;
2212
2213 emitGetVirtualRegister(src, regT0);
2214 Jump isImmediate = emitJumpIfNotJSCell(regT0);
2215
2216 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
2217 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
2218 addJump(branchTest32(Zero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined)), target);
2219 Jump wasNotImmediate = jump();
2220
2221 // Now handle the immediate cases - undefined & null
2222 isImmediate.link(this);
2223 andPtr(Imm32(~JSImmediate::ExtendedTagBitUndefined), regT0);
2224 addJump(branchPtr(NotEqual, regT0, ImmPtr(JSValue::encode(jsNull()))), target);
2225
2226 wasNotImmediate.link(this);
2227 RECORD_JUMP_TARGET(target);
2228 }
2229
2230 void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
2231 {
2232 unsigned src = currentInstruction[1].u.operand;
2233 JSCell* ptr = currentInstruction[2].u.jsCell;
2234 unsigned target = currentInstruction[3].u.operand;
2235
2236 emitGetVirtualRegister(src, regT0);
2237 addJump(branchPtr(NotEqual, regT0, ImmPtr(JSValue::encode(JSValue(ptr)))), target);
2238
2239 RECORD_JUMP_TARGET(target);
2240 }
2241
2242 void JIT::emit_op_jsr(Instruction* currentInstruction)
2243 {
2244 int retAddrDst = currentInstruction[1].u.operand;
2245 int target = currentInstruction[2].u.operand;
2246 DataLabelPtr storeLocation = storePtrWithPatch(ImmPtr(0), Address(callFrameRegister, sizeof(Register) * retAddrDst));
2247 addJump(jump(), target);
2248 m_jsrSites.append(JSRInfo(storeLocation, label()));
2249 killLastResultRegister();
2250 RECORD_JUMP_TARGET(target);
2251 }
2252
2253 void JIT::emit_op_sret(Instruction* currentInstruction)
2254 {
2255 jump(Address(callFrameRegister, sizeof(Register) * currentInstruction[1].u.operand));
2256 killLastResultRegister();
2257 }
2258
2259 void JIT::emit_op_eq(Instruction* currentInstruction)
2260 {
2261 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
2262 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
2263 set32(Equal, regT1, regT0, regT0);
2264 emitTagAsBoolImmediate(regT0);
2265 emitPutVirtualRegister(currentInstruction[1].u.operand);
2266 }
2267
2268 void JIT::emit_op_bitnot(Instruction* currentInstruction)
2269 {
2270 emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
2271 emitJumpSlowCaseIfNotImmediateInteger(regT0);
2272 #if USE(JSVALUE64)
2273 not32(regT0);
2274 emitFastArithIntToImmNoCheck(regT0, regT0);
2275 #else
2276 xorPtr(Imm32(~JSImmediate::TagTypeNumber), regT0);
2277 #endif
2278 emitPutVirtualRegister(currentInstruction[1].u.operand);
2279 }
2280
2281 void JIT::emit_op_resolve_with_base(Instruction* currentInstruction)
2282 {
2283 JITStubCall stubCall(this, cti_op_resolve_with_base);
2284 stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[3].u.operand)));
2285 stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
2286 stubCall.call(currentInstruction[2].u.operand);
2287 }
2288
2289 void JIT::emit_op_new_func_exp(Instruction* currentInstruction)
2290 {
2291 JITStubCall stubCall(this, cti_op_new_func_exp);
2292 stubCall.addArgument(ImmPtr(m_codeBlock->functionExpr(currentInstruction[2].u.operand)));
2293 stubCall.call(currentInstruction[1].u.operand);
2294 }
2295
2296 void JIT::emit_op_jtrue(Instruction* currentInstruction)
2297 {
2298 unsigned target = currentInstruction[2].u.operand;
2299 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
2300
2301 Jump isZero = branchPtr(Equal, regT0, ImmPtr(JSValue::encode(jsNumber(m_globalData, 0))));
2302 addJump(emitJumpIfImmediateInteger(regT0), target);
2303
2304 addJump(branchPtr(Equal, regT0, ImmPtr(JSValue::encode(jsBoolean(true)))), target);
2305 addSlowCase(branchPtr(NotEqual, regT0, ImmPtr(JSValue::encode(jsBoolean(false)))));
2306
2307 isZero.link(this);
2308 RECORD_JUMP_TARGET(target);
2309 }
2310
2311 void JIT::emit_op_neq(Instruction* currentInstruction)
2312 {
2313 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
2314 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
2315 set32(NotEqual, regT1, regT0, regT0);
2316 emitTagAsBoolImmediate(regT0);
2317
2318 emitPutVirtualRegister(currentInstruction[1].u.operand);
2319
2320 }
2321
2322 void JIT::emit_op_bitxor(Instruction* currentInstruction)
2323 {
2324 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
2325 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
2326 xorPtr(regT1, regT0);
2327 emitFastArithReTagImmediate(regT0, regT0);
2328 emitPutVirtualRegister(currentInstruction[1].u.operand);
2329 }
2330
2331 void JIT::emit_op_new_regexp(Instruction* currentInstruction)
2332 {
2333 JITStubCall stubCall(this, cti_op_new_regexp);
2334 stubCall.addArgument(ImmPtr(m_codeBlock->regexp(currentInstruction[2].u.operand)));
2335 stubCall.call(currentInstruction[1].u.operand);
2336 }
2337
2338 void JIT::emit_op_bitor(Instruction* currentInstruction)
2339 {
2340 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
2341 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
2342 orPtr(regT1, regT0);
2343 emitPutVirtualRegister(currentInstruction[1].u.operand);
2344 }
2345
2346 void JIT::emit_op_throw(Instruction* currentInstruction)
2347 {
2348 JITStubCall stubCall(this, cti_op_throw);
2349 stubCall.addArgument(currentInstruction[1].u.operand, regT2);
2350 stubCall.call();
2351 ASSERT(regT0 == returnValueRegister);
2352 #ifndef NDEBUG
2353 // cti_op_throw always changes it's return address,
2354 // this point in the code should never be reached.
2355 breakpoint();
2356 #endif
2357 }
2358
2359 void JIT::emit_op_get_pnames(Instruction* currentInstruction)
2360 {
2361 int dst = currentInstruction[1].u.operand;
2362 int base = currentInstruction[2].u.operand;
2363 int i = currentInstruction[3].u.operand;
2364 int size = currentInstruction[4].u.operand;
2365 int breakTarget = currentInstruction[5].u.operand;
2366
2367 JumpList isNotObject;
2368
2369 emitGetVirtualRegister(base, regT0);
2370 if (!m_codeBlock->isKnownNotImmediate(base))
2371 isNotObject.append(emitJumpIfNotJSCell(regT0));
2372 if (base != m_codeBlock->thisRegister()) {
2373 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
2374 isNotObject.append(branch32(NotEqual, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_type)), Imm32(ObjectType)));
2375 }
2376
2377 // We could inline the case where you have a valid cache, but
2378 // this call doesn't seem to be hot.
2379 Label isObject(this);
2380 JITStubCall getPnamesStubCall(this, cti_op_get_pnames);
2381 getPnamesStubCall.addArgument(regT0);
2382 getPnamesStubCall.call(dst);
2383 load32(Address(regT0, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStringsSize)), regT3);
2384 store32(Imm32(0), addressFor(i));
2385 store32(regT3, addressFor(size));
2386 Jump end = jump();
2387
2388 isNotObject.link(this);
2389 move(regT0, regT1);
2390 and32(Imm32(~JSImmediate::ExtendedTagBitUndefined), regT1);
2391 addJump(branch32(Equal, regT1, Imm32(JSImmediate::FullTagTypeNull)), breakTarget);
2392
2393 JITStubCall toObjectStubCall(this, cti_to_object);
2394 toObjectStubCall.addArgument(regT0);
2395 toObjectStubCall.call(base);
2396 jump().linkTo(isObject, this);
2397
2398 end.link(this);
2399 }
2400
2401 void JIT::emit_op_next_pname(Instruction* currentInstruction)
2402 {
2403 int dst = currentInstruction[1].u.operand;
2404 int base = currentInstruction[2].u.operand;
2405 int i = currentInstruction[3].u.operand;
2406 int size = currentInstruction[4].u.operand;
2407 int it = currentInstruction[5].u.operand;
2408 int target = currentInstruction[6].u.operand;
2409
2410 JumpList callHasProperty;
2411
2412 Label begin(this);
2413 load32(addressFor(i), regT0);
2414 Jump end = branch32(Equal, regT0, addressFor(size));
2415
2416 // Grab key @ i
2417 loadPtr(addressFor(it), regT1);
2418 loadPtr(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStrings)), regT2);
2419
2420 #if USE(JSVALUE64)
2421 loadPtr(BaseIndex(regT2, regT0, TimesEight), regT2);
2422 #else
2423 loadPtr(BaseIndex(regT2, regT0, TimesFour), regT2);
2424 #endif
2425
2426 emitPutVirtualRegister(dst, regT2);
2427
2428 // Increment i
2429 add32(Imm32(1), regT0);
2430 store32(regT0, addressFor(i));
2431
2432 // Verify that i is valid:
2433 emitGetVirtualRegister(base, regT0);
2434
2435 // Test base's structure
2436 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
2437 callHasProperty.append(branchPtr(NotEqual, regT2, Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure)))));
2438
2439 // Test base's prototype chain
2440 loadPtr(Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedPrototypeChain))), regT3);
2441 loadPtr(Address(regT3, OBJECT_OFFSETOF(StructureChain, m_vector)), regT3);
2442 addJump(branchTestPtr(Zero, Address(regT3)), target);
2443
2444 Label checkPrototype(this);
2445 loadPtr(Address(regT2, OBJECT_OFFSETOF(Structure, m_prototype)), regT2);
2446 callHasProperty.append(emitJumpIfNotJSCell(regT2));
2447 loadPtr(Address(regT2, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
2448 callHasProperty.append(branchPtr(NotEqual, regT2, Address(regT3)));
2449 addPtr(Imm32(sizeof(Structure*)), regT3);
2450 branchTestPtr(NonZero, Address(regT3)).linkTo(checkPrototype, this);
2451
2452 // Continue loop.
2453 addJump(jump(), target);
2454
2455 // Slow case: Ask the object if i is valid.
2456 callHasProperty.link(this);
2457 emitGetVirtualRegister(dst, regT1);
2458 JITStubCall stubCall(this, cti_has_property);
2459 stubCall.addArgument(regT0);
2460 stubCall.addArgument(regT1);
2461 stubCall.call();
2462
2463 // Test for valid key.
2464 addJump(branchTest32(NonZero, regT0), target);
2465 jump().linkTo(begin, this);
2466
2467 // End of loop.
2468 end.link(this);
2469 }
2470
2471 void JIT::emit_op_push_scope(Instruction* currentInstruction)
2472 {
2473 JITStubCall stubCall(this, cti_op_push_scope);
2474 stubCall.addArgument(currentInstruction[1].u.operand, regT2);
2475 stubCall.call(currentInstruction[1].u.operand);
2476 }
2477
2478 void JIT::emit_op_pop_scope(Instruction*)
2479 {
2480 JITStubCall(this, cti_op_pop_scope).call();
2481 }
2482
2483 void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
2484 {
2485 unsigned dst = currentInstruction[1].u.operand;
2486 unsigned src1 = currentInstruction[2].u.operand;
2487 unsigned src2 = currentInstruction[3].u.operand;
2488
2489 emitGetVirtualRegisters(src1, regT0, src2, regT1);
2490
2491 // Jump to a slow case if either operand is a number, or if both are JSCell*s.
2492 move(regT0, regT2);
2493 orPtr(regT1, regT2);
2494 addSlowCase(emitJumpIfJSCell(regT2));
2495 addSlowCase(emitJumpIfImmediateNumber(regT2));
2496
2497 if (type == OpStrictEq)
2498 set32(Equal, regT1, regT0, regT0);
2499 else
2500 set32(NotEqual, regT1, regT0, regT0);
2501 emitTagAsBoolImmediate(regT0);
2502
2503 emitPutVirtualRegister(dst);
2504 }
2505
2506 void JIT::emit_op_stricteq(Instruction* currentInstruction)
2507 {
2508 compileOpStrictEq(currentInstruction, OpStrictEq);
2509 }
2510
2511 void JIT::emit_op_nstricteq(Instruction* currentInstruction)
2512 {
2513 compileOpStrictEq(currentInstruction, OpNStrictEq);
2514 }
2515
2516 void JIT::emit_op_to_jsnumber(Instruction* currentInstruction)
2517 {
2518 int srcVReg = currentInstruction[2].u.operand;
2519 emitGetVirtualRegister(srcVReg, regT0);
2520
2521 Jump wasImmediate = emitJumpIfImmediateInteger(regT0);
2522
2523 emitJumpSlowCaseIfNotJSCell(regT0, srcVReg);
2524 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
2525 addSlowCase(branch32(NotEqual, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_type)), Imm32(NumberType)));
2526
2527 wasImmediate.link(this);
2528
2529 emitPutVirtualRegister(currentInstruction[1].u.operand);
2530 }
2531
2532 void JIT::emit_op_push_new_scope(Instruction* currentInstruction)
2533 {
2534 JITStubCall stubCall(this, cti_op_push_new_scope);
2535 stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
2536 stubCall.addArgument(currentInstruction[3].u.operand, regT2);
2537 stubCall.call(currentInstruction[1].u.operand);
2538 }
2539
2540 void JIT::emit_op_catch(Instruction* currentInstruction)
2541 {
2542 killLastResultRegister(); // FIXME: Implicitly treat op_catch as a labeled statement, and remove this line of code.
2543 peek(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof (void*));
2544 emitPutVirtualRegister(currentInstruction[1].u.operand);
2545 }
2546
2547 void JIT::emit_op_jmp_scopes(Instruction* currentInstruction)
2548 {
2549 JITStubCall stubCall(this, cti_op_jmp_scopes);
2550 stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
2551 stubCall.call();
2552 addJump(jump(), currentInstruction[2].u.operand);
2553 RECORD_JUMP_TARGET(currentInstruction[2].u.operand);
2554 }
2555
2556 void JIT::emit_op_switch_imm(Instruction* currentInstruction)
2557 {
2558 unsigned tableIndex = currentInstruction[1].u.operand;
2559 unsigned defaultOffset = currentInstruction[2].u.operand;
2560 unsigned scrutinee = currentInstruction[3].u.operand;
2561
2562 // create jump table for switch destinations, track this switch statement.
2563 SimpleJumpTable* jumpTable = &m_codeBlock->immediateSwitchJumpTable(tableIndex);
2564 m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset, SwitchRecord::Immediate));
2565 jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
2566
2567 JITStubCall stubCall(this, cti_op_switch_imm);
2568 stubCall.addArgument(scrutinee, regT2);
2569 stubCall.addArgument(Imm32(tableIndex));
2570 stubCall.call();
2571 jump(regT0);
2572 }
2573
2574 void JIT::emit_op_switch_char(Instruction* currentInstruction)
2575 {
2576 unsigned tableIndex = currentInstruction[1].u.operand;
2577 unsigned defaultOffset = currentInstruction[2].u.operand;
2578 unsigned scrutinee = currentInstruction[3].u.operand;
2579
2580 // create jump table for switch destinations, track this switch statement.
2581 SimpleJumpTable* jumpTable = &m_codeBlock->characterSwitchJumpTable(tableIndex);
2582 m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset, SwitchRecord::Character));
2583 jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
2584
2585 JITStubCall stubCall(this, cti_op_switch_char);
2586 stubCall.addArgument(scrutinee, regT2);
2587 stubCall.addArgument(Imm32(tableIndex));
2588 stubCall.call();
2589 jump(regT0);
2590 }
2591
2592 void JIT::emit_op_switch_string(Instruction* currentInstruction)
2593 {
2594 unsigned tableIndex = currentInstruction[1].u.operand;
2595 unsigned defaultOffset = currentInstruction[2].u.operand;
2596 unsigned scrutinee = currentInstruction[3].u.operand;
2597
2598 // create jump table for switch destinations, track this switch statement.
2599 StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
2600 m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset));
2601
2602 JITStubCall stubCall(this, cti_op_switch_string);
2603 stubCall.addArgument(scrutinee, regT2);
2604 stubCall.addArgument(Imm32(tableIndex));
2605 stubCall.call();
2606 jump(regT0);
2607 }
2608
2609 void JIT::emit_op_new_error(Instruction* currentInstruction)
2610 {
2611 JITStubCall stubCall(this, cti_op_new_error);
2612 stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
2613 stubCall.addArgument(ImmPtr(JSValue::encode(m_codeBlock->getConstant(currentInstruction[3].u.operand))));
2614 stubCall.addArgument(Imm32(m_bytecodeIndex));
2615 stubCall.call(currentInstruction[1].u.operand);
2616 }
2617
2618 void JIT::emit_op_debug(Instruction* currentInstruction)
2619 {
2620 JITStubCall stubCall(this, cti_op_debug);
2621 stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
2622 stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
2623 stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
2624 stubCall.call();
2625 }
2626
2627 void JIT::emit_op_eq_null(Instruction* currentInstruction)
2628 {
2629 unsigned dst = currentInstruction[1].u.operand;
2630 unsigned src1 = currentInstruction[2].u.operand;
2631
2632 emitGetVirtualRegister(src1, regT0);
2633 Jump isImmediate = emitJumpIfNotJSCell(regT0);
2634
2635 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
2636 setTest32(NonZero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined), regT0);
2637
2638 Jump wasNotImmediate = jump();
2639
2640 isImmediate.link(this);
2641
2642 andPtr(Imm32(~JSImmediate::ExtendedTagBitUndefined), regT0);
2643 setPtr(Equal, regT0, Imm32(JSImmediate::FullTagTypeNull), regT0);
2644
2645 wasNotImmediate.link(this);
2646
2647 emitTagAsBoolImmediate(regT0);
2648 emitPutVirtualRegister(dst);
2649
2650 }
2651
2652 void JIT::emit_op_neq_null(Instruction* currentInstruction)
2653 {
2654 unsigned dst = currentInstruction[1].u.operand;
2655 unsigned src1 = currentInstruction[2].u.operand;
2656
2657 emitGetVirtualRegister(src1, regT0);
2658 Jump isImmediate = emitJumpIfNotJSCell(regT0);
2659
2660 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
2661 setTest32(Zero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined), regT0);
2662
2663 Jump wasNotImmediate = jump();
2664
2665 isImmediate.link(this);
2666
2667 andPtr(Imm32(~JSImmediate::ExtendedTagBitUndefined), regT0);
2668 setPtr(NotEqual, regT0, Imm32(JSImmediate::FullTagTypeNull), regT0);
2669
2670 wasNotImmediate.link(this);
2671
2672 emitTagAsBoolImmediate(regT0);
2673 emitPutVirtualRegister(dst);
2674
2675 }
2676
2677 void JIT::emit_op_enter(Instruction*)
2678 {
2679 // Even though CTI doesn't use them, we initialize our constant
2680 // registers to zap stale pointers, to avoid unnecessarily prolonging
2681 // object lifetime and increasing GC pressure.
2682 size_t count = m_codeBlock->m_numVars;
2683 for (size_t j = 0; j < count; ++j)
2684 emitInitRegister(j);
2685
2686 }
2687
2688 void JIT::emit_op_enter_with_activation(Instruction* currentInstruction)
2689 {
2690 // Even though CTI doesn't use them, we initialize our constant
2691 // registers to zap stale pointers, to avoid unnecessarily prolonging
2692 // object lifetime and increasing GC pressure.
2693 size_t count = m_codeBlock->m_numVars;
2694 for (size_t j = 0; j < count; ++j)
2695 emitInitRegister(j);
2696
2697 JITStubCall(this, cti_op_push_activation).call(currentInstruction[1].u.operand);
2698 }
2699
2700 void JIT::emit_op_create_arguments(Instruction*)
2701 {
2702 Jump argsCreated = branchTestPtr(NonZero, Address(callFrameRegister, sizeof(Register) * RegisterFile::ArgumentsRegister));
2703 if (m_codeBlock->m_numParameters == 1)
2704 JITStubCall(this, cti_op_create_arguments_no_params).call();
2705 else
2706 JITStubCall(this, cti_op_create_arguments).call();
2707 argsCreated.link(this);
2708 }
2709
2710 void JIT::emit_op_init_arguments(Instruction*)
2711 {
2712 storePtr(ImmPtr(0), Address(callFrameRegister, sizeof(Register) * RegisterFile::ArgumentsRegister));
2713 }
2714
2715 void JIT::emit_op_convert_this(Instruction* currentInstruction)
2716 {
2717 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
2718
2719 emitJumpSlowCaseIfNotJSCell(regT0);
2720 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT1);
2721 addSlowCase(branchTest32(NonZero, Address(regT1, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(NeedsThisConversion)));
2722
2723 }
2724
2725 void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
2726 {
2727 peek(regT1, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof (void*));
2728 Jump noProfiler = branchTestPtr(Zero, Address(regT1));
2729
2730 JITStubCall stubCall(this, cti_op_profile_will_call);
2731 stubCall.addArgument(currentInstruction[1].u.operand, regT1);
2732 stubCall.call();
2733 noProfiler.link(this);
2734
2735 }
2736
2737 void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
2738 {
2739 peek(regT1, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof (void*));
2740 Jump noProfiler = branchTestPtr(Zero, Address(regT1));
2741
2742 JITStubCall stubCall(this, cti_op_profile_did_call);
2743 stubCall.addArgument(currentInstruction[1].u.operand, regT1);
2744 stubCall.call();
2745 noProfiler.link(this);
2746 }
2747
2748
2749 // Slow cases
2750
2751 void JIT::emitSlow_op_convert_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2752 {
2753 linkSlowCase(iter);
2754 linkSlowCase(iter);
2755 JITStubCall stubCall(this, cti_op_convert_this);
2756 stubCall.addArgument(regT0);
2757 stubCall.call(currentInstruction[1].u.operand);
2758 }
2759
2760 void JIT::emitSlow_op_construct_verify(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2761 {
2762 linkSlowCase(iter);
2763 linkSlowCase(iter);
2764 emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
2765 emitPutVirtualRegister(currentInstruction[1].u.operand);
2766 }
2767
2768 void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2769 {
2770 linkSlowCase(iter);
2771
2772 JITStubCall stubCall(this, cti_op_to_primitive);
2773 stubCall.addArgument(regT0);
2774 stubCall.call(currentInstruction[1].u.operand);
2775 }
2776
2777 void JIT::emitSlow_op_get_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2778 {
2779 unsigned dst = currentInstruction[1].u.operand;
2780 unsigned base = currentInstruction[2].u.operand;
2781 unsigned property = currentInstruction[3].u.operand;
2782
2783 linkSlowCase(iter); // property int32 check
2784 linkSlowCaseIfNotJSCell(iter, base); // base cell check
2785 linkSlowCase(iter); // base array check
2786 linkSlowCase(iter); // vector length check
2787 linkSlowCase(iter); // empty value
2788
2789 JITStubCall stubCall(this, cti_op_get_by_val);
2790 stubCall.addArgument(base, regT2);
2791 stubCall.addArgument(property, regT2);
2792 stubCall.call(dst);
2793 }
2794
2795 void JIT::emitSlow_op_loop_if_lesseq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2796 {
2797 unsigned op2 = currentInstruction[2].u.operand;
2798 unsigned target = currentInstruction[3].u.operand;
2799 if (isOperandConstantImmediateInt(op2)) {
2800 linkSlowCase(iter);
2801 JITStubCall stubCall(this, cti_op_loop_if_lesseq);
2802 stubCall.addArgument(regT0);
2803 stubCall.addArgument(currentInstruction[2].u.operand, regT2);
2804 stubCall.call();
2805 emitJumpSlowToHot(branchTest32(NonZero, regT0), target);
2806 } else {
2807 linkSlowCase(iter);
2808 linkSlowCase(iter);
2809 JITStubCall stubCall(this, cti_op_loop_if_lesseq);
2810 stubCall.addArgument(regT0);
2811 stubCall.addArgument(regT1);
2812 stubCall.call();
2813 emitJumpSlowToHot(branchTest32(NonZero, regT0), target);
2814 }
2815 }
2816
2817 void JIT::emitSlow_op_put_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2818 {
2819 unsigned base = currentInstruction[1].u.operand;
2820 unsigned property = currentInstruction[2].u.operand;
2821 unsigned value = currentInstruction[3].u.operand;
2822
2823 linkSlowCase(iter); // property int32 check
2824 linkSlowCaseIfNotJSCell(iter, base); // base cell check
2825 linkSlowCase(iter); // base not array check
2826 linkSlowCase(iter); // in vector check
2827
2828 JITStubCall stubPutByValCall(this, cti_op_put_by_val);
2829 stubPutByValCall.addArgument(regT0);
2830 stubPutByValCall.addArgument(property, regT2);
2831 stubPutByValCall.addArgument(value, regT2);
2832 stubPutByValCall.call();
2833 }
2834
2835 void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2836 {
2837 linkSlowCase(iter);
2838 xorPtr(Imm32(static_cast<int32_t>(JSImmediate::FullTagTypeBool)), regT0);
2839 JITStubCall stubCall(this, cti_op_not);
2840 stubCall.addArgument(regT0);
2841 stubCall.call(currentInstruction[1].u.operand);
2842 }
2843
2844 void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2845 {
2846 linkSlowCase(iter);
2847 JITStubCall stubCall(this, cti_op_jtrue);
2848 stubCall.addArgument(regT0);
2849 stubCall.call();
2850 emitJumpSlowToHot(branchTest32(Zero, regT0), currentInstruction[2].u.operand); // inverted!
2851 }
2852
2853 void JIT::emitSlow_op_bitnot(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2854 {
2855 linkSlowCase(iter);
2856 JITStubCall stubCall(this, cti_op_bitnot);
2857 stubCall.addArgument(regT0);
2858 stubCall.call(currentInstruction[1].u.operand);
2859 }
2860
2861 void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2862 {
2863 linkSlowCase(iter);
2864 JITStubCall stubCall(this, cti_op_jtrue);
2865 stubCall.addArgument(regT0);
2866 stubCall.call();
2867 emitJumpSlowToHot(branchTest32(NonZero, regT0), currentInstruction[2].u.operand);
2868 }
2869
2870 void JIT::emitSlow_op_bitxor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2871 {
2872 linkSlowCase(iter);
2873 JITStubCall stubCall(this, cti_op_bitxor);
2874 stubCall.addArgument(regT0);
2875 stubCall.addArgument(regT1);
2876 stubCall.call(currentInstruction[1].u.operand);
2877 }
2878
2879 void JIT::emitSlow_op_bitor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2880 {
2881 linkSlowCase(iter);
2882 JITStubCall stubCall(this, cti_op_bitor);
2883 stubCall.addArgument(regT0);
2884 stubCall.addArgument(regT1);
2885 stubCall.call(currentInstruction[1].u.operand);
2886 }
2887
2888 void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2889 {
2890 linkSlowCase(iter);
2891 JITStubCall stubCall(this, cti_op_eq);
2892 stubCall.addArgument(regT0);
2893 stubCall.addArgument(regT1);
2894 stubCall.call();
2895 emitTagAsBoolImmediate(regT0);
2896 emitPutVirtualRegister(currentInstruction[1].u.operand);
2897 }
2898
2899 void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2900 {
2901 linkSlowCase(iter);
2902 JITStubCall stubCall(this, cti_op_eq);
2903 stubCall.addArgument(regT0);
2904 stubCall.addArgument(regT1);
2905 stubCall.call();
2906 xor32(Imm32(0x1), regT0);
2907 emitTagAsBoolImmediate(regT0);
2908 emitPutVirtualRegister(currentInstruction[1].u.operand);
2909 }
2910
2911 void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2912 {
2913 linkSlowCase(iter);
2914 linkSlowCase(iter);
2915 JITStubCall stubCall(this, cti_op_stricteq);
2916 stubCall.addArgument(regT0);
2917 stubCall.addArgument(regT1);
2918 stubCall.call(currentInstruction[1].u.operand);
2919 }
2920
2921 void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2922 {
2923 linkSlowCase(iter);
2924 linkSlowCase(iter);
2925 JITStubCall stubCall(this, cti_op_nstricteq);
2926 stubCall.addArgument(regT0);
2927 stubCall.addArgument(regT1);
2928 stubCall.call(currentInstruction[1].u.operand);
2929 }
2930
2931 void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2932 {
2933 unsigned dst = currentInstruction[1].u.operand;
2934 unsigned value = currentInstruction[2].u.operand;
2935 unsigned baseVal = currentInstruction[3].u.operand;
2936 unsigned proto = currentInstruction[4].u.operand;
2937
2938 linkSlowCaseIfNotJSCell(iter, value);
2939 linkSlowCaseIfNotJSCell(iter, baseVal);
2940 linkSlowCaseIfNotJSCell(iter, proto);
2941 linkSlowCase(iter);
2942 JITStubCall stubCall(this, cti_op_instanceof);
2943 stubCall.addArgument(value, regT2);
2944 stubCall.addArgument(baseVal, regT2);
2945 stubCall.addArgument(proto, regT2);
2946 stubCall.call(dst);
2947 }
2948
2949 void JIT::emitSlow_op_call(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2950 {
2951 compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_call);
2952 }
2953
2954 void JIT::emitSlow_op_call_eval(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2955 {
2956 compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_call_eval);
2957 }
2958
2959 void JIT::emitSlow_op_call_varargs(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2960 {
2961 compileOpCallVarargsSlowCase(currentInstruction, iter);
2962 }
2963
2964 void JIT::emitSlow_op_construct(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2965 {
2966 compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_construct);
2967 }
2968
2969 void JIT::emitSlow_op_to_jsnumber(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2970 {
2971 linkSlowCaseIfNotJSCell(iter, currentInstruction[2].u.operand);
2972 linkSlowCase(iter);
2973
2974 JITStubCall stubCall(this, cti_op_to_jsnumber);
2975 stubCall.addArgument(regT0);
2976 stubCall.call(currentInstruction[1].u.operand);
2977 }
2978
2979 #endif // USE(JSVALUE32_64)
2980
2981 } // namespace JSC
2982
2983 #endif // ENABLE(JIT)