]> git.saurik.com Git - apple/javascriptcore.git/blame - jit/JIT.h
JavaScriptCore-7600.1.4.16.1.tar.gz
[apple/javascriptcore.git] / jit / JIT.h
CommitLineData
9dae56ea 1/*
81345200 2 * Copyright (C) 2008, 2012, 2013, 2014 Apple Inc. All rights reserved.
9dae56ea
A
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#ifndef JIT_h
27#define JIT_h
28
9dae56ea
A
29#if ENABLE(JIT)
30
ba379fdc
A
31// We've run into some problems where changing the size of the class JIT leads to
32// performance fluctuations. Try forcing alignment in an attempt to stabalize this.
33#if COMPILER(GCC)
34#define JIT_CLASS_ALIGNMENT __attribute__ ((aligned (32)))
35#else
36#define JIT_CLASS_ALIGNMENT
37#endif
9dae56ea 38
14957cd0 39#define ASSERT_JIT_OFFSET(actual, expected) ASSERT_WITH_MESSAGE(actual == expected, "JIT Offset \"%s\" should be %d, not %d.\n", #expected, static_cast<int>(expected), static_cast<int>(actual));
f9bf01c6 40
ba379fdc 41#include "CodeBlock.h"
6fe7ccc8 42#include "CompactJITCodeMap.h"
9dae56ea 43#include "Interpreter.h"
93a37866 44#include "JITDisassembler.h"
81345200 45#include "JITInlineCacheGenerator.h"
4e4e5a6f 46#include "JSInterfaceJIT.h"
93a37866 47#include "LegacyProfiler.h"
9dae56ea 48#include "Opcode.h"
93a37866 49#include "ResultType.h"
81345200 50#include "SamplingTool.h"
93a37866 51#include "UnusedPointer.h"
9dae56ea 52
9dae56ea
A
53namespace JSC {
54
81345200 55 class ArrayAllocationProfile;
9dae56ea 56 class CodeBlock;
6fe7ccc8 57 class FunctionExecutable;
ba379fdc 58 class JIT;
9dae56ea 59 class JSPropertyNameIterator;
81345200 60 class Identifier;
9dae56ea 61 class Interpreter;
93a37866
A
62 class JSScope;
63 class JSStack;
64 class MarkedAllocator;
9dae56ea 65 class Register;
9dae56ea
A
66 class StructureChain;
67
68 struct CallLinkInfo;
69 struct Instruction;
70 struct OperandTypes;
71 struct PolymorphicAccessStructureList;
f9bf01c6
A
72 struct SimpleJumpTable;
73 struct StringJumpTable;
9dae56ea
A
74 struct StructureStubInfo;
75
9dae56ea 76 struct CallRecord {
ba379fdc 77 MacroAssembler::Call from;
14957cd0 78 unsigned bytecodeOffset;
9dae56ea
A
79 void* to;
80
81 CallRecord()
82 {
83 }
84
14957cd0 85 CallRecord(MacroAssembler::Call from, unsigned bytecodeOffset, void* to = 0)
9dae56ea 86 : from(from)
14957cd0 87 , bytecodeOffset(bytecodeOffset)
9dae56ea
A
88 , to(to)
89 {
90 }
91 };
92
93 struct JumpTable {
94 MacroAssembler::Jump from;
14957cd0 95 unsigned toBytecodeOffset;
9dae56ea
A
96
97 JumpTable(MacroAssembler::Jump f, unsigned t)
98 : from(f)
14957cd0 99 , toBytecodeOffset(t)
9dae56ea
A
100 {
101 }
102 };
103
104 struct SlowCaseEntry {
105 MacroAssembler::Jump from;
106 unsigned to;
107 unsigned hint;
108
109 SlowCaseEntry(MacroAssembler::Jump f, unsigned t, unsigned h = 0)
110 : from(f)
111 , to(t)
112 , hint(h)
113 {
114 }
115 };
116
117 struct SwitchRecord {
118 enum Type {
119 Immediate,
120 Character,
121 String
122 };
123
124 Type type;
125
126 union {
127 SimpleJumpTable* simpleJumpTable;
128 StringJumpTable* stringJumpTable;
129 } jumpTable;
130
14957cd0 131 unsigned bytecodeOffset;
9dae56ea
A
132 unsigned defaultOffset;
133
14957cd0 134 SwitchRecord(SimpleJumpTable* jumpTable, unsigned bytecodeOffset, unsigned defaultOffset, Type type)
9dae56ea 135 : type(type)
14957cd0 136 , bytecodeOffset(bytecodeOffset)
9dae56ea
A
137 , defaultOffset(defaultOffset)
138 {
139 this->jumpTable.simpleJumpTable = jumpTable;
140 }
141
14957cd0 142 SwitchRecord(StringJumpTable* jumpTable, unsigned bytecodeOffset, unsigned defaultOffset)
9dae56ea 143 : type(String)
14957cd0 144 , bytecodeOffset(bytecodeOffset)
9dae56ea
A
145 , defaultOffset(defaultOffset)
146 {
147 this->jumpTable.stringJumpTable = jumpTable;
148 }
149 };
150
93a37866
A
151 struct ByValCompilationInfo {
152 ByValCompilationInfo() { }
153
154 ByValCompilationInfo(unsigned bytecodeIndex, MacroAssembler::PatchableJump badTypeJump, JITArrayMode arrayMode, MacroAssembler::Label doneTarget)
155 : bytecodeIndex(bytecodeIndex)
156 , badTypeJump(badTypeJump)
157 , arrayMode(arrayMode)
158 , doneTarget(doneTarget)
6fe7ccc8 159 {
6fe7ccc8 160 }
93a37866
A
161
162 unsigned bytecodeIndex;
163 MacroAssembler::PatchableJump badTypeJump;
164 JITArrayMode arrayMode;
165 MacroAssembler::Label doneTarget;
166 MacroAssembler::Label slowPathTarget;
167 MacroAssembler::Call returnAddress;
9dae56ea
A
168 };
169
81345200 170 struct CallCompilationInfo {
9dae56ea 171 MacroAssembler::DataLabelPtr hotPathBegin;
ba379fdc
A
172 MacroAssembler::Call hotPathOther;
173 MacroAssembler::Call callReturnLocation;
81345200 174 CallLinkInfo* callLinkInfo;
9dae56ea
A
175 };
176
ba379fdc
A
177 // Near calls can only be patched to other JIT code, regular calls can be patched to JIT code or relinked to stub functions.
178 void ctiPatchNearCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, MacroAssemblerCodePtr newCalleeFunction);
179 void ctiPatchCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, MacroAssemblerCodePtr newCalleeFunction);
180 void ctiPatchCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, FunctionPtr newCalleeFunction);
9dae56ea 181
4e4e5a6f 182 class JIT : private JSInterfaceJIT {
81345200 183 friend class JITSlowPathCall;
ba379fdc
A
184 friend class JITStubCall;
185
9dae56ea
A
186 using MacroAssembler::Jump;
187 using MacroAssembler::JumpList;
188 using MacroAssembler::Label;
189
93a37866 190 static const uintptr_t patchGetByIdDefaultStructure = unusedPointer;
14957cd0 191 static const int patchGetByIdDefaultOffset = 0;
9dae56ea
A
192 // Magic number - initial offset cannot be representable as a signed 8bit value, or the X86Assembler
193 // will compress the displacement, and we may not be able to fit a patched offset.
14957cd0 194 static const int patchPutByIdDefaultOffset = 256;
9dae56ea 195
9dae56ea 196 public:
81345200 197 static CompilationResult compile(VM* vm, CodeBlock* codeBlock, JITCompilationEffort effort)
93a37866 198 {
81345200 199 return JIT(vm, codeBlock).privateCompile(effort);
93a37866
A
200 }
201
202 static void compileClosureCall(VM* vm, CallLinkInfo* callLinkInfo, CodeBlock* callerCodeBlock, CodeBlock* calleeCodeBlock, Structure* expectedStructure, ExecutableBase* expectedExecutable, MacroAssemblerCodePtr codePtr)
9dae56ea 203 {
93a37866
A
204 JIT jit(vm, callerCodeBlock);
205 jit.m_bytecodeOffset = callLinkInfo->codeOrigin.bytecodeIndex;
206 jit.privateCompileClosureCall(callLinkInfo, calleeCodeBlock, expectedStructure, expectedExecutable, codePtr);
9dae56ea
A
207 }
208
93a37866
A
209 static void compileGetByVal(VM* vm, CodeBlock* codeBlock, ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode)
210 {
211 JIT jit(vm, codeBlock);
212 jit.m_bytecodeOffset = byValInfo->bytecodeIndex;
213 jit.privateCompileGetByVal(byValInfo, returnAddress, arrayMode);
214 }
9dae56ea 215
93a37866 216 static void compilePutByVal(VM* vm, CodeBlock* codeBlock, ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode)
9dae56ea 217 {
93a37866
A
218 JIT jit(vm, codeBlock);
219 jit.m_bytecodeOffset = byValInfo->bytecodeIndex;
220 jit.privateCompilePutByVal(byValInfo, returnAddress, arrayMode);
9dae56ea 221 }
81345200
A
222
223 static void compileDirectPutByVal(VM* vm, CodeBlock* codeBlock, ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode)
224 {
225 JIT jit(vm, codeBlock);
226 jit.m_bytecodeOffset = byValInfo->bytecodeIndex;
227 jit.privateCompilePutByVal(byValInfo, returnAddress, arrayMode);
228 }
9dae56ea 229
93a37866 230 static CodeRef compileCTINativeCall(VM* vm, NativeFunction func)
14957cd0 231 {
93a37866 232 if (!vm->canUseJIT()) {
6fe7ccc8 233 return CodeRef::createLLIntCodeRef(llint_native_call_trampoline);
6fe7ccc8 234 }
93a37866
A
235 JIT jit(vm, 0);
236 return jit.privateCompileCTINativeCall(vm, func);
14957cd0
A
237 }
238
81345200
A
239 static unsigned frameRegisterCountFor(CodeBlock*);
240 static int stackPointerOffsetFor(CodeBlock*);
9dae56ea 241
9dae56ea 242 private:
93a37866 243 JIT(VM*, CodeBlock* = 0);
9dae56ea
A
244
245 void privateCompileMainPass();
246 void privateCompileLinkPass();
247 void privateCompileSlowCases();
81345200 248 CompilationResult privateCompile(JITCompilationEffort);
93a37866
A
249
250 void privateCompileClosureCall(CallLinkInfo*, CodeBlock* calleeCodeBlock, Structure*, ExecutableBase*, MacroAssemblerCodePtr);
251
93a37866
A
252 void privateCompileGetByVal(ByValInfo*, ReturnAddressPtr, JITArrayMode);
253 void privateCompilePutByVal(ByValInfo*, ReturnAddressPtr, JITArrayMode);
254
255 Label privateCompileCTINativeCall(VM*, bool isConstruct = false);
256 CodeRef privateCompileCTINativeCall(VM*, NativeFunction);
ba379fdc 257 void privateCompilePatchGetArrayLength(ReturnAddressPtr returnAddress);
9dae56ea 258
81345200
A
259 // Add a call out from JIT code, without an exception check.
260 Call appendCall(const FunctionPtr& function)
261 {
262 Call functionCall = call();
263 m_calls.append(CallRecord(functionCall, m_bytecodeOffset, function.value()));
264 return functionCall;
265 }
266
267#if OS(WINDOWS) && CPU(X86_64)
268 Call appendCallWithSlowPathReturnType(const FunctionPtr& function)
269 {
270 Call functionCall = callWithSlowPathReturnType();
271 m_calls.append(CallRecord(functionCall, m_bytecodeOffset, function.value()));
272 return functionCall;
273 }
274#endif
275
276 void exceptionCheck(Jump jumpToHandler)
277 {
278 m_exceptionChecks.append(jumpToHandler);
279 }
280
281 void exceptionCheck()
282 {
283 m_exceptionChecks.append(emitExceptionCheck());
284 }
285
286 void exceptionCheckWithCallFrameRollback()
287 {
288 m_exceptionChecksWithCallFrameRollback.append(emitExceptionCheck());
289 }
290
291 void privateCompileExceptionHandlers();
6fe7ccc8 292
9dae56ea 293 void addSlowCase(Jump);
ba379fdc 294 void addSlowCase(JumpList);
6fe7ccc8 295 void addSlowCase();
9dae56ea
A
296 void addJump(Jump, int);
297 void emitJumpSlowToHot(Jump, int);
298
6fe7ccc8
A
299 void compileOpCall(OpcodeID, Instruction*, unsigned callLinkInfoIndex);
300 void compileOpCallSlowCase(OpcodeID, Instruction*, Vector<SlowCaseEntry>::iterator&, unsigned callLinkInfoIndex);
301 void compileLoadVarargs(Instruction*);
81345200
A
302 void compileCallEval(Instruction*);
303 void compileCallEvalSlowCase(Instruction*, Vector<SlowCaseEntry>::iterator&);
304 void emitPutCallResult(Instruction*);
ba379fdc 305
9dae56ea
A
306 enum CompileOpStrictEqType { OpStrictEq, OpNStrictEq };
307 void compileOpStrictEq(Instruction* instruction, CompileOpStrictEqType type);
81345200 308 bool isOperandConstantImmediateDouble(int src);
f9bf01c6 309
6fe7ccc8
A
310 void emitLoadDouble(int index, FPRegisterID value);
311 void emitLoadInt32ToDouble(int index, FPRegisterID value);
81345200 312 Jump emitJumpIfCellNotObject(RegisterID cellReg);
9dae56ea 313
81345200 314 enum WriteBarrierMode { UnconditionalWriteBarrier, ShouldFilterBase, ShouldFilterValue, ShouldFilterBaseAndValue };
6fe7ccc8
A
315 // value register in write barrier is used before any scratch registers
316 // so may safely be the same as either of the scratch registers.
81345200
A
317 void emitWriteBarrier(unsigned owner, unsigned value, WriteBarrierMode);
318 void emitWriteBarrier(JSCell* owner, unsigned value, WriteBarrierMode);
319 void emitWriteBarrier(JSCell* owner);
6fe7ccc8 320
93a37866
A
321 template<typename StructureType> // StructureType can be RegisterID or ImmPtr.
322 void emitAllocateJSObject(RegisterID allocator, StructureType, RegisterID result, RegisterID scratch);
6fe7ccc8 323
6fe7ccc8
A
324 // This assumes that the value to profile is in regT0 and that regT3 is available for
325 // scratch.
326 void emitValueProfilingSite(ValueProfile*);
327 void emitValueProfilingSite(unsigned bytecodeOffset);
328 void emitValueProfilingSite();
81345200
A
329 void emitArrayProfilingSiteWithCell(RegisterID cell, RegisterID indexingType, ArrayProfile*);
330 void emitArrayProfilingSiteForBytecodeIndexWithCell(RegisterID cell, RegisterID indexingType, unsigned bytecodeIndex);
93a37866
A
331 void emitArrayProfileStoreToHoleSpecialCase(ArrayProfile*);
332 void emitArrayProfileOutOfBoundsSpecialCase(ArrayProfile*);
333
334 JITArrayMode chooseArrayMode(ArrayProfile*);
335
336 // Property is in regT1, base is in regT0. regT2 contains indexing type.
337 // Property is int-checked and zero extended. Base is cell checked.
338 // Structure is already profiled. Returns the slow cases. Fall-through
339 // case contains result in regT0, and it is not yet profiled.
340 JumpList emitInt32GetByVal(Instruction* instruction, PatchableJump& badType) { return emitContiguousGetByVal(instruction, badType, Int32Shape); }
341 JumpList emitDoubleGetByVal(Instruction*, PatchableJump& badType);
342 JumpList emitContiguousGetByVal(Instruction*, PatchableJump& badType, IndexingType expectedShape = ContiguousShape);
343 JumpList emitArrayStorageGetByVal(Instruction*, PatchableJump& badType);
81345200
A
344 JumpList emitIntTypedArrayGetByVal(Instruction*, PatchableJump& badType, TypedArrayType);
345 JumpList emitFloatTypedArrayGetByVal(Instruction*, PatchableJump& badType, TypedArrayType);
93a37866
A
346
347 // Property is in regT0, base is in regT0. regT2 contains indecing type.
348 // The value to store is not yet loaded. Property is int-checked and
349 // zero-extended. Base is cell checked. Structure is already profiled.
350 // returns the slow cases.
351 JumpList emitInt32PutByVal(Instruction* currentInstruction, PatchableJump& badType)
352 {
353 return emitGenericContiguousPutByVal(currentInstruction, badType, Int32Shape);
354 }
355 JumpList emitDoublePutByVal(Instruction* currentInstruction, PatchableJump& badType)
356 {
357 return emitGenericContiguousPutByVal(currentInstruction, badType, DoubleShape);
358 }
359 JumpList emitContiguousPutByVal(Instruction* currentInstruction, PatchableJump& badType)
360 {
361 return emitGenericContiguousPutByVal(currentInstruction, badType);
362 }
363 JumpList emitGenericContiguousPutByVal(Instruction*, PatchableJump& badType, IndexingType indexingShape = ContiguousShape);
364 JumpList emitArrayStoragePutByVal(Instruction*, PatchableJump& badType);
81345200
A
365 JumpList emitIntTypedArrayPutByVal(Instruction*, PatchableJump& badType, TypedArrayType);
366 JumpList emitFloatTypedArrayPutByVal(Instruction*, PatchableJump& badType, TypedArrayType);
93a37866
A
367
368 enum FinalObjectMode { MayBeFinal, KnownNotFinal };
6fe7ccc8 369
81345200
A
370 template <typename T> Jump branchStructure(RelationalCondition, T leftHandSide, Structure*);
371
ba379fdc 372#if USE(JSVALUE32_64)
81345200 373 bool getOperandConstantImmediateInt(int op1, int op2, int& op, int32_t& constant);
ba379fdc 374
6fe7ccc8
A
375 void emitLoadTag(int index, RegisterID tag);
376 void emitLoadPayload(int index, RegisterID payload);
ba379fdc
A
377
378 void emitLoad(const JSValue& v, RegisterID tag, RegisterID payload);
6fe7ccc8
A
379 void emitLoad(int index, RegisterID tag, RegisterID payload, RegisterID base = callFrameRegister);
380 void emitLoad2(int index1, RegisterID tag1, RegisterID payload1, int index2, RegisterID tag2, RegisterID payload2);
381
382 void emitStore(int index, RegisterID tag, RegisterID payload, RegisterID base = callFrameRegister);
383 void emitStore(int index, const JSValue constant, RegisterID base = callFrameRegister);
384 void emitStoreInt32(int index, RegisterID payload, bool indexIsInt32 = false);
385 void emitStoreInt32(int index, TrustedImm32 payload, bool indexIsInt32 = false);
6fe7ccc8
A
386 void emitStoreCell(int index, RegisterID payload, bool indexIsCell = false);
387 void emitStoreBool(int index, RegisterID payload, bool indexIsBool = false);
388 void emitStoreDouble(int index, FPRegisterID value);
ba379fdc 389
6fe7ccc8
A
390 void emitJumpSlowCaseIfNotJSCell(int virtualRegisterIndex);
391 void emitJumpSlowCaseIfNotJSCell(int virtualRegisterIndex, RegisterID tag);
ba379fdc 392
81345200 393 void compileGetByIdHotPath(const Identifier*);
93a37866
A
394 void compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, PropertyOffset cachedOffset);
395 void compileGetDirectOffset(JSObject* base, RegisterID resultTag, RegisterID resultPayload, PropertyOffset cachedOffset);
396 void compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, RegisterID offset, FinalObjectMode = MayBeFinal);
397 void compilePutDirectOffset(RegisterID base, RegisterID valueTag, RegisterID valuePayload, PropertyOffset cachedOffset);
9dae56ea 398
ba379fdc 399 // Arithmetic opcode helpers
81345200
A
400 void emitAdd32Constant(int dst, int op, int32_t constant, ResultType opType);
401 void emitSub32Constant(int dst, int op, int32_t constant, ResultType opType);
402 void emitBinaryDoubleOp(OpcodeID, int dst, int op1, int op2, OperandTypes, JumpList& notInt32Op1, JumpList& notInt32Op2, bool op1IsInRegisters = true, bool op2IsInRegisters = true);
9dae56ea 403
ba379fdc
A
404#else // USE(JSVALUE32_64)
405 void emitGetVirtualRegister(int src, RegisterID dst);
81345200 406 void emitGetVirtualRegister(VirtualRegister src, RegisterID dst);
ba379fdc 407 void emitGetVirtualRegisters(int src1, RegisterID dst1, int src2, RegisterID dst2);
81345200
A
408 void emitGetVirtualRegisters(VirtualRegister src1, RegisterID dst1, VirtualRegister src2, RegisterID dst2);
409 void emitPutVirtualRegister(int dst, RegisterID from = regT0);
410 void emitPutVirtualRegister(VirtualRegister dst, RegisterID from = regT0);
411 void emitStoreCell(int dst, RegisterID payload, bool /* only used in JSValue32_64 */ = false)
412 {
413 emitPutVirtualRegister(dst, payload);
414 }
415 void emitStoreCell(VirtualRegister dst, RegisterID payload)
6fe7ccc8
A
416 {
417 emitPutVirtualRegister(dst, payload);
418 }
9dae56ea 419
81345200 420 int32_t getConstantOperandImmediateInt(int src);
9dae56ea
A
421
422 Jump emitJumpIfJSCell(RegisterID);
423 Jump emitJumpIfBothJSCells(RegisterID, RegisterID, RegisterID);
424 void emitJumpSlowCaseIfJSCell(RegisterID);
9dae56ea
A
425 void emitJumpSlowCaseIfNotJSCell(RegisterID);
426 void emitJumpSlowCaseIfNotJSCell(RegisterID, int VReg);
6fe7ccc8
A
427 Jump emitJumpIfImmediateInteger(RegisterID);
428 Jump emitJumpIfNotImmediateInteger(RegisterID);
429 Jump emitJumpIfNotImmediateIntegers(RegisterID, RegisterID, RegisterID);
9dae56ea 430 void emitJumpSlowCaseIfNotImmediateInteger(RegisterID);
f9bf01c6 431 void emitJumpSlowCaseIfNotImmediateNumber(RegisterID);
9dae56ea
A
432 void emitJumpSlowCaseIfNotImmediateIntegers(RegisterID, RegisterID, RegisterID);
433
9dae56ea 434 void emitFastArithReTagImmediate(RegisterID src, RegisterID dest);
9dae56ea
A
435
436 void emitTagAsBoolImmediate(RegisterID reg);
81345200
A
437 void compileBinaryArithOp(OpcodeID, int dst, int src1, int src2, OperandTypes opi);
438 void compileBinaryArithOpSlowCase(Instruction*, OpcodeID, Vector<SlowCaseEntry>::iterator&, int dst, int src1, int src2, OperandTypes, bool op1HasImmediateIntFastCase, bool op2HasImmediateIntFastCase);
ba379fdc 439
81345200 440 void compileGetByIdHotPath(int baseVReg, const Identifier*);
93a37866
A
441 void compileGetDirectOffset(RegisterID base, RegisterID result, PropertyOffset cachedOffset);
442 void compileGetDirectOffset(JSObject* base, RegisterID result, PropertyOffset cachedOffset);
443 void compileGetDirectOffset(RegisterID base, RegisterID result, RegisterID offset, RegisterID scratch, FinalObjectMode = MayBeFinal);
444 void compilePutDirectOffset(RegisterID base, RegisterID value, PropertyOffset cachedOffset);
f9bf01c6 445
ba379fdc
A
446#endif // USE(JSVALUE32_64)
447
81345200
A
448 void emit_compareAndJump(OpcodeID, int op1, int op2, unsigned target, RelationalCondition);
449 void emit_compareAndJumpSlow(int op1, int op2, unsigned target, DoubleCondition, size_t (JIT_OPERATION *operation)(ExecState*, EncodedJSValue, EncodedJSValue), bool invert, Vector<SlowCaseEntry>::iterator&);
6fe7ccc8 450
81345200 451 void emit_op_touch_entry(Instruction*);
ba379fdc
A
452 void emit_op_add(Instruction*);
453 void emit_op_bitand(Instruction*);
ba379fdc
A
454 void emit_op_bitor(Instruction*);
455 void emit_op_bitxor(Instruction*);
456 void emit_op_call(Instruction*);
457 void emit_op_call_eval(Instruction*);
458 void emit_op_call_varargs(Instruction*);
81345200
A
459 void emit_op_construct_varargs(Instruction*);
460 void emit_op_captured_mov(Instruction*);
ba379fdc
A
461 void emit_op_catch(Instruction*);
462 void emit_op_construct(Instruction*);
14957cd0
A
463 void emit_op_get_callee(Instruction*);
464 void emit_op_create_this(Instruction*);
81345200 465 void emit_op_to_this(Instruction*);
ba379fdc
A
466 void emit_op_create_arguments(Instruction*);
467 void emit_op_debug(Instruction*);
468 void emit_op_del_by_id(Instruction*);
469 void emit_op_div(Instruction*);
470 void emit_op_end(Instruction*);
471 void emit_op_enter(Instruction*);
14957cd0 472 void emit_op_create_activation(Instruction*);
ba379fdc
A
473 void emit_op_eq(Instruction*);
474 void emit_op_eq_null(Instruction*);
475 void emit_op_get_by_id(Instruction*);
14957cd0 476 void emit_op_get_arguments_length(Instruction*);
ba379fdc 477 void emit_op_get_by_val(Instruction*);
14957cd0 478 void emit_op_get_argument_by_val(Instruction*);
f9bf01c6 479 void emit_op_get_by_pname(Instruction*);
14957cd0
A
480 void emit_op_init_lazy_reg(Instruction*);
481 void emit_op_check_has_instance(Instruction*);
ba379fdc 482 void emit_op_instanceof(Instruction*);
6fe7ccc8
A
483 void emit_op_is_undefined(Instruction*);
484 void emit_op_is_boolean(Instruction*);
485 void emit_op_is_number(Instruction*);
486 void emit_op_is_string(Instruction*);
ba379fdc
A
487 void emit_op_jeq_null(Instruction*);
488 void emit_op_jfalse(Instruction*);
489 void emit_op_jmp(Instruction*);
ba379fdc
A
490 void emit_op_jneq_null(Instruction*);
491 void emit_op_jneq_ptr(Instruction*);
f9bf01c6 492 void emit_op_jless(Instruction*);
6fe7ccc8
A
493 void emit_op_jlesseq(Instruction*);
494 void emit_op_jgreater(Instruction*);
495 void emit_op_jgreatereq(Instruction*);
496 void emit_op_jnless(Instruction*);
ba379fdc 497 void emit_op_jnlesseq(Instruction*);
6fe7ccc8
A
498 void emit_op_jngreater(Instruction*);
499 void emit_op_jngreatereq(Instruction*);
ba379fdc 500 void emit_op_jtrue(Instruction*);
6fe7ccc8 501 void emit_op_loop_hint(Instruction*);
ba379fdc 502 void emit_op_lshift(Instruction*);
ba379fdc
A
503 void emit_op_mod(Instruction*);
504 void emit_op_mov(Instruction*);
505 void emit_op_mul(Instruction*);
506 void emit_op_negate(Instruction*);
507 void emit_op_neq(Instruction*);
508 void emit_op_neq_null(Instruction*);
509 void emit_op_new_array(Instruction*);
93a37866 510 void emit_op_new_array_with_size(Instruction*);
14957cd0 511 void emit_op_new_array_buffer(Instruction*);
ba379fdc 512 void emit_op_new_func(Instruction*);
81345200 513 void emit_op_new_captured_func(Instruction*);
ba379fdc
A
514 void emit_op_new_func_exp(Instruction*);
515 void emit_op_new_object(Instruction*);
516 void emit_op_new_regexp(Instruction*);
f9bf01c6 517 void emit_op_get_pnames(Instruction*);
ba379fdc
A
518 void emit_op_next_pname(Instruction*);
519 void emit_op_not(Instruction*);
520 void emit_op_nstricteq(Instruction*);
521 void emit_op_pop_scope(Instruction*);
93a37866
A
522 void emit_op_dec(Instruction*);
523 void emit_op_inc(Instruction*);
ba379fdc
A
524 void emit_op_profile_did_call(Instruction*);
525 void emit_op_profile_will_call(Instruction*);
93a37866
A
526 void emit_op_push_name_scope(Instruction*);
527 void emit_op_push_with_scope(Instruction*);
ba379fdc
A
528 void emit_op_put_by_id(Instruction*);
529 void emit_op_put_by_index(Instruction*);
530 void emit_op_put_by_val(Instruction*);
6fe7ccc8 531 void emit_op_put_getter_setter(Instruction*);
93a37866 532 void emit_op_init_global_const(Instruction*);
ba379fdc 533 void emit_op_ret(Instruction*);
14957cd0 534 void emit_op_ret_object_or_this(Instruction*);
ba379fdc 535 void emit_op_rshift(Instruction*);
ba379fdc
A
536 void emit_op_strcat(Instruction*);
537 void emit_op_stricteq(Instruction*);
538 void emit_op_sub(Instruction*);
539 void emit_op_switch_char(Instruction*);
540 void emit_op_switch_imm(Instruction*);
541 void emit_op_switch_string(Instruction*);
542 void emit_op_tear_off_activation(Instruction*);
543 void emit_op_tear_off_arguments(Instruction*);
544 void emit_op_throw(Instruction*);
93a37866
A
545 void emit_op_throw_static_error(Instruction*);
546 void emit_op_to_number(Instruction*);
ba379fdc
A
547 void emit_op_to_primitive(Instruction*);
548 void emit_op_unexpected_load(Instruction*);
81345200 549 void emit_op_unsigned(Instruction*);
4e4e5a6f 550 void emit_op_urshift(Instruction*);
ba379fdc
A
551
552 void emitSlow_op_add(Instruction*, Vector<SlowCaseEntry>::iterator&);
553 void emitSlow_op_bitand(Instruction*, Vector<SlowCaseEntry>::iterator&);
ba379fdc
A
554 void emitSlow_op_bitor(Instruction*, Vector<SlowCaseEntry>::iterator&);
555 void emitSlow_op_bitxor(Instruction*, Vector<SlowCaseEntry>::iterator&);
556 void emitSlow_op_call(Instruction*, Vector<SlowCaseEntry>::iterator&);
557 void emitSlow_op_call_eval(Instruction*, Vector<SlowCaseEntry>::iterator&);
558 void emitSlow_op_call_varargs(Instruction*, Vector<SlowCaseEntry>::iterator&);
81345200
A
559 void emitSlow_op_construct_varargs(Instruction*, Vector<SlowCaseEntry>::iterator&);
560 void emitSlow_op_captured_mov(Instruction*, Vector<SlowCaseEntry>::iterator&);
ba379fdc 561 void emitSlow_op_construct(Instruction*, Vector<SlowCaseEntry>::iterator&);
81345200 562 void emitSlow_op_to_this(Instruction*, Vector<SlowCaseEntry>::iterator&);
6fe7ccc8 563 void emitSlow_op_create_this(Instruction*, Vector<SlowCaseEntry>::iterator&);
ba379fdc
A
564 void emitSlow_op_div(Instruction*, Vector<SlowCaseEntry>::iterator&);
565 void emitSlow_op_eq(Instruction*, Vector<SlowCaseEntry>::iterator&);
81345200 566 void emitSlow_op_get_callee(Instruction*, Vector<SlowCaseEntry>::iterator&);
ba379fdc 567 void emitSlow_op_get_by_id(Instruction*, Vector<SlowCaseEntry>::iterator&);
14957cd0 568 void emitSlow_op_get_arguments_length(Instruction*, Vector<SlowCaseEntry>::iterator&);
ba379fdc 569 void emitSlow_op_get_by_val(Instruction*, Vector<SlowCaseEntry>::iterator&);
14957cd0 570 void emitSlow_op_get_argument_by_val(Instruction*, Vector<SlowCaseEntry>::iterator&);
f9bf01c6 571 void emitSlow_op_get_by_pname(Instruction*, Vector<SlowCaseEntry>::iterator&);
14957cd0 572 void emitSlow_op_check_has_instance(Instruction*, Vector<SlowCaseEntry>::iterator&);
ba379fdc
A
573 void emitSlow_op_instanceof(Instruction*, Vector<SlowCaseEntry>::iterator&);
574 void emitSlow_op_jfalse(Instruction*, Vector<SlowCaseEntry>::iterator&);
f9bf01c6 575 void emitSlow_op_jless(Instruction*, Vector<SlowCaseEntry>::iterator&);
6fe7ccc8
A
576 void emitSlow_op_jlesseq(Instruction*, Vector<SlowCaseEntry>::iterator&);
577 void emitSlow_op_jgreater(Instruction*, Vector<SlowCaseEntry>::iterator&);
578 void emitSlow_op_jgreatereq(Instruction*, Vector<SlowCaseEntry>::iterator&);
579 void emitSlow_op_jnless(Instruction*, Vector<SlowCaseEntry>::iterator&);
ba379fdc 580 void emitSlow_op_jnlesseq(Instruction*, Vector<SlowCaseEntry>::iterator&);
6fe7ccc8
A
581 void emitSlow_op_jngreater(Instruction*, Vector<SlowCaseEntry>::iterator&);
582 void emitSlow_op_jngreatereq(Instruction*, Vector<SlowCaseEntry>::iterator&);
ba379fdc 583 void emitSlow_op_jtrue(Instruction*, Vector<SlowCaseEntry>::iterator&);
93a37866 584 void emitSlow_op_loop_hint(Instruction*, Vector<SlowCaseEntry>::iterator&);
ba379fdc 585 void emitSlow_op_lshift(Instruction*, Vector<SlowCaseEntry>::iterator&);
ba379fdc
A
586 void emitSlow_op_mod(Instruction*, Vector<SlowCaseEntry>::iterator&);
587 void emitSlow_op_mul(Instruction*, Vector<SlowCaseEntry>::iterator&);
588 void emitSlow_op_negate(Instruction*, Vector<SlowCaseEntry>::iterator&);
589 void emitSlow_op_neq(Instruction*, Vector<SlowCaseEntry>::iterator&);
6fe7ccc8 590 void emitSlow_op_new_object(Instruction*, Vector<SlowCaseEntry>::iterator&);
ba379fdc
A
591 void emitSlow_op_not(Instruction*, Vector<SlowCaseEntry>::iterator&);
592 void emitSlow_op_nstricteq(Instruction*, Vector<SlowCaseEntry>::iterator&);
93a37866
A
593 void emitSlow_op_dec(Instruction*, Vector<SlowCaseEntry>::iterator&);
594 void emitSlow_op_inc(Instruction*, Vector<SlowCaseEntry>::iterator&);
ba379fdc
A
595 void emitSlow_op_put_by_id(Instruction*, Vector<SlowCaseEntry>::iterator&);
596 void emitSlow_op_put_by_val(Instruction*, Vector<SlowCaseEntry>::iterator&);
ba379fdc
A
597 void emitSlow_op_rshift(Instruction*, Vector<SlowCaseEntry>::iterator&);
598 void emitSlow_op_stricteq(Instruction*, Vector<SlowCaseEntry>::iterator&);
599 void emitSlow_op_sub(Instruction*, Vector<SlowCaseEntry>::iterator&);
93a37866 600 void emitSlow_op_to_number(Instruction*, Vector<SlowCaseEntry>::iterator&);
ba379fdc 601 void emitSlow_op_to_primitive(Instruction*, Vector<SlowCaseEntry>::iterator&);
81345200 602 void emitSlow_op_unsigned(Instruction*, Vector<SlowCaseEntry>::iterator&);
4e4e5a6f 603 void emitSlow_op_urshift(Instruction*, Vector<SlowCaseEntry>::iterator&);
93a37866 604
81345200
A
605 void emit_op_resolve_scope(Instruction*);
606 void emit_op_get_from_scope(Instruction*);
607 void emit_op_put_to_scope(Instruction*);
608 void emitSlow_op_resolve_scope(Instruction*, Vector<SlowCaseEntry>::iterator&);
609 void emitSlow_op_get_from_scope(Instruction*, Vector<SlowCaseEntry>::iterator&);
610 void emitSlow_op_put_to_scope(Instruction*, Vector<SlowCaseEntry>::iterator&);
93a37866 611
4e4e5a6f
A
612 void emitRightShift(Instruction*, bool isUnsigned);
613 void emitRightShiftSlowCase(Instruction*, Vector<SlowCaseEntry>::iterator&, bool isUnsigned);
ba379fdc 614
81345200
A
615 void emitVarInjectionCheck(bool needsVarInjectionChecks);
616 void emitResolveClosure(int dst, bool needsVarInjectionChecks, unsigned depth);
617 void emitLoadWithStructureCheck(int scope, Structure** structureSlot);
618 void emitGetGlobalProperty(uintptr_t* operandSlot);
619 void emitGetGlobalVar(uintptr_t operand);
620 void emitGetClosureVar(int scope, uintptr_t operand);
621 void emitPutGlobalProperty(uintptr_t* operandSlot, int value);
622#if USE(JSVALUE64)
623 void emitNotifyWrite(RegisterID value, RegisterID scratch, VariableWatchpointSet*);
624#else
625 void emitNotifyWrite(RegisterID tag, RegisterID payload, RegisterID scratch, VariableWatchpointSet*);
626#endif
627 void emitPutGlobalVar(uintptr_t operand, int value, VariableWatchpointSet*);
628 void emitPutClosureVar(int scope, uintptr_t operand, int value);
629
630 void emitInitRegister(int dst);
ba379fdc 631
93a37866
A
632 void emitPutIntToCallFrameHeader(RegisterID from, JSStack::CallFrameHeaderEntry);
633 void emitGetFromCallFrameHeaderPtr(JSStack::CallFrameHeaderEntry, RegisterID to, RegisterID from = callFrameRegister);
634 void emitGetFromCallFrameHeader32(JSStack::CallFrameHeaderEntry, RegisterID to, RegisterID from = callFrameRegister);
635#if USE(JSVALUE64)
636 void emitGetFromCallFrameHeader64(JSStack::CallFrameHeaderEntry, RegisterID to, RegisterID from = callFrameRegister);
637#endif
ba379fdc 638
81345200
A
639 JSValue getConstantOperand(int src);
640 bool isOperandConstantImmediateInt(int src);
641 bool isOperandConstantImmediateChar(int src);
14957cd0 642
ba379fdc
A
643 Jump getSlowCase(Vector<SlowCaseEntry>::iterator& iter)
644 {
645 return iter++->from;
646 }
647 void linkSlowCase(Vector<SlowCaseEntry>::iterator& iter)
648 {
649 iter->from.link(this);
650 ++iter;
651 }
6fe7ccc8
A
652 void linkDummySlowCase(Vector<SlowCaseEntry>::iterator& iter)
653 {
654 ASSERT(!iter->from.isSet());
655 ++iter;
656 }
657 void linkSlowCaseIfNotJSCell(Vector<SlowCaseEntry>::iterator&, int virtualRegisterIndex);
ba379fdc 658
81345200
A
659 MacroAssembler::Call appendCallWithExceptionCheck(const FunctionPtr&);
660#if OS(WINDOWS) && CPU(X86_64)
661 MacroAssembler::Call appendCallWithExceptionCheckAndSlowPathReturnType(const FunctionPtr&);
662#endif
663 MacroAssembler::Call appendCallWithCallFrameRollbackOnException(const FunctionPtr&);
664 MacroAssembler::Call appendCallWithExceptionCheckSetJSValueResult(const FunctionPtr&, int);
665 MacroAssembler::Call appendCallWithExceptionCheckSetJSValueResultWithProfile(const FunctionPtr&, int);
666
667 enum WithProfileTag { WithProfile };
668
669 MacroAssembler::Call callOperation(C_JITOperation_E);
670 MacroAssembler::Call callOperation(C_JITOperation_EO, GPRReg);
671 MacroAssembler::Call callOperation(C_JITOperation_ESt, Structure*);
672 MacroAssembler::Call callOperation(C_JITOperation_EZ, int32_t);
673 MacroAssembler::Call callOperation(F_JITOperation_EJZZ, GPRReg, int32_t, int32_t);
674 MacroAssembler::Call callOperation(J_JITOperation_E, int);
675 MacroAssembler::Call callOperation(J_JITOperation_EAapJ, int, ArrayAllocationProfile*, GPRReg);
676 MacroAssembler::Call callOperation(J_JITOperation_EAapJcpZ, int, ArrayAllocationProfile*, GPRReg, int32_t);
677 MacroAssembler::Call callOperation(J_JITOperation_EAapJcpZ, int, ArrayAllocationProfile*, const JSValue*, int32_t);
678 MacroAssembler::Call callOperation(J_JITOperation_EC, int, JSCell*);
679 MacroAssembler::Call callOperation(V_JITOperation_EC, JSCell*);
680 MacroAssembler::Call callOperation(J_JITOperation_EJ, int, GPRReg);
681#if USE(JSVALUE64)
682 MacroAssembler::Call callOperation(WithProfileTag, J_JITOperation_ESsiJI, int, StructureStubInfo*, GPRReg, StringImpl*);
683#else
684 MacroAssembler::Call callOperation(WithProfileTag, J_JITOperation_ESsiJI, int, StructureStubInfo*, GPRReg, GPRReg, StringImpl*);
685#endif
686 MacroAssembler::Call callOperation(J_JITOperation_EJIdc, int, GPRReg, const Identifier*);
687 MacroAssembler::Call callOperation(J_JITOperation_EJJ, int, GPRReg, GPRReg);
688#if USE(JSVALUE64)
689 MacroAssembler::Call callOperation(WithProfileTag, J_JITOperation_EJJ, int, GPRReg, GPRReg);
690#else
691 MacroAssembler::Call callOperation(WithProfileTag, J_JITOperation_EJJ, int, GPRReg, GPRReg, GPRReg, GPRReg);
692#endif
693 MacroAssembler::Call callOperation(J_JITOperation_EP, int, void*);
694 MacroAssembler::Call callOperation(WithProfileTag, J_JITOperation_EPc, int, Instruction*);
695 MacroAssembler::Call callOperation(J_JITOperation_EZ, int, int32_t);
696 MacroAssembler::Call callOperation(P_JITOperation_EJS, GPRReg, size_t);
697 MacroAssembler::Call callOperation(S_JITOperation_ECC, RegisterID, RegisterID);
698 MacroAssembler::Call callOperation(S_JITOperation_EJ, RegisterID);
699 MacroAssembler::Call callOperation(S_JITOperation_EJJ, RegisterID, RegisterID);
700 MacroAssembler::Call callOperation(S_JITOperation_EOJss, RegisterID, RegisterID);
701 MacroAssembler::Call callOperation(Sprt_JITOperation_EZ, int32_t);
702 MacroAssembler::Call callOperation(V_JITOperation_E);
703 MacroAssembler::Call callOperation(V_JITOperation_EC, RegisterID);
704 MacroAssembler::Call callOperation(V_JITOperation_ECC, RegisterID, RegisterID);
705 MacroAssembler::Call callOperation(V_JITOperation_ECICC, RegisterID, const Identifier*, RegisterID, RegisterID);
706 MacroAssembler::Call callOperation(V_JITOperation_EIdJZ, const Identifier*, RegisterID, int32_t);
707 MacroAssembler::Call callOperation(V_JITOperation_EJ, RegisterID);
708#if USE(JSVALUE64)
709 MacroAssembler::Call callOperationNoExceptionCheck(V_JITOperation_EJ, RegisterID);
710#else
711 MacroAssembler::Call callOperationNoExceptionCheck(V_JITOperation_EJ, RegisterID, RegisterID);
712#endif
713 MacroAssembler::Call callOperation(V_JITOperation_EJIdJJ, RegisterID, const Identifier*, RegisterID, RegisterID);
714#if USE(JSVALUE64)
715 MacroAssembler::Call callOperation(F_JITOperation_EFJJZ, RegisterID, RegisterID, RegisterID, int32_t);
716 MacroAssembler::Call callOperation(V_JITOperation_ESsiJJI, StructureStubInfo*, RegisterID, RegisterID, StringImpl*);
717#else
718 MacroAssembler::Call callOperation(V_JITOperation_ESsiJJI, StructureStubInfo*, RegisterID, RegisterID, RegisterID, RegisterID, StringImpl*);
719#endif
720 MacroAssembler::Call callOperation(V_JITOperation_EJJJ, RegisterID, RegisterID, RegisterID);
721 MacroAssembler::Call callOperation(V_JITOperation_EJZJ, RegisterID, int32_t, RegisterID);
722 MacroAssembler::Call callOperation(V_JITOperation_EJZ, RegisterID, int32_t);
723 MacroAssembler::Call callOperation(V_JITOperation_EPc, Instruction*);
724 MacroAssembler::Call callOperation(V_JITOperation_EZ, int32_t);
725 MacroAssembler::Call callOperationWithCallFrameRollbackOnException(J_JITOperation_E);
726 MacroAssembler::Call callOperationNoExceptionCheck(J_JITOperation_EE, RegisterID);
727 MacroAssembler::Call callOperationWithCallFrameRollbackOnException(V_JITOperation_ECb, CodeBlock*);
728 MacroAssembler::Call callOperationWithCallFrameRollbackOnException(Z_JITOperation_E);
729#if USE(JSVALUE32_64)
730 MacroAssembler::Call callOperation(F_JITOperation_EFJJZ, RegisterID, RegisterID, RegisterID, RegisterID, RegisterID, int32_t);
731 MacroAssembler::Call callOperation(F_JITOperation_EJZZ, GPRReg, GPRReg, int32_t, int32_t);
732 MacroAssembler::Call callOperation(J_JITOperation_EAapJ, int, ArrayAllocationProfile*, GPRReg, GPRReg);
733 MacroAssembler::Call callOperation(J_JITOperation_EJ, int, GPRReg, GPRReg);
734 MacroAssembler::Call callOperation(J_JITOperation_EJIdc, int, GPRReg, GPRReg, const Identifier*);
735 MacroAssembler::Call callOperation(J_JITOperation_EJJ, int, GPRReg, GPRReg, GPRReg, GPRReg);
736 MacroAssembler::Call callOperation(P_JITOperation_EJS, GPRReg, GPRReg, size_t);
737 MacroAssembler::Call callOperation(S_JITOperation_EJ, RegisterID, RegisterID);
738 MacroAssembler::Call callOperation(S_JITOperation_EJJ, RegisterID, RegisterID, RegisterID, RegisterID);
739 MacroAssembler::Call callOperation(V_JITOperation_EIdJZ, const Identifier*, RegisterID, RegisterID, int32_t);
740 MacroAssembler::Call callOperation(V_JITOperation_EJ, RegisterID, RegisterID);
741 MacroAssembler::Call callOperation(V_JITOperation_EJJJ, RegisterID, RegisterID, RegisterID, RegisterID, RegisterID, RegisterID);
742 MacroAssembler::Call callOperation(V_JITOperation_EJZ, RegisterID, RegisterID, int32_t);
743 MacroAssembler::Call callOperation(V_JITOperation_EJZJ, RegisterID, RegisterID, int32_t, RegisterID, RegisterID);
744#endif
745
ba379fdc 746 Jump checkStructure(RegisterID reg, Structure* structure);
9dae56ea 747
6fe7ccc8 748 void updateTopCallFrame();
9dae56ea 749
ba379fdc 750 Call emitNakedCall(CodePtr function = CodePtr());
9dae56ea 751
4e4e5a6f
A
752 // Loads the character value of a single character string into dst.
753 void emitLoadCharacterString(RegisterID src, RegisterID dst, JumpList& failures);
754
6fe7ccc8 755#if ENABLE(DFG_JIT)
93a37866 756 void emitEnterOptimizationCheck();
6fe7ccc8 757#else
93a37866 758 void emitEnterOptimizationCheck() { }
6fe7ccc8 759#endif
93a37866 760
9dae56ea 761#ifndef NDEBUG
81345200 762 void printBytecodeOperandTypes(int src1, int src2);
9dae56ea
A
763#endif
764
ba379fdc
A
765#if ENABLE(SAMPLING_FLAGS)
766 void setSamplingFlag(int32_t);
767 void clearSamplingFlag(int32_t);
9dae56ea 768#endif
ba379fdc
A
769
770#if ENABLE(SAMPLING_COUNTERS)
6fe7ccc8 771 void emitCount(AbstractSamplingCounter&, int32_t = 1);
9dae56ea
A
772#endif
773
774#if ENABLE(OPCODE_SAMPLING)
ba379fdc 775 void sampleInstruction(Instruction*, bool = false);
9dae56ea 776#endif
ba379fdc
A
777
778#if ENABLE(CODEBLOCK_SAMPLING)
779 void sampleCodeBlock(CodeBlock*);
9dae56ea 780#else
ba379fdc 781 void sampleCodeBlock(CodeBlock*) {}
9dae56ea
A
782#endif
783
6fe7ccc8
A
784#if ENABLE(DFG_JIT)
785 bool canBeOptimized() { return m_canBeOptimized; }
93a37866
A
786 bool canBeOptimizedOrInlined() { return m_canBeOptimizedOrInlined; }
787 bool shouldEmitProfiling() { return m_shouldEmitProfiling; }
6fe7ccc8
A
788#else
789 bool canBeOptimized() { return false; }
93a37866 790 bool canBeOptimizedOrInlined() { return false; }
6fe7ccc8
A
791 // Enables use of value profiler with tiered compilation turned off,
792 // in which case all code gets profiled.
93a37866 793 bool shouldEmitProfiling() { return false; }
6fe7ccc8
A
794#endif
795
9dae56ea 796 Interpreter* m_interpreter;
9dae56ea
A
797
798 Vector<CallRecord> m_calls;
799 Vector<Label> m_labels;
81345200
A
800 Vector<JITGetByIdGenerator> m_getByIds;
801 Vector<JITPutByIdGenerator> m_putByIds;
93a37866 802 Vector<ByValCompilationInfo> m_byValCompilationInfo;
81345200 803 Vector<CallCompilationInfo> m_callCompilationInfo;
9dae56ea
A
804 Vector<JumpTable> m_jmpTable;
805
14957cd0 806 unsigned m_bytecodeOffset;
9dae56ea
A
807 Vector<SlowCaseEntry> m_slowCases;
808 Vector<SwitchRecord> m_switches;
809
81345200
A
810 JumpList m_exceptionChecks;
811 JumpList m_exceptionChecksWithCallFrameRollback;
812
813 unsigned m_getByIdIndex;
814 unsigned m_putByIdIndex;
93a37866 815 unsigned m_byValInstructionIndex;
ba379fdc
A
816 unsigned m_callLinkInfoIndex;
817
93a37866
A
818 OwnPtr<JITDisassembler> m_disassembler;
819 RefPtr<Profiler::Compilation> m_compilation;
14957cd0 820 WeakRandom m_randomGenerator;
93a37866 821 static CodeRef stringGetByValStubGenerator(VM*);
6fe7ccc8 822
6fe7ccc8 823 bool m_canBeOptimized;
93a37866
A
824 bool m_canBeOptimizedOrInlined;
825 bool m_shouldEmitProfiling;
ba379fdc 826 } JIT_CLASS_ALIGNMENT;
f9bf01c6 827
ba379fdc 828} // namespace JSC
9dae56ea
A
829
830#endif // ENABLE(JIT)
831
832#endif // JIT_h