]>
Commit | Line | Data |
---|---|---|
1 | /* | |
2 | * Copyright (C) 2008, 2012, 2013, 2014 Apple Inc. All rights reserved. | |
3 | * | |
4 | * Redistribution and use in source and binary forms, with or without | |
5 | * modification, are permitted provided that the following conditions | |
6 | * are met: | |
7 | * 1. Redistributions of source code must retain the above copyright | |
8 | * notice, this list of conditions and the following disclaimer. | |
9 | * 2. Redistributions in binary form must reproduce the above copyright | |
10 | * notice, this list of conditions and the following disclaimer in the | |
11 | * documentation and/or other materials provided with the distribution. | |
12 | * | |
13 | * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY | |
14 | * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE | |
15 | * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR | |
16 | * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR | |
17 | * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, | |
18 | * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, | |
19 | * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR | |
20 | * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY | |
21 | * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | |
22 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | |
23 | * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | |
24 | */ | |
25 | ||
26 | #ifndef JIT_h | |
27 | #define JIT_h | |
28 | ||
29 | #if ENABLE(JIT) | |
30 | ||
31 | // We've run into some problems where changing the size of the class JIT leads to | |
32 | // performance fluctuations. Try forcing alignment in an attempt to stabalize this. | |
33 | #if COMPILER(GCC) | |
34 | #define JIT_CLASS_ALIGNMENT __attribute__ ((aligned (32))) | |
35 | #else | |
36 | #define JIT_CLASS_ALIGNMENT | |
37 | #endif | |
38 | ||
39 | #define ASSERT_JIT_OFFSET(actual, expected) ASSERT_WITH_MESSAGE(actual == expected, "JIT Offset \"%s\" should be %d, not %d.\n", #expected, static_cast<int>(expected), static_cast<int>(actual)); | |
40 | ||
41 | #include "CodeBlock.h" | |
42 | #include "CompactJITCodeMap.h" | |
43 | #include "Interpreter.h" | |
44 | #include "JITDisassembler.h" | |
45 | #include "JITInlineCacheGenerator.h" | |
46 | #include "JSInterfaceJIT.h" | |
47 | #include "LegacyProfiler.h" | |
48 | #include "Opcode.h" | |
49 | #include "ResultType.h" | |
50 | #include "SamplingTool.h" | |
51 | #include "UnusedPointer.h" | |
52 | ||
53 | namespace JSC { | |
54 | ||
55 | class ArrayAllocationProfile; | |
56 | class CodeBlock; | |
57 | class FunctionExecutable; | |
58 | class JIT; | |
59 | class JSPropertyNameIterator; | |
60 | class Identifier; | |
61 | class Interpreter; | |
62 | class JSScope; | |
63 | class JSStack; | |
64 | class MarkedAllocator; | |
65 | class Register; | |
66 | class StructureChain; | |
67 | ||
68 | struct CallLinkInfo; | |
69 | struct Instruction; | |
70 | struct OperandTypes; | |
71 | struct PolymorphicAccessStructureList; | |
72 | struct SimpleJumpTable; | |
73 | struct StringJumpTable; | |
74 | struct StructureStubInfo; | |
75 | ||
76 | struct CallRecord { | |
77 | MacroAssembler::Call from; | |
78 | unsigned bytecodeOffset; | |
79 | void* to; | |
80 | ||
81 | CallRecord() | |
82 | { | |
83 | } | |
84 | ||
85 | CallRecord(MacroAssembler::Call from, unsigned bytecodeOffset, void* to = 0) | |
86 | : from(from) | |
87 | , bytecodeOffset(bytecodeOffset) | |
88 | , to(to) | |
89 | { | |
90 | } | |
91 | }; | |
92 | ||
93 | struct JumpTable { | |
94 | MacroAssembler::Jump from; | |
95 | unsigned toBytecodeOffset; | |
96 | ||
97 | JumpTable(MacroAssembler::Jump f, unsigned t) | |
98 | : from(f) | |
99 | , toBytecodeOffset(t) | |
100 | { | |
101 | } | |
102 | }; | |
103 | ||
104 | struct SlowCaseEntry { | |
105 | MacroAssembler::Jump from; | |
106 | unsigned to; | |
107 | unsigned hint; | |
108 | ||
109 | SlowCaseEntry(MacroAssembler::Jump f, unsigned t, unsigned h = 0) | |
110 | : from(f) | |
111 | , to(t) | |
112 | , hint(h) | |
113 | { | |
114 | } | |
115 | }; | |
116 | ||
117 | struct SwitchRecord { | |
118 | enum Type { | |
119 | Immediate, | |
120 | Character, | |
121 | String | |
122 | }; | |
123 | ||
124 | Type type; | |
125 | ||
126 | union { | |
127 | SimpleJumpTable* simpleJumpTable; | |
128 | StringJumpTable* stringJumpTable; | |
129 | } jumpTable; | |
130 | ||
131 | unsigned bytecodeOffset; | |
132 | unsigned defaultOffset; | |
133 | ||
134 | SwitchRecord(SimpleJumpTable* jumpTable, unsigned bytecodeOffset, unsigned defaultOffset, Type type) | |
135 | : type(type) | |
136 | , bytecodeOffset(bytecodeOffset) | |
137 | , defaultOffset(defaultOffset) | |
138 | { | |
139 | this->jumpTable.simpleJumpTable = jumpTable; | |
140 | } | |
141 | ||
142 | SwitchRecord(StringJumpTable* jumpTable, unsigned bytecodeOffset, unsigned defaultOffset) | |
143 | : type(String) | |
144 | , bytecodeOffset(bytecodeOffset) | |
145 | , defaultOffset(defaultOffset) | |
146 | { | |
147 | this->jumpTable.stringJumpTable = jumpTable; | |
148 | } | |
149 | }; | |
150 | ||
151 | struct ByValCompilationInfo { | |
152 | ByValCompilationInfo() { } | |
153 | ||
154 | ByValCompilationInfo(unsigned bytecodeIndex, MacroAssembler::PatchableJump badTypeJump, JITArrayMode arrayMode, MacroAssembler::Label doneTarget) | |
155 | : bytecodeIndex(bytecodeIndex) | |
156 | , badTypeJump(badTypeJump) | |
157 | , arrayMode(arrayMode) | |
158 | , doneTarget(doneTarget) | |
159 | { | |
160 | } | |
161 | ||
162 | unsigned bytecodeIndex; | |
163 | MacroAssembler::PatchableJump badTypeJump; | |
164 | JITArrayMode arrayMode; | |
165 | MacroAssembler::Label doneTarget; | |
166 | MacroAssembler::Label slowPathTarget; | |
167 | MacroAssembler::Call returnAddress; | |
168 | }; | |
169 | ||
170 | struct CallCompilationInfo { | |
171 | MacroAssembler::DataLabelPtr hotPathBegin; | |
172 | MacroAssembler::Call hotPathOther; | |
173 | MacroAssembler::Call callReturnLocation; | |
174 | CallLinkInfo* callLinkInfo; | |
175 | }; | |
176 | ||
177 | // Near calls can only be patched to other JIT code, regular calls can be patched to JIT code or relinked to stub functions. | |
178 | void ctiPatchNearCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, MacroAssemblerCodePtr newCalleeFunction); | |
179 | void ctiPatchCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, MacroAssemblerCodePtr newCalleeFunction); | |
180 | void ctiPatchCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, FunctionPtr newCalleeFunction); | |
181 | ||
182 | class JIT : private JSInterfaceJIT { | |
183 | friend class JITSlowPathCall; | |
184 | friend class JITStubCall; | |
185 | ||
186 | using MacroAssembler::Jump; | |
187 | using MacroAssembler::JumpList; | |
188 | using MacroAssembler::Label; | |
189 | ||
190 | static const uintptr_t patchGetByIdDefaultStructure = unusedPointer; | |
191 | static const int patchGetByIdDefaultOffset = 0; | |
192 | // Magic number - initial offset cannot be representable as a signed 8bit value, or the X86Assembler | |
193 | // will compress the displacement, and we may not be able to fit a patched offset. | |
194 | static const int patchPutByIdDefaultOffset = 256; | |
195 | ||
196 | public: | |
197 | static CompilationResult compile(VM* vm, CodeBlock* codeBlock, JITCompilationEffort effort) | |
198 | { | |
199 | return JIT(vm, codeBlock).privateCompile(effort); | |
200 | } | |
201 | ||
202 | static void compileClosureCall(VM* vm, CallLinkInfo* callLinkInfo, CodeBlock* callerCodeBlock, CodeBlock* calleeCodeBlock, Structure* expectedStructure, ExecutableBase* expectedExecutable, MacroAssemblerCodePtr codePtr) | |
203 | { | |
204 | JIT jit(vm, callerCodeBlock); | |
205 | jit.m_bytecodeOffset = callLinkInfo->codeOrigin.bytecodeIndex; | |
206 | jit.privateCompileClosureCall(callLinkInfo, calleeCodeBlock, expectedStructure, expectedExecutable, codePtr); | |
207 | } | |
208 | ||
209 | static void compileGetByVal(VM* vm, CodeBlock* codeBlock, ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode) | |
210 | { | |
211 | JIT jit(vm, codeBlock); | |
212 | jit.m_bytecodeOffset = byValInfo->bytecodeIndex; | |
213 | jit.privateCompileGetByVal(byValInfo, returnAddress, arrayMode); | |
214 | } | |
215 | ||
216 | static void compilePutByVal(VM* vm, CodeBlock* codeBlock, ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode) | |
217 | { | |
218 | JIT jit(vm, codeBlock); | |
219 | jit.m_bytecodeOffset = byValInfo->bytecodeIndex; | |
220 | jit.privateCompilePutByVal(byValInfo, returnAddress, arrayMode); | |
221 | } | |
222 | ||
223 | static void compileDirectPutByVal(VM* vm, CodeBlock* codeBlock, ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode) | |
224 | { | |
225 | JIT jit(vm, codeBlock); | |
226 | jit.m_bytecodeOffset = byValInfo->bytecodeIndex; | |
227 | jit.privateCompilePutByVal(byValInfo, returnAddress, arrayMode); | |
228 | } | |
229 | ||
230 | static CodeRef compileCTINativeCall(VM* vm, NativeFunction func) | |
231 | { | |
232 | if (!vm->canUseJIT()) { | |
233 | return CodeRef::createLLIntCodeRef(llint_native_call_trampoline); | |
234 | } | |
235 | JIT jit(vm, 0); | |
236 | return jit.privateCompileCTINativeCall(vm, func); | |
237 | } | |
238 | ||
239 | static unsigned frameRegisterCountFor(CodeBlock*); | |
240 | static int stackPointerOffsetFor(CodeBlock*); | |
241 | ||
242 | private: | |
243 | JIT(VM*, CodeBlock* = 0); | |
244 | ||
245 | void privateCompileMainPass(); | |
246 | void privateCompileLinkPass(); | |
247 | void privateCompileSlowCases(); | |
248 | CompilationResult privateCompile(JITCompilationEffort); | |
249 | ||
250 | void privateCompileClosureCall(CallLinkInfo*, CodeBlock* calleeCodeBlock, Structure*, ExecutableBase*, MacroAssemblerCodePtr); | |
251 | ||
252 | void privateCompileGetByVal(ByValInfo*, ReturnAddressPtr, JITArrayMode); | |
253 | void privateCompilePutByVal(ByValInfo*, ReturnAddressPtr, JITArrayMode); | |
254 | ||
255 | Label privateCompileCTINativeCall(VM*, bool isConstruct = false); | |
256 | CodeRef privateCompileCTINativeCall(VM*, NativeFunction); | |
257 | void privateCompilePatchGetArrayLength(ReturnAddressPtr returnAddress); | |
258 | ||
259 | // Add a call out from JIT code, without an exception check. | |
260 | Call appendCall(const FunctionPtr& function) | |
261 | { | |
262 | Call functionCall = call(); | |
263 | m_calls.append(CallRecord(functionCall, m_bytecodeOffset, function.value())); | |
264 | return functionCall; | |
265 | } | |
266 | ||
267 | #if OS(WINDOWS) && CPU(X86_64) | |
268 | Call appendCallWithSlowPathReturnType(const FunctionPtr& function) | |
269 | { | |
270 | Call functionCall = callWithSlowPathReturnType(); | |
271 | m_calls.append(CallRecord(functionCall, m_bytecodeOffset, function.value())); | |
272 | return functionCall; | |
273 | } | |
274 | #endif | |
275 | ||
276 | void exceptionCheck(Jump jumpToHandler) | |
277 | { | |
278 | m_exceptionChecks.append(jumpToHandler); | |
279 | } | |
280 | ||
281 | void exceptionCheck() | |
282 | { | |
283 | m_exceptionChecks.append(emitExceptionCheck()); | |
284 | } | |
285 | ||
286 | void exceptionCheckWithCallFrameRollback() | |
287 | { | |
288 | m_exceptionChecksWithCallFrameRollback.append(emitExceptionCheck()); | |
289 | } | |
290 | ||
291 | void privateCompileExceptionHandlers(); | |
292 | ||
293 | void addSlowCase(Jump); | |
294 | void addSlowCase(JumpList); | |
295 | void addSlowCase(); | |
296 | void addJump(Jump, int); | |
297 | void emitJumpSlowToHot(Jump, int); | |
298 | ||
299 | void compileOpCall(OpcodeID, Instruction*, unsigned callLinkInfoIndex); | |
300 | void compileOpCallSlowCase(OpcodeID, Instruction*, Vector<SlowCaseEntry>::iterator&, unsigned callLinkInfoIndex); | |
301 | void compileLoadVarargs(Instruction*); | |
302 | void compileCallEval(Instruction*); | |
303 | void compileCallEvalSlowCase(Instruction*, Vector<SlowCaseEntry>::iterator&); | |
304 | void emitPutCallResult(Instruction*); | |
305 | ||
306 | enum CompileOpStrictEqType { OpStrictEq, OpNStrictEq }; | |
307 | void compileOpStrictEq(Instruction* instruction, CompileOpStrictEqType type); | |
308 | bool isOperandConstantImmediateDouble(int src); | |
309 | ||
310 | void emitLoadDouble(int index, FPRegisterID value); | |
311 | void emitLoadInt32ToDouble(int index, FPRegisterID value); | |
312 | Jump emitJumpIfCellNotObject(RegisterID cellReg); | |
313 | ||
314 | enum WriteBarrierMode { UnconditionalWriteBarrier, ShouldFilterBase, ShouldFilterValue, ShouldFilterBaseAndValue }; | |
315 | // value register in write barrier is used before any scratch registers | |
316 | // so may safely be the same as either of the scratch registers. | |
317 | void emitWriteBarrier(unsigned owner, unsigned value, WriteBarrierMode); | |
318 | void emitWriteBarrier(JSCell* owner, unsigned value, WriteBarrierMode); | |
319 | void emitWriteBarrier(JSCell* owner); | |
320 | ||
321 | template<typename StructureType> // StructureType can be RegisterID or ImmPtr. | |
322 | void emitAllocateJSObject(RegisterID allocator, StructureType, RegisterID result, RegisterID scratch); | |
323 | ||
324 | // This assumes that the value to profile is in regT0 and that regT3 is available for | |
325 | // scratch. | |
326 | void emitValueProfilingSite(ValueProfile*); | |
327 | void emitValueProfilingSite(unsigned bytecodeOffset); | |
328 | void emitValueProfilingSite(); | |
329 | void emitArrayProfilingSiteWithCell(RegisterID cell, RegisterID indexingType, ArrayProfile*); | |
330 | void emitArrayProfilingSiteForBytecodeIndexWithCell(RegisterID cell, RegisterID indexingType, unsigned bytecodeIndex); | |
331 | void emitArrayProfileStoreToHoleSpecialCase(ArrayProfile*); | |
332 | void emitArrayProfileOutOfBoundsSpecialCase(ArrayProfile*); | |
333 | ||
334 | JITArrayMode chooseArrayMode(ArrayProfile*); | |
335 | ||
336 | // Property is in regT1, base is in regT0. regT2 contains indexing type. | |
337 | // Property is int-checked and zero extended. Base is cell checked. | |
338 | // Structure is already profiled. Returns the slow cases. Fall-through | |
339 | // case contains result in regT0, and it is not yet profiled. | |
340 | JumpList emitInt32GetByVal(Instruction* instruction, PatchableJump& badType) { return emitContiguousGetByVal(instruction, badType, Int32Shape); } | |
341 | JumpList emitDoubleGetByVal(Instruction*, PatchableJump& badType); | |
342 | JumpList emitContiguousGetByVal(Instruction*, PatchableJump& badType, IndexingType expectedShape = ContiguousShape); | |
343 | JumpList emitArrayStorageGetByVal(Instruction*, PatchableJump& badType); | |
344 | JumpList emitIntTypedArrayGetByVal(Instruction*, PatchableJump& badType, TypedArrayType); | |
345 | JumpList emitFloatTypedArrayGetByVal(Instruction*, PatchableJump& badType, TypedArrayType); | |
346 | ||
347 | // Property is in regT0, base is in regT0. regT2 contains indecing type. | |
348 | // The value to store is not yet loaded. Property is int-checked and | |
349 | // zero-extended. Base is cell checked. Structure is already profiled. | |
350 | // returns the slow cases. | |
351 | JumpList emitInt32PutByVal(Instruction* currentInstruction, PatchableJump& badType) | |
352 | { | |
353 | return emitGenericContiguousPutByVal(currentInstruction, badType, Int32Shape); | |
354 | } | |
355 | JumpList emitDoublePutByVal(Instruction* currentInstruction, PatchableJump& badType) | |
356 | { | |
357 | return emitGenericContiguousPutByVal(currentInstruction, badType, DoubleShape); | |
358 | } | |
359 | JumpList emitContiguousPutByVal(Instruction* currentInstruction, PatchableJump& badType) | |
360 | { | |
361 | return emitGenericContiguousPutByVal(currentInstruction, badType); | |
362 | } | |
363 | JumpList emitGenericContiguousPutByVal(Instruction*, PatchableJump& badType, IndexingType indexingShape = ContiguousShape); | |
364 | JumpList emitArrayStoragePutByVal(Instruction*, PatchableJump& badType); | |
365 | JumpList emitIntTypedArrayPutByVal(Instruction*, PatchableJump& badType, TypedArrayType); | |
366 | JumpList emitFloatTypedArrayPutByVal(Instruction*, PatchableJump& badType, TypedArrayType); | |
367 | ||
368 | enum FinalObjectMode { MayBeFinal, KnownNotFinal }; | |
369 | ||
370 | template <typename T> Jump branchStructure(RelationalCondition, T leftHandSide, Structure*); | |
371 | ||
372 | #if USE(JSVALUE32_64) | |
373 | bool getOperandConstantImmediateInt(int op1, int op2, int& op, int32_t& constant); | |
374 | ||
375 | void emitLoadTag(int index, RegisterID tag); | |
376 | void emitLoadPayload(int index, RegisterID payload); | |
377 | ||
378 | void emitLoad(const JSValue& v, RegisterID tag, RegisterID payload); | |
379 | void emitLoad(int index, RegisterID tag, RegisterID payload, RegisterID base = callFrameRegister); | |
380 | void emitLoad2(int index1, RegisterID tag1, RegisterID payload1, int index2, RegisterID tag2, RegisterID payload2); | |
381 | ||
382 | void emitStore(int index, RegisterID tag, RegisterID payload, RegisterID base = callFrameRegister); | |
383 | void emitStore(int index, const JSValue constant, RegisterID base = callFrameRegister); | |
384 | void emitStoreInt32(int index, RegisterID payload, bool indexIsInt32 = false); | |
385 | void emitStoreInt32(int index, TrustedImm32 payload, bool indexIsInt32 = false); | |
386 | void emitStoreCell(int index, RegisterID payload, bool indexIsCell = false); | |
387 | void emitStoreBool(int index, RegisterID payload, bool indexIsBool = false); | |
388 | void emitStoreDouble(int index, FPRegisterID value); | |
389 | ||
390 | void emitJumpSlowCaseIfNotJSCell(int virtualRegisterIndex); | |
391 | void emitJumpSlowCaseIfNotJSCell(int virtualRegisterIndex, RegisterID tag); | |
392 | ||
393 | void compileGetByIdHotPath(const Identifier*); | |
394 | void compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, PropertyOffset cachedOffset); | |
395 | void compileGetDirectOffset(JSObject* base, RegisterID resultTag, RegisterID resultPayload, PropertyOffset cachedOffset); | |
396 | void compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, RegisterID offset, FinalObjectMode = MayBeFinal); | |
397 | void compilePutDirectOffset(RegisterID base, RegisterID valueTag, RegisterID valuePayload, PropertyOffset cachedOffset); | |
398 | ||
399 | // Arithmetic opcode helpers | |
400 | void emitAdd32Constant(int dst, int op, int32_t constant, ResultType opType); | |
401 | void emitSub32Constant(int dst, int op, int32_t constant, ResultType opType); | |
402 | void emitBinaryDoubleOp(OpcodeID, int dst, int op1, int op2, OperandTypes, JumpList& notInt32Op1, JumpList& notInt32Op2, bool op1IsInRegisters = true, bool op2IsInRegisters = true); | |
403 | ||
404 | #else // USE(JSVALUE32_64) | |
405 | void emitGetVirtualRegister(int src, RegisterID dst); | |
406 | void emitGetVirtualRegister(VirtualRegister src, RegisterID dst); | |
407 | void emitGetVirtualRegisters(int src1, RegisterID dst1, int src2, RegisterID dst2); | |
408 | void emitGetVirtualRegisters(VirtualRegister src1, RegisterID dst1, VirtualRegister src2, RegisterID dst2); | |
409 | void emitPutVirtualRegister(int dst, RegisterID from = regT0); | |
410 | void emitPutVirtualRegister(VirtualRegister dst, RegisterID from = regT0); | |
411 | void emitStoreCell(int dst, RegisterID payload, bool /* only used in JSValue32_64 */ = false) | |
412 | { | |
413 | emitPutVirtualRegister(dst, payload); | |
414 | } | |
415 | void emitStoreCell(VirtualRegister dst, RegisterID payload) | |
416 | { | |
417 | emitPutVirtualRegister(dst, payload); | |
418 | } | |
419 | ||
420 | int32_t getConstantOperandImmediateInt(int src); | |
421 | ||
422 | Jump emitJumpIfJSCell(RegisterID); | |
423 | Jump emitJumpIfBothJSCells(RegisterID, RegisterID, RegisterID); | |
424 | void emitJumpSlowCaseIfJSCell(RegisterID); | |
425 | void emitJumpSlowCaseIfNotJSCell(RegisterID); | |
426 | void emitJumpSlowCaseIfNotJSCell(RegisterID, int VReg); | |
427 | Jump emitJumpIfImmediateInteger(RegisterID); | |
428 | Jump emitJumpIfNotImmediateInteger(RegisterID); | |
429 | Jump emitJumpIfNotImmediateIntegers(RegisterID, RegisterID, RegisterID); | |
430 | void emitJumpSlowCaseIfNotImmediateInteger(RegisterID); | |
431 | void emitJumpSlowCaseIfNotImmediateNumber(RegisterID); | |
432 | void emitJumpSlowCaseIfNotImmediateIntegers(RegisterID, RegisterID, RegisterID); | |
433 | ||
434 | void emitFastArithReTagImmediate(RegisterID src, RegisterID dest); | |
435 | ||
436 | void emitTagAsBoolImmediate(RegisterID reg); | |
437 | void compileBinaryArithOp(OpcodeID, int dst, int src1, int src2, OperandTypes opi); | |
438 | void compileBinaryArithOpSlowCase(Instruction*, OpcodeID, Vector<SlowCaseEntry>::iterator&, int dst, int src1, int src2, OperandTypes, bool op1HasImmediateIntFastCase, bool op2HasImmediateIntFastCase); | |
439 | ||
440 | void compileGetByIdHotPath(int baseVReg, const Identifier*); | |
441 | void compileGetDirectOffset(RegisterID base, RegisterID result, PropertyOffset cachedOffset); | |
442 | void compileGetDirectOffset(JSObject* base, RegisterID result, PropertyOffset cachedOffset); | |
443 | void compileGetDirectOffset(RegisterID base, RegisterID result, RegisterID offset, RegisterID scratch, FinalObjectMode = MayBeFinal); | |
444 | void compilePutDirectOffset(RegisterID base, RegisterID value, PropertyOffset cachedOffset); | |
445 | ||
446 | #endif // USE(JSVALUE32_64) | |
447 | ||
448 | void emit_compareAndJump(OpcodeID, int op1, int op2, unsigned target, RelationalCondition); | |
449 | void emit_compareAndJumpSlow(int op1, int op2, unsigned target, DoubleCondition, size_t (JIT_OPERATION *operation)(ExecState*, EncodedJSValue, EncodedJSValue), bool invert, Vector<SlowCaseEntry>::iterator&); | |
450 | ||
451 | void emit_op_touch_entry(Instruction*); | |
452 | void emit_op_add(Instruction*); | |
453 | void emit_op_bitand(Instruction*); | |
454 | void emit_op_bitor(Instruction*); | |
455 | void emit_op_bitxor(Instruction*); | |
456 | void emit_op_call(Instruction*); | |
457 | void emit_op_call_eval(Instruction*); | |
458 | void emit_op_call_varargs(Instruction*); | |
459 | void emit_op_construct_varargs(Instruction*); | |
460 | void emit_op_captured_mov(Instruction*); | |
461 | void emit_op_catch(Instruction*); | |
462 | void emit_op_construct(Instruction*); | |
463 | void emit_op_get_callee(Instruction*); | |
464 | void emit_op_create_this(Instruction*); | |
465 | void emit_op_to_this(Instruction*); | |
466 | void emit_op_create_arguments(Instruction*); | |
467 | void emit_op_debug(Instruction*); | |
468 | void emit_op_del_by_id(Instruction*); | |
469 | void emit_op_div(Instruction*); | |
470 | void emit_op_end(Instruction*); | |
471 | void emit_op_enter(Instruction*); | |
472 | void emit_op_create_activation(Instruction*); | |
473 | void emit_op_eq(Instruction*); | |
474 | void emit_op_eq_null(Instruction*); | |
475 | void emit_op_get_by_id(Instruction*); | |
476 | void emit_op_get_arguments_length(Instruction*); | |
477 | void emit_op_get_by_val(Instruction*); | |
478 | void emit_op_get_argument_by_val(Instruction*); | |
479 | void emit_op_get_by_pname(Instruction*); | |
480 | void emit_op_init_lazy_reg(Instruction*); | |
481 | void emit_op_check_has_instance(Instruction*); | |
482 | void emit_op_instanceof(Instruction*); | |
483 | void emit_op_is_undefined(Instruction*); | |
484 | void emit_op_is_boolean(Instruction*); | |
485 | void emit_op_is_number(Instruction*); | |
486 | void emit_op_is_string(Instruction*); | |
487 | void emit_op_jeq_null(Instruction*); | |
488 | void emit_op_jfalse(Instruction*); | |
489 | void emit_op_jmp(Instruction*); | |
490 | void emit_op_jneq_null(Instruction*); | |
491 | void emit_op_jneq_ptr(Instruction*); | |
492 | void emit_op_jless(Instruction*); | |
493 | void emit_op_jlesseq(Instruction*); | |
494 | void emit_op_jgreater(Instruction*); | |
495 | void emit_op_jgreatereq(Instruction*); | |
496 | void emit_op_jnless(Instruction*); | |
497 | void emit_op_jnlesseq(Instruction*); | |
498 | void emit_op_jngreater(Instruction*); | |
499 | void emit_op_jngreatereq(Instruction*); | |
500 | void emit_op_jtrue(Instruction*); | |
501 | void emit_op_loop_hint(Instruction*); | |
502 | void emit_op_lshift(Instruction*); | |
503 | void emit_op_mod(Instruction*); | |
504 | void emit_op_mov(Instruction*); | |
505 | void emit_op_mul(Instruction*); | |
506 | void emit_op_negate(Instruction*); | |
507 | void emit_op_neq(Instruction*); | |
508 | void emit_op_neq_null(Instruction*); | |
509 | void emit_op_new_array(Instruction*); | |
510 | void emit_op_new_array_with_size(Instruction*); | |
511 | void emit_op_new_array_buffer(Instruction*); | |
512 | void emit_op_new_func(Instruction*); | |
513 | void emit_op_new_captured_func(Instruction*); | |
514 | void emit_op_new_func_exp(Instruction*); | |
515 | void emit_op_new_object(Instruction*); | |
516 | void emit_op_new_regexp(Instruction*); | |
517 | void emit_op_get_pnames(Instruction*); | |
518 | void emit_op_next_pname(Instruction*); | |
519 | void emit_op_not(Instruction*); | |
520 | void emit_op_nstricteq(Instruction*); | |
521 | void emit_op_pop_scope(Instruction*); | |
522 | void emit_op_dec(Instruction*); | |
523 | void emit_op_inc(Instruction*); | |
524 | void emit_op_profile_did_call(Instruction*); | |
525 | void emit_op_profile_will_call(Instruction*); | |
526 | void emit_op_push_name_scope(Instruction*); | |
527 | void emit_op_push_with_scope(Instruction*); | |
528 | void emit_op_put_by_id(Instruction*); | |
529 | void emit_op_put_by_index(Instruction*); | |
530 | void emit_op_put_by_val(Instruction*); | |
531 | void emit_op_put_getter_setter(Instruction*); | |
532 | void emit_op_init_global_const(Instruction*); | |
533 | void emit_op_ret(Instruction*); | |
534 | void emit_op_ret_object_or_this(Instruction*); | |
535 | void emit_op_rshift(Instruction*); | |
536 | void emit_op_strcat(Instruction*); | |
537 | void emit_op_stricteq(Instruction*); | |
538 | void emit_op_sub(Instruction*); | |
539 | void emit_op_switch_char(Instruction*); | |
540 | void emit_op_switch_imm(Instruction*); | |
541 | void emit_op_switch_string(Instruction*); | |
542 | void emit_op_tear_off_activation(Instruction*); | |
543 | void emit_op_tear_off_arguments(Instruction*); | |
544 | void emit_op_throw(Instruction*); | |
545 | void emit_op_throw_static_error(Instruction*); | |
546 | void emit_op_to_number(Instruction*); | |
547 | void emit_op_to_primitive(Instruction*); | |
548 | void emit_op_unexpected_load(Instruction*); | |
549 | void emit_op_unsigned(Instruction*); | |
550 | void emit_op_urshift(Instruction*); | |
551 | ||
552 | void emitSlow_op_add(Instruction*, Vector<SlowCaseEntry>::iterator&); | |
553 | void emitSlow_op_bitand(Instruction*, Vector<SlowCaseEntry>::iterator&); | |
554 | void emitSlow_op_bitor(Instruction*, Vector<SlowCaseEntry>::iterator&); | |
555 | void emitSlow_op_bitxor(Instruction*, Vector<SlowCaseEntry>::iterator&); | |
556 | void emitSlow_op_call(Instruction*, Vector<SlowCaseEntry>::iterator&); | |
557 | void emitSlow_op_call_eval(Instruction*, Vector<SlowCaseEntry>::iterator&); | |
558 | void emitSlow_op_call_varargs(Instruction*, Vector<SlowCaseEntry>::iterator&); | |
559 | void emitSlow_op_construct_varargs(Instruction*, Vector<SlowCaseEntry>::iterator&); | |
560 | void emitSlow_op_captured_mov(Instruction*, Vector<SlowCaseEntry>::iterator&); | |
561 | void emitSlow_op_construct(Instruction*, Vector<SlowCaseEntry>::iterator&); | |
562 | void emitSlow_op_to_this(Instruction*, Vector<SlowCaseEntry>::iterator&); | |
563 | void emitSlow_op_create_this(Instruction*, Vector<SlowCaseEntry>::iterator&); | |
564 | void emitSlow_op_div(Instruction*, Vector<SlowCaseEntry>::iterator&); | |
565 | void emitSlow_op_eq(Instruction*, Vector<SlowCaseEntry>::iterator&); | |
566 | void emitSlow_op_get_callee(Instruction*, Vector<SlowCaseEntry>::iterator&); | |
567 | void emitSlow_op_get_by_id(Instruction*, Vector<SlowCaseEntry>::iterator&); | |
568 | void emitSlow_op_get_arguments_length(Instruction*, Vector<SlowCaseEntry>::iterator&); | |
569 | void emitSlow_op_get_by_val(Instruction*, Vector<SlowCaseEntry>::iterator&); | |
570 | void emitSlow_op_get_argument_by_val(Instruction*, Vector<SlowCaseEntry>::iterator&); | |
571 | void emitSlow_op_get_by_pname(Instruction*, Vector<SlowCaseEntry>::iterator&); | |
572 | void emitSlow_op_check_has_instance(Instruction*, Vector<SlowCaseEntry>::iterator&); | |
573 | void emitSlow_op_instanceof(Instruction*, Vector<SlowCaseEntry>::iterator&); | |
574 | void emitSlow_op_jfalse(Instruction*, Vector<SlowCaseEntry>::iterator&); | |
575 | void emitSlow_op_jless(Instruction*, Vector<SlowCaseEntry>::iterator&); | |
576 | void emitSlow_op_jlesseq(Instruction*, Vector<SlowCaseEntry>::iterator&); | |
577 | void emitSlow_op_jgreater(Instruction*, Vector<SlowCaseEntry>::iterator&); | |
578 | void emitSlow_op_jgreatereq(Instruction*, Vector<SlowCaseEntry>::iterator&); | |
579 | void emitSlow_op_jnless(Instruction*, Vector<SlowCaseEntry>::iterator&); | |
580 | void emitSlow_op_jnlesseq(Instruction*, Vector<SlowCaseEntry>::iterator&); | |
581 | void emitSlow_op_jngreater(Instruction*, Vector<SlowCaseEntry>::iterator&); | |
582 | void emitSlow_op_jngreatereq(Instruction*, Vector<SlowCaseEntry>::iterator&); | |
583 | void emitSlow_op_jtrue(Instruction*, Vector<SlowCaseEntry>::iterator&); | |
584 | void emitSlow_op_loop_hint(Instruction*, Vector<SlowCaseEntry>::iterator&); | |
585 | void emitSlow_op_lshift(Instruction*, Vector<SlowCaseEntry>::iterator&); | |
586 | void emitSlow_op_mod(Instruction*, Vector<SlowCaseEntry>::iterator&); | |
587 | void emitSlow_op_mul(Instruction*, Vector<SlowCaseEntry>::iterator&); | |
588 | void emitSlow_op_negate(Instruction*, Vector<SlowCaseEntry>::iterator&); | |
589 | void emitSlow_op_neq(Instruction*, Vector<SlowCaseEntry>::iterator&); | |
590 | void emitSlow_op_new_object(Instruction*, Vector<SlowCaseEntry>::iterator&); | |
591 | void emitSlow_op_not(Instruction*, Vector<SlowCaseEntry>::iterator&); | |
592 | void emitSlow_op_nstricteq(Instruction*, Vector<SlowCaseEntry>::iterator&); | |
593 | void emitSlow_op_dec(Instruction*, Vector<SlowCaseEntry>::iterator&); | |
594 | void emitSlow_op_inc(Instruction*, Vector<SlowCaseEntry>::iterator&); | |
595 | void emitSlow_op_put_by_id(Instruction*, Vector<SlowCaseEntry>::iterator&); | |
596 | void emitSlow_op_put_by_val(Instruction*, Vector<SlowCaseEntry>::iterator&); | |
597 | void emitSlow_op_rshift(Instruction*, Vector<SlowCaseEntry>::iterator&); | |
598 | void emitSlow_op_stricteq(Instruction*, Vector<SlowCaseEntry>::iterator&); | |
599 | void emitSlow_op_sub(Instruction*, Vector<SlowCaseEntry>::iterator&); | |
600 | void emitSlow_op_to_number(Instruction*, Vector<SlowCaseEntry>::iterator&); | |
601 | void emitSlow_op_to_primitive(Instruction*, Vector<SlowCaseEntry>::iterator&); | |
602 | void emitSlow_op_unsigned(Instruction*, Vector<SlowCaseEntry>::iterator&); | |
603 | void emitSlow_op_urshift(Instruction*, Vector<SlowCaseEntry>::iterator&); | |
604 | ||
605 | void emit_op_resolve_scope(Instruction*); | |
606 | void emit_op_get_from_scope(Instruction*); | |
607 | void emit_op_put_to_scope(Instruction*); | |
608 | void emitSlow_op_resolve_scope(Instruction*, Vector<SlowCaseEntry>::iterator&); | |
609 | void emitSlow_op_get_from_scope(Instruction*, Vector<SlowCaseEntry>::iterator&); | |
610 | void emitSlow_op_put_to_scope(Instruction*, Vector<SlowCaseEntry>::iterator&); | |
611 | ||
612 | void emitRightShift(Instruction*, bool isUnsigned); | |
613 | void emitRightShiftSlowCase(Instruction*, Vector<SlowCaseEntry>::iterator&, bool isUnsigned); | |
614 | ||
615 | void emitVarInjectionCheck(bool needsVarInjectionChecks); | |
616 | void emitResolveClosure(int dst, bool needsVarInjectionChecks, unsigned depth); | |
617 | void emitLoadWithStructureCheck(int scope, Structure** structureSlot); | |
618 | void emitGetGlobalProperty(uintptr_t* operandSlot); | |
619 | void emitGetGlobalVar(uintptr_t operand); | |
620 | void emitGetClosureVar(int scope, uintptr_t operand); | |
621 | void emitPutGlobalProperty(uintptr_t* operandSlot, int value); | |
622 | #if USE(JSVALUE64) | |
623 | void emitNotifyWrite(RegisterID value, RegisterID scratch, VariableWatchpointSet*); | |
624 | #else | |
625 | void emitNotifyWrite(RegisterID tag, RegisterID payload, RegisterID scratch, VariableWatchpointSet*); | |
626 | #endif | |
627 | void emitPutGlobalVar(uintptr_t operand, int value, VariableWatchpointSet*); | |
628 | void emitPutClosureVar(int scope, uintptr_t operand, int value); | |
629 | ||
630 | void emitInitRegister(int dst); | |
631 | ||
632 | void emitPutIntToCallFrameHeader(RegisterID from, JSStack::CallFrameHeaderEntry); | |
633 | void emitGetFromCallFrameHeaderPtr(JSStack::CallFrameHeaderEntry, RegisterID to, RegisterID from = callFrameRegister); | |
634 | void emitGetFromCallFrameHeader32(JSStack::CallFrameHeaderEntry, RegisterID to, RegisterID from = callFrameRegister); | |
635 | #if USE(JSVALUE64) | |
636 | void emitGetFromCallFrameHeader64(JSStack::CallFrameHeaderEntry, RegisterID to, RegisterID from = callFrameRegister); | |
637 | #endif | |
638 | ||
639 | JSValue getConstantOperand(int src); | |
640 | bool isOperandConstantImmediateInt(int src); | |
641 | bool isOperandConstantImmediateChar(int src); | |
642 | ||
643 | Jump getSlowCase(Vector<SlowCaseEntry>::iterator& iter) | |
644 | { | |
645 | return iter++->from; | |
646 | } | |
647 | void linkSlowCase(Vector<SlowCaseEntry>::iterator& iter) | |
648 | { | |
649 | iter->from.link(this); | |
650 | ++iter; | |
651 | } | |
652 | void linkDummySlowCase(Vector<SlowCaseEntry>::iterator& iter) | |
653 | { | |
654 | ASSERT(!iter->from.isSet()); | |
655 | ++iter; | |
656 | } | |
657 | void linkSlowCaseIfNotJSCell(Vector<SlowCaseEntry>::iterator&, int virtualRegisterIndex); | |
658 | ||
659 | MacroAssembler::Call appendCallWithExceptionCheck(const FunctionPtr&); | |
660 | #if OS(WINDOWS) && CPU(X86_64) | |
661 | MacroAssembler::Call appendCallWithExceptionCheckAndSlowPathReturnType(const FunctionPtr&); | |
662 | #endif | |
663 | MacroAssembler::Call appendCallWithCallFrameRollbackOnException(const FunctionPtr&); | |
664 | MacroAssembler::Call appendCallWithExceptionCheckSetJSValueResult(const FunctionPtr&, int); | |
665 | MacroAssembler::Call appendCallWithExceptionCheckSetJSValueResultWithProfile(const FunctionPtr&, int); | |
666 | ||
667 | enum WithProfileTag { WithProfile }; | |
668 | ||
669 | MacroAssembler::Call callOperation(C_JITOperation_E); | |
670 | MacroAssembler::Call callOperation(C_JITOperation_EO, GPRReg); | |
671 | MacroAssembler::Call callOperation(C_JITOperation_ESt, Structure*); | |
672 | MacroAssembler::Call callOperation(C_JITOperation_EZ, int32_t); | |
673 | MacroAssembler::Call callOperation(F_JITOperation_EJZZ, GPRReg, int32_t, int32_t); | |
674 | MacroAssembler::Call callOperation(J_JITOperation_E, int); | |
675 | MacroAssembler::Call callOperation(J_JITOperation_EAapJ, int, ArrayAllocationProfile*, GPRReg); | |
676 | MacroAssembler::Call callOperation(J_JITOperation_EAapJcpZ, int, ArrayAllocationProfile*, GPRReg, int32_t); | |
677 | MacroAssembler::Call callOperation(J_JITOperation_EAapJcpZ, int, ArrayAllocationProfile*, const JSValue*, int32_t); | |
678 | MacroAssembler::Call callOperation(J_JITOperation_EC, int, JSCell*); | |
679 | MacroAssembler::Call callOperation(V_JITOperation_EC, JSCell*); | |
680 | MacroAssembler::Call callOperation(J_JITOperation_EJ, int, GPRReg); | |
681 | #if USE(JSVALUE64) | |
682 | MacroAssembler::Call callOperation(WithProfileTag, J_JITOperation_ESsiJI, int, StructureStubInfo*, GPRReg, StringImpl*); | |
683 | #else | |
684 | MacroAssembler::Call callOperation(WithProfileTag, J_JITOperation_ESsiJI, int, StructureStubInfo*, GPRReg, GPRReg, StringImpl*); | |
685 | #endif | |
686 | MacroAssembler::Call callOperation(J_JITOperation_EJIdc, int, GPRReg, const Identifier*); | |
687 | MacroAssembler::Call callOperation(J_JITOperation_EJJ, int, GPRReg, GPRReg); | |
688 | #if USE(JSVALUE64) | |
689 | MacroAssembler::Call callOperation(WithProfileTag, J_JITOperation_EJJ, int, GPRReg, GPRReg); | |
690 | #else | |
691 | MacroAssembler::Call callOperation(WithProfileTag, J_JITOperation_EJJ, int, GPRReg, GPRReg, GPRReg, GPRReg); | |
692 | #endif | |
693 | MacroAssembler::Call callOperation(J_JITOperation_EP, int, void*); | |
694 | MacroAssembler::Call callOperation(WithProfileTag, J_JITOperation_EPc, int, Instruction*); | |
695 | MacroAssembler::Call callOperation(J_JITOperation_EZ, int, int32_t); | |
696 | MacroAssembler::Call callOperation(P_JITOperation_EJS, GPRReg, size_t); | |
697 | MacroAssembler::Call callOperation(S_JITOperation_ECC, RegisterID, RegisterID); | |
698 | MacroAssembler::Call callOperation(S_JITOperation_EJ, RegisterID); | |
699 | MacroAssembler::Call callOperation(S_JITOperation_EJJ, RegisterID, RegisterID); | |
700 | MacroAssembler::Call callOperation(S_JITOperation_EOJss, RegisterID, RegisterID); | |
701 | MacroAssembler::Call callOperation(Sprt_JITOperation_EZ, int32_t); | |
702 | MacroAssembler::Call callOperation(V_JITOperation_E); | |
703 | MacroAssembler::Call callOperation(V_JITOperation_EC, RegisterID); | |
704 | MacroAssembler::Call callOperation(V_JITOperation_ECC, RegisterID, RegisterID); | |
705 | MacroAssembler::Call callOperation(V_JITOperation_ECICC, RegisterID, const Identifier*, RegisterID, RegisterID); | |
706 | MacroAssembler::Call callOperation(V_JITOperation_EIdJZ, const Identifier*, RegisterID, int32_t); | |
707 | MacroAssembler::Call callOperation(V_JITOperation_EJ, RegisterID); | |
708 | #if USE(JSVALUE64) | |
709 | MacroAssembler::Call callOperationNoExceptionCheck(V_JITOperation_EJ, RegisterID); | |
710 | #else | |
711 | MacroAssembler::Call callOperationNoExceptionCheck(V_JITOperation_EJ, RegisterID, RegisterID); | |
712 | #endif | |
713 | MacroAssembler::Call callOperation(V_JITOperation_EJIdJJ, RegisterID, const Identifier*, RegisterID, RegisterID); | |
714 | #if USE(JSVALUE64) | |
715 | MacroAssembler::Call callOperation(F_JITOperation_EFJJZ, RegisterID, RegisterID, RegisterID, int32_t); | |
716 | MacroAssembler::Call callOperation(V_JITOperation_ESsiJJI, StructureStubInfo*, RegisterID, RegisterID, StringImpl*); | |
717 | #else | |
718 | MacroAssembler::Call callOperation(V_JITOperation_ESsiJJI, StructureStubInfo*, RegisterID, RegisterID, RegisterID, RegisterID, StringImpl*); | |
719 | #endif | |
720 | MacroAssembler::Call callOperation(V_JITOperation_EJJJ, RegisterID, RegisterID, RegisterID); | |
721 | MacroAssembler::Call callOperation(V_JITOperation_EJZJ, RegisterID, int32_t, RegisterID); | |
722 | MacroAssembler::Call callOperation(V_JITOperation_EJZ, RegisterID, int32_t); | |
723 | MacroAssembler::Call callOperation(V_JITOperation_EPc, Instruction*); | |
724 | MacroAssembler::Call callOperation(V_JITOperation_EZ, int32_t); | |
725 | MacroAssembler::Call callOperationWithCallFrameRollbackOnException(J_JITOperation_E); | |
726 | MacroAssembler::Call callOperationNoExceptionCheck(J_JITOperation_EE, RegisterID); | |
727 | MacroAssembler::Call callOperationWithCallFrameRollbackOnException(V_JITOperation_ECb, CodeBlock*); | |
728 | MacroAssembler::Call callOperationWithCallFrameRollbackOnException(Z_JITOperation_E); | |
729 | #if USE(JSVALUE32_64) | |
730 | MacroAssembler::Call callOperation(F_JITOperation_EFJJZ, RegisterID, RegisterID, RegisterID, RegisterID, RegisterID, int32_t); | |
731 | MacroAssembler::Call callOperation(F_JITOperation_EJZZ, GPRReg, GPRReg, int32_t, int32_t); | |
732 | MacroAssembler::Call callOperation(J_JITOperation_EAapJ, int, ArrayAllocationProfile*, GPRReg, GPRReg); | |
733 | MacroAssembler::Call callOperation(J_JITOperation_EJ, int, GPRReg, GPRReg); | |
734 | MacroAssembler::Call callOperation(J_JITOperation_EJIdc, int, GPRReg, GPRReg, const Identifier*); | |
735 | MacroAssembler::Call callOperation(J_JITOperation_EJJ, int, GPRReg, GPRReg, GPRReg, GPRReg); | |
736 | MacroAssembler::Call callOperation(P_JITOperation_EJS, GPRReg, GPRReg, size_t); | |
737 | MacroAssembler::Call callOperation(S_JITOperation_EJ, RegisterID, RegisterID); | |
738 | MacroAssembler::Call callOperation(S_JITOperation_EJJ, RegisterID, RegisterID, RegisterID, RegisterID); | |
739 | MacroAssembler::Call callOperation(V_JITOperation_EIdJZ, const Identifier*, RegisterID, RegisterID, int32_t); | |
740 | MacroAssembler::Call callOperation(V_JITOperation_EJ, RegisterID, RegisterID); | |
741 | MacroAssembler::Call callOperation(V_JITOperation_EJJJ, RegisterID, RegisterID, RegisterID, RegisterID, RegisterID, RegisterID); | |
742 | MacroAssembler::Call callOperation(V_JITOperation_EJZ, RegisterID, RegisterID, int32_t); | |
743 | MacroAssembler::Call callOperation(V_JITOperation_EJZJ, RegisterID, RegisterID, int32_t, RegisterID, RegisterID); | |
744 | #endif | |
745 | ||
746 | Jump checkStructure(RegisterID reg, Structure* structure); | |
747 | ||
748 | void updateTopCallFrame(); | |
749 | ||
750 | Call emitNakedCall(CodePtr function = CodePtr()); | |
751 | ||
752 | // Loads the character value of a single character string into dst. | |
753 | void emitLoadCharacterString(RegisterID src, RegisterID dst, JumpList& failures); | |
754 | ||
755 | #if ENABLE(DFG_JIT) | |
756 | void emitEnterOptimizationCheck(); | |
757 | #else | |
758 | void emitEnterOptimizationCheck() { } | |
759 | #endif | |
760 | ||
761 | #ifndef NDEBUG | |
762 | void printBytecodeOperandTypes(int src1, int src2); | |
763 | #endif | |
764 | ||
765 | #if ENABLE(SAMPLING_FLAGS) | |
766 | void setSamplingFlag(int32_t); | |
767 | void clearSamplingFlag(int32_t); | |
768 | #endif | |
769 | ||
770 | #if ENABLE(SAMPLING_COUNTERS) | |
771 | void emitCount(AbstractSamplingCounter&, int32_t = 1); | |
772 | #endif | |
773 | ||
774 | #if ENABLE(OPCODE_SAMPLING) | |
775 | void sampleInstruction(Instruction*, bool = false); | |
776 | #endif | |
777 | ||
778 | #if ENABLE(CODEBLOCK_SAMPLING) | |
779 | void sampleCodeBlock(CodeBlock*); | |
780 | #else | |
781 | void sampleCodeBlock(CodeBlock*) {} | |
782 | #endif | |
783 | ||
784 | #if ENABLE(DFG_JIT) | |
785 | bool canBeOptimized() { return m_canBeOptimized; } | |
786 | bool canBeOptimizedOrInlined() { return m_canBeOptimizedOrInlined; } | |
787 | bool shouldEmitProfiling() { return m_shouldEmitProfiling; } | |
788 | #else | |
789 | bool canBeOptimized() { return false; } | |
790 | bool canBeOptimizedOrInlined() { return false; } | |
791 | // Enables use of value profiler with tiered compilation turned off, | |
792 | // in which case all code gets profiled. | |
793 | bool shouldEmitProfiling() { return false; } | |
794 | #endif | |
795 | ||
796 | Interpreter* m_interpreter; | |
797 | ||
798 | Vector<CallRecord> m_calls; | |
799 | Vector<Label> m_labels; | |
800 | Vector<JITGetByIdGenerator> m_getByIds; | |
801 | Vector<JITPutByIdGenerator> m_putByIds; | |
802 | Vector<ByValCompilationInfo> m_byValCompilationInfo; | |
803 | Vector<CallCompilationInfo> m_callCompilationInfo; | |
804 | Vector<JumpTable> m_jmpTable; | |
805 | ||
806 | unsigned m_bytecodeOffset; | |
807 | Vector<SlowCaseEntry> m_slowCases; | |
808 | Vector<SwitchRecord> m_switches; | |
809 | ||
810 | JumpList m_exceptionChecks; | |
811 | JumpList m_exceptionChecksWithCallFrameRollback; | |
812 | ||
813 | unsigned m_getByIdIndex; | |
814 | unsigned m_putByIdIndex; | |
815 | unsigned m_byValInstructionIndex; | |
816 | unsigned m_callLinkInfoIndex; | |
817 | ||
818 | OwnPtr<JITDisassembler> m_disassembler; | |
819 | RefPtr<Profiler::Compilation> m_compilation; | |
820 | WeakRandom m_randomGenerator; | |
821 | static CodeRef stringGetByValStubGenerator(VM*); | |
822 | ||
823 | bool m_canBeOptimized; | |
824 | bool m_canBeOptimizedOrInlined; | |
825 | bool m_shouldEmitProfiling; | |
826 | } JIT_CLASS_ALIGNMENT; | |
827 | ||
828 | } // namespace JSC | |
829 | ||
830 | #endif // ENABLE(JIT) | |
831 | ||
832 | #endif // JIT_h |