]> git.saurik.com Git - apple/javascriptcore.git/blob - jit/JIT.h
JavaScriptCore-521.tar.gz
[apple/javascriptcore.git] / jit / JIT.h
1 /*
2 * Copyright (C) 2008 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26 #ifndef JIT_h
27 #define JIT_h
28
29 #include <wtf/Platform.h>
30 #include <bytecode/SamplingTool.h>
31
32 #if ENABLE(JIT)
33
34 #define WTF_USE_CTI_REPATCH_PIC 1
35
36 #include "Interpreter.h"
37 #include "Opcode.h"
38 #include "RegisterFile.h"
39 #include "MacroAssembler.h"
40 #include "Profiler.h"
41 #include <wtf/AlwaysInline.h>
42 #include <wtf/Vector.h>
43
44 #if PLATFORM(X86_64)
45 #define STUB_ARGS_offset 0x10
46 #else
47 #define STUB_ARGS_offset 0x0C
48 #endif
49
50 #define STUB_ARGS_code (STUB_ARGS_offset)
51 #define STUB_ARGS_registerFile (STUB_ARGS_offset + 1)
52 #define STUB_ARGS_callFrame (STUB_ARGS_offset + 2)
53 #define STUB_ARGS_exception (STUB_ARGS_offset + 3)
54 #define STUB_ARGS_profilerReference (STUB_ARGS_offset + 4)
55 #define STUB_ARGS_globalData (STUB_ARGS_offset + 5)
56
57 #define ARG_callFrame static_cast<CallFrame*>(ARGS[STUB_ARGS_callFrame])
58 #define ARG_registerFile static_cast<RegisterFile*>(ARGS[STUB_ARGS_registerFile])
59 #define ARG_exception static_cast<JSValuePtr*>(ARGS[STUB_ARGS_exception])
60 #define ARG_profilerReference static_cast<Profiler**>(ARGS[STUB_ARGS_profilerReference])
61 #define ARG_globalData static_cast<JSGlobalData*>(ARGS[STUB_ARGS_globalData])
62
63 #define ARG_setCallFrame(newCallFrame) (ARGS[STUB_ARGS_callFrame] = (newCallFrame))
64
65 #define ARG_src1 JSValuePtr::decode(static_cast<JSValueEncodedAsPointer*>(ARGS[1]))
66 #define ARG_src2 JSValuePtr::decode(static_cast<JSValueEncodedAsPointer*>(ARGS[2]))
67 #define ARG_src3 JSValuePtr::decode(static_cast<JSValueEncodedAsPointer*>(ARGS[3]))
68 #define ARG_src4 JSValuePtr::decode(static_cast<JSValueEncodedAsPointer*>(ARGS[4]))
69 #define ARG_src5 JSValuePtr::decode(static_cast<JSValueEncodedAsPointer*>(ARGS[5]))
70 #define ARG_id1 static_cast<Identifier*>(ARGS[1])
71 #define ARG_id2 static_cast<Identifier*>(ARGS[2])
72 #define ARG_id3 static_cast<Identifier*>(ARGS[3])
73 #define ARG_id4 static_cast<Identifier*>(ARGS[4])
74 #define ARG_int1 static_cast<int32_t>(reinterpret_cast<intptr_t>(ARGS[1]))
75 #define ARG_int2 static_cast<int32_t>(reinterpret_cast<intptr_t>(ARGS[2]))
76 #define ARG_int3 static_cast<int32_t>(reinterpret_cast<intptr_t>(ARGS[3]))
77 #define ARG_int4 static_cast<int32_t>(reinterpret_cast<intptr_t>(ARGS[4]))
78 #define ARG_int5 static_cast<int32_t>(reinterpret_cast<intptr_t>(ARGS[5]))
79 #define ARG_int6 static_cast<int32_t>(reinterpret_cast<intptr_t>(ARGS[6]))
80 #define ARG_func1 static_cast<FuncDeclNode*>(ARGS[1])
81 #define ARG_funcexp1 static_cast<FuncExprNode*>(ARGS[1])
82 #define ARG_regexp1 static_cast<RegExp*>(ARGS[1])
83 #define ARG_pni1 static_cast<JSPropertyNameIterator*>(ARGS[1])
84 #define ARG_returnAddress2 static_cast<void*>(ARGS[2])
85 #define ARG_codeBlock4 static_cast<CodeBlock*>(ARGS[4])
86
87 #define STUB_RETURN_ADDRESS_SLOT (ARGS[-1])
88
89 namespace JSC {
90
91 class CodeBlock;
92 class JSPropertyNameIterator;
93 class Interpreter;
94 class Register;
95 class RegisterFile;
96 class ScopeChainNode;
97 class SimpleJumpTable;
98 class StringJumpTable;
99 class StructureChain;
100
101 struct CallLinkInfo;
102 struct Instruction;
103 struct OperandTypes;
104 struct PolymorphicAccessStructureList;
105 struct StructureStubInfo;
106
107 typedef JSValueEncodedAsPointer* (JIT_STUB *CTIHelper_j)(STUB_ARGS);
108 typedef JSObject* (JIT_STUB *CTIHelper_o)(STUB_ARGS);
109 typedef JSPropertyNameIterator* (JIT_STUB *CTIHelper_p)(STUB_ARGS);
110 typedef void (JIT_STUB *CTIHelper_v)(STUB_ARGS);
111 typedef void* (JIT_STUB *CTIHelper_s)(STUB_ARGS);
112 typedef int (JIT_STUB *CTIHelper_b)(STUB_ARGS);
113 typedef VoidPtrPair (JIT_STUB *CTIHelper_2)(STUB_ARGS);
114
115 struct CallRecord {
116 MacroAssembler::Jump from;
117 unsigned bytecodeIndex;
118 void* to;
119
120 CallRecord()
121 {
122 }
123
124 CallRecord(MacroAssembler::Jump from, unsigned bytecodeIndex, void* to = 0)
125 : from(from)
126 , bytecodeIndex(bytecodeIndex)
127 , to(to)
128 {
129 }
130 };
131
132 struct JumpTable {
133 MacroAssembler::Jump from;
134 unsigned toBytecodeIndex;
135
136 JumpTable(MacroAssembler::Jump f, unsigned t)
137 : from(f)
138 , toBytecodeIndex(t)
139 {
140 }
141 };
142
143 struct SlowCaseEntry {
144 MacroAssembler::Jump from;
145 unsigned to;
146 unsigned hint;
147
148 SlowCaseEntry(MacroAssembler::Jump f, unsigned t, unsigned h = 0)
149 : from(f)
150 , to(t)
151 , hint(h)
152 {
153 }
154 };
155
156 struct SwitchRecord {
157 enum Type {
158 Immediate,
159 Character,
160 String
161 };
162
163 Type type;
164
165 union {
166 SimpleJumpTable* simpleJumpTable;
167 StringJumpTable* stringJumpTable;
168 } jumpTable;
169
170 unsigned bytecodeIndex;
171 unsigned defaultOffset;
172
173 SwitchRecord(SimpleJumpTable* jumpTable, unsigned bytecodeIndex, unsigned defaultOffset, Type type)
174 : type(type)
175 , bytecodeIndex(bytecodeIndex)
176 , defaultOffset(defaultOffset)
177 {
178 this->jumpTable.simpleJumpTable = jumpTable;
179 }
180
181 SwitchRecord(StringJumpTable* jumpTable, unsigned bytecodeIndex, unsigned defaultOffset)
182 : type(String)
183 , bytecodeIndex(bytecodeIndex)
184 , defaultOffset(defaultOffset)
185 {
186 this->jumpTable.stringJumpTable = jumpTable;
187 }
188 };
189
190 struct PropertyStubCompilationInfo {
191 MacroAssembler::Jump callReturnLocation;
192 MacroAssembler::Label hotPathBegin;
193 };
194
195 struct StructureStubCompilationInfo {
196 MacroAssembler::DataLabelPtr hotPathBegin;
197 MacroAssembler::Jump hotPathOther;
198 MacroAssembler::Jump callReturnLocation;
199 MacroAssembler::Label coldPathOther;
200 };
201
202 extern "C" {
203 JSValueEncodedAsPointer* ctiTrampoline(
204 #if PLATFORM(X86_64)
205 // FIXME: (bug #22910) this will force all arguments onto the stack (regparm(0) does not appear to have any effect).
206 // We can allow register passing here, and move the writes of these values into the trampoline.
207 void*, void*, void*, void*, void*, void*,
208 #endif
209 void* code, RegisterFile*, CallFrame*, JSValuePtr* exception, Profiler**, JSGlobalData*);
210 void ctiVMThrowTrampoline();
211 };
212
213 void ctiSetReturnAddress(void** where, void* what);
214 void ctiPatchCallByReturnAddress(void* where, void* what);
215
216 class JIT : private MacroAssembler {
217 using MacroAssembler::Jump;
218 using MacroAssembler::JumpList;
219 using MacroAssembler::Label;
220
221 #if PLATFORM(X86_64)
222 static const RegisterID timeoutCheckRegister = X86::r12;
223 static const RegisterID callFrameRegister = X86::r13;
224 static const RegisterID tagTypeNumberRegister = X86::r14;
225 static const RegisterID tagMaskRegister = X86::r15;
226 #else
227 static const RegisterID timeoutCheckRegister = X86::esi;
228 static const RegisterID callFrameRegister = X86::edi;
229 #endif
230
231 static const int patchGetByIdDefaultStructure = -1;
232 // Magic number - initial offset cannot be representable as a signed 8bit value, or the X86Assembler
233 // will compress the displacement, and we may not be able to fit a patched offset.
234 static const int patchGetByIdDefaultOffset = 256;
235
236 #if USE(JIT_STUB_ARGUMENT_REGISTER)
237 #if PLATFORM(X86_64)
238 static const int ctiArgumentInitSize = 6;
239 #else
240 static const int ctiArgumentInitSize = 2;
241 #endif
242 #elif USE(JIT_STUB_ARGUMENT_STACK)
243 static const int ctiArgumentInitSize = 4;
244 #else // JIT_STUB_ARGUMENT_VA_LIST
245 static const int ctiArgumentInitSize = 0;
246 #endif
247
248 #if PLATFORM(X86_64)
249 // These architecture specific value are used to enable patching - see comment on op_put_by_id.
250 static const int patchOffsetPutByIdStructure = 10;
251 static const int patchOffsetPutByIdPropertyMapOffset = 31;
252 // These architecture specific value are used to enable patching - see comment on op_get_by_id.
253 static const int patchOffsetGetByIdStructure = 10;
254 static const int patchOffsetGetByIdBranchToSlowCase = 20;
255 static const int patchOffsetGetByIdPropertyMapOffset = 31;
256 static const int patchOffsetGetByIdPutResult = 31;
257 #if ENABLE(OPCODE_SAMPLING)
258 static const int patchOffsetGetByIdSlowCaseCall = 53 + ctiArgumentInitSize;
259 #else
260 static const int patchOffsetGetByIdSlowCaseCall = 30 + ctiArgumentInitSize;
261 #endif
262 static const int patchOffsetOpCallCompareToJump = 9;
263 #else
264 // These architecture specific value are used to enable patching - see comment on op_put_by_id.
265 static const int patchOffsetPutByIdStructure = 7;
266 static const int patchOffsetPutByIdPropertyMapOffset = 22;
267 // These architecture specific value are used to enable patching - see comment on op_get_by_id.
268 static const int patchOffsetGetByIdStructure = 7;
269 static const int patchOffsetGetByIdBranchToSlowCase = 13;
270 static const int patchOffsetGetByIdPropertyMapOffset = 22;
271 static const int patchOffsetGetByIdPutResult = 22;
272 #if ENABLE(OPCODE_SAMPLING)
273 static const int patchOffsetGetByIdSlowCaseCall = 31 + ctiArgumentInitSize;
274 #else
275 static const int patchOffsetGetByIdSlowCaseCall = 21 + ctiArgumentInitSize;
276 #endif
277 static const int patchOffsetOpCallCompareToJump = 6;
278 #endif
279
280 public:
281 static void compile(JSGlobalData* globalData, CodeBlock* codeBlock)
282 {
283 JIT jit(globalData, codeBlock);
284 jit.privateCompile();
285 }
286
287 static void compileGetByIdSelf(JSGlobalData* globalData, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, size_t cachedOffset, void* returnAddress)
288 {
289 JIT jit(globalData, codeBlock);
290 jit.privateCompileGetByIdSelf(stubInfo, structure, cachedOffset, returnAddress);
291 }
292
293 static void compileGetByIdProto(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, Structure* prototypeStructure, size_t cachedOffset, void* returnAddress)
294 {
295 JIT jit(globalData, codeBlock);
296 jit.privateCompileGetByIdProto(stubInfo, structure, prototypeStructure, cachedOffset, returnAddress, callFrame);
297 }
298
299 #if USE(CTI_REPATCH_PIC)
300 static void compileGetByIdSelfList(JSGlobalData* globalData, CodeBlock* codeBlock, StructureStubInfo* stubInfo, PolymorphicAccessStructureList* polymorphicStructures, int currentIndex, Structure* structure, size_t cachedOffset)
301 {
302 JIT jit(globalData, codeBlock);
303 jit.privateCompileGetByIdSelfList(stubInfo, polymorphicStructures, currentIndex, structure, cachedOffset);
304 }
305 static void compileGetByIdProtoList(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructureList, int currentIndex, Structure* structure, Structure* prototypeStructure, size_t cachedOffset)
306 {
307 JIT jit(globalData, codeBlock);
308 jit.privateCompileGetByIdProtoList(stubInfo, prototypeStructureList, currentIndex, structure, prototypeStructure, cachedOffset, callFrame);
309 }
310 static void compileGetByIdChainList(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructureList, int currentIndex, Structure* structure, StructureChain* chain, size_t count, size_t cachedOffset)
311 {
312 JIT jit(globalData, codeBlock);
313 jit.privateCompileGetByIdChainList(stubInfo, prototypeStructureList, currentIndex, structure, chain, count, cachedOffset, callFrame);
314 }
315 #endif
316
317 static void compileGetByIdChain(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, StructureChain* chain, size_t count, size_t cachedOffset, void* returnAddress)
318 {
319 JIT jit(globalData, codeBlock);
320 jit.privateCompileGetByIdChain(stubInfo, structure, chain, count, cachedOffset, returnAddress, callFrame);
321 }
322
323 static void compilePutByIdReplace(JSGlobalData* globalData, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, size_t cachedOffset, void* returnAddress)
324 {
325 JIT jit(globalData, codeBlock);
326 jit.privateCompilePutByIdReplace(stubInfo, structure, cachedOffset, returnAddress);
327 }
328
329 static void compilePutByIdTransition(JSGlobalData* globalData, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* oldStructure, Structure* newStructure, size_t cachedOffset, StructureChain* chain, void* returnAddress)
330 {
331 JIT jit(globalData, codeBlock);
332 jit.privateCompilePutByIdTransition(stubInfo, oldStructure, newStructure, cachedOffset, chain, returnAddress);
333 }
334
335 static void compileCTIMachineTrampolines(JSGlobalData* globalData)
336 {
337 JIT jit(globalData);
338 jit.privateCompileCTIMachineTrampolines();
339 }
340
341 static void patchGetByIdSelf(StructureStubInfo*, Structure*, size_t cachedOffset, void* returnAddress);
342 static void patchPutByIdReplace(StructureStubInfo*, Structure*, size_t cachedOffset, void* returnAddress);
343
344 static void compilePatchGetArrayLength(JSGlobalData* globalData, CodeBlock* codeBlock, void* returnAddress)
345 {
346 JIT jit(globalData, codeBlock);
347 return jit.privateCompilePatchGetArrayLength(returnAddress);
348 }
349
350 static void linkCall(JSFunction* callee, CodeBlock* calleeCodeBlock, void* ctiCode, CallLinkInfo* callLinkInfo, int callerArgCount);
351 static void unlinkCall(CallLinkInfo*);
352
353 inline static JSValuePtr execute(void* code, RegisterFile* registerFile, CallFrame* callFrame, JSGlobalData* globalData, JSValuePtr* exception)
354 {
355 return JSValuePtr::decode(ctiTrampoline(
356 #if PLATFORM(X86_64)
357 0, 0, 0, 0, 0, 0,
358 #endif
359 code, registerFile, callFrame, exception, Profiler::enabledProfilerReference(), globalData));
360 }
361
362 private:
363 JIT(JSGlobalData*, CodeBlock* = 0);
364
365 void privateCompileMainPass();
366 void privateCompileLinkPass();
367 void privateCompileSlowCases();
368 void privateCompile();
369 void privateCompileGetByIdSelf(StructureStubInfo*, Structure*, size_t cachedOffset, void* returnAddress);
370 void privateCompileGetByIdProto(StructureStubInfo*, Structure*, Structure* prototypeStructure, size_t cachedOffset, void* returnAddress, CallFrame* callFrame);
371 #if USE(CTI_REPATCH_PIC)
372 void privateCompileGetByIdSelfList(StructureStubInfo*, PolymorphicAccessStructureList*, int, Structure*, size_t cachedOffset);
373 void privateCompileGetByIdProtoList(StructureStubInfo*, PolymorphicAccessStructureList*, int, Structure*, Structure* prototypeStructure, size_t cachedOffset, CallFrame* callFrame);
374 void privateCompileGetByIdChainList(StructureStubInfo*, PolymorphicAccessStructureList*, int, Structure*, StructureChain* chain, size_t count, size_t cachedOffset, CallFrame* callFrame);
375 #endif
376 void privateCompileGetByIdChain(StructureStubInfo*, Structure*, StructureChain*, size_t count, size_t cachedOffset, void* returnAddress, CallFrame* callFrame);
377 void privateCompilePutByIdReplace(StructureStubInfo*, Structure*, size_t cachedOffset, void* returnAddress);
378 void privateCompilePutByIdTransition(StructureStubInfo*, Structure*, Structure*, size_t cachedOffset, StructureChain*, void* returnAddress);
379
380 void privateCompileCTIMachineTrampolines();
381 void privateCompilePatchGetArrayLength(void* returnAddress);
382
383 void addSlowCase(Jump);
384 void addJump(Jump, int);
385 void emitJumpSlowToHot(Jump, int);
386
387 void compileGetByIdHotPath(int resultVReg, int baseVReg, Identifier* ident, unsigned propertyAccessInstructionIndex);
388 void compileGetByIdSlowCase(int resultVReg, int baseVReg, Identifier* ident, Vector<SlowCaseEntry>::iterator& iter, unsigned propertyAccessInstructionIndex);
389 void compilePutByIdHotPath(int baseVReg, Identifier* ident, int valueVReg, unsigned propertyAccessInstructionIndex);
390 void compilePutByIdSlowCase(int baseVReg, Identifier* ident, int valueVReg, Vector<SlowCaseEntry>::iterator& iter, unsigned propertyAccessInstructionIndex);
391 void compileOpCall(OpcodeID, Instruction* instruction, unsigned callLinkInfoIndex);
392 void compileOpCallInitializeCallFrame();
393 void compileOpCallSetupArgs(Instruction*);
394 void compileOpCallEvalSetupArgs(Instruction*);
395 void compileOpCallSlowCase(Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter, unsigned callLinkInfoIndex, OpcodeID opcodeID);
396 void compileOpConstructSetupArgs(Instruction*);
397 enum CompileOpStrictEqType { OpStrictEq, OpNStrictEq };
398 void compileOpStrictEq(Instruction* instruction, CompileOpStrictEqType type);
399 void putDoubleResultToJSNumberCellOrJSImmediate(X86Assembler::XMMRegisterID xmmSource, RegisterID jsNumberCell, unsigned dst, X86Assembler::JmpSrc* wroteJSNumberCell, X86Assembler::XMMRegisterID tempXmm, RegisterID tempReg1, RegisterID tempReg2);
400
401 void compileFastArith_op_add(Instruction*);
402 void compileFastArith_op_sub(Instruction*);
403 void compileFastArith_op_mul(Instruction*);
404 void compileFastArith_op_mod(unsigned result, unsigned op1, unsigned op2);
405 void compileFastArith_op_bitand(unsigned result, unsigned op1, unsigned op2);
406 void compileFastArith_op_lshift(unsigned result, unsigned op1, unsigned op2);
407 void compileFastArith_op_rshift(unsigned result, unsigned op1, unsigned op2);
408 void compileFastArith_op_pre_inc(unsigned srcDst);
409 void compileFastArith_op_pre_dec(unsigned srcDst);
410 void compileFastArith_op_post_inc(unsigned result, unsigned srcDst);
411 void compileFastArith_op_post_dec(unsigned result, unsigned srcDst);
412 void compileFastArithSlow_op_add(Instruction*, Vector<SlowCaseEntry>::iterator&);
413 void compileFastArithSlow_op_sub(Instruction*, Vector<SlowCaseEntry>::iterator&);
414 void compileFastArithSlow_op_mul(Instruction*, Vector<SlowCaseEntry>::iterator&);
415 void compileFastArithSlow_op_mod(unsigned result, unsigned op1, unsigned op2, Vector<SlowCaseEntry>::iterator&);
416 void compileFastArithSlow_op_bitand(unsigned result, unsigned op1, unsigned op2, Vector<SlowCaseEntry>::iterator&);
417 void compileFastArithSlow_op_lshift(unsigned result, unsigned op1, unsigned op2, Vector<SlowCaseEntry>::iterator&);
418 void compileFastArithSlow_op_rshift(unsigned result, unsigned op1, unsigned op2, Vector<SlowCaseEntry>::iterator&);
419 void compileFastArithSlow_op_pre_inc(unsigned srcDst, Vector<SlowCaseEntry>::iterator&);
420 void compileFastArithSlow_op_pre_dec(unsigned srcDst, Vector<SlowCaseEntry>::iterator&);
421 void compileFastArithSlow_op_post_inc(unsigned result, unsigned srcDst, Vector<SlowCaseEntry>::iterator&);
422 void compileFastArithSlow_op_post_dec(unsigned result, unsigned srcDst, Vector<SlowCaseEntry>::iterator&);
423 #if ENABLE(JIT_OPTIMIZE_ARITHMETIC)
424 void compileBinaryArithOp(OpcodeID, unsigned dst, unsigned src1, unsigned src2, OperandTypes opi);
425 void compileBinaryArithOpSlowCase(OpcodeID, Vector<SlowCaseEntry>::iterator&, unsigned dst, unsigned src1, unsigned src2, OperandTypes opi);
426 #endif
427
428 void emitGetVirtualRegister(int src, RegisterID dst);
429 void emitGetVirtualRegisters(int src1, RegisterID dst1, int src2, RegisterID dst2);
430 void emitPutVirtualRegister(unsigned dst, RegisterID from = X86::eax);
431
432 void emitPutJITStubArg(RegisterID src, unsigned argumentNumber);
433 void emitPutJITStubArgFromVirtualRegister(unsigned src, unsigned argumentNumber, RegisterID scratch);
434 void emitPutJITStubArgConstant(unsigned value, unsigned argumentNumber);
435 void emitPutJITStubArgConstant(void* value, unsigned argumentNumber);
436 void emitGetJITStubArg(unsigned argumentNumber, RegisterID dst);
437
438 void emitInitRegister(unsigned dst);
439
440 void emitPutCTIParam(void* value, unsigned name);
441 void emitPutCTIParam(RegisterID from, unsigned name);
442 void emitGetCTIParam(unsigned name, RegisterID to);
443
444 void emitPutToCallFrameHeader(RegisterID from, RegisterFile::CallFrameHeaderEntry entry);
445 void emitPutImmediateToCallFrameHeader(void* value, RegisterFile::CallFrameHeaderEntry entry);
446 void emitGetFromCallFrameHeader(RegisterFile::CallFrameHeaderEntry entry, RegisterID to);
447
448 JSValuePtr getConstantOperand(unsigned src);
449 int32_t getConstantOperandImmediateInt(unsigned src);
450 bool isOperandConstantImmediateInt(unsigned src);
451
452 Jump emitJumpIfJSCell(RegisterID);
453 Jump emitJumpIfBothJSCells(RegisterID, RegisterID, RegisterID);
454 void emitJumpSlowCaseIfJSCell(RegisterID);
455 Jump emitJumpIfNotJSCell(RegisterID);
456 void emitJumpSlowCaseIfNotJSCell(RegisterID);
457 void emitJumpSlowCaseIfNotJSCell(RegisterID, int VReg);
458 #if USE(ALTERNATE_JSIMMEDIATE)
459 JIT::Jump emitJumpIfImmediateNumber(RegisterID);
460 JIT::Jump emitJumpIfNotImmediateNumber(RegisterID);
461 #endif
462
463 Jump getSlowCase(Vector<SlowCaseEntry>::iterator& iter)
464 {
465 return iter++->from;
466 }
467 void linkSlowCase(Vector<SlowCaseEntry>::iterator& iter)
468 {
469 iter->from.link(this);
470 ++iter;
471 }
472 void linkSlowCaseIfNotJSCell(Vector<SlowCaseEntry>::iterator&, int vReg);
473
474 JIT::Jump emitJumpIfImmediateInteger(RegisterID);
475 JIT::Jump emitJumpIfNotImmediateInteger(RegisterID);
476 JIT::Jump emitJumpIfNotImmediateIntegers(RegisterID, RegisterID, RegisterID);
477 void emitJumpSlowCaseIfNotImmediateInteger(RegisterID);
478 void emitJumpSlowCaseIfNotImmediateIntegers(RegisterID, RegisterID, RegisterID);
479
480 Jump checkStructure(RegisterID reg, Structure* structure);
481
482 #if !USE(ALTERNATE_JSIMMEDIATE)
483 void emitFastArithDeTagImmediate(RegisterID);
484 Jump emitFastArithDeTagImmediateJumpIfZero(RegisterID);
485 #endif
486 void emitFastArithReTagImmediate(RegisterID src, RegisterID dest);
487 void emitFastArithImmToInt(RegisterID);
488 void emitFastArithIntToImmNoCheck(RegisterID src, RegisterID dest);
489
490 void emitTagAsBoolImmediate(RegisterID reg);
491
492 void restoreArgumentReference();
493 void restoreArgumentReferenceForTrampoline();
494
495 Jump emitNakedCall(RegisterID);
496 Jump emitNakedCall(void* function);
497 Jump emitCTICall_internal(void*);
498 Jump emitCTICall(CTIHelper_j helper) { return emitCTICall_internal(reinterpret_cast<void*>(helper)); }
499 Jump emitCTICall(CTIHelper_o helper) { return emitCTICall_internal(reinterpret_cast<void*>(helper)); }
500 Jump emitCTICall(CTIHelper_p helper) { return emitCTICall_internal(reinterpret_cast<void*>(helper)); }
501 Jump emitCTICall(CTIHelper_v helper) { return emitCTICall_internal(reinterpret_cast<void*>(helper)); }
502 Jump emitCTICall(CTIHelper_s helper) { return emitCTICall_internal(reinterpret_cast<void*>(helper)); }
503 Jump emitCTICall(CTIHelper_b helper) { return emitCTICall_internal(reinterpret_cast<void*>(helper)); }
504 Jump emitCTICall(CTIHelper_2 helper) { return emitCTICall_internal(reinterpret_cast<void*>(helper)); }
505
506 void emitGetVariableObjectRegister(RegisterID variableObject, int index, RegisterID dst);
507 void emitPutVariableObjectRegister(RegisterID src, RegisterID variableObject, int index);
508
509 void emitSlowScriptCheck();
510 #ifndef NDEBUG
511 void printBytecodeOperandTypes(unsigned src1, unsigned src2);
512 #endif
513
514 void killLastResultRegister();
515
516 #if ENABLE(CODEBLOCK_SAMPLING)
517 void sampleCodeBlock(CodeBlock* codeBlock)
518 {
519 #if PLATFORM(X86_64)
520 move(ImmPtr(m_interpreter->sampler()->codeBlockSlot()), X86::ecx);
521 storePtr(ImmPtr(codeBlock), X86::ecx);
522 #else
523 storePtr(ImmPtr(codeBlock), m_interpreter->sampler()->codeBlockSlot());
524 #endif
525 }
526 #else
527 void sampleCodeBlock(CodeBlock*) {}
528 #endif
529
530 #if ENABLE(OPCODE_SAMPLING)
531 void sampleInstruction(Instruction* instruction, bool inHostFunction=false)
532 {
533 #if PLATFORM(X86_64)
534 move(ImmPtr(m_interpreter->sampler()->sampleSlot()), X86::ecx);
535 storePtr(ImmPtr(m_interpreter->sampler()->encodeSample(instruction, inHostFunction)), X86::ecx);
536 #else
537 storePtr(ImmPtr(m_interpreter->sampler()->encodeSample(instruction, inHostFunction)), m_interpreter->sampler()->sampleSlot());
538 #endif
539 }
540 #else
541 void sampleInstruction(Instruction*, bool) {}
542 #endif
543
544 Interpreter* m_interpreter;
545 JSGlobalData* m_globalData;
546 CodeBlock* m_codeBlock;
547
548 Vector<CallRecord> m_calls;
549 Vector<Label> m_labels;
550 Vector<PropertyStubCompilationInfo> m_propertyAccessCompilationInfo;
551 Vector<StructureStubCompilationInfo> m_callStructureStubCompilationInfo;
552 Vector<JumpTable> m_jmpTable;
553
554 struct JSRInfo {
555 DataLabelPtr storeLocation;
556 Label target;
557
558 JSRInfo(DataLabelPtr storeLocation, Label targetLocation)
559 : storeLocation(storeLocation)
560 , target(targetLocation)
561 {
562 }
563 };
564
565 unsigned m_bytecodeIndex;
566 Vector<JSRInfo> m_jsrSites;
567 Vector<SlowCaseEntry> m_slowCases;
568 Vector<SwitchRecord> m_switches;
569
570 int m_lastResultBytecodeRegister;
571 unsigned m_jumpTargetsPosition;
572 };
573 }
574
575 #endif // ENABLE(JIT)
576
577 #endif // JIT_h