]> git.saurik.com Git - apple/javascriptcore.git/blob - jit/JIT.h
JavaScriptCore-1097.3.3.tar.gz
[apple/javascriptcore.git] / jit / JIT.h
1 /*
2 * Copyright (C) 2008 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26 #ifndef JIT_h
27 #define JIT_h
28
29 #if ENABLE(JIT)
30
31 // Verbose logging of code generation
32 #define ENABLE_JIT_VERBOSE 0
33 // Verbose logging for OSR-related code.
34 #define ENABLE_JIT_VERBOSE_OSR 0
35
36 // We've run into some problems where changing the size of the class JIT leads to
37 // performance fluctuations. Try forcing alignment in an attempt to stabalize this.
38 #if COMPILER(GCC)
39 #define JIT_CLASS_ALIGNMENT __attribute__ ((aligned (32)))
40 #else
41 #define JIT_CLASS_ALIGNMENT
42 #endif
43
44 #define ASSERT_JIT_OFFSET(actual, expected) ASSERT_WITH_MESSAGE(actual == expected, "JIT Offset \"%s\" should be %d, not %d.\n", #expected, static_cast<int>(expected), static_cast<int>(actual));
45
46 #include "CodeBlock.h"
47 #include "CompactJITCodeMap.h"
48 #include "Interpreter.h"
49 #include "JSInterfaceJIT.h"
50 #include "Opcode.h"
51 #include "Profiler.h"
52 #include <bytecode/SamplingTool.h>
53
54 namespace JSC {
55
56 class CodeBlock;
57 class FunctionExecutable;
58 class JIT;
59 class JSPropertyNameIterator;
60 class Interpreter;
61 class Register;
62 class RegisterFile;
63 class ScopeChainNode;
64 class StructureChain;
65
66 struct CallLinkInfo;
67 struct Instruction;
68 struct OperandTypes;
69 struct PolymorphicAccessStructureList;
70 struct SimpleJumpTable;
71 struct StringJumpTable;
72 struct StructureStubInfo;
73
74 struct CallRecord {
75 MacroAssembler::Call from;
76 unsigned bytecodeOffset;
77 void* to;
78
79 CallRecord()
80 {
81 }
82
83 CallRecord(MacroAssembler::Call from, unsigned bytecodeOffset, void* to = 0)
84 : from(from)
85 , bytecodeOffset(bytecodeOffset)
86 , to(to)
87 {
88 }
89 };
90
91 struct JumpTable {
92 MacroAssembler::Jump from;
93 unsigned toBytecodeOffset;
94
95 JumpTable(MacroAssembler::Jump f, unsigned t)
96 : from(f)
97 , toBytecodeOffset(t)
98 {
99 }
100 };
101
102 struct SlowCaseEntry {
103 MacroAssembler::Jump from;
104 unsigned to;
105 unsigned hint;
106
107 SlowCaseEntry(MacroAssembler::Jump f, unsigned t, unsigned h = 0)
108 : from(f)
109 , to(t)
110 , hint(h)
111 {
112 }
113 };
114
115 struct SwitchRecord {
116 enum Type {
117 Immediate,
118 Character,
119 String
120 };
121
122 Type type;
123
124 union {
125 SimpleJumpTable* simpleJumpTable;
126 StringJumpTable* stringJumpTable;
127 } jumpTable;
128
129 unsigned bytecodeOffset;
130 unsigned defaultOffset;
131
132 SwitchRecord(SimpleJumpTable* jumpTable, unsigned bytecodeOffset, unsigned defaultOffset, Type type)
133 : type(type)
134 , bytecodeOffset(bytecodeOffset)
135 , defaultOffset(defaultOffset)
136 {
137 this->jumpTable.simpleJumpTable = jumpTable;
138 }
139
140 SwitchRecord(StringJumpTable* jumpTable, unsigned bytecodeOffset, unsigned defaultOffset)
141 : type(String)
142 , bytecodeOffset(bytecodeOffset)
143 , defaultOffset(defaultOffset)
144 {
145 this->jumpTable.stringJumpTable = jumpTable;
146 }
147 };
148
149 enum PropertyStubGetById_T { PropertyStubGetById };
150 enum PropertyStubPutById_T { PropertyStubPutById };
151
152 struct PropertyStubCompilationInfo {
153 enum Type { GetById, PutById, MethodCheck } m_type;
154
155 unsigned bytecodeIndex;
156 MacroAssembler::Call callReturnLocation;
157 MacroAssembler::Label hotPathBegin;
158 MacroAssembler::DataLabelPtr getStructureToCompare;
159 MacroAssembler::PatchableJump getStructureCheck;
160 #if USE(JSVALUE64)
161 MacroAssembler::DataLabelCompact getDisplacementLabel;
162 #else
163 MacroAssembler::DataLabelCompact getDisplacementLabel1;
164 MacroAssembler::DataLabelCompact getDisplacementLabel2;
165 #endif
166 MacroAssembler::Label getPutResult;
167 MacroAssembler::Label getColdPathBegin;
168 MacroAssembler::DataLabelPtr putStructureToCompare;
169 #if USE(JSVALUE64)
170 MacroAssembler::DataLabel32 putDisplacementLabel;
171 #else
172 MacroAssembler::DataLabel32 putDisplacementLabel1;
173 MacroAssembler::DataLabel32 putDisplacementLabel2;
174 #endif
175 MacroAssembler::DataLabelPtr methodCheckStructureToCompare;
176 MacroAssembler::DataLabelPtr methodCheckProtoObj;
177 MacroAssembler::DataLabelPtr methodCheckProtoStructureToCompare;
178 MacroAssembler::DataLabelPtr methodCheckPutFunction;
179
180 #if !ASSERT_DISABLED
181 PropertyStubCompilationInfo()
182 : bytecodeIndex(std::numeric_limits<unsigned>::max())
183 {
184 }
185 #endif
186
187
188 PropertyStubCompilationInfo(PropertyStubGetById_T, unsigned bytecodeIndex, MacroAssembler::Label hotPathBegin,
189 #if USE(JSVALUE64)
190 MacroAssembler::DataLabelPtr structureToCompare, MacroAssembler::PatchableJump structureCheck, MacroAssembler::DataLabelCompact displacementLabel, MacroAssembler::Label putResult)
191 #else
192 MacroAssembler::DataLabelPtr structureToCompare, MacroAssembler::PatchableJump structureCheck, MacroAssembler::DataLabelCompact displacementLabel1, MacroAssembler::DataLabelCompact displacementLabel2, MacroAssembler::Label putResult)
193 #endif
194 : m_type(GetById)
195 , bytecodeIndex(bytecodeIndex)
196 , hotPathBegin(hotPathBegin)
197 , getStructureToCompare(structureToCompare)
198 , getStructureCheck(structureCheck)
199 #if USE(JSVALUE64)
200 , getDisplacementLabel(displacementLabel)
201 #else
202 , getDisplacementLabel1(displacementLabel1)
203 , getDisplacementLabel2(displacementLabel2)
204 #endif
205 , getPutResult(putResult)
206 {
207 }
208
209 PropertyStubCompilationInfo(PropertyStubPutById_T, unsigned bytecodeIndex, MacroAssembler::Label hotPathBegin,
210 #if USE(JSVALUE64)
211 MacroAssembler::DataLabelPtr structureToCompare, MacroAssembler::DataLabel32 displacementLabel)
212 #else
213 MacroAssembler::DataLabelPtr structureToCompare, MacroAssembler::DataLabel32 displacementLabel1, MacroAssembler::DataLabel32 displacementLabel2)
214 #endif
215 : m_type(PutById)
216 , bytecodeIndex(bytecodeIndex)
217 , hotPathBegin(hotPathBegin)
218 , putStructureToCompare(structureToCompare)
219 #if USE(JSVALUE64)
220 , putDisplacementLabel(displacementLabel)
221 #else
222 , putDisplacementLabel1(displacementLabel1)
223 , putDisplacementLabel2(displacementLabel2)
224 #endif
225 {
226 }
227
228 void slowCaseInfo(PropertyStubGetById_T, MacroAssembler::Label coldPathBegin, MacroAssembler::Call call)
229 {
230 ASSERT(m_type == GetById || m_type == MethodCheck);
231 callReturnLocation = call;
232 getColdPathBegin = coldPathBegin;
233 }
234
235 void slowCaseInfo(PropertyStubPutById_T, MacroAssembler::Call call)
236 {
237 ASSERT(m_type == PutById);
238 callReturnLocation = call;
239 }
240
241 void addMethodCheckInfo(MacroAssembler::DataLabelPtr structureToCompare, MacroAssembler::DataLabelPtr protoObj, MacroAssembler::DataLabelPtr protoStructureToCompare, MacroAssembler::DataLabelPtr putFunction)
242 {
243 m_type = MethodCheck;
244 methodCheckStructureToCompare = structureToCompare;
245 methodCheckProtoObj = protoObj;
246 methodCheckProtoStructureToCompare = protoStructureToCompare;
247 methodCheckPutFunction = putFunction;
248 }
249
250 void copyToStubInfo(StructureStubInfo& info, LinkBuffer &patchBuffer);
251 };
252
253 struct StructureStubCompilationInfo {
254 MacroAssembler::DataLabelPtr hotPathBegin;
255 MacroAssembler::Call hotPathOther;
256 MacroAssembler::Call callReturnLocation;
257 CallLinkInfo::CallType callType;
258 unsigned bytecodeIndex;
259 };
260
261 struct MethodCallCompilationInfo {
262 MethodCallCompilationInfo(unsigned bytecodeIndex, unsigned propertyAccessIndex)
263 : bytecodeIndex(bytecodeIndex)
264 , propertyAccessIndex(propertyAccessIndex)
265 {
266 }
267
268 unsigned bytecodeIndex;
269 MacroAssembler::DataLabelPtr structureToCompare;
270 unsigned propertyAccessIndex;
271 };
272
273 // Near calls can only be patched to other JIT code, regular calls can be patched to JIT code or relinked to stub functions.
274 void ctiPatchNearCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, MacroAssemblerCodePtr newCalleeFunction);
275 void ctiPatchCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, MacroAssemblerCodePtr newCalleeFunction);
276 void ctiPatchCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, FunctionPtr newCalleeFunction);
277
278 class JIT : private JSInterfaceJIT {
279 friend class JITStubCall;
280 friend struct PropertyStubCompilationInfo;
281
282 using MacroAssembler::Jump;
283 using MacroAssembler::JumpList;
284 using MacroAssembler::Label;
285
286 static const int patchGetByIdDefaultStructure = -1;
287 static const int patchGetByIdDefaultOffset = 0;
288 // Magic number - initial offset cannot be representable as a signed 8bit value, or the X86Assembler
289 // will compress the displacement, and we may not be able to fit a patched offset.
290 static const int patchPutByIdDefaultOffset = 256;
291
292 public:
293 static JITCode compile(JSGlobalData* globalData, CodeBlock* codeBlock, JITCompilationEffort effort, CodePtr* functionEntryArityCheck = 0)
294 {
295 return JIT(globalData, codeBlock).privateCompile(functionEntryArityCheck, effort);
296 }
297
298 static void compileGetByIdProto(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, ReturnAddressPtr returnAddress)
299 {
300 JIT jit(globalData, codeBlock);
301 jit.m_bytecodeOffset = stubInfo->bytecodeIndex;
302 jit.privateCompileGetByIdProto(stubInfo, structure, prototypeStructure, ident, slot, cachedOffset, returnAddress, callFrame);
303 }
304
305 static void compileGetByIdSelfList(JSGlobalData* globalData, CodeBlock* codeBlock, StructureStubInfo* stubInfo, PolymorphicAccessStructureList* polymorphicStructures, int currentIndex, Structure* structure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset)
306 {
307 JIT jit(globalData, codeBlock);
308 jit.m_bytecodeOffset = stubInfo->bytecodeIndex;
309 jit.privateCompileGetByIdSelfList(stubInfo, polymorphicStructures, currentIndex, structure, ident, slot, cachedOffset);
310 }
311 static void compileGetByIdProtoList(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructureList, int currentIndex, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset)
312 {
313 JIT jit(globalData, codeBlock);
314 jit.m_bytecodeOffset = stubInfo->bytecodeIndex;
315 jit.privateCompileGetByIdProtoList(stubInfo, prototypeStructureList, currentIndex, structure, prototypeStructure, ident, slot, cachedOffset, callFrame);
316 }
317 static void compileGetByIdChainList(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructureList, int currentIndex, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset)
318 {
319 JIT jit(globalData, codeBlock);
320 jit.m_bytecodeOffset = stubInfo->bytecodeIndex;
321 jit.privateCompileGetByIdChainList(stubInfo, prototypeStructureList, currentIndex, structure, chain, count, ident, slot, cachedOffset, callFrame);
322 }
323
324 static void compileGetByIdChain(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, ReturnAddressPtr returnAddress)
325 {
326 JIT jit(globalData, codeBlock);
327 jit.m_bytecodeOffset = stubInfo->bytecodeIndex;
328 jit.privateCompileGetByIdChain(stubInfo, structure, chain, count, ident, slot, cachedOffset, returnAddress, callFrame);
329 }
330
331 static void compilePutByIdTransition(JSGlobalData* globalData, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* oldStructure, Structure* newStructure, size_t cachedOffset, StructureChain* chain, ReturnAddressPtr returnAddress, bool direct)
332 {
333 JIT jit(globalData, codeBlock);
334 jit.m_bytecodeOffset = stubInfo->bytecodeIndex;
335 jit.privateCompilePutByIdTransition(stubInfo, oldStructure, newStructure, cachedOffset, chain, returnAddress, direct);
336 }
337
338 static PassRefPtr<ExecutableMemoryHandle> compileCTIMachineTrampolines(JSGlobalData* globalData, TrampolineStructure *trampolines)
339 {
340 if (!globalData->canUseJIT())
341 return 0;
342 JIT jit(globalData, 0);
343 return jit.privateCompileCTIMachineTrampolines(globalData, trampolines);
344 }
345
346 static CodeRef compileCTINativeCall(JSGlobalData* globalData, NativeFunction func)
347 {
348 if (!globalData->canUseJIT()) {
349 #if ENABLE(LLINT)
350 return CodeRef::createLLIntCodeRef(llint_native_call_trampoline);
351 #else
352 return CodeRef();
353 #endif
354 }
355 JIT jit(globalData, 0);
356 return jit.privateCompileCTINativeCall(globalData, func);
357 }
358
359 static void resetPatchGetById(RepatchBuffer&, StructureStubInfo*);
360 static void resetPatchPutById(RepatchBuffer&, StructureStubInfo*);
361 static void patchGetByIdSelf(CodeBlock* codeblock, StructureStubInfo*, Structure*, size_t cachedOffset, ReturnAddressPtr returnAddress);
362 static void patchPutByIdReplace(CodeBlock* codeblock, StructureStubInfo*, Structure*, size_t cachedOffset, ReturnAddressPtr returnAddress, bool direct);
363 static void patchMethodCallProto(JSGlobalData&, CodeBlock* codeblock, MethodCallLinkInfo&, StructureStubInfo&, JSObject*, Structure*, JSObject*, ReturnAddressPtr);
364
365 static void compilePatchGetArrayLength(JSGlobalData* globalData, CodeBlock* codeBlock, ReturnAddressPtr returnAddress)
366 {
367 JIT jit(globalData, codeBlock);
368 return jit.privateCompilePatchGetArrayLength(returnAddress);
369 }
370
371 static void linkFor(JSFunction* callee, CodeBlock* callerCodeBlock, CodeBlock* calleeCodeBlock, CodePtr, CallLinkInfo*, JSGlobalData*, CodeSpecializationKind);
372
373 private:
374 JIT(JSGlobalData*, CodeBlock* = 0);
375
376 void privateCompileMainPass();
377 void privateCompileLinkPass();
378 void privateCompileSlowCases();
379 JITCode privateCompile(CodePtr* functionEntryArityCheck, JITCompilationEffort);
380 void privateCompileGetByIdProto(StructureStubInfo*, Structure*, Structure* prototypeStructure, const Identifier&, const PropertySlot&, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame);
381 void privateCompileGetByIdSelfList(StructureStubInfo*, PolymorphicAccessStructureList*, int, Structure*, const Identifier&, const PropertySlot&, size_t cachedOffset);
382 void privateCompileGetByIdProtoList(StructureStubInfo*, PolymorphicAccessStructureList*, int, Structure*, Structure* prototypeStructure, const Identifier&, const PropertySlot&, size_t cachedOffset, CallFrame* callFrame);
383 void privateCompileGetByIdChainList(StructureStubInfo*, PolymorphicAccessStructureList*, int, Structure*, StructureChain* chain, size_t count, const Identifier&, const PropertySlot&, size_t cachedOffset, CallFrame* callFrame);
384 void privateCompileGetByIdChain(StructureStubInfo*, Structure*, StructureChain*, size_t count, const Identifier&, const PropertySlot&, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame);
385 void privateCompilePutByIdTransition(StructureStubInfo*, Structure*, Structure*, size_t cachedOffset, StructureChain*, ReturnAddressPtr returnAddress, bool direct);
386
387 PassRefPtr<ExecutableMemoryHandle> privateCompileCTIMachineTrampolines(JSGlobalData*, TrampolineStructure*);
388 Label privateCompileCTINativeCall(JSGlobalData*, bool isConstruct = false);
389 CodeRef privateCompileCTINativeCall(JSGlobalData*, NativeFunction);
390 void privateCompilePatchGetArrayLength(ReturnAddressPtr returnAddress);
391
392 static bool isDirectPutById(StructureStubInfo*);
393
394 void addSlowCase(Jump);
395 void addSlowCase(JumpList);
396 void addSlowCase();
397 void addJump(Jump, int);
398 void emitJumpSlowToHot(Jump, int);
399
400 void compileOpCall(OpcodeID, Instruction*, unsigned callLinkInfoIndex);
401 void compileOpCallSlowCase(OpcodeID, Instruction*, Vector<SlowCaseEntry>::iterator&, unsigned callLinkInfoIndex);
402 void compileLoadVarargs(Instruction*);
403 void compileCallEval();
404 void compileCallEvalSlowCase(Vector<SlowCaseEntry>::iterator&);
405
406 enum CompileOpStrictEqType { OpStrictEq, OpNStrictEq };
407 void compileOpStrictEq(Instruction* instruction, CompileOpStrictEqType type);
408 bool isOperandConstantImmediateDouble(unsigned src);
409
410 void emitLoadDouble(int index, FPRegisterID value);
411 void emitLoadInt32ToDouble(int index, FPRegisterID value);
412 Jump emitJumpIfNotObject(RegisterID structureReg);
413 Jump emitJumpIfNotType(RegisterID baseReg, RegisterID scratchReg, JSType);
414
415 void testPrototype(JSValue, JumpList& failureCases);
416
417 enum WriteBarrierMode { UnconditionalWriteBarrier, ShouldFilterImmediates };
418 // value register in write barrier is used before any scratch registers
419 // so may safely be the same as either of the scratch registers.
420 void emitWriteBarrier(RegisterID owner, RegisterID valueTag, RegisterID scratch, RegisterID scratch2, WriteBarrierMode, WriteBarrierUseKind);
421 void emitWriteBarrier(JSCell* owner, RegisterID value, RegisterID scratch, WriteBarrierMode, WriteBarrierUseKind);
422
423 template<typename ClassType, bool destructor, typename StructureType> void emitAllocateBasicJSObject(StructureType, RegisterID result, RegisterID storagePtr);
424 void emitAllocateBasicStorage(size_t, RegisterID result, RegisterID storagePtr);
425 template<typename T> void emitAllocateJSFinalObject(T structure, RegisterID result, RegisterID storagePtr);
426 void emitAllocateJSFunction(FunctionExecutable*, RegisterID scopeChain, RegisterID result, RegisterID storagePtr);
427 void emitAllocateJSArray(unsigned valuesRegister, unsigned length, RegisterID cellResult, RegisterID storageResult, RegisterID storagePtr);
428
429 #if ENABLE(VALUE_PROFILER)
430 // This assumes that the value to profile is in regT0 and that regT3 is available for
431 // scratch.
432 void emitValueProfilingSite(ValueProfile*);
433 void emitValueProfilingSite(unsigned bytecodeOffset);
434 void emitValueProfilingSite();
435 #else
436 void emitValueProfilingSite(unsigned) { }
437 void emitValueProfilingSite() { }
438 #endif
439
440 #if USE(JSVALUE32_64)
441 bool getOperandConstantImmediateInt(unsigned op1, unsigned op2, unsigned& op, int32_t& constant);
442
443 void emitLoadTag(int index, RegisterID tag);
444 void emitLoadPayload(int index, RegisterID payload);
445
446 void emitLoad(const JSValue& v, RegisterID tag, RegisterID payload);
447 void emitLoad(int index, RegisterID tag, RegisterID payload, RegisterID base = callFrameRegister);
448 void emitLoad2(int index1, RegisterID tag1, RegisterID payload1, int index2, RegisterID tag2, RegisterID payload2);
449
450 void emitStore(int index, RegisterID tag, RegisterID payload, RegisterID base = callFrameRegister);
451 void emitStore(int index, const JSValue constant, RegisterID base = callFrameRegister);
452 void emitStoreInt32(int index, RegisterID payload, bool indexIsInt32 = false);
453 void emitStoreInt32(int index, TrustedImm32 payload, bool indexIsInt32 = false);
454 void emitStoreAndMapInt32(int index, RegisterID tag, RegisterID payload, bool indexIsInt32, size_t opcodeLength);
455 void emitStoreCell(int index, RegisterID payload, bool indexIsCell = false);
456 void emitStoreBool(int index, RegisterID payload, bool indexIsBool = false);
457 void emitStoreDouble(int index, FPRegisterID value);
458
459 bool isLabeled(unsigned bytecodeOffset);
460 void map(unsigned bytecodeOffset, int virtualRegisterIndex, RegisterID tag, RegisterID payload);
461 void unmap(RegisterID);
462 void unmap();
463 bool isMapped(int virtualRegisterIndex);
464 bool getMappedPayload(int virtualRegisterIndex, RegisterID& payload);
465 bool getMappedTag(int virtualRegisterIndex, RegisterID& tag);
466
467 void emitJumpSlowCaseIfNotJSCell(int virtualRegisterIndex);
468 void emitJumpSlowCaseIfNotJSCell(int virtualRegisterIndex, RegisterID tag);
469
470 void compileGetByIdHotPath();
471 void compileGetByIdSlowCase(int resultVReg, int baseVReg, Identifier* ident, Vector<SlowCaseEntry>::iterator& iter, bool isMethodCheck = false);
472 void compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, size_t cachedOffset);
473 void compileGetDirectOffset(JSObject* base, RegisterID resultTag, RegisterID resultPayload, size_t cachedOffset);
474 void compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, RegisterID offset);
475 void compilePutDirectOffset(RegisterID base, RegisterID valueTag, RegisterID valuePayload, size_t cachedOffset);
476
477 // Arithmetic opcode helpers
478 void emitAdd32Constant(unsigned dst, unsigned op, int32_t constant, ResultType opType);
479 void emitSub32Constant(unsigned dst, unsigned op, int32_t constant, ResultType opType);
480 void emitBinaryDoubleOp(OpcodeID, unsigned dst, unsigned op1, unsigned op2, OperandTypes, JumpList& notInt32Op1, JumpList& notInt32Op2, bool op1IsInRegisters = true, bool op2IsInRegisters = true);
481
482 #if CPU(ARM_TRADITIONAL)
483 // sequenceOpCall
484 static const int sequenceOpCallInstructionSpace = 12;
485 static const int sequenceOpCallConstantSpace = 2;
486 // sequenceMethodCheck
487 static const int sequenceMethodCheckInstructionSpace = 40;
488 static const int sequenceMethodCheckConstantSpace = 6;
489 // sequenceGetByIdHotPath
490 static const int sequenceGetByIdHotPathInstructionSpace = 36;
491 static const int sequenceGetByIdHotPathConstantSpace = 4;
492 // sequenceGetByIdSlowCase
493 static const int sequenceGetByIdSlowCaseInstructionSpace = 64;
494 static const int sequenceGetByIdSlowCaseConstantSpace = 4;
495 // sequencePutById
496 static const int sequencePutByIdInstructionSpace = 36;
497 static const int sequencePutByIdConstantSpace = 4;
498 #elif CPU(SH4)
499 // sequenceOpCall
500 static const int sequenceOpCallInstructionSpace = 12;
501 static const int sequenceOpCallConstantSpace = 2;
502 // sequenceMethodCheck
503 static const int sequenceMethodCheckInstructionSpace = 40;
504 static const int sequenceMethodCheckConstantSpace = 6;
505 // sequenceGetByIdHotPath
506 static const int sequenceGetByIdHotPathInstructionSpace = 36;
507 static const int sequenceGetByIdHotPathConstantSpace = 5;
508 // sequenceGetByIdSlowCase
509 static const int sequenceGetByIdSlowCaseInstructionSpace = 38;
510 static const int sequenceGetByIdSlowCaseConstantSpace = 4;
511 // sequencePutById
512 static const int sequencePutByIdInstructionSpace = 36;
513 static const int sequencePutByIdConstantSpace = 5;
514 #endif
515
516 #else // USE(JSVALUE32_64)
517 void emitGetVirtualRegister(int src, RegisterID dst);
518 void emitGetVirtualRegisters(int src1, RegisterID dst1, int src2, RegisterID dst2);
519 void emitPutVirtualRegister(unsigned dst, RegisterID from = regT0);
520 void emitStoreCell(unsigned dst, RegisterID payload, bool /* only used in JSValue32_64 */ = false)
521 {
522 emitPutVirtualRegister(dst, payload);
523 }
524
525 int32_t getConstantOperandImmediateInt(unsigned src);
526
527 void killLastResultRegister();
528
529 Jump emitJumpIfJSCell(RegisterID);
530 Jump emitJumpIfBothJSCells(RegisterID, RegisterID, RegisterID);
531 void emitJumpSlowCaseIfJSCell(RegisterID);
532 Jump emitJumpIfNotJSCell(RegisterID);
533 void emitJumpSlowCaseIfNotJSCell(RegisterID);
534 void emitJumpSlowCaseIfNotJSCell(RegisterID, int VReg);
535 Jump emitJumpIfImmediateInteger(RegisterID);
536 Jump emitJumpIfNotImmediateInteger(RegisterID);
537 Jump emitJumpIfNotImmediateIntegers(RegisterID, RegisterID, RegisterID);
538 void emitJumpSlowCaseIfNotImmediateInteger(RegisterID);
539 void emitJumpSlowCaseIfNotImmediateNumber(RegisterID);
540 void emitJumpSlowCaseIfNotImmediateIntegers(RegisterID, RegisterID, RegisterID);
541
542 void emitFastArithReTagImmediate(RegisterID src, RegisterID dest);
543 void emitFastArithIntToImmNoCheck(RegisterID src, RegisterID dest);
544
545 void emitTagAsBoolImmediate(RegisterID reg);
546 void compileBinaryArithOp(OpcodeID, unsigned dst, unsigned src1, unsigned src2, OperandTypes opi);
547 void compileBinaryArithOpSlowCase(OpcodeID, Vector<SlowCaseEntry>::iterator&, unsigned dst, unsigned src1, unsigned src2, OperandTypes, bool op1HasImmediateIntFastCase, bool op2HasImmediateIntFastCase);
548
549 void compileGetByIdHotPath(int baseVReg, Identifier*);
550 void compileGetByIdSlowCase(int resultVReg, int baseVReg, Identifier* ident, Vector<SlowCaseEntry>::iterator& iter, bool isMethodCheck = false);
551 void compileGetDirectOffset(RegisterID base, RegisterID result, size_t cachedOffset);
552 void compileGetDirectOffset(JSObject* base, RegisterID result, size_t cachedOffset);
553 void compileGetDirectOffset(RegisterID base, RegisterID result, RegisterID offset, RegisterID scratch);
554 void compilePutDirectOffset(RegisterID base, RegisterID value, size_t cachedOffset);
555
556 #endif // USE(JSVALUE32_64)
557
558 #if (defined(ASSEMBLER_HAS_CONSTANT_POOL) && ASSEMBLER_HAS_CONSTANT_POOL)
559 #define BEGIN_UNINTERRUPTED_SEQUENCE(name) do { beginUninterruptedSequence(name ## InstructionSpace, name ## ConstantSpace); } while (false)
560 #define END_UNINTERRUPTED_SEQUENCE_FOR_PUT(name, dst) do { endUninterruptedSequence(name ## InstructionSpace, name ## ConstantSpace, dst); } while (false)
561 #define END_UNINTERRUPTED_SEQUENCE(name) END_UNINTERRUPTED_SEQUENCE_FOR_PUT(name, 0)
562
563 void beginUninterruptedSequence(int, int);
564 void endUninterruptedSequence(int, int, int);
565
566 #else
567 #define BEGIN_UNINTERRUPTED_SEQUENCE(name)
568 #define END_UNINTERRUPTED_SEQUENCE(name)
569 #define END_UNINTERRUPTED_SEQUENCE_FOR_PUT(name, dst)
570 #endif
571
572 void emit_compareAndJump(OpcodeID, unsigned op1, unsigned op2, unsigned target, RelationalCondition);
573 void emit_compareAndJumpSlow(unsigned op1, unsigned op2, unsigned target, DoubleCondition, int (JIT_STUB *stub)(STUB_ARGS_DECLARATION), bool invert, Vector<SlowCaseEntry>::iterator&);
574
575 void emit_op_add(Instruction*);
576 void emit_op_bitand(Instruction*);
577 void emit_op_bitor(Instruction*);
578 void emit_op_bitxor(Instruction*);
579 void emit_op_call(Instruction*);
580 void emit_op_call_eval(Instruction*);
581 void emit_op_call_varargs(Instruction*);
582 void emit_op_call_put_result(Instruction*);
583 void emit_op_catch(Instruction*);
584 void emit_op_construct(Instruction*);
585 void emit_op_get_callee(Instruction*);
586 void emit_op_create_this(Instruction*);
587 void emit_op_convert_this(Instruction*);
588 void emit_op_create_arguments(Instruction*);
589 void emit_op_debug(Instruction*);
590 void emit_op_del_by_id(Instruction*);
591 void emit_op_div(Instruction*);
592 void emit_op_end(Instruction*);
593 void emit_op_enter(Instruction*);
594 void emit_op_create_activation(Instruction*);
595 void emit_op_eq(Instruction*);
596 void emit_op_eq_null(Instruction*);
597 void emit_op_get_by_id(Instruction*);
598 void emit_op_get_arguments_length(Instruction*);
599 void emit_op_get_by_val(Instruction*);
600 void emit_op_get_argument_by_val(Instruction*);
601 void emit_op_get_by_pname(Instruction*);
602 void emit_op_get_global_var(Instruction*);
603 void emit_op_get_scoped_var(Instruction*);
604 void emit_op_init_lazy_reg(Instruction*);
605 void emit_op_check_has_instance(Instruction*);
606 void emit_op_instanceof(Instruction*);
607 void emit_op_is_undefined(Instruction*);
608 void emit_op_is_boolean(Instruction*);
609 void emit_op_is_number(Instruction*);
610 void emit_op_is_string(Instruction*);
611 void emit_op_jeq_null(Instruction*);
612 void emit_op_jfalse(Instruction*);
613 void emit_op_jmp(Instruction*);
614 void emit_op_jmp_scopes(Instruction*);
615 void emit_op_jneq_null(Instruction*);
616 void emit_op_jneq_ptr(Instruction*);
617 void emit_op_jless(Instruction*);
618 void emit_op_jlesseq(Instruction*);
619 void emit_op_jgreater(Instruction*);
620 void emit_op_jgreatereq(Instruction*);
621 void emit_op_jnless(Instruction*);
622 void emit_op_jnlesseq(Instruction*);
623 void emit_op_jngreater(Instruction*);
624 void emit_op_jngreatereq(Instruction*);
625 void emit_op_jtrue(Instruction*);
626 void emit_op_loop(Instruction*);
627 void emit_op_loop_hint(Instruction*);
628 void emit_op_loop_if_less(Instruction*);
629 void emit_op_loop_if_lesseq(Instruction*);
630 void emit_op_loop_if_greater(Instruction*);
631 void emit_op_loop_if_greatereq(Instruction*);
632 void emit_op_loop_if_true(Instruction*);
633 void emit_op_loop_if_false(Instruction*);
634 void emit_op_lshift(Instruction*);
635 void emit_op_method_check(Instruction*);
636 void emit_op_mod(Instruction*);
637 void emit_op_mov(Instruction*);
638 void emit_op_mul(Instruction*);
639 void emit_op_negate(Instruction*);
640 void emit_op_neq(Instruction*);
641 void emit_op_neq_null(Instruction*);
642 void emit_op_new_array(Instruction*);
643 void emit_op_new_array_buffer(Instruction*);
644 void emit_op_new_func(Instruction*);
645 void emit_op_new_func_exp(Instruction*);
646 void emit_op_new_object(Instruction*);
647 void emit_op_new_regexp(Instruction*);
648 void emit_op_get_pnames(Instruction*);
649 void emit_op_next_pname(Instruction*);
650 void emit_op_not(Instruction*);
651 void emit_op_nstricteq(Instruction*);
652 void emit_op_pop_scope(Instruction*);
653 void emit_op_post_dec(Instruction*);
654 void emit_op_post_inc(Instruction*);
655 void emit_op_pre_dec(Instruction*);
656 void emit_op_pre_inc(Instruction*);
657 void emit_op_profile_did_call(Instruction*);
658 void emit_op_profile_will_call(Instruction*);
659 void emit_op_push_new_scope(Instruction*);
660 void emit_op_push_scope(Instruction*);
661 void emit_op_put_by_id(Instruction*);
662 void emit_op_put_by_index(Instruction*);
663 void emit_op_put_by_val(Instruction*);
664 void emit_op_put_getter_setter(Instruction*);
665 void emit_op_put_global_var(Instruction*);
666 void emit_op_put_scoped_var(Instruction*);
667 void emit_op_resolve(Instruction*);
668 void emit_op_resolve_base(Instruction*);
669 void emit_op_ensure_property_exists(Instruction*);
670 void emit_op_resolve_global(Instruction*, bool dynamic = false);
671 void emit_op_resolve_global_dynamic(Instruction*);
672 void emit_op_resolve_skip(Instruction*);
673 void emit_op_resolve_with_base(Instruction*);
674 void emit_op_resolve_with_this(Instruction*);
675 void emit_op_ret(Instruction*);
676 void emit_op_ret_object_or_this(Instruction*);
677 void emit_op_rshift(Instruction*);
678 void emit_op_strcat(Instruction*);
679 void emit_op_stricteq(Instruction*);
680 void emit_op_sub(Instruction*);
681 void emit_op_switch_char(Instruction*);
682 void emit_op_switch_imm(Instruction*);
683 void emit_op_switch_string(Instruction*);
684 void emit_op_tear_off_activation(Instruction*);
685 void emit_op_tear_off_arguments(Instruction*);
686 void emit_op_throw(Instruction*);
687 void emit_op_throw_reference_error(Instruction*);
688 void emit_op_to_jsnumber(Instruction*);
689 void emit_op_to_primitive(Instruction*);
690 void emit_op_unexpected_load(Instruction*);
691 void emit_op_urshift(Instruction*);
692
693 void emitSlow_op_add(Instruction*, Vector<SlowCaseEntry>::iterator&);
694 void emitSlow_op_bitand(Instruction*, Vector<SlowCaseEntry>::iterator&);
695 void emitSlow_op_bitor(Instruction*, Vector<SlowCaseEntry>::iterator&);
696 void emitSlow_op_bitxor(Instruction*, Vector<SlowCaseEntry>::iterator&);
697 void emitSlow_op_call(Instruction*, Vector<SlowCaseEntry>::iterator&);
698 void emitSlow_op_call_eval(Instruction*, Vector<SlowCaseEntry>::iterator&);
699 void emitSlow_op_call_varargs(Instruction*, Vector<SlowCaseEntry>::iterator&);
700 void emitSlow_op_construct(Instruction*, Vector<SlowCaseEntry>::iterator&);
701 void emitSlow_op_convert_this(Instruction*, Vector<SlowCaseEntry>::iterator&);
702 void emitSlow_op_create_this(Instruction*, Vector<SlowCaseEntry>::iterator&);
703 void emitSlow_op_div(Instruction*, Vector<SlowCaseEntry>::iterator&);
704 void emitSlow_op_eq(Instruction*, Vector<SlowCaseEntry>::iterator&);
705 void emitSlow_op_get_by_id(Instruction*, Vector<SlowCaseEntry>::iterator&);
706 void emitSlow_op_get_arguments_length(Instruction*, Vector<SlowCaseEntry>::iterator&);
707 void emitSlow_op_get_by_val(Instruction*, Vector<SlowCaseEntry>::iterator&);
708 void emitSlow_op_get_argument_by_val(Instruction*, Vector<SlowCaseEntry>::iterator&);
709 void emitSlow_op_get_by_pname(Instruction*, Vector<SlowCaseEntry>::iterator&);
710 void emitSlow_op_check_has_instance(Instruction*, Vector<SlowCaseEntry>::iterator&);
711 void emitSlow_op_instanceof(Instruction*, Vector<SlowCaseEntry>::iterator&);
712 void emitSlow_op_jfalse(Instruction*, Vector<SlowCaseEntry>::iterator&);
713 void emitSlow_op_jless(Instruction*, Vector<SlowCaseEntry>::iterator&);
714 void emitSlow_op_jlesseq(Instruction*, Vector<SlowCaseEntry>::iterator&);
715 void emitSlow_op_jgreater(Instruction*, Vector<SlowCaseEntry>::iterator&);
716 void emitSlow_op_jgreatereq(Instruction*, Vector<SlowCaseEntry>::iterator&);
717 void emitSlow_op_jnless(Instruction*, Vector<SlowCaseEntry>::iterator&);
718 void emitSlow_op_jnlesseq(Instruction*, Vector<SlowCaseEntry>::iterator&);
719 void emitSlow_op_jngreater(Instruction*, Vector<SlowCaseEntry>::iterator&);
720 void emitSlow_op_jngreatereq(Instruction*, Vector<SlowCaseEntry>::iterator&);
721 void emitSlow_op_jtrue(Instruction*, Vector<SlowCaseEntry>::iterator&);
722 void emitSlow_op_loop_if_less(Instruction*, Vector<SlowCaseEntry>::iterator&);
723 void emitSlow_op_loop_if_lesseq(Instruction*, Vector<SlowCaseEntry>::iterator&);
724 void emitSlow_op_loop_if_greater(Instruction*, Vector<SlowCaseEntry>::iterator&);
725 void emitSlow_op_loop_if_greatereq(Instruction*, Vector<SlowCaseEntry>::iterator&);
726 void emitSlow_op_loop_if_true(Instruction*, Vector<SlowCaseEntry>::iterator&);
727 void emitSlow_op_loop_if_false(Instruction*, Vector<SlowCaseEntry>::iterator&);
728 void emitSlow_op_lshift(Instruction*, Vector<SlowCaseEntry>::iterator&);
729 void emitSlow_op_method_check(Instruction*, Vector<SlowCaseEntry>::iterator&);
730 void emitSlow_op_mod(Instruction*, Vector<SlowCaseEntry>::iterator&);
731 void emitSlow_op_mul(Instruction*, Vector<SlowCaseEntry>::iterator&);
732 void emitSlow_op_negate(Instruction*, Vector<SlowCaseEntry>::iterator&);
733 void emitSlow_op_neq(Instruction*, Vector<SlowCaseEntry>::iterator&);
734 void emitSlow_op_new_object(Instruction*, Vector<SlowCaseEntry>::iterator&);
735 void emitSlow_op_not(Instruction*, Vector<SlowCaseEntry>::iterator&);
736 void emitSlow_op_nstricteq(Instruction*, Vector<SlowCaseEntry>::iterator&);
737 void emitSlow_op_post_dec(Instruction*, Vector<SlowCaseEntry>::iterator&);
738 void emitSlow_op_post_inc(Instruction*, Vector<SlowCaseEntry>::iterator&);
739 void emitSlow_op_pre_dec(Instruction*, Vector<SlowCaseEntry>::iterator&);
740 void emitSlow_op_pre_inc(Instruction*, Vector<SlowCaseEntry>::iterator&);
741 void emitSlow_op_put_by_id(Instruction*, Vector<SlowCaseEntry>::iterator&);
742 void emitSlow_op_put_by_val(Instruction*, Vector<SlowCaseEntry>::iterator&);
743 void emitSlow_op_resolve_global(Instruction*, Vector<SlowCaseEntry>::iterator&);
744 void emitSlow_op_resolve_global_dynamic(Instruction*, Vector<SlowCaseEntry>::iterator&);
745 void emitSlow_op_rshift(Instruction*, Vector<SlowCaseEntry>::iterator&);
746 void emitSlow_op_stricteq(Instruction*, Vector<SlowCaseEntry>::iterator&);
747 void emitSlow_op_sub(Instruction*, Vector<SlowCaseEntry>::iterator&);
748 void emitSlow_op_to_jsnumber(Instruction*, Vector<SlowCaseEntry>::iterator&);
749 void emitSlow_op_to_primitive(Instruction*, Vector<SlowCaseEntry>::iterator&);
750 void emitSlow_op_urshift(Instruction*, Vector<SlowCaseEntry>::iterator&);
751 void emitSlow_op_new_func(Instruction*, Vector<SlowCaseEntry>::iterator&);
752 void emitSlow_op_new_func_exp(Instruction*, Vector<SlowCaseEntry>::iterator&);
753 void emitSlow_op_new_array(Instruction*, Vector<SlowCaseEntry>::iterator&);
754
755 void emitRightShift(Instruction*, bool isUnsigned);
756 void emitRightShiftSlowCase(Instruction*, Vector<SlowCaseEntry>::iterator&, bool isUnsigned);
757
758 /* This function is deprecated. */
759 void emitGetJITStubArg(unsigned argumentNumber, RegisterID dst);
760
761 void emitInitRegister(unsigned dst);
762
763 void emitPutToCallFrameHeader(RegisterID from, RegisterFile::CallFrameHeaderEntry entry);
764 void emitPutCellToCallFrameHeader(RegisterID from, RegisterFile::CallFrameHeaderEntry);
765 void emitPutIntToCallFrameHeader(RegisterID from, RegisterFile::CallFrameHeaderEntry);
766 void emitPutImmediateToCallFrameHeader(void* value, RegisterFile::CallFrameHeaderEntry entry);
767 void emitGetFromCallFrameHeaderPtr(RegisterFile::CallFrameHeaderEntry entry, RegisterID to, RegisterID from = callFrameRegister);
768 void emitGetFromCallFrameHeader32(RegisterFile::CallFrameHeaderEntry entry, RegisterID to, RegisterID from = callFrameRegister);
769
770 JSValue getConstantOperand(unsigned src);
771 bool isOperandConstantImmediateInt(unsigned src);
772 bool isOperandConstantImmediateChar(unsigned src);
773
774 bool atJumpTarget();
775
776 Jump getSlowCase(Vector<SlowCaseEntry>::iterator& iter)
777 {
778 return iter++->from;
779 }
780 void linkSlowCase(Vector<SlowCaseEntry>::iterator& iter)
781 {
782 iter->from.link(this);
783 ++iter;
784 }
785 void linkDummySlowCase(Vector<SlowCaseEntry>::iterator& iter)
786 {
787 ASSERT(!iter->from.isSet());
788 ++iter;
789 }
790 void linkSlowCaseIfNotJSCell(Vector<SlowCaseEntry>::iterator&, int virtualRegisterIndex);
791
792 Jump checkStructure(RegisterID reg, Structure* structure);
793
794 void restoreArgumentReference();
795 void restoreArgumentReferenceForTrampoline();
796 void updateTopCallFrame();
797
798 Call emitNakedCall(CodePtr function = CodePtr());
799
800 void preserveReturnAddressAfterCall(RegisterID);
801 void restoreReturnAddressBeforeReturn(RegisterID);
802 void restoreReturnAddressBeforeReturn(Address);
803
804 // Loads the character value of a single character string into dst.
805 void emitLoadCharacterString(RegisterID src, RegisterID dst, JumpList& failures);
806
807 enum OptimizationCheckKind { LoopOptimizationCheck, RetOptimizationCheck };
808 #if ENABLE(DFG_JIT)
809 void emitOptimizationCheck(OptimizationCheckKind);
810 #else
811 void emitOptimizationCheck(OptimizationCheckKind) { }
812 #endif
813
814 void emitTimeoutCheck();
815 #ifndef NDEBUG
816 void printBytecodeOperandTypes(unsigned src1, unsigned src2);
817 #endif
818
819 #if ENABLE(SAMPLING_FLAGS)
820 void setSamplingFlag(int32_t);
821 void clearSamplingFlag(int32_t);
822 #endif
823
824 #if ENABLE(SAMPLING_COUNTERS)
825 void emitCount(AbstractSamplingCounter&, int32_t = 1);
826 #endif
827
828 #if ENABLE(OPCODE_SAMPLING)
829 void sampleInstruction(Instruction*, bool = false);
830 #endif
831
832 #if ENABLE(CODEBLOCK_SAMPLING)
833 void sampleCodeBlock(CodeBlock*);
834 #else
835 void sampleCodeBlock(CodeBlock*) {}
836 #endif
837
838 #if ENABLE(DFG_JIT)
839 bool canBeOptimized() { return m_canBeOptimized; }
840 bool shouldEmitProfiling() { return m_canBeOptimized; }
841 #else
842 bool canBeOptimized() { return false; }
843 // Enables use of value profiler with tiered compilation turned off,
844 // in which case all code gets profiled.
845 bool shouldEmitProfiling() { return true; }
846 #endif
847
848 Interpreter* m_interpreter;
849 JSGlobalData* m_globalData;
850 CodeBlock* m_codeBlock;
851
852 Vector<CallRecord> m_calls;
853 Vector<Label> m_labels;
854 Vector<PropertyStubCompilationInfo> m_propertyAccessCompilationInfo;
855 Vector<StructureStubCompilationInfo> m_callStructureStubCompilationInfo;
856 Vector<MethodCallCompilationInfo> m_methodCallCompilationInfo;
857 Vector<JumpTable> m_jmpTable;
858
859 unsigned m_bytecodeOffset;
860 Vector<SlowCaseEntry> m_slowCases;
861 Vector<SwitchRecord> m_switches;
862
863 unsigned m_propertyAccessInstructionIndex;
864 unsigned m_globalResolveInfoIndex;
865 unsigned m_callLinkInfoIndex;
866
867 #if USE(JSVALUE32_64)
868 unsigned m_jumpTargetIndex;
869 unsigned m_mappedBytecodeOffset;
870 int m_mappedVirtualRegisterIndex;
871 RegisterID m_mappedTag;
872 RegisterID m_mappedPayload;
873 #else
874 int m_lastResultBytecodeRegister;
875 #endif
876 unsigned m_jumpTargetsPosition;
877
878 #ifndef NDEBUG
879 #if defined(ASSEMBLER_HAS_CONSTANT_POOL) && ASSEMBLER_HAS_CONSTANT_POOL
880 Label m_uninterruptedInstructionSequenceBegin;
881 int m_uninterruptedConstantSequenceBegin;
882 #endif
883 #endif
884 WeakRandom m_randomGenerator;
885 static CodeRef stringGetByValStubGenerator(JSGlobalData*);
886
887 #if ENABLE(VALUE_PROFILER)
888 bool m_canBeOptimized;
889 #endif
890 } JIT_CLASS_ALIGNMENT;
891
892 inline void JIT::emit_op_loop(Instruction* currentInstruction)
893 {
894 emitTimeoutCheck();
895 emit_op_jmp(currentInstruction);
896 }
897
898 inline void JIT::emit_op_loop_hint(Instruction*)
899 {
900 emitOptimizationCheck(LoopOptimizationCheck);
901 }
902
903 inline void JIT::emit_op_loop_if_true(Instruction* currentInstruction)
904 {
905 emitTimeoutCheck();
906 emit_op_jtrue(currentInstruction);
907 }
908
909 inline void JIT::emitSlow_op_loop_if_true(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
910 {
911 emitSlow_op_jtrue(currentInstruction, iter);
912 }
913
914 inline void JIT::emit_op_loop_if_false(Instruction* currentInstruction)
915 {
916 emitTimeoutCheck();
917 emit_op_jfalse(currentInstruction);
918 }
919
920 inline void JIT::emitSlow_op_loop_if_false(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
921 {
922 emitSlow_op_jfalse(currentInstruction, iter);
923 }
924
925 inline void JIT::emit_op_loop_if_less(Instruction* currentInstruction)
926 {
927 emitTimeoutCheck();
928 emit_op_jless(currentInstruction);
929 }
930
931 inline void JIT::emitSlow_op_loop_if_less(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
932 {
933 emitSlow_op_jless(currentInstruction, iter);
934 }
935
936 inline void JIT::emit_op_loop_if_lesseq(Instruction* currentInstruction)
937 {
938 emitTimeoutCheck();
939 emit_op_jlesseq(currentInstruction);
940 }
941
942 inline void JIT::emitSlow_op_loop_if_lesseq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
943 {
944 emitSlow_op_jlesseq(currentInstruction, iter);
945 }
946
947 inline void JIT::emit_op_loop_if_greater(Instruction* currentInstruction)
948 {
949 emitTimeoutCheck();
950 emit_op_jgreater(currentInstruction);
951 }
952
953 inline void JIT::emitSlow_op_loop_if_greater(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
954 {
955 emitSlow_op_jgreater(currentInstruction, iter);
956 }
957
958 inline void JIT::emit_op_loop_if_greatereq(Instruction* currentInstruction)
959 {
960 emitTimeoutCheck();
961 emit_op_jgreatereq(currentInstruction);
962 }
963
964 inline void JIT::emitSlow_op_loop_if_greatereq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
965 {
966 emitSlow_op_jgreatereq(currentInstruction, iter);
967 }
968
969 } // namespace JSC
970
971 #endif // ENABLE(JIT)
972
973 #endif // JIT_h