]> git.saurik.com Git - apple/javascriptcore.git/blob - jit/JIT.h
6cc55fd7e5035838c1a2c1eae3b4678395d7da4b
[apple/javascriptcore.git] / jit / JIT.h
1 /*
2 * Copyright (C) 2008 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26 #ifndef JIT_h
27 #define JIT_h
28
29 #if ENABLE(JIT)
30
31 // We've run into some problems where changing the size of the class JIT leads to
32 // performance fluctuations. Try forcing alignment in an attempt to stabalize this.
33 #if COMPILER(GCC)
34 #define JIT_CLASS_ALIGNMENT __attribute__ ((aligned (32)))
35 #else
36 #define JIT_CLASS_ALIGNMENT
37 #endif
38
39 #define ASSERT_JIT_OFFSET_UNUSED(variable, actual, expected) ASSERT_WITH_MESSAGE_UNUSED(variable, actual == expected, "JIT Offset \"%s\" should be %d, not %d.\n", #expected, static_cast<int>(expected), static_cast<int>(actual));
40 #define ASSERT_JIT_OFFSET(actual, expected) ASSERT_WITH_MESSAGE(actual == expected, "JIT Offset \"%s\" should be %d, not %d.\n", #expected, static_cast<int>(expected), static_cast<int>(actual));
41
42 #include "CodeBlock.h"
43 #include "Interpreter.h"
44 #include "JSInterfaceJIT.h"
45 #include "Opcode.h"
46 #include "Profiler.h"
47 #include <bytecode/SamplingTool.h>
48
49 namespace JSC {
50
51 class CodeBlock;
52 class JIT;
53 class JSPropertyNameIterator;
54 class Interpreter;
55 class Register;
56 class RegisterFile;
57 class ScopeChainNode;
58 class StructureChain;
59
60 struct CallLinkInfo;
61 struct Instruction;
62 struct OperandTypes;
63 struct PolymorphicAccessStructureList;
64 struct SimpleJumpTable;
65 struct StringJumpTable;
66 struct StructureStubInfo;
67
68 struct CallRecord {
69 MacroAssembler::Call from;
70 unsigned bytecodeOffset;
71 void* to;
72
73 CallRecord()
74 {
75 }
76
77 CallRecord(MacroAssembler::Call from, unsigned bytecodeOffset, void* to = 0)
78 : from(from)
79 , bytecodeOffset(bytecodeOffset)
80 , to(to)
81 {
82 }
83 };
84
85 struct JumpTable {
86 MacroAssembler::Jump from;
87 unsigned toBytecodeOffset;
88
89 JumpTable(MacroAssembler::Jump f, unsigned t)
90 : from(f)
91 , toBytecodeOffset(t)
92 {
93 }
94 };
95
96 struct SlowCaseEntry {
97 MacroAssembler::Jump from;
98 unsigned to;
99 unsigned hint;
100
101 SlowCaseEntry(MacroAssembler::Jump f, unsigned t, unsigned h = 0)
102 : from(f)
103 , to(t)
104 , hint(h)
105 {
106 }
107 };
108
109 struct SwitchRecord {
110 enum Type {
111 Immediate,
112 Character,
113 String
114 };
115
116 Type type;
117
118 union {
119 SimpleJumpTable* simpleJumpTable;
120 StringJumpTable* stringJumpTable;
121 } jumpTable;
122
123 unsigned bytecodeOffset;
124 unsigned defaultOffset;
125
126 SwitchRecord(SimpleJumpTable* jumpTable, unsigned bytecodeOffset, unsigned defaultOffset, Type type)
127 : type(type)
128 , bytecodeOffset(bytecodeOffset)
129 , defaultOffset(defaultOffset)
130 {
131 this->jumpTable.simpleJumpTable = jumpTable;
132 }
133
134 SwitchRecord(StringJumpTable* jumpTable, unsigned bytecodeOffset, unsigned defaultOffset)
135 : type(String)
136 , bytecodeOffset(bytecodeOffset)
137 , defaultOffset(defaultOffset)
138 {
139 this->jumpTable.stringJumpTable = jumpTable;
140 }
141 };
142
143 struct PropertyStubCompilationInfo {
144 MacroAssembler::Call callReturnLocation;
145 MacroAssembler::Label hotPathBegin;
146 };
147
148 struct StructureStubCompilationInfo {
149 MacroAssembler::DataLabelPtr hotPathBegin;
150 MacroAssembler::Call hotPathOther;
151 MacroAssembler::Call callReturnLocation;
152 bool isCall;
153 };
154
155 struct MethodCallCompilationInfo {
156 MethodCallCompilationInfo(unsigned propertyAccessIndex)
157 : propertyAccessIndex(propertyAccessIndex)
158 {
159 }
160
161 MacroAssembler::DataLabelPtr structureToCompare;
162 unsigned propertyAccessIndex;
163 };
164
165 // Near calls can only be patched to other JIT code, regular calls can be patched to JIT code or relinked to stub functions.
166 void ctiPatchNearCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, MacroAssemblerCodePtr newCalleeFunction);
167 void ctiPatchCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, MacroAssemblerCodePtr newCalleeFunction);
168 void ctiPatchCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, FunctionPtr newCalleeFunction);
169
170 class JIT : private JSInterfaceJIT {
171 friend class JITStubCall;
172
173 using MacroAssembler::Jump;
174 using MacroAssembler::JumpList;
175 using MacroAssembler::Label;
176
177 static const int patchGetByIdDefaultStructure = -1;
178 static const int patchGetByIdDefaultOffset = 0;
179 // Magic number - initial offset cannot be representable as a signed 8bit value, or the X86Assembler
180 // will compress the displacement, and we may not be able to fit a patched offset.
181 static const int patchPutByIdDefaultOffset = 256;
182
183 public:
184 static JITCode compile(JSGlobalData* globalData, CodeBlock* codeBlock, CodePtr* functionEntryArityCheck = 0)
185 {
186 return JIT(globalData, codeBlock).privateCompile(functionEntryArityCheck);
187 }
188
189 static void compileGetByIdProto(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, ReturnAddressPtr returnAddress)
190 {
191 JIT jit(globalData, codeBlock);
192 jit.privateCompileGetByIdProto(stubInfo, structure, prototypeStructure, ident, slot, cachedOffset, returnAddress, callFrame);
193 }
194
195 static void compileGetByIdSelfList(JSGlobalData* globalData, CodeBlock* codeBlock, StructureStubInfo* stubInfo, PolymorphicAccessStructureList* polymorphicStructures, int currentIndex, Structure* structure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset)
196 {
197 JIT jit(globalData, codeBlock);
198 jit.privateCompileGetByIdSelfList(stubInfo, polymorphicStructures, currentIndex, structure, ident, slot, cachedOffset);
199 }
200 static void compileGetByIdProtoList(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructureList, int currentIndex, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset)
201 {
202 JIT jit(globalData, codeBlock);
203 jit.privateCompileGetByIdProtoList(stubInfo, prototypeStructureList, currentIndex, structure, prototypeStructure, ident, slot, cachedOffset, callFrame);
204 }
205 static void compileGetByIdChainList(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructureList, int currentIndex, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset)
206 {
207 JIT jit(globalData, codeBlock);
208 jit.privateCompileGetByIdChainList(stubInfo, prototypeStructureList, currentIndex, structure, chain, count, ident, slot, cachedOffset, callFrame);
209 }
210
211 static void compileGetByIdChain(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, ReturnAddressPtr returnAddress)
212 {
213 JIT jit(globalData, codeBlock);
214 jit.privateCompileGetByIdChain(stubInfo, structure, chain, count, ident, slot, cachedOffset, returnAddress, callFrame);
215 }
216
217 static void compilePutByIdTransition(JSGlobalData* globalData, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* oldStructure, Structure* newStructure, size_t cachedOffset, StructureChain* chain, ReturnAddressPtr returnAddress, bool direct)
218 {
219 JIT jit(globalData, codeBlock);
220 jit.privateCompilePutByIdTransition(stubInfo, oldStructure, newStructure, cachedOffset, chain, returnAddress, direct);
221 }
222
223 static void compileCTIMachineTrampolines(JSGlobalData* globalData, RefPtr<ExecutablePool>* executablePool, TrampolineStructure *trampolines)
224 {
225 if (!globalData->canUseJIT())
226 return;
227 JIT jit(globalData, 0);
228 jit.privateCompileCTIMachineTrampolines(executablePool, globalData, trampolines);
229 }
230
231 static CodePtr compileCTINativeCall(JSGlobalData* globalData, PassRefPtr<ExecutablePool> executablePool, NativeFunction func)
232 {
233 if (!globalData->canUseJIT())
234 return CodePtr();
235 JIT jit(globalData, 0);
236 return jit.privateCompileCTINativeCall(executablePool, globalData, func);
237 }
238
239 static void patchGetByIdSelf(CodeBlock* codeblock, StructureStubInfo*, Structure*, size_t cachedOffset, ReturnAddressPtr returnAddress);
240 static void patchPutByIdReplace(CodeBlock* codeblock, StructureStubInfo*, Structure*, size_t cachedOffset, ReturnAddressPtr returnAddress, bool direct);
241 static void patchMethodCallProto(JSGlobalData&, CodeBlock* codeblock, MethodCallLinkInfo&, JSObjectWithGlobalObject*, Structure*, JSObject*, ReturnAddressPtr);
242
243 static void compilePatchGetArrayLength(JSGlobalData* globalData, CodeBlock* codeBlock, ReturnAddressPtr returnAddress)
244 {
245 JIT jit(globalData, codeBlock);
246 return jit.privateCompilePatchGetArrayLength(returnAddress);
247 }
248
249 static void linkCall(JSFunction* callee, CodeBlock* callerCodeBlock, CodeBlock* calleeCodeBlock, CodePtr, CallLinkInfo*, int callerArgCount, JSGlobalData*);
250 static void linkConstruct(JSFunction* callee, CodeBlock* callerCodeBlock, CodeBlock* calleeCodeBlock, CodePtr, CallLinkInfo*, int callerArgCount, JSGlobalData*);
251
252 private:
253 struct JSRInfo {
254 DataLabelPtr storeLocation;
255 Label target;
256
257 JSRInfo(DataLabelPtr storeLocation, Label targetLocation)
258 : storeLocation(storeLocation)
259 , target(targetLocation)
260 {
261 }
262 };
263
264 JIT(JSGlobalData*, CodeBlock* = 0);
265
266 void privateCompileMainPass();
267 void privateCompileLinkPass();
268 void privateCompileSlowCases();
269 JITCode privateCompile(CodePtr* functionEntryArityCheck);
270 void privateCompileGetByIdProto(StructureStubInfo*, Structure*, Structure* prototypeStructure, const Identifier&, const PropertySlot&, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame);
271 void privateCompileGetByIdSelfList(StructureStubInfo*, PolymorphicAccessStructureList*, int, Structure*, const Identifier&, const PropertySlot&, size_t cachedOffset);
272 void privateCompileGetByIdProtoList(StructureStubInfo*, PolymorphicAccessStructureList*, int, Structure*, Structure* prototypeStructure, const Identifier&, const PropertySlot&, size_t cachedOffset, CallFrame* callFrame);
273 void privateCompileGetByIdChainList(StructureStubInfo*, PolymorphicAccessStructureList*, int, Structure*, StructureChain* chain, size_t count, const Identifier&, const PropertySlot&, size_t cachedOffset, CallFrame* callFrame);
274 void privateCompileGetByIdChain(StructureStubInfo*, Structure*, StructureChain*, size_t count, const Identifier&, const PropertySlot&, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame);
275 void privateCompilePutByIdTransition(StructureStubInfo*, Structure*, Structure*, size_t cachedOffset, StructureChain*, ReturnAddressPtr returnAddress, bool direct);
276
277 void privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool>* executablePool, JSGlobalData* data, TrampolineStructure *trampolines);
278 Label privateCompileCTINativeCall(JSGlobalData*, bool isConstruct = false);
279 CodePtr privateCompileCTINativeCall(PassRefPtr<ExecutablePool> executablePool, JSGlobalData* data, NativeFunction func);
280 void privateCompilePatchGetArrayLength(ReturnAddressPtr returnAddress);
281
282 void addSlowCase(Jump);
283 void addSlowCase(JumpList);
284 void addJump(Jump, int);
285 void emitJumpSlowToHot(Jump, int);
286
287 void compileOpCall(OpcodeID, Instruction* instruction, unsigned callLinkInfoIndex);
288 void compileOpCallVarargs(Instruction* instruction);
289 void compileOpCallInitializeCallFrame();
290 void compileOpCallSlowCase(Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter, unsigned callLinkInfoIndex, OpcodeID opcodeID);
291 void compileOpCallVarargsSlowCase(Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter);
292
293 enum CompileOpStrictEqType { OpStrictEq, OpNStrictEq };
294 void compileOpStrictEq(Instruction* instruction, CompileOpStrictEqType type);
295 bool isOperandConstantImmediateDouble(unsigned src);
296
297 void emitLoadDouble(unsigned index, FPRegisterID value);
298 void emitLoadInt32ToDouble(unsigned index, FPRegisterID value);
299
300 void testPrototype(JSValue, JumpList& failureCases);
301
302 #if USE(JSVALUE32_64)
303 bool getOperandConstantImmediateInt(unsigned op1, unsigned op2, unsigned& op, int32_t& constant);
304
305 void emitLoadTag(unsigned index, RegisterID tag);
306 void emitLoadPayload(unsigned index, RegisterID payload);
307
308 void emitLoad(const JSValue& v, RegisterID tag, RegisterID payload);
309 void emitLoad(unsigned index, RegisterID tag, RegisterID payload, RegisterID base = callFrameRegister);
310 void emitLoad2(unsigned index1, RegisterID tag1, RegisterID payload1, unsigned index2, RegisterID tag2, RegisterID payload2);
311
312 void emitStore(unsigned index, RegisterID tag, RegisterID payload, RegisterID base = callFrameRegister);
313 void emitStore(unsigned index, const JSValue constant, RegisterID base = callFrameRegister);
314 void emitStoreInt32(unsigned index, RegisterID payload, bool indexIsInt32 = false);
315 void emitStoreInt32(unsigned index, TrustedImm32 payload, bool indexIsInt32 = false);
316 void emitStoreCell(unsigned index, RegisterID payload, bool indexIsCell = false);
317 void emitStoreBool(unsigned index, RegisterID payload, bool indexIsBool = false);
318 void emitStoreDouble(unsigned index, FPRegisterID value);
319
320 bool isLabeled(unsigned bytecodeOffset);
321 void map(unsigned bytecodeOffset, unsigned virtualRegisterIndex, RegisterID tag, RegisterID payload);
322 void unmap(RegisterID);
323 void unmap();
324 bool isMapped(unsigned virtualRegisterIndex);
325 bool getMappedPayload(unsigned virtualRegisterIndex, RegisterID& payload);
326 bool getMappedTag(unsigned virtualRegisterIndex, RegisterID& tag);
327
328 void emitJumpSlowCaseIfNotJSCell(unsigned virtualRegisterIndex);
329 void emitJumpSlowCaseIfNotJSCell(unsigned virtualRegisterIndex, RegisterID tag);
330 void linkSlowCaseIfNotJSCell(Vector<SlowCaseEntry>::iterator&, unsigned virtualRegisterIndex);
331
332 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
333 void compileGetByIdHotPath();
334 void compileGetByIdSlowCase(int resultVReg, int baseVReg, Identifier* ident, Vector<SlowCaseEntry>::iterator& iter, bool isMethodCheck = false);
335 #endif
336 void compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, Structure* structure, size_t cachedOffset);
337 void compileGetDirectOffset(JSObject* base, RegisterID resultTag, RegisterID resultPayload, size_t cachedOffset);
338 void compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, RegisterID offset);
339 void compilePutDirectOffset(RegisterID base, RegisterID valueTag, RegisterID valuePayload, Structure* structure, size_t cachedOffset);
340
341 // Arithmetic opcode helpers
342 void emitAdd32Constant(unsigned dst, unsigned op, int32_t constant, ResultType opType);
343 void emitSub32Constant(unsigned dst, unsigned op, int32_t constant, ResultType opType);
344 void emitBinaryDoubleOp(OpcodeID, unsigned dst, unsigned op1, unsigned op2, OperandTypes, JumpList& notInt32Op1, JumpList& notInt32Op2, bool op1IsInRegisters = true, bool op2IsInRegisters = true);
345
346 #if CPU(X86)
347 // These architecture specific value are used to enable patching - see comment on op_put_by_id.
348 static const int patchOffsetPutByIdStructure = 7;
349 static const int patchOffsetPutByIdPropertyMapOffset1 = 22;
350 static const int patchOffsetPutByIdPropertyMapOffset2 = 28;
351 // These architecture specific value are used to enable patching - see comment on op_get_by_id.
352 static const int patchOffsetGetByIdStructure = 7;
353 static const int patchOffsetGetByIdBranchToSlowCase = 13;
354 static const int patchOffsetGetByIdPropertyMapOffset1 = 19;
355 static const int patchOffsetGetByIdPropertyMapOffset2 = 22;
356 static const int patchOffsetGetByIdPutResult = 22;
357 #if ENABLE(OPCODE_SAMPLING)
358 static const int patchOffsetGetByIdSlowCaseCall = 37;
359 #else
360 static const int patchOffsetGetByIdSlowCaseCall = 27;
361 #endif
362 static const int patchOffsetOpCallCompareToJump = 6;
363
364 static const int patchOffsetMethodCheckProtoObj = 11;
365 static const int patchOffsetMethodCheckProtoStruct = 18;
366 static const int patchOffsetMethodCheckPutFunction = 29;
367 #elif CPU(ARM_TRADITIONAL)
368 // These architecture specific value are used to enable patching - see comment on op_put_by_id.
369 static const int patchOffsetPutByIdStructure = 4;
370 static const int patchOffsetPutByIdPropertyMapOffset1 = 20;
371 static const int patchOffsetPutByIdPropertyMapOffset2 = 28;
372 // These architecture specific value are used to enable patching - see comment on op_get_by_id.
373 static const int patchOffsetGetByIdStructure = 4;
374 static const int patchOffsetGetByIdBranchToSlowCase = 16;
375 static const int patchOffsetGetByIdPropertyMapOffset1 = 20;
376 static const int patchOffsetGetByIdPropertyMapOffset2 = 28;
377 static const int patchOffsetGetByIdPutResult = 36;
378 #if ENABLE(OPCODE_SAMPLING)
379 #error "OPCODE_SAMPLING is not yet supported"
380 #else
381 static const int patchOffsetGetByIdSlowCaseCall = 32;
382 #endif
383 static const int patchOffsetOpCallCompareToJump = 12;
384
385 static const int patchOffsetMethodCheckProtoObj = 12;
386 static const int patchOffsetMethodCheckProtoStruct = 20;
387 static const int patchOffsetMethodCheckPutFunction = 32;
388
389 // sequenceOpCall
390 static const int sequenceOpCallInstructionSpace = 12;
391 static const int sequenceOpCallConstantSpace = 2;
392 // sequenceMethodCheck
393 static const int sequenceMethodCheckInstructionSpace = 40;
394 static const int sequenceMethodCheckConstantSpace = 6;
395 // sequenceGetByIdHotPath
396 static const int sequenceGetByIdHotPathInstructionSpace = 36;
397 static const int sequenceGetByIdHotPathConstantSpace = 4;
398 // sequenceGetByIdSlowCase
399 static const int sequenceGetByIdSlowCaseInstructionSpace = 56;
400 static const int sequenceGetByIdSlowCaseConstantSpace = 2;
401 // sequencePutById
402 static const int sequencePutByIdInstructionSpace = 36;
403 static const int sequencePutByIdConstantSpace = 4;
404 #elif CPU(ARM_THUMB2)
405 // These architecture specific value are used to enable patching - see comment on op_put_by_id.
406 static const int patchOffsetPutByIdStructure = 10;
407 static const int patchOffsetPutByIdPropertyMapOffset1 = 36;
408 static const int patchOffsetPutByIdPropertyMapOffset2 = 48;
409 // These architecture specific value are used to enable patching - see comment on op_get_by_id.
410 static const int patchOffsetGetByIdStructure = 10;
411 static const int patchOffsetGetByIdBranchToSlowCase = 26;
412 static const int patchOffsetGetByIdPropertyMapOffset1 = 28;
413 static const int patchOffsetGetByIdPropertyMapOffset2 = 30;
414 static const int patchOffsetGetByIdPutResult = 32;
415 #if ENABLE(OPCODE_SAMPLING)
416 #error "OPCODE_SAMPLING is not yet supported"
417 #else
418 static const int patchOffsetGetByIdSlowCaseCall = 30;
419 #endif
420 static const int patchOffsetOpCallCompareToJump = 16;
421
422 static const int patchOffsetMethodCheckProtoObj = 24;
423 static const int patchOffsetMethodCheckProtoStruct = 34;
424 static const int patchOffsetMethodCheckPutFunction = 58;
425
426 // sequenceOpCall
427 static const int sequenceOpCallInstructionSpace = 12;
428 static const int sequenceOpCallConstantSpace = 2;
429 // sequenceMethodCheck
430 static const int sequenceMethodCheckInstructionSpace = 40;
431 static const int sequenceMethodCheckConstantSpace = 6;
432 // sequenceGetByIdHotPath
433 static const int sequenceGetByIdHotPathInstructionSpace = 36;
434 static const int sequenceGetByIdHotPathConstantSpace = 4;
435 // sequenceGetByIdSlowCase
436 static const int sequenceGetByIdSlowCaseInstructionSpace = 40;
437 static const int sequenceGetByIdSlowCaseConstantSpace = 2;
438 // sequencePutById
439 static const int sequencePutByIdInstructionSpace = 36;
440 static const int sequencePutByIdConstantSpace = 4;
441 #elif CPU(MIPS)
442 #if WTF_MIPS_ISA(1)
443 static const int patchOffsetPutByIdStructure = 16;
444 static const int patchOffsetPutByIdPropertyMapOffset1 = 56;
445 static const int patchOffsetPutByIdPropertyMapOffset2 = 72;
446 static const int patchOffsetGetByIdStructure = 16;
447 static const int patchOffsetGetByIdBranchToSlowCase = 48;
448 static const int patchOffsetGetByIdPropertyMapOffset1 = 56;
449 static const int patchOffsetGetByIdPropertyMapOffset2 = 76;
450 static const int patchOffsetGetByIdPutResult = 96;
451 #if ENABLE(OPCODE_SAMPLING)
452 #error "OPCODE_SAMPLING is not yet supported"
453 #else
454 static const int patchOffsetGetByIdSlowCaseCall = 44;
455 #endif
456 static const int patchOffsetOpCallCompareToJump = 32;
457 static const int patchOffsetMethodCheckProtoObj = 32;
458 static const int patchOffsetMethodCheckProtoStruct = 56;
459 static const int patchOffsetMethodCheckPutFunction = 88;
460 #else // WTF_MIPS_ISA(1)
461 static const int patchOffsetPutByIdStructure = 12;
462 static const int patchOffsetPutByIdPropertyMapOffset1 = 48;
463 static const int patchOffsetPutByIdPropertyMapOffset2 = 64;
464 static const int patchOffsetGetByIdStructure = 12;
465 static const int patchOffsetGetByIdBranchToSlowCase = 44;
466 static const int patchOffsetGetByIdPropertyMapOffset1 = 48;
467 static const int patchOffsetGetByIdPropertyMapOffset2 = 64;
468 static const int patchOffsetGetByIdPutResult = 80;
469 #if ENABLE(OPCODE_SAMPLING)
470 #error "OPCODE_SAMPLING is not yet supported"
471 #else
472 static const int patchOffsetGetByIdSlowCaseCall = 44;
473 #endif
474 static const int patchOffsetOpCallCompareToJump = 32;
475 static const int patchOffsetMethodCheckProtoObj = 32;
476 static const int patchOffsetMethodCheckProtoStruct = 52;
477 static const int patchOffsetMethodCheckPutFunction = 84;
478 #endif
479 #elif CPU(SH4)
480 // These architecture specific value are used to enable patching - see comment on op_put_by_id.
481 static const int patchOffsetGetByIdStructure = 6;
482 static const int patchOffsetPutByIdPropertyMapOffset = 24;
483 static const int patchOffsetPutByIdStructure = 6;
484 // These architecture specific value are used to enable patching - see comment on op_get_by_id.
485 static const int patchOffsetGetByIdBranchToSlowCase = 10;
486 static const int patchOffsetGetByIdPropertyMapOffset = 24;
487 static const int patchOffsetGetByIdPutResult = 32;
488
489 // sequenceOpCall
490 static const int sequenceOpCallInstructionSpace = 12;
491 static const int sequenceOpCallConstantSpace = 2;
492 // sequenceMethodCheck
493 static const int sequenceMethodCheckInstructionSpace = 40;
494 static const int sequenceMethodCheckConstantSpace = 6;
495 // sequenceGetByIdHotPath
496 static const int sequenceGetByIdHotPathInstructionSpace = 36;
497 static const int sequenceGetByIdHotPathConstantSpace = 5;
498 // sequenceGetByIdSlowCase
499 static const int sequenceGetByIdSlowCaseInstructionSpace = 26;
500 static const int sequenceGetByIdSlowCaseConstantSpace = 2;
501 // sequencePutById
502 static const int sequencePutByIdInstructionSpace = 36;
503 static const int sequencePutByIdConstantSpace = 5;
504
505 static const int patchOffsetGetByIdPropertyMapOffset1 = 20;
506 static const int patchOffsetGetByIdPropertyMapOffset2 = 26;
507
508 static const int patchOffsetPutByIdPropertyMapOffset1 = 20;
509 static const int patchOffsetPutByIdPropertyMapOffset2 = 26;
510
511 #if ENABLE(OPCODE_SAMPLING)
512 static const int patchOffsetGetByIdSlowCaseCall = 0; // FIMXE
513 #else
514 static const int patchOffsetGetByIdSlowCaseCall = 22;
515 #endif
516 static const int patchOffsetOpCallCompareToJump = 4;
517
518 static const int patchOffsetMethodCheckProtoObj = 12;
519 static const int patchOffsetMethodCheckProtoStruct = 20;
520 static const int patchOffsetMethodCheckPutFunction = 32;
521 #else
522 #error "JSVALUE32_64 not supported on this platform."
523 #endif
524
525 #else // USE(JSVALUE32_64)
526 void emitGetVirtualRegister(int src, RegisterID dst);
527 void emitGetVirtualRegisters(int src1, RegisterID dst1, int src2, RegisterID dst2);
528 void emitPutVirtualRegister(unsigned dst, RegisterID from = regT0);
529
530 int32_t getConstantOperandImmediateInt(unsigned src);
531
532 void killLastResultRegister();
533
534 Jump emitJumpIfJSCell(RegisterID);
535 Jump emitJumpIfBothJSCells(RegisterID, RegisterID, RegisterID);
536 void emitJumpSlowCaseIfJSCell(RegisterID);
537 Jump emitJumpIfNotJSCell(RegisterID);
538 void emitJumpSlowCaseIfNotJSCell(RegisterID);
539 void emitJumpSlowCaseIfNotJSCell(RegisterID, int VReg);
540 #if USE(JSVALUE32_64)
541 JIT::Jump emitJumpIfImmediateNumber(RegisterID reg)
542 {
543 return emitJumpIfImmediateInteger(reg);
544 }
545
546 JIT::Jump emitJumpIfNotImmediateNumber(RegisterID reg)
547 {
548 return emitJumpIfNotImmediateInteger(reg);
549 }
550 #endif
551 JIT::Jump emitJumpIfImmediateInteger(RegisterID);
552 JIT::Jump emitJumpIfNotImmediateInteger(RegisterID);
553 JIT::Jump emitJumpIfNotImmediateIntegers(RegisterID, RegisterID, RegisterID);
554 void emitJumpSlowCaseIfNotImmediateInteger(RegisterID);
555 void emitJumpSlowCaseIfNotImmediateNumber(RegisterID);
556 void emitJumpSlowCaseIfNotImmediateIntegers(RegisterID, RegisterID, RegisterID);
557
558 #if USE(JSVALUE32_64)
559 void emitFastArithDeTagImmediate(RegisterID);
560 Jump emitFastArithDeTagImmediateJumpIfZero(RegisterID);
561 #endif
562 void emitFastArithReTagImmediate(RegisterID src, RegisterID dest);
563 void emitFastArithIntToImmNoCheck(RegisterID src, RegisterID dest);
564
565 void emitTagAsBoolImmediate(RegisterID reg);
566 void compileBinaryArithOp(OpcodeID, unsigned dst, unsigned src1, unsigned src2, OperandTypes opi);
567 #if USE(JSVALUE64)
568 void compileBinaryArithOpSlowCase(OpcodeID, Vector<SlowCaseEntry>::iterator&, unsigned dst, unsigned src1, unsigned src2, OperandTypes, bool op1HasImmediateIntFastCase, bool op2HasImmediateIntFastCase);
569 #else
570 void compileBinaryArithOpSlowCase(OpcodeID, Vector<SlowCaseEntry>::iterator&, unsigned dst, unsigned src1, unsigned src2, OperandTypes);
571 #endif
572
573 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
574 void compileGetByIdHotPath(int resultVReg, int baseVReg, Identifier* ident, unsigned propertyAccessInstructionIndex);
575 void compileGetByIdSlowCase(int resultVReg, int baseVReg, Identifier* ident, Vector<SlowCaseEntry>::iterator& iter, bool isMethodCheck = false);
576 #endif
577 void compileGetDirectOffset(RegisterID base, RegisterID result, Structure* structure, size_t cachedOffset);
578 void compileGetDirectOffset(JSObject* base, RegisterID result, size_t cachedOffset);
579 void compileGetDirectOffset(RegisterID base, RegisterID result, RegisterID offset, RegisterID scratch);
580 void compilePutDirectOffset(RegisterID base, RegisterID value, Structure* structure, size_t cachedOffset);
581
582 #if CPU(X86_64)
583 // These architecture specific value are used to enable patching - see comment on op_put_by_id.
584 static const int patchOffsetPutByIdStructure = 10;
585 static const int patchOffsetPutByIdPropertyMapOffset = 31;
586 // These architecture specific value are used to enable patching - see comment on op_get_by_id.
587 static const int patchOffsetGetByIdStructure = 10;
588 static const int patchOffsetGetByIdBranchToSlowCase = 20;
589 static const int patchOffsetGetByIdPropertyMapOffset = 28;
590 static const int patchOffsetGetByIdPutResult = 28;
591 #if ENABLE(OPCODE_SAMPLING)
592 static const int patchOffsetGetByIdSlowCaseCall = 64;
593 #else
594 static const int patchOffsetGetByIdSlowCaseCall = 41;
595 #endif
596 static const int patchOffsetOpCallCompareToJump = 9;
597
598 static const int patchOffsetMethodCheckProtoObj = 20;
599 static const int patchOffsetMethodCheckProtoStruct = 30;
600 static const int patchOffsetMethodCheckPutFunction = 50;
601 #elif CPU(X86)
602 // These architecture specific value are used to enable patching - see comment on op_put_by_id.
603 static const int patchOffsetPutByIdStructure = 7;
604 static const int patchOffsetPutByIdPropertyMapOffset = 22;
605 // These architecture specific value are used to enable patching - see comment on op_get_by_id.
606 static const int patchOffsetGetByIdStructure = 7;
607 static const int patchOffsetGetByIdBranchToSlowCase = 13;
608 static const int patchOffsetGetByIdPropertyMapOffset = 22;
609 static const int patchOffsetGetByIdPutResult = 22;
610 #if ENABLE(OPCODE_SAMPLING)
611 static const int patchOffsetGetByIdSlowCaseCall = 33;
612 #else
613 static const int patchOffsetGetByIdSlowCaseCall = 23;
614 #endif
615 static const int patchOffsetOpCallCompareToJump = 6;
616
617 static const int patchOffsetMethodCheckProtoObj = 11;
618 static const int patchOffsetMethodCheckProtoStruct = 18;
619 static const int patchOffsetMethodCheckPutFunction = 29;
620 #elif CPU(ARM_THUMB2)
621 // These architecture specific value are used to enable patching - see comment on op_put_by_id.
622 static const int patchOffsetPutByIdStructure = 10;
623 static const int patchOffsetPutByIdPropertyMapOffset = 46;
624 // These architecture specific value are used to enable patching - see comment on op_get_by_id.
625 static const int patchOffsetGetByIdStructure = 10;
626 static const int patchOffsetGetByIdBranchToSlowCase = 26;
627 static const int patchOffsetGetByIdPropertyMapOffset = 46;
628 static const int patchOffsetGetByIdPutResult = 50;
629 #if ENABLE(OPCODE_SAMPLING)
630 static const int patchOffsetGetByIdSlowCaseCall = 0; // FIMXE
631 #else
632 static const int patchOffsetGetByIdSlowCaseCall = 28;
633 #endif
634 static const int patchOffsetOpCallCompareToJump = 16;
635
636 static const int patchOffsetMethodCheckProtoObj = 24;
637 static const int patchOffsetMethodCheckProtoStruct = 34;
638 static const int patchOffsetMethodCheckPutFunction = 58;
639 #elif CPU(ARM_TRADITIONAL)
640 // These architecture specific value are used to enable patching - see comment on op_put_by_id.
641 static const int patchOffsetPutByIdStructure = 4;
642 static const int patchOffsetPutByIdPropertyMapOffset = 20;
643 // These architecture specific value are used to enable patching - see comment on op_get_by_id.
644 static const int patchOffsetGetByIdStructure = 4;
645 static const int patchOffsetGetByIdBranchToSlowCase = 16;
646 static const int patchOffsetGetByIdPropertyMapOffset = 20;
647 static const int patchOffsetGetByIdPutResult = 28;
648 #if ENABLE(OPCODE_SAMPLING)
649 #error "OPCODE_SAMPLING is not yet supported"
650 #else
651 static const int patchOffsetGetByIdSlowCaseCall = 28;
652 #endif
653 static const int patchOffsetOpCallCompareToJump = 12;
654
655 static const int patchOffsetMethodCheckProtoObj = 12;
656 static const int patchOffsetMethodCheckProtoStruct = 20;
657 static const int patchOffsetMethodCheckPutFunction = 32;
658
659 // sequenceOpCall
660 static const int sequenceOpCallInstructionSpace = 12;
661 static const int sequenceOpCallConstantSpace = 2;
662 // sequenceMethodCheck
663 static const int sequenceMethodCheckInstructionSpace = 40;
664 static const int sequenceMethodCheckConstantSpace = 6;
665 // sequenceGetByIdHotPath
666 static const int sequenceGetByIdHotPathInstructionSpace = 28;
667 static const int sequenceGetByIdHotPathConstantSpace = 3;
668 // sequenceGetByIdSlowCase
669 static const int sequenceGetByIdSlowCaseInstructionSpace = 32;
670 static const int sequenceGetByIdSlowCaseConstantSpace = 2;
671 // sequencePutById
672 static const int sequencePutByIdInstructionSpace = 28;
673 static const int sequencePutByIdConstantSpace = 3;
674 #elif CPU(MIPS)
675 #if WTF_MIPS_ISA(1)
676 static const int patchOffsetPutByIdStructure = 16;
677 static const int patchOffsetPutByIdPropertyMapOffset = 68;
678 static const int patchOffsetGetByIdStructure = 16;
679 static const int patchOffsetGetByIdBranchToSlowCase = 48;
680 static const int patchOffsetGetByIdPropertyMapOffset = 68;
681 static const int patchOffsetGetByIdPutResult = 88;
682 #if ENABLE(OPCODE_SAMPLING)
683 #error "OPCODE_SAMPLING is not yet supported"
684 #else
685 static const int patchOffsetGetByIdSlowCaseCall = 40;
686 #endif
687 static const int patchOffsetOpCallCompareToJump = 32;
688 static const int patchOffsetMethodCheckProtoObj = 32;
689 static const int patchOffsetMethodCheckProtoStruct = 56;
690 static const int patchOffsetMethodCheckPutFunction = 88;
691 #else // WTF_MIPS_ISA(1)
692 static const int patchOffsetPutByIdStructure = 12;
693 static const int patchOffsetPutByIdPropertyMapOffset = 60;
694 static const int patchOffsetGetByIdStructure = 12;
695 static const int patchOffsetGetByIdBranchToSlowCase = 44;
696 static const int patchOffsetGetByIdPropertyMapOffset = 60;
697 static const int patchOffsetGetByIdPutResult = 76;
698 #if ENABLE(OPCODE_SAMPLING)
699 #error "OPCODE_SAMPLING is not yet supported"
700 #else
701 static const int patchOffsetGetByIdSlowCaseCall = 40;
702 #endif
703 static const int patchOffsetOpCallCompareToJump = 32;
704 static const int patchOffsetMethodCheckProtoObj = 32;
705 static const int patchOffsetMethodCheckProtoStruct = 52;
706 static const int patchOffsetMethodCheckPutFunction = 84;
707 #endif
708 #endif
709 #endif // USE(JSVALUE32_64)
710
711 #if (defined(ASSEMBLER_HAS_CONSTANT_POOL) && ASSEMBLER_HAS_CONSTANT_POOL)
712 #define BEGIN_UNINTERRUPTED_SEQUENCE(name) do { beginUninterruptedSequence(name ## InstructionSpace, name ## ConstantSpace); } while (false)
713 #define END_UNINTERRUPTED_SEQUENCE_FOR_PUT(name, dst) do { endUninterruptedSequence(name ## InstructionSpace, name ## ConstantSpace, dst); } while (false)
714 #define END_UNINTERRUPTED_SEQUENCE(name) END_UNINTERRUPTED_SEQUENCE_FOR_PUT(name, 0)
715
716 void beginUninterruptedSequence(int, int);
717 void endUninterruptedSequence(int, int, int);
718
719 #else
720 #define BEGIN_UNINTERRUPTED_SEQUENCE(name) do { beginUninterruptedSequence(); } while (false)
721 #define END_UNINTERRUPTED_SEQUENCE(name) do { endUninterruptedSequence(); } while (false)
722 #define END_UNINTERRUPTED_SEQUENCE_FOR_PUT(name, dst) do { endUninterruptedSequence(); } while (false)
723 #endif
724
725 void emit_op_add(Instruction*);
726 void emit_op_bitand(Instruction*);
727 void emit_op_bitnot(Instruction*);
728 void emit_op_bitor(Instruction*);
729 void emit_op_bitxor(Instruction*);
730 void emit_op_call(Instruction*);
731 void emit_op_call_eval(Instruction*);
732 void emit_op_call_varargs(Instruction*);
733 void emit_op_call_put_result(Instruction*);
734 void emit_op_catch(Instruction*);
735 void emit_op_construct(Instruction*);
736 void emit_op_get_callee(Instruction*);
737 void emit_op_create_this(Instruction*);
738 void emit_op_convert_this(Instruction*);
739 void emit_op_convert_this_strict(Instruction*);
740 void emit_op_create_arguments(Instruction*);
741 void emit_op_debug(Instruction*);
742 void emit_op_del_by_id(Instruction*);
743 void emit_op_div(Instruction*);
744 void emit_op_end(Instruction*);
745 void emit_op_enter(Instruction*);
746 void emit_op_create_activation(Instruction*);
747 void emit_op_eq(Instruction*);
748 void emit_op_eq_null(Instruction*);
749 void emit_op_get_by_id(Instruction*);
750 void emit_op_get_arguments_length(Instruction*);
751 void emit_op_get_by_val(Instruction*);
752 void emit_op_get_argument_by_val(Instruction*);
753 void emit_op_get_by_pname(Instruction*);
754 void emit_op_get_global_var(Instruction*);
755 void emit_op_get_scoped_var(Instruction*);
756 void emit_op_init_lazy_reg(Instruction*);
757 void emit_op_check_has_instance(Instruction*);
758 void emit_op_instanceof(Instruction*);
759 void emit_op_jeq_null(Instruction*);
760 void emit_op_jfalse(Instruction*);
761 void emit_op_jmp(Instruction*);
762 void emit_op_jmp_scopes(Instruction*);
763 void emit_op_jneq_null(Instruction*);
764 void emit_op_jneq_ptr(Instruction*);
765 void emit_op_jnless(Instruction*);
766 void emit_op_jless(Instruction*);
767 void emit_op_jlesseq(Instruction*, bool invert = false);
768 void emit_op_jnlesseq(Instruction*);
769 void emit_op_jsr(Instruction*);
770 void emit_op_jtrue(Instruction*);
771 void emit_op_load_varargs(Instruction*);
772 void emit_op_loop(Instruction*);
773 void emit_op_loop_if_less(Instruction*);
774 void emit_op_loop_if_lesseq(Instruction*);
775 void emit_op_loop_if_true(Instruction*);
776 void emit_op_loop_if_false(Instruction*);
777 void emit_op_lshift(Instruction*);
778 void emit_op_method_check(Instruction*);
779 void emit_op_mod(Instruction*);
780 void emit_op_mov(Instruction*);
781 void emit_op_mul(Instruction*);
782 void emit_op_negate(Instruction*);
783 void emit_op_neq(Instruction*);
784 void emit_op_neq_null(Instruction*);
785 void emit_op_new_array(Instruction*);
786 void emit_op_new_array_buffer(Instruction*);
787 void emit_op_new_func(Instruction*);
788 void emit_op_new_func_exp(Instruction*);
789 void emit_op_new_object(Instruction*);
790 void emit_op_new_regexp(Instruction*);
791 void emit_op_get_pnames(Instruction*);
792 void emit_op_next_pname(Instruction*);
793 void emit_op_not(Instruction*);
794 void emit_op_nstricteq(Instruction*);
795 void emit_op_pop_scope(Instruction*);
796 void emit_op_post_dec(Instruction*);
797 void emit_op_post_inc(Instruction*);
798 void emit_op_pre_dec(Instruction*);
799 void emit_op_pre_inc(Instruction*);
800 void emit_op_profile_did_call(Instruction*);
801 void emit_op_profile_will_call(Instruction*);
802 void emit_op_push_new_scope(Instruction*);
803 void emit_op_push_scope(Instruction*);
804 void emit_op_put_by_id(Instruction*);
805 void emit_op_put_by_index(Instruction*);
806 void emit_op_put_by_val(Instruction*);
807 void emit_op_put_getter(Instruction*);
808 void emit_op_put_global_var(Instruction*);
809 void emit_op_put_scoped_var(Instruction*);
810 void emit_op_put_setter(Instruction*);
811 void emit_op_resolve(Instruction*);
812 void emit_op_resolve_base(Instruction*);
813 void emit_op_ensure_property_exists(Instruction*);
814 void emit_op_resolve_global(Instruction*, bool dynamic = false);
815 void emit_op_resolve_global_dynamic(Instruction*);
816 void emit_op_resolve_skip(Instruction*);
817 void emit_op_resolve_with_base(Instruction*);
818 void emit_op_ret(Instruction*);
819 void emit_op_ret_object_or_this(Instruction*);
820 void emit_op_rshift(Instruction*);
821 void emit_op_sret(Instruction*);
822 void emit_op_strcat(Instruction*);
823 void emit_op_stricteq(Instruction*);
824 void emit_op_sub(Instruction*);
825 void emit_op_switch_char(Instruction*);
826 void emit_op_switch_imm(Instruction*);
827 void emit_op_switch_string(Instruction*);
828 void emit_op_tear_off_activation(Instruction*);
829 void emit_op_tear_off_arguments(Instruction*);
830 void emit_op_throw(Instruction*);
831 void emit_op_throw_reference_error(Instruction*);
832 void emit_op_to_jsnumber(Instruction*);
833 void emit_op_to_primitive(Instruction*);
834 void emit_op_unexpected_load(Instruction*);
835 void emit_op_urshift(Instruction*);
836 #if ENABLE(JIT_USE_SOFT_MODULO)
837 void softModulo();
838 #endif
839
840 void emitSlow_op_add(Instruction*, Vector<SlowCaseEntry>::iterator&);
841 void emitSlow_op_bitand(Instruction*, Vector<SlowCaseEntry>::iterator&);
842 void emitSlow_op_bitnot(Instruction*, Vector<SlowCaseEntry>::iterator&);
843 void emitSlow_op_bitor(Instruction*, Vector<SlowCaseEntry>::iterator&);
844 void emitSlow_op_bitxor(Instruction*, Vector<SlowCaseEntry>::iterator&);
845 void emitSlow_op_call(Instruction*, Vector<SlowCaseEntry>::iterator&);
846 void emitSlow_op_call_eval(Instruction*, Vector<SlowCaseEntry>::iterator&);
847 void emitSlow_op_call_varargs(Instruction*, Vector<SlowCaseEntry>::iterator&);
848 void emitSlow_op_construct(Instruction*, Vector<SlowCaseEntry>::iterator&);
849 void emitSlow_op_convert_this(Instruction*, Vector<SlowCaseEntry>::iterator&);
850 void emitSlow_op_convert_this_strict(Instruction*, Vector<SlowCaseEntry>::iterator&);
851 void emitSlow_op_div(Instruction*, Vector<SlowCaseEntry>::iterator&);
852 void emitSlow_op_eq(Instruction*, Vector<SlowCaseEntry>::iterator&);
853 void emitSlow_op_get_by_id(Instruction*, Vector<SlowCaseEntry>::iterator&);
854 void emitSlow_op_get_arguments_length(Instruction*, Vector<SlowCaseEntry>::iterator&);
855 void emitSlow_op_get_by_val(Instruction*, Vector<SlowCaseEntry>::iterator&);
856 void emitSlow_op_get_argument_by_val(Instruction*, Vector<SlowCaseEntry>::iterator&);
857 void emitSlow_op_get_by_pname(Instruction*, Vector<SlowCaseEntry>::iterator&);
858 void emitSlow_op_check_has_instance(Instruction*, Vector<SlowCaseEntry>::iterator&);
859 void emitSlow_op_instanceof(Instruction*, Vector<SlowCaseEntry>::iterator&);
860 void emitSlow_op_jfalse(Instruction*, Vector<SlowCaseEntry>::iterator&);
861 void emitSlow_op_jnless(Instruction*, Vector<SlowCaseEntry>::iterator&);
862 void emitSlow_op_jless(Instruction*, Vector<SlowCaseEntry>::iterator&);
863 void emitSlow_op_jlesseq(Instruction*, Vector<SlowCaseEntry>::iterator&, bool invert = false);
864 void emitSlow_op_jnlesseq(Instruction*, Vector<SlowCaseEntry>::iterator&);
865 void emitSlow_op_jtrue(Instruction*, Vector<SlowCaseEntry>::iterator&);
866 void emitSlow_op_load_varargs(Instruction*, Vector<SlowCaseEntry>::iterator&);
867 void emitSlow_op_loop_if_less(Instruction*, Vector<SlowCaseEntry>::iterator&);
868 void emitSlow_op_loop_if_lesseq(Instruction*, Vector<SlowCaseEntry>::iterator&);
869 void emitSlow_op_loop_if_true(Instruction*, Vector<SlowCaseEntry>::iterator&);
870 void emitSlow_op_loop_if_false(Instruction*, Vector<SlowCaseEntry>::iterator&);
871 void emitSlow_op_lshift(Instruction*, Vector<SlowCaseEntry>::iterator&);
872 void emitSlow_op_method_check(Instruction*, Vector<SlowCaseEntry>::iterator&);
873 void emitSlow_op_mod(Instruction*, Vector<SlowCaseEntry>::iterator&);
874 void emitSlow_op_mul(Instruction*, Vector<SlowCaseEntry>::iterator&);
875 void emitSlow_op_negate(Instruction*, Vector<SlowCaseEntry>::iterator&);
876 void emitSlow_op_neq(Instruction*, Vector<SlowCaseEntry>::iterator&);
877 void emitSlow_op_not(Instruction*, Vector<SlowCaseEntry>::iterator&);
878 void emitSlow_op_nstricteq(Instruction*, Vector<SlowCaseEntry>::iterator&);
879 void emitSlow_op_post_dec(Instruction*, Vector<SlowCaseEntry>::iterator&);
880 void emitSlow_op_post_inc(Instruction*, Vector<SlowCaseEntry>::iterator&);
881 void emitSlow_op_pre_dec(Instruction*, Vector<SlowCaseEntry>::iterator&);
882 void emitSlow_op_pre_inc(Instruction*, Vector<SlowCaseEntry>::iterator&);
883 void emitSlow_op_put_by_id(Instruction*, Vector<SlowCaseEntry>::iterator&);
884 void emitSlow_op_put_by_val(Instruction*, Vector<SlowCaseEntry>::iterator&);
885 void emitSlow_op_resolve_global(Instruction*, Vector<SlowCaseEntry>::iterator&);
886 void emitSlow_op_resolve_global_dynamic(Instruction*, Vector<SlowCaseEntry>::iterator&);
887 void emitSlow_op_rshift(Instruction*, Vector<SlowCaseEntry>::iterator&);
888 void emitSlow_op_stricteq(Instruction*, Vector<SlowCaseEntry>::iterator&);
889 void emitSlow_op_sub(Instruction*, Vector<SlowCaseEntry>::iterator&);
890 void emitSlow_op_to_jsnumber(Instruction*, Vector<SlowCaseEntry>::iterator&);
891 void emitSlow_op_to_primitive(Instruction*, Vector<SlowCaseEntry>::iterator&);
892 void emitSlow_op_urshift(Instruction*, Vector<SlowCaseEntry>::iterator&);
893
894
895 void emitRightShift(Instruction*, bool isUnsigned);
896 void emitRightShiftSlowCase(Instruction*, Vector<SlowCaseEntry>::iterator&, bool isUnsigned);
897
898 /* This function is deprecated. */
899 void emitGetJITStubArg(unsigned argumentNumber, RegisterID dst);
900
901 void emitInitRegister(unsigned dst);
902
903 void emitPutToCallFrameHeader(RegisterID from, RegisterFile::CallFrameHeaderEntry entry);
904 void emitPutCellToCallFrameHeader(RegisterID from, RegisterFile::CallFrameHeaderEntry);
905 void emitPutIntToCallFrameHeader(RegisterID from, RegisterFile::CallFrameHeaderEntry);
906 void emitPutImmediateToCallFrameHeader(void* value, RegisterFile::CallFrameHeaderEntry entry);
907 void emitGetFromCallFrameHeaderPtr(RegisterFile::CallFrameHeaderEntry entry, RegisterID to, RegisterID from = callFrameRegister);
908 void emitGetFromCallFrameHeader32(RegisterFile::CallFrameHeaderEntry entry, RegisterID to, RegisterID from = callFrameRegister);
909
910 JSValue getConstantOperand(unsigned src);
911 bool isOperandConstantImmediateInt(unsigned src);
912 bool isOperandConstantImmediateChar(unsigned src);
913
914 bool atJumpTarget();
915
916 Jump getSlowCase(Vector<SlowCaseEntry>::iterator& iter)
917 {
918 return iter++->from;
919 }
920 void linkSlowCase(Vector<SlowCaseEntry>::iterator& iter)
921 {
922 iter->from.link(this);
923 ++iter;
924 }
925 void linkSlowCaseIfNotJSCell(Vector<SlowCaseEntry>::iterator&, int vReg);
926
927 Jump checkStructure(RegisterID reg, Structure* structure);
928
929 void restoreArgumentReference();
930 void restoreArgumentReferenceForTrampoline();
931
932 Call emitNakedCall(CodePtr function = CodePtr());
933
934 void preserveReturnAddressAfterCall(RegisterID);
935 void restoreReturnAddressBeforeReturn(RegisterID);
936 void restoreReturnAddressBeforeReturn(Address);
937
938 // Loads the character value of a single character string into dst.
939 void emitLoadCharacterString(RegisterID src, RegisterID dst, JumpList& failures);
940
941 void emitTimeoutCheck();
942 #ifndef NDEBUG
943 void printBytecodeOperandTypes(unsigned src1, unsigned src2);
944 #endif
945
946 #if ENABLE(SAMPLING_FLAGS)
947 void setSamplingFlag(int32_t);
948 void clearSamplingFlag(int32_t);
949 #endif
950
951 #if ENABLE(SAMPLING_COUNTERS)
952 void emitCount(AbstractSamplingCounter&, uint32_t = 1);
953 #endif
954
955 #if ENABLE(OPCODE_SAMPLING)
956 void sampleInstruction(Instruction*, bool = false);
957 #endif
958
959 #if ENABLE(CODEBLOCK_SAMPLING)
960 void sampleCodeBlock(CodeBlock*);
961 #else
962 void sampleCodeBlock(CodeBlock*) {}
963 #endif
964
965 Interpreter* m_interpreter;
966 JSGlobalData* m_globalData;
967 CodeBlock* m_codeBlock;
968
969 Vector<CallRecord> m_calls;
970 Vector<Label> m_labels;
971 Vector<PropertyStubCompilationInfo> m_propertyAccessCompilationInfo;
972 Vector<StructureStubCompilationInfo> m_callStructureStubCompilationInfo;
973 Vector<MethodCallCompilationInfo> m_methodCallCompilationInfo;
974 Vector<JumpTable> m_jmpTable;
975
976 unsigned m_bytecodeOffset;
977 Vector<JSRInfo> m_jsrSites;
978 Vector<SlowCaseEntry> m_slowCases;
979 Vector<SwitchRecord> m_switches;
980
981 unsigned m_propertyAccessInstructionIndex;
982 unsigned m_globalResolveInfoIndex;
983 unsigned m_callLinkInfoIndex;
984
985 #if USE(JSVALUE32_64)
986 unsigned m_jumpTargetIndex;
987 unsigned m_mappedBytecodeOffset;
988 unsigned m_mappedVirtualRegisterIndex;
989 RegisterID m_mappedTag;
990 RegisterID m_mappedPayload;
991 #else
992 int m_lastResultBytecodeRegister;
993 #endif
994 unsigned m_jumpTargetsPosition;
995
996 #ifndef NDEBUG
997 #if defined(ASSEMBLER_HAS_CONSTANT_POOL) && ASSEMBLER_HAS_CONSTANT_POOL
998 Label m_uninterruptedInstructionSequenceBegin;
999 int m_uninterruptedConstantSequenceBegin;
1000 #endif
1001 #endif
1002 WeakRandom m_randomGenerator;
1003 static CodePtr stringGetByValStubGenerator(JSGlobalData* globalData, ExecutablePool* pool);
1004 } JIT_CLASS_ALIGNMENT;
1005
1006 inline void JIT::emit_op_loop(Instruction* currentInstruction)
1007 {
1008 emitTimeoutCheck();
1009 emit_op_jmp(currentInstruction);
1010 }
1011
1012 inline void JIT::emit_op_loop_if_true(Instruction* currentInstruction)
1013 {
1014 emitTimeoutCheck();
1015 emit_op_jtrue(currentInstruction);
1016 }
1017
1018 inline void JIT::emitSlow_op_loop_if_true(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1019 {
1020 emitSlow_op_jtrue(currentInstruction, iter);
1021 }
1022
1023 inline void JIT::emit_op_loop_if_false(Instruction* currentInstruction)
1024 {
1025 emitTimeoutCheck();
1026 emit_op_jfalse(currentInstruction);
1027 }
1028
1029 inline void JIT::emitSlow_op_loop_if_false(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1030 {
1031 emitSlow_op_jfalse(currentInstruction, iter);
1032 }
1033
1034 inline void JIT::emit_op_loop_if_less(Instruction* currentInstruction)
1035 {
1036 emitTimeoutCheck();
1037 emit_op_jless(currentInstruction);
1038 }
1039
1040 inline void JIT::emitSlow_op_loop_if_less(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1041 {
1042 emitSlow_op_jless(currentInstruction, iter);
1043 }
1044
1045 } // namespace JSC
1046
1047 #endif // ENABLE(JIT)
1048
1049 #endif // JIT_h