2 * Copyright (C) 2008 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 // We've run into some problems where changing the size of the class JIT leads to
32 // performance fluctuations. Try forcing alignment in an attempt to stabalize this.
34 #define JIT_CLASS_ALIGNMENT __attribute__ ((aligned (32)))
36 #define JIT_CLASS_ALIGNMENT
39 #define ASSERT_JIT_OFFSET(actual, expected) ASSERT_WITH_MESSAGE(actual == expected, "JIT Offset \"%s\" should be %d, not %d.\n", #expected, static_cast<int>(actual), static_cast<int>(expected));
41 #include "CodeBlock.h"
42 #include "Interpreter.h"
43 #include "JSInterfaceJIT.h"
46 #include <bytecode/SamplingTool.h>
52 class JSPropertyNameIterator
;
62 struct PolymorphicAccessStructureList
;
63 struct SimpleJumpTable
;
64 struct StringJumpTable
;
65 struct StructureStubInfo
;
68 MacroAssembler::Call from
;
69 unsigned bytecodeIndex
;
76 CallRecord(MacroAssembler::Call from
, unsigned bytecodeIndex
, void* to
= 0)
78 , bytecodeIndex(bytecodeIndex
)
85 MacroAssembler::Jump from
;
86 unsigned toBytecodeIndex
;
88 JumpTable(MacroAssembler::Jump f
, unsigned t
)
95 struct SlowCaseEntry
{
96 MacroAssembler::Jump from
;
100 SlowCaseEntry(MacroAssembler::Jump f
, unsigned t
, unsigned h
= 0)
108 struct SwitchRecord
{
118 SimpleJumpTable
* simpleJumpTable
;
119 StringJumpTable
* stringJumpTable
;
122 unsigned bytecodeIndex
;
123 unsigned defaultOffset
;
125 SwitchRecord(SimpleJumpTable
* jumpTable
, unsigned bytecodeIndex
, unsigned defaultOffset
, Type type
)
127 , bytecodeIndex(bytecodeIndex
)
128 , defaultOffset(defaultOffset
)
130 this->jumpTable
.simpleJumpTable
= jumpTable
;
133 SwitchRecord(StringJumpTable
* jumpTable
, unsigned bytecodeIndex
, unsigned defaultOffset
)
135 , bytecodeIndex(bytecodeIndex
)
136 , defaultOffset(defaultOffset
)
138 this->jumpTable
.stringJumpTable
= jumpTable
;
142 struct PropertyStubCompilationInfo
{
143 MacroAssembler::Call callReturnLocation
;
144 MacroAssembler::Label hotPathBegin
;
147 struct StructureStubCompilationInfo
{
148 MacroAssembler::DataLabelPtr hotPathBegin
;
149 MacroAssembler::Call hotPathOther
;
150 MacroAssembler::Call callReturnLocation
;
153 struct MethodCallCompilationInfo
{
154 MethodCallCompilationInfo(unsigned propertyAccessIndex
)
155 : propertyAccessIndex(propertyAccessIndex
)
159 MacroAssembler::DataLabelPtr structureToCompare
;
160 unsigned propertyAccessIndex
;
163 // Near calls can only be patched to other JIT code, regular calls can be patched to JIT code or relinked to stub functions.
164 void ctiPatchNearCallByReturnAddress(CodeBlock
* codeblock
, ReturnAddressPtr returnAddress
, MacroAssemblerCodePtr newCalleeFunction
);
165 void ctiPatchCallByReturnAddress(CodeBlock
* codeblock
, ReturnAddressPtr returnAddress
, MacroAssemblerCodePtr newCalleeFunction
);
166 void ctiPatchCallByReturnAddress(CodeBlock
* codeblock
, ReturnAddressPtr returnAddress
, FunctionPtr newCalleeFunction
);
168 class JIT
: private JSInterfaceJIT
{
169 friend class JITStubCall
;
171 using MacroAssembler::Jump
;
172 using MacroAssembler::JumpList
;
173 using MacroAssembler::Label
;
175 static const int patchGetByIdDefaultStructure
= -1;
176 // Magic number - initial offset cannot be representable as a signed 8bit value, or the X86Assembler
177 // will compress the displacement, and we may not be able to fit a patched offset.
178 static const int patchGetByIdDefaultOffset
= 256;
181 static JITCode
compile(JSGlobalData
* globalData
, CodeBlock
* codeBlock
)
183 return JIT(globalData
, codeBlock
).privateCompile();
186 static void compileGetByIdProto(JSGlobalData
* globalData
, CallFrame
* callFrame
, CodeBlock
* codeBlock
, StructureStubInfo
* stubInfo
, Structure
* structure
, Structure
* prototypeStructure
, const Identifier
& ident
, const PropertySlot
& slot
, size_t cachedOffset
, ReturnAddressPtr returnAddress
)
188 JIT
jit(globalData
, codeBlock
);
189 jit
.privateCompileGetByIdProto(stubInfo
, structure
, prototypeStructure
, ident
, slot
, cachedOffset
, returnAddress
, callFrame
);
192 static void compileGetByIdSelfList(JSGlobalData
* globalData
, CodeBlock
* codeBlock
, StructureStubInfo
* stubInfo
, PolymorphicAccessStructureList
* polymorphicStructures
, int currentIndex
, Structure
* structure
, const Identifier
& ident
, const PropertySlot
& slot
, size_t cachedOffset
)
194 JIT
jit(globalData
, codeBlock
);
195 jit
.privateCompileGetByIdSelfList(stubInfo
, polymorphicStructures
, currentIndex
, structure
, ident
, slot
, cachedOffset
);
197 static void compileGetByIdProtoList(JSGlobalData
* globalData
, CallFrame
* callFrame
, CodeBlock
* codeBlock
, StructureStubInfo
* stubInfo
, PolymorphicAccessStructureList
* prototypeStructureList
, int currentIndex
, Structure
* structure
, Structure
* prototypeStructure
, const Identifier
& ident
, const PropertySlot
& slot
, size_t cachedOffset
)
199 JIT
jit(globalData
, codeBlock
);
200 jit
.privateCompileGetByIdProtoList(stubInfo
, prototypeStructureList
, currentIndex
, structure
, prototypeStructure
, ident
, slot
, cachedOffset
, callFrame
);
202 static void compileGetByIdChainList(JSGlobalData
* globalData
, CallFrame
* callFrame
, CodeBlock
* codeBlock
, StructureStubInfo
* stubInfo
, PolymorphicAccessStructureList
* prototypeStructureList
, int currentIndex
, Structure
* structure
, StructureChain
* chain
, size_t count
, const Identifier
& ident
, const PropertySlot
& slot
, size_t cachedOffset
)
204 JIT
jit(globalData
, codeBlock
);
205 jit
.privateCompileGetByIdChainList(stubInfo
, prototypeStructureList
, currentIndex
, structure
, chain
, count
, ident
, slot
, cachedOffset
, callFrame
);
208 static void compileGetByIdChain(JSGlobalData
* globalData
, CallFrame
* callFrame
, CodeBlock
* codeBlock
, StructureStubInfo
* stubInfo
, Structure
* structure
, StructureChain
* chain
, size_t count
, const Identifier
& ident
, const PropertySlot
& slot
, size_t cachedOffset
, ReturnAddressPtr returnAddress
)
210 JIT
jit(globalData
, codeBlock
);
211 jit
.privateCompileGetByIdChain(stubInfo
, structure
, chain
, count
, ident
, slot
, cachedOffset
, returnAddress
, callFrame
);
214 static void compilePutByIdTransition(JSGlobalData
* globalData
, CodeBlock
* codeBlock
, StructureStubInfo
* stubInfo
, Structure
* oldStructure
, Structure
* newStructure
, size_t cachedOffset
, StructureChain
* chain
, ReturnAddressPtr returnAddress
, bool direct
)
216 JIT
jit(globalData
, codeBlock
);
217 jit
.privateCompilePutByIdTransition(stubInfo
, oldStructure
, newStructure
, cachedOffset
, chain
, returnAddress
, direct
);
220 static void compileCTIMachineTrampolines(JSGlobalData
* globalData
, RefPtr
<ExecutablePool
>* executablePool
, TrampolineStructure
*trampolines
)
222 if (!globalData
->canUseJIT())
225 jit
.privateCompileCTIMachineTrampolines(executablePool
, globalData
, trampolines
);
228 static void patchGetByIdSelf(CodeBlock
* codeblock
, StructureStubInfo
*, Structure
*, size_t cachedOffset
, ReturnAddressPtr returnAddress
);
229 static void patchPutByIdReplace(CodeBlock
* codeblock
, StructureStubInfo
*, Structure
*, size_t cachedOffset
, ReturnAddressPtr returnAddress
, bool direct
);
230 static void patchMethodCallProto(CodeBlock
* codeblock
, MethodCallLinkInfo
&, JSFunction
*, Structure
*, JSObject
*, ReturnAddressPtr
);
232 static void compilePatchGetArrayLength(JSGlobalData
* globalData
, CodeBlock
* codeBlock
, ReturnAddressPtr returnAddress
)
234 JIT
jit(globalData
, codeBlock
);
235 return jit
.privateCompilePatchGetArrayLength(returnAddress
);
238 static void linkCall(JSFunction
* callee
, CodeBlock
* callerCodeBlock
, CodeBlock
* calleeCodeBlock
, JITCode
&, CallLinkInfo
*, int callerArgCount
, JSGlobalData
*);
239 static void unlinkCall(CallLinkInfo
*);
243 DataLabelPtr storeLocation
;
246 JSRInfo(DataLabelPtr storeLocation
, Label targetLocation
)
247 : storeLocation(storeLocation
)
248 , target(targetLocation
)
253 JIT(JSGlobalData
*, CodeBlock
* = 0);
255 void privateCompileMainPass();
256 void privateCompileLinkPass();
257 void privateCompileSlowCases();
258 JITCode
privateCompile();
259 void privateCompileGetByIdProto(StructureStubInfo
*, Structure
*, Structure
* prototypeStructure
, const Identifier
&, const PropertySlot
&, size_t cachedOffset
, ReturnAddressPtr returnAddress
, CallFrame
* callFrame
);
260 void privateCompileGetByIdSelfList(StructureStubInfo
*, PolymorphicAccessStructureList
*, int, Structure
*, const Identifier
&, const PropertySlot
&, size_t cachedOffset
);
261 void privateCompileGetByIdProtoList(StructureStubInfo
*, PolymorphicAccessStructureList
*, int, Structure
*, Structure
* prototypeStructure
, const Identifier
&, const PropertySlot
&, size_t cachedOffset
, CallFrame
* callFrame
);
262 void privateCompileGetByIdChainList(StructureStubInfo
*, PolymorphicAccessStructureList
*, int, Structure
*, StructureChain
* chain
, size_t count
, const Identifier
&, const PropertySlot
&, size_t cachedOffset
, CallFrame
* callFrame
);
263 void privateCompileGetByIdChain(StructureStubInfo
*, Structure
*, StructureChain
*, size_t count
, const Identifier
&, const PropertySlot
&, size_t cachedOffset
, ReturnAddressPtr returnAddress
, CallFrame
* callFrame
);
264 void privateCompilePutByIdTransition(StructureStubInfo
*, Structure
*, Structure
*, size_t cachedOffset
, StructureChain
*, ReturnAddressPtr returnAddress
, bool direct
);
266 void privateCompileCTIMachineTrampolines(RefPtr
<ExecutablePool
>* executablePool
, JSGlobalData
* data
, TrampolineStructure
*trampolines
);
267 void privateCompilePatchGetArrayLength(ReturnAddressPtr returnAddress
);
269 void addSlowCase(Jump
);
270 void addSlowCase(JumpList
);
271 void addJump(Jump
, int);
272 void emitJumpSlowToHot(Jump
, int);
274 void compileOpCall(OpcodeID
, Instruction
* instruction
, unsigned callLinkInfoIndex
);
275 void compileOpCallVarargs(Instruction
* instruction
);
276 void compileOpCallInitializeCallFrame();
277 void compileOpCallSetupArgs(Instruction
*);
278 void compileOpCallVarargsSetupArgs(Instruction
*);
279 void compileOpCallSlowCase(Instruction
* instruction
, Vector
<SlowCaseEntry
>::iterator
& iter
, unsigned callLinkInfoIndex
, OpcodeID opcodeID
);
280 void compileOpCallVarargsSlowCase(Instruction
* instruction
, Vector
<SlowCaseEntry
>::iterator
& iter
);
281 void compileOpConstructSetupArgs(Instruction
*);
283 enum CompileOpStrictEqType
{ OpStrictEq
, OpNStrictEq
};
284 void compileOpStrictEq(Instruction
* instruction
, CompileOpStrictEqType type
);
285 bool isOperandConstantImmediateDouble(unsigned src
);
287 void emitLoadDouble(unsigned index
, FPRegisterID value
);
288 void emitLoadInt32ToDouble(unsigned index
, FPRegisterID value
);
290 void testPrototype(Structure
*, JumpList
& failureCases
);
292 #if USE(JSVALUE32_64)
293 bool getOperandConstantImmediateInt(unsigned op1
, unsigned op2
, unsigned& op
, int32_t& constant
);
295 void emitLoadTag(unsigned index
, RegisterID tag
);
296 void emitLoadPayload(unsigned index
, RegisterID payload
);
298 void emitLoad(const JSValue
& v
, RegisterID tag
, RegisterID payload
);
299 void emitLoad(unsigned index
, RegisterID tag
, RegisterID payload
, RegisterID base
= callFrameRegister
);
300 void emitLoad2(unsigned index1
, RegisterID tag1
, RegisterID payload1
, unsigned index2
, RegisterID tag2
, RegisterID payload2
);
302 void emitStore(unsigned index
, RegisterID tag
, RegisterID payload
, RegisterID base
= callFrameRegister
);
303 void emitStore(unsigned index
, const JSValue constant
, RegisterID base
= callFrameRegister
);
304 void emitStoreInt32(unsigned index
, RegisterID payload
, bool indexIsInt32
= false);
305 void emitStoreInt32(unsigned index
, Imm32 payload
, bool indexIsInt32
= false);
306 void emitStoreCell(unsigned index
, RegisterID payload
, bool indexIsCell
= false);
307 void emitStoreBool(unsigned index
, RegisterID tag
, bool indexIsBool
= false);
308 void emitStoreDouble(unsigned index
, FPRegisterID value
);
310 bool isLabeled(unsigned bytecodeIndex
);
311 void map(unsigned bytecodeIndex
, unsigned virtualRegisterIndex
, RegisterID tag
, RegisterID payload
);
312 void unmap(RegisterID
);
314 bool isMapped(unsigned virtualRegisterIndex
);
315 bool getMappedPayload(unsigned virtualRegisterIndex
, RegisterID
& payload
);
316 bool getMappedTag(unsigned virtualRegisterIndex
, RegisterID
& tag
);
318 void emitJumpSlowCaseIfNotJSCell(unsigned virtualRegisterIndex
);
319 void emitJumpSlowCaseIfNotJSCell(unsigned virtualRegisterIndex
, RegisterID tag
);
320 void linkSlowCaseIfNotJSCell(Vector
<SlowCaseEntry
>::iterator
&, unsigned virtualRegisterIndex
);
322 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
323 void compileGetByIdHotPath();
324 void compileGetByIdSlowCase(int resultVReg
, int baseVReg
, Identifier
* ident
, Vector
<SlowCaseEntry
>::iterator
& iter
, bool isMethodCheck
= false);
326 void compileGetDirectOffset(RegisterID base
, RegisterID resultTag
, RegisterID resultPayload
, Structure
* structure
, size_t cachedOffset
);
327 void compileGetDirectOffset(JSObject
* base
, RegisterID temp
, RegisterID resultTag
, RegisterID resultPayload
, size_t cachedOffset
);
328 void compileGetDirectOffset(RegisterID base
, RegisterID resultTag
, RegisterID resultPayload
, RegisterID structure
, RegisterID offset
);
329 void compilePutDirectOffset(RegisterID base
, RegisterID valueTag
, RegisterID valuePayload
, Structure
* structure
, size_t cachedOffset
);
331 // Arithmetic opcode helpers
332 void emitAdd32Constant(unsigned dst
, unsigned op
, int32_t constant
, ResultType opType
);
333 void emitSub32Constant(unsigned dst
, unsigned op
, int32_t constant
, ResultType opType
);
334 void emitBinaryDoubleOp(OpcodeID
, unsigned dst
, unsigned op1
, unsigned op2
, OperandTypes
, JumpList
& notInt32Op1
, JumpList
& notInt32Op2
, bool op1IsInRegisters
= true, bool op2IsInRegisters
= true);
337 // These architecture specific value are used to enable patching - see comment on op_put_by_id.
338 static const int patchOffsetPutByIdStructure
= 7;
339 static const int patchOffsetPutByIdExternalLoad
= 13;
340 static const int patchLengthPutByIdExternalLoad
= 3;
341 static const int patchOffsetPutByIdPropertyMapOffset1
= 22;
342 static const int patchOffsetPutByIdPropertyMapOffset2
= 28;
343 // These architecture specific value are used to enable patching - see comment on op_get_by_id.
344 static const int patchOffsetGetByIdStructure
= 7;
345 static const int patchOffsetGetByIdBranchToSlowCase
= 13;
346 static const int patchOffsetGetByIdExternalLoad
= 13;
347 static const int patchLengthGetByIdExternalLoad
= 3;
348 static const int patchOffsetGetByIdPropertyMapOffset1
= 22;
349 static const int patchOffsetGetByIdPropertyMapOffset2
= 28;
350 static const int patchOffsetGetByIdPutResult
= 28;
351 #if ENABLE(OPCODE_SAMPLING) && USE(JIT_STUB_ARGUMENT_VA_LIST)
352 static const int patchOffsetGetByIdSlowCaseCall
= 35;
353 #elif ENABLE(OPCODE_SAMPLING)
354 static const int patchOffsetGetByIdSlowCaseCall
= 37;
355 #elif USE(JIT_STUB_ARGUMENT_VA_LIST)
356 static const int patchOffsetGetByIdSlowCaseCall
= 25;
358 static const int patchOffsetGetByIdSlowCaseCall
= 27;
360 static const int patchOffsetOpCallCompareToJump
= 6;
362 static const int patchOffsetMethodCheckProtoObj
= 11;
363 static const int patchOffsetMethodCheckProtoStruct
= 18;
364 static const int patchOffsetMethodCheckPutFunction
= 29;
365 #elif CPU(ARM_TRADITIONAL)
366 // These architecture specific value are used to enable patching - see comment on op_put_by_id.
367 static const int patchOffsetPutByIdStructure
= 4;
368 static const int patchOffsetPutByIdExternalLoad
= 16;
369 static const int patchLengthPutByIdExternalLoad
= 4;
370 static const int patchOffsetPutByIdPropertyMapOffset1
= 20;
371 static const int patchOffsetPutByIdPropertyMapOffset2
= 28;
372 // These architecture specific value are used to enable patching - see comment on op_get_by_id.
373 static const int patchOffsetGetByIdStructure
= 4;
374 static const int patchOffsetGetByIdBranchToSlowCase
= 16;
375 static const int patchOffsetGetByIdExternalLoad
= 16;
376 static const int patchLengthGetByIdExternalLoad
= 4;
377 static const int patchOffsetGetByIdPropertyMapOffset1
= 20;
378 static const int patchOffsetGetByIdPropertyMapOffset2
= 28;
379 static const int patchOffsetGetByIdPutResult
= 36;
380 #if ENABLE(OPCODE_SAMPLING)
381 #error "OPCODE_SAMPLING is not yet supported"
383 static const int patchOffsetGetByIdSlowCaseCall
= 32;
385 static const int patchOffsetOpCallCompareToJump
= 12;
387 static const int patchOffsetMethodCheckProtoObj
= 12;
388 static const int patchOffsetMethodCheckProtoStruct
= 20;
389 static const int patchOffsetMethodCheckPutFunction
= 32;
392 static const int sequenceOpCallInstructionSpace
= 12;
393 static const int sequenceOpCallConstantSpace
= 2;
394 // sequenceMethodCheck
395 static const int sequenceMethodCheckInstructionSpace
= 40;
396 static const int sequenceMethodCheckConstantSpace
= 6;
397 // sequenceGetByIdHotPath
398 static const int sequenceGetByIdHotPathInstructionSpace
= 36;
399 static const int sequenceGetByIdHotPathConstantSpace
= 4;
400 // sequenceGetByIdSlowCase
401 static const int sequenceGetByIdSlowCaseInstructionSpace
= 40;
402 static const int sequenceGetByIdSlowCaseConstantSpace
= 2;
404 static const int sequencePutByIdInstructionSpace
= 36;
405 static const int sequencePutByIdConstantSpace
= 4;
406 #elif CPU(ARM_THUMB2)
407 // These architecture specific value are used to enable patching - see comment on op_put_by_id.
408 static const int patchOffsetPutByIdStructure
= 10;
409 static const int patchOffsetPutByIdExternalLoad
= 26;
410 static const int patchLengthPutByIdExternalLoad
= 12;
411 static const int patchOffsetPutByIdPropertyMapOffset1
= 46;
412 static const int patchOffsetPutByIdPropertyMapOffset2
= 58;
413 // These architecture specific value are used to enable patching - see comment on op_get_by_id.
414 static const int patchOffsetGetByIdStructure
= 10;
415 static const int patchOffsetGetByIdBranchToSlowCase
= 26;
416 static const int patchOffsetGetByIdExternalLoad
= 26;
417 static const int patchLengthGetByIdExternalLoad
= 12;
418 static const int patchOffsetGetByIdPropertyMapOffset1
= 46;
419 static const int patchOffsetGetByIdPropertyMapOffset2
= 58;
420 static const int patchOffsetGetByIdPutResult
= 62;
421 #if ENABLE(OPCODE_SAMPLING)
422 #error "OPCODE_SAMPLING is not yet supported"
424 static const int patchOffsetGetByIdSlowCaseCall
= 30;
426 static const int patchOffsetOpCallCompareToJump
= 16;
428 static const int patchOffsetMethodCheckProtoObj
= 24;
429 static const int patchOffsetMethodCheckProtoStruct
= 34;
430 static const int patchOffsetMethodCheckPutFunction
= 58;
433 static const int sequenceOpCallInstructionSpace
= 12;
434 static const int sequenceOpCallConstantSpace
= 2;
435 // sequenceMethodCheck
436 static const int sequenceMethodCheckInstructionSpace
= 40;
437 static const int sequenceMethodCheckConstantSpace
= 6;
438 // sequenceGetByIdHotPath
439 static const int sequenceGetByIdHotPathInstructionSpace
= 36;
440 static const int sequenceGetByIdHotPathConstantSpace
= 4;
441 // sequenceGetByIdSlowCase
442 static const int sequenceGetByIdSlowCaseInstructionSpace
= 40;
443 static const int sequenceGetByIdSlowCaseConstantSpace
= 2;
445 static const int sequencePutByIdInstructionSpace
= 36;
446 static const int sequencePutByIdConstantSpace
= 4;
448 #error "JSVALUE32_64 not supported on this platform."
451 #else // USE(JSVALUE32_64)
452 void emitGetVirtualRegister(int src
, RegisterID dst
);
453 void emitGetVirtualRegisters(int src1
, RegisterID dst1
, int src2
, RegisterID dst2
);
454 void emitPutVirtualRegister(unsigned dst
, RegisterID from
= regT0
);
456 int32_t getConstantOperandImmediateInt(unsigned src
);
458 void emitGetVariableObjectRegister(RegisterID variableObject
, int index
, RegisterID dst
);
459 void emitPutVariableObjectRegister(RegisterID src
, RegisterID variableObject
, int index
);
461 void killLastResultRegister();
463 Jump
emitJumpIfJSCell(RegisterID
);
464 Jump
emitJumpIfBothJSCells(RegisterID
, RegisterID
, RegisterID
);
465 void emitJumpSlowCaseIfJSCell(RegisterID
);
466 Jump
emitJumpIfNotJSCell(RegisterID
);
467 void emitJumpSlowCaseIfNotJSCell(RegisterID
);
468 void emitJumpSlowCaseIfNotJSCell(RegisterID
, int VReg
);
471 JIT::Jump
emitJumpIfImmediateNumber(RegisterID reg
)
473 return emitJumpIfImmediateInteger(reg
);
476 JIT::Jump
emitJumpIfNotImmediateNumber(RegisterID reg
)
478 return emitJumpIfNotImmediateInteger(reg
);
481 JIT::Jump
emitJumpIfImmediateInteger(RegisterID
);
482 JIT::Jump
emitJumpIfNotImmediateInteger(RegisterID
);
483 JIT::Jump
emitJumpIfNotImmediateIntegers(RegisterID
, RegisterID
, RegisterID
);
484 void emitJumpSlowCaseIfNotImmediateInteger(RegisterID
);
485 void emitJumpSlowCaseIfNotImmediateNumber(RegisterID
);
486 void emitJumpSlowCaseIfNotImmediateIntegers(RegisterID
, RegisterID
, RegisterID
);
489 void emitFastArithDeTagImmediate(RegisterID
);
490 Jump
emitFastArithDeTagImmediateJumpIfZero(RegisterID
);
492 void emitFastArithReTagImmediate(RegisterID src
, RegisterID dest
);
493 void emitFastArithIntToImmNoCheck(RegisterID src
, RegisterID dest
);
495 void emitTagAsBoolImmediate(RegisterID reg
);
496 void compileBinaryArithOp(OpcodeID
, unsigned dst
, unsigned src1
, unsigned src2
, OperandTypes opi
);
498 void compileBinaryArithOpSlowCase(OpcodeID
, Vector
<SlowCaseEntry
>::iterator
&, unsigned dst
, unsigned src1
, unsigned src2
, OperandTypes
, bool op1HasImmediateIntFastCase
, bool op2HasImmediateIntFastCase
);
500 void compileBinaryArithOpSlowCase(OpcodeID
, Vector
<SlowCaseEntry
>::iterator
&, unsigned dst
, unsigned src1
, unsigned src2
, OperandTypes
);
503 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
504 void compileGetByIdHotPath(int resultVReg
, int baseVReg
, Identifier
* ident
, unsigned propertyAccessInstructionIndex
);
505 void compileGetByIdSlowCase(int resultVReg
, int baseVReg
, Identifier
* ident
, Vector
<SlowCaseEntry
>::iterator
& iter
, bool isMethodCheck
= false);
507 void compileGetDirectOffset(RegisterID base
, RegisterID result
, Structure
* structure
, size_t cachedOffset
);
508 void compileGetDirectOffset(JSObject
* base
, RegisterID temp
, RegisterID result
, size_t cachedOffset
);
509 void compileGetDirectOffset(RegisterID base
, RegisterID result
, RegisterID structure
, RegisterID offset
, RegisterID scratch
);
510 void compilePutDirectOffset(RegisterID base
, RegisterID value
, Structure
* structure
, size_t cachedOffset
);
513 // These architecture specific value are used to enable patching - see comment on op_put_by_id.
514 static const int patchOffsetPutByIdStructure
= 10;
515 static const int patchOffsetPutByIdExternalLoad
= 20;
516 static const int patchLengthPutByIdExternalLoad
= 4;
517 static const int patchOffsetPutByIdPropertyMapOffset
= 31;
518 // These architecture specific value are used to enable patching - see comment on op_get_by_id.
519 static const int patchOffsetGetByIdStructure
= 10;
520 static const int patchOffsetGetByIdBranchToSlowCase
= 20;
521 static const int patchOffsetGetByIdExternalLoad
= 20;
522 static const int patchLengthGetByIdExternalLoad
= 4;
523 static const int patchOffsetGetByIdPropertyMapOffset
= 31;
524 static const int patchOffsetGetByIdPutResult
= 31;
525 #if ENABLE(OPCODE_SAMPLING)
526 static const int patchOffsetGetByIdSlowCaseCall
= 64;
528 static const int patchOffsetGetByIdSlowCaseCall
= 41;
530 static const int patchOffsetOpCallCompareToJump
= 9;
532 static const int patchOffsetMethodCheckProtoObj
= 20;
533 static const int patchOffsetMethodCheckProtoStruct
= 30;
534 static const int patchOffsetMethodCheckPutFunction
= 50;
536 // These architecture specific value are used to enable patching - see comment on op_put_by_id.
537 static const int patchOffsetPutByIdStructure
= 7;
538 static const int patchOffsetPutByIdExternalLoad
= 13;
539 static const int patchLengthPutByIdExternalLoad
= 3;
540 static const int patchOffsetPutByIdPropertyMapOffset
= 22;
541 // These architecture specific value are used to enable patching - see comment on op_get_by_id.
542 static const int patchOffsetGetByIdStructure
= 7;
543 static const int patchOffsetGetByIdBranchToSlowCase
= 13;
544 static const int patchOffsetGetByIdExternalLoad
= 13;
545 static const int patchLengthGetByIdExternalLoad
= 3;
546 static const int patchOffsetGetByIdPropertyMapOffset
= 22;
547 static const int patchOffsetGetByIdPutResult
= 22;
548 #if ENABLE(OPCODE_SAMPLING) && USE(JIT_STUB_ARGUMENT_VA_LIST)
549 static const int patchOffsetGetByIdSlowCaseCall
= 31;
550 #elif ENABLE(OPCODE_SAMPLING)
551 static const int patchOffsetGetByIdSlowCaseCall
= 33;
552 #elif USE(JIT_STUB_ARGUMENT_VA_LIST)
553 static const int patchOffsetGetByIdSlowCaseCall
= 21;
555 static const int patchOffsetGetByIdSlowCaseCall
= 23;
557 static const int patchOffsetOpCallCompareToJump
= 6;
559 static const int patchOffsetMethodCheckProtoObj
= 11;
560 static const int patchOffsetMethodCheckProtoStruct
= 18;
561 static const int patchOffsetMethodCheckPutFunction
= 29;
562 #elif CPU(ARM_THUMB2)
563 // These architecture specific value are used to enable patching - see comment on op_put_by_id.
564 static const int patchOffsetPutByIdStructure
= 10;
565 static const int patchOffsetPutByIdExternalLoad
= 26;
566 static const int patchLengthPutByIdExternalLoad
= 12;
567 static const int patchOffsetPutByIdPropertyMapOffset
= 46;
568 // These architecture specific value are used to enable patching - see comment on op_get_by_id.
569 static const int patchOffsetGetByIdStructure
= 10;
570 static const int patchOffsetGetByIdBranchToSlowCase
= 26;
571 static const int patchOffsetGetByIdExternalLoad
= 26;
572 static const int patchLengthGetByIdExternalLoad
= 12;
573 static const int patchOffsetGetByIdPropertyMapOffset
= 46;
574 static const int patchOffsetGetByIdPutResult
= 50;
575 #if ENABLE(OPCODE_SAMPLING)
576 static const int patchOffsetGetByIdSlowCaseCall
= 0; // FIMXE
578 static const int patchOffsetGetByIdSlowCaseCall
= 28;
580 static const int patchOffsetOpCallCompareToJump
= 16;
582 static const int patchOffsetMethodCheckProtoObj
= 24;
583 static const int patchOffsetMethodCheckProtoStruct
= 34;
584 static const int patchOffsetMethodCheckPutFunction
= 58;
585 #elif CPU(ARM_TRADITIONAL)
586 // These architecture specific value are used to enable patching - see comment on op_put_by_id.
587 static const int patchOffsetPutByIdStructure
= 4;
588 static const int patchOffsetPutByIdExternalLoad
= 16;
589 static const int patchLengthPutByIdExternalLoad
= 4;
590 static const int patchOffsetPutByIdPropertyMapOffset
= 20;
591 // These architecture specific value are used to enable patching - see comment on op_get_by_id.
592 static const int patchOffsetGetByIdStructure
= 4;
593 static const int patchOffsetGetByIdBranchToSlowCase
= 16;
594 static const int patchOffsetGetByIdExternalLoad
= 16;
595 static const int patchLengthGetByIdExternalLoad
= 4;
596 static const int patchOffsetGetByIdPropertyMapOffset
= 20;
597 static const int patchOffsetGetByIdPutResult
= 28;
598 #if ENABLE(OPCODE_SAMPLING)
599 #error "OPCODE_SAMPLING is not yet supported"
601 static const int patchOffsetGetByIdSlowCaseCall
= 28;
603 static const int patchOffsetOpCallCompareToJump
= 12;
605 static const int patchOffsetMethodCheckProtoObj
= 12;
606 static const int patchOffsetMethodCheckProtoStruct
= 20;
607 static const int patchOffsetMethodCheckPutFunction
= 32;
610 static const int sequenceOpCallInstructionSpace
= 12;
611 static const int sequenceOpCallConstantSpace
= 2;
612 // sequenceMethodCheck
613 static const int sequenceMethodCheckInstructionSpace
= 40;
614 static const int sequenceMethodCheckConstantSpace
= 6;
615 // sequenceGetByIdHotPath
616 static const int sequenceGetByIdHotPathInstructionSpace
= 28;
617 static const int sequenceGetByIdHotPathConstantSpace
= 3;
618 // sequenceGetByIdSlowCase
619 static const int sequenceGetByIdSlowCaseInstructionSpace
= 32;
620 static const int sequenceGetByIdSlowCaseConstantSpace
= 2;
622 static const int sequencePutByIdInstructionSpace
= 28;
623 static const int sequencePutByIdConstantSpace
= 3;
626 static const int patchOffsetPutByIdStructure
= 16;
627 static const int patchOffsetPutByIdExternalLoad
= 48;
628 static const int patchLengthPutByIdExternalLoad
= 20;
629 static const int patchOffsetPutByIdPropertyMapOffset
= 68;
630 static const int patchOffsetGetByIdStructure
= 16;
631 static const int patchOffsetGetByIdBranchToSlowCase
= 48;
632 static const int patchOffsetGetByIdExternalLoad
= 48;
633 static const int patchLengthGetByIdExternalLoad
= 20;
634 static const int patchOffsetGetByIdPropertyMapOffset
= 68;
635 static const int patchOffsetGetByIdPutResult
= 88;
636 #if ENABLE(OPCODE_SAMPLING)
637 #error "OPCODE_SAMPLING is not yet supported"
639 static const int patchOffsetGetByIdSlowCaseCall
= 40;
641 static const int patchOffsetOpCallCompareToJump
= 32;
642 static const int patchOffsetMethodCheckProtoObj
= 32;
643 static const int patchOffsetMethodCheckProtoStruct
= 56;
644 static const int patchOffsetMethodCheckPutFunction
= 88;
645 #else // WTF_MIPS_ISA(1)
646 static const int patchOffsetPutByIdStructure
= 12;
647 static const int patchOffsetPutByIdExternalLoad
= 44;
648 static const int patchLengthPutByIdExternalLoad
= 16;
649 static const int patchOffsetPutByIdPropertyMapOffset
= 60;
650 static const int patchOffsetGetByIdStructure
= 12;
651 static const int patchOffsetGetByIdBranchToSlowCase
= 44;
652 static const int patchOffsetGetByIdExternalLoad
= 44;
653 static const int patchLengthGetByIdExternalLoad
= 16;
654 static const int patchOffsetGetByIdPropertyMapOffset
= 60;
655 static const int patchOffsetGetByIdPutResult
= 76;
656 #if ENABLE(OPCODE_SAMPLING)
657 #error "OPCODE_SAMPLING is not yet supported"
659 static const int patchOffsetGetByIdSlowCaseCall
= 40;
661 static const int patchOffsetOpCallCompareToJump
= 32;
662 static const int patchOffsetMethodCheckProtoObj
= 32;
663 static const int patchOffsetMethodCheckProtoStruct
= 52;
664 static const int patchOffsetMethodCheckPutFunction
= 84;
667 #endif // USE(JSVALUE32_64)
669 #if defined(ASSEMBLER_HAS_CONSTANT_POOL) && ASSEMBLER_HAS_CONSTANT_POOL
670 #define BEGIN_UNINTERRUPTED_SEQUENCE(name) beginUninterruptedSequence(name ## InstructionSpace, name ## ConstantSpace)
671 #define END_UNINTERRUPTED_SEQUENCE(name) endUninterruptedSequence(name ## InstructionSpace, name ## ConstantSpace)
673 void beginUninterruptedSequence(int, int);
674 void endUninterruptedSequence(int, int);
677 #define BEGIN_UNINTERRUPTED_SEQUENCE(name)
678 #define END_UNINTERRUPTED_SEQUENCE(name)
681 void emit_op_add(Instruction
*);
682 void emit_op_bitand(Instruction
*);
683 void emit_op_bitnot(Instruction
*);
684 void emit_op_bitor(Instruction
*);
685 void emit_op_bitxor(Instruction
*);
686 void emit_op_call(Instruction
*);
687 void emit_op_call_eval(Instruction
*);
688 void emit_op_call_varargs(Instruction
*);
689 void emit_op_catch(Instruction
*);
690 void emit_op_construct(Instruction
*);
691 void emit_op_construct_verify(Instruction
*);
692 void emit_op_convert_this(Instruction
*);
693 void emit_op_create_arguments(Instruction
*);
694 void emit_op_debug(Instruction
*);
695 void emit_op_del_by_id(Instruction
*);
696 void emit_op_div(Instruction
*);
697 void emit_op_end(Instruction
*);
698 void emit_op_enter(Instruction
*);
699 void emit_op_enter_with_activation(Instruction
*);
700 void emit_op_eq(Instruction
*);
701 void emit_op_eq_null(Instruction
*);
702 void emit_op_get_by_id(Instruction
*);
703 void emit_op_get_by_val(Instruction
*);
704 void emit_op_get_by_pname(Instruction
*);
705 void emit_op_get_global_var(Instruction
*);
706 void emit_op_get_scoped_var(Instruction
*);
707 void emit_op_init_arguments(Instruction
*);
708 void emit_op_instanceof(Instruction
*);
709 void emit_op_jeq_null(Instruction
*);
710 void emit_op_jfalse(Instruction
*);
711 void emit_op_jmp(Instruction
*);
712 void emit_op_jmp_scopes(Instruction
*);
713 void emit_op_jneq_null(Instruction
*);
714 void emit_op_jneq_ptr(Instruction
*);
715 void emit_op_jnless(Instruction
*);
716 void emit_op_jless(Instruction
*);
717 void emit_op_jlesseq(Instruction
*, bool invert
= false);
718 void emit_op_jnlesseq(Instruction
*);
719 void emit_op_jsr(Instruction
*);
720 void emit_op_jtrue(Instruction
*);
721 void emit_op_load_varargs(Instruction
*);
722 void emit_op_loop(Instruction
*);
723 void emit_op_loop_if_less(Instruction
*);
724 void emit_op_loop_if_lesseq(Instruction
*);
725 void emit_op_loop_if_true(Instruction
*);
726 void emit_op_loop_if_false(Instruction
*);
727 void emit_op_lshift(Instruction
*);
728 void emit_op_method_check(Instruction
*);
729 void emit_op_mod(Instruction
*);
730 void emit_op_mov(Instruction
*);
731 void emit_op_mul(Instruction
*);
732 void emit_op_negate(Instruction
*);
733 void emit_op_neq(Instruction
*);
734 void emit_op_neq_null(Instruction
*);
735 void emit_op_new_array(Instruction
*);
736 void emit_op_new_error(Instruction
*);
737 void emit_op_new_func(Instruction
*);
738 void emit_op_new_func_exp(Instruction
*);
739 void emit_op_new_object(Instruction
*);
740 void emit_op_new_regexp(Instruction
*);
741 void emit_op_get_pnames(Instruction
*);
742 void emit_op_next_pname(Instruction
*);
743 void emit_op_not(Instruction
*);
744 void emit_op_nstricteq(Instruction
*);
745 void emit_op_pop_scope(Instruction
*);
746 void emit_op_post_dec(Instruction
*);
747 void emit_op_post_inc(Instruction
*);
748 void emit_op_pre_dec(Instruction
*);
749 void emit_op_pre_inc(Instruction
*);
750 void emit_op_profile_did_call(Instruction
*);
751 void emit_op_profile_will_call(Instruction
*);
752 void emit_op_push_new_scope(Instruction
*);
753 void emit_op_push_scope(Instruction
*);
754 void emit_op_put_by_id(Instruction
*);
755 void emit_op_put_by_index(Instruction
*);
756 void emit_op_put_by_val(Instruction
*);
757 void emit_op_put_getter(Instruction
*);
758 void emit_op_put_global_var(Instruction
*);
759 void emit_op_put_scoped_var(Instruction
*);
760 void emit_op_put_setter(Instruction
*);
761 void emit_op_resolve(Instruction
*);
762 void emit_op_resolve_base(Instruction
*);
763 void emit_op_resolve_global(Instruction
*, bool dynamic
= false);
764 void emit_op_resolve_global_dynamic(Instruction
*);
765 void emit_op_resolve_skip(Instruction
*);
766 void emit_op_resolve_with_base(Instruction
*);
767 void emit_op_ret(Instruction
*);
768 void emit_op_rshift(Instruction
*);
769 void emit_op_sret(Instruction
*);
770 void emit_op_strcat(Instruction
*);
771 void emit_op_stricteq(Instruction
*);
772 void emit_op_sub(Instruction
*);
773 void emit_op_switch_char(Instruction
*);
774 void emit_op_switch_imm(Instruction
*);
775 void emit_op_switch_string(Instruction
*);
776 void emit_op_tear_off_activation(Instruction
*);
777 void emit_op_tear_off_arguments(Instruction
*);
778 void emit_op_throw(Instruction
*);
779 void emit_op_to_jsnumber(Instruction
*);
780 void emit_op_to_primitive(Instruction
*);
781 void emit_op_unexpected_load(Instruction
*);
782 void emit_op_urshift(Instruction
*);
783 #if ENABLE(JIT_OPTIMIZE_MOD)
787 void emitSlow_op_add(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
788 void emitSlow_op_bitand(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
789 void emitSlow_op_bitnot(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
790 void emitSlow_op_bitor(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
791 void emitSlow_op_bitxor(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
792 void emitSlow_op_call(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
793 void emitSlow_op_call_eval(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
794 void emitSlow_op_call_varargs(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
795 void emitSlow_op_construct(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
796 void emitSlow_op_construct_verify(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
797 void emitSlow_op_convert_this(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
798 void emitSlow_op_div(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
799 void emitSlow_op_eq(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
800 void emitSlow_op_get_by_id(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
801 void emitSlow_op_get_by_val(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
802 void emitSlow_op_get_by_pname(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
803 void emitSlow_op_instanceof(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
804 void emitSlow_op_jfalse(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
805 void emitSlow_op_jnless(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
806 void emitSlow_op_jless(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
807 void emitSlow_op_jlesseq(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&, bool invert
= false);
808 void emitSlow_op_jnlesseq(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
809 void emitSlow_op_jtrue(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
810 void emitSlow_op_loop_if_less(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
811 void emitSlow_op_loop_if_lesseq(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
812 void emitSlow_op_loop_if_true(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
813 void emitSlow_op_loop_if_false(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
814 void emitSlow_op_lshift(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
815 void emitSlow_op_method_check(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
816 void emitSlow_op_mod(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
817 void emitSlow_op_mul(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
818 void emitSlow_op_negate(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
819 void emitSlow_op_neq(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
820 void emitSlow_op_not(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
821 void emitSlow_op_nstricteq(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
822 void emitSlow_op_post_dec(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
823 void emitSlow_op_post_inc(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
824 void emitSlow_op_pre_dec(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
825 void emitSlow_op_pre_inc(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
826 void emitSlow_op_put_by_id(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
827 void emitSlow_op_put_by_val(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
828 void emitSlow_op_resolve_global(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
829 void emitSlow_op_resolve_global_dynamic(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
830 void emitSlow_op_rshift(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
831 void emitSlow_op_stricteq(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
832 void emitSlow_op_sub(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
833 void emitSlow_op_to_jsnumber(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
834 void emitSlow_op_to_primitive(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
835 void emitSlow_op_urshift(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
838 void emitRightShift(Instruction
*, bool isUnsigned
);
839 void emitRightShiftSlowCase(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&, bool isUnsigned
);
841 /* These functions are deprecated: Please use JITStubCall instead. */
842 void emitPutJITStubArg(RegisterID src
, unsigned argumentNumber
);
843 #if USE(JSVALUE32_64)
844 void emitPutJITStubArg(RegisterID tag
, RegisterID payload
, unsigned argumentNumber
);
845 void emitPutJITStubArgFromVirtualRegister(unsigned src
, unsigned argumentNumber
, RegisterID scratch1
, RegisterID scratch2
);
847 void emitPutJITStubArgFromVirtualRegister(unsigned src
, unsigned argumentNumber
, RegisterID scratch
);
849 void emitPutJITStubArgConstant(unsigned value
, unsigned argumentNumber
);
850 void emitPutJITStubArgConstant(void* value
, unsigned argumentNumber
);
851 void emitGetJITStubArg(unsigned argumentNumber
, RegisterID dst
);
853 void emitInitRegister(unsigned dst
);
855 void emitPutToCallFrameHeader(RegisterID from
, RegisterFile::CallFrameHeaderEntry entry
);
856 void emitPutImmediateToCallFrameHeader(void* value
, RegisterFile::CallFrameHeaderEntry entry
);
857 void emitGetFromCallFrameHeaderPtr(RegisterFile::CallFrameHeaderEntry entry
, RegisterID to
, RegisterID from
= callFrameRegister
);
858 void emitGetFromCallFrameHeader32(RegisterFile::CallFrameHeaderEntry entry
, RegisterID to
, RegisterID from
= callFrameRegister
);
860 JSValue
getConstantOperand(unsigned src
);
861 bool isOperandConstantImmediateInt(unsigned src
);
862 bool isOperandConstantImmediateChar(unsigned src
);
864 Jump
getSlowCase(Vector
<SlowCaseEntry
>::iterator
& iter
)
868 void linkSlowCase(Vector
<SlowCaseEntry
>::iterator
& iter
)
870 iter
->from
.link(this);
873 void linkSlowCaseIfNotJSCell(Vector
<SlowCaseEntry
>::iterator
&, int vReg
);
875 Jump
checkStructure(RegisterID reg
, Structure
* structure
);
877 void restoreArgumentReference();
878 void restoreArgumentReferenceForTrampoline();
880 Call
emitNakedCall(CodePtr function
= CodePtr());
882 void preserveReturnAddressAfterCall(RegisterID
);
883 void restoreReturnAddressBeforeReturn(RegisterID
);
884 void restoreReturnAddressBeforeReturn(Address
);
886 // Loads the character value of a single character string into dst.
887 void emitLoadCharacterString(RegisterID src
, RegisterID dst
, JumpList
& failures
);
889 void emitTimeoutCheck();
891 void printBytecodeOperandTypes(unsigned src1
, unsigned src2
);
894 #if ENABLE(SAMPLING_FLAGS)
895 void setSamplingFlag(int32_t);
896 void clearSamplingFlag(int32_t);
899 #if ENABLE(SAMPLING_COUNTERS)
900 void emitCount(AbstractSamplingCounter
&, uint32_t = 1);
903 #if ENABLE(OPCODE_SAMPLING)
904 void sampleInstruction(Instruction
*, bool = false);
907 #if ENABLE(CODEBLOCK_SAMPLING)
908 void sampleCodeBlock(CodeBlock
*);
910 void sampleCodeBlock(CodeBlock
*) {}
913 Interpreter
* m_interpreter
;
914 JSGlobalData
* m_globalData
;
915 CodeBlock
* m_codeBlock
;
917 Vector
<CallRecord
> m_calls
;
918 Vector
<Label
> m_labels
;
919 Vector
<PropertyStubCompilationInfo
> m_propertyAccessCompilationInfo
;
920 Vector
<StructureStubCompilationInfo
> m_callStructureStubCompilationInfo
;
921 Vector
<MethodCallCompilationInfo
> m_methodCallCompilationInfo
;
922 Vector
<JumpTable
> m_jmpTable
;
924 unsigned m_bytecodeIndex
;
925 Vector
<JSRInfo
> m_jsrSites
;
926 Vector
<SlowCaseEntry
> m_slowCases
;
927 Vector
<SwitchRecord
> m_switches
;
929 unsigned m_propertyAccessInstructionIndex
;
930 unsigned m_globalResolveInfoIndex
;
931 unsigned m_callLinkInfoIndex
;
933 #if USE(JSVALUE32_64)
934 unsigned m_jumpTargetIndex
;
935 unsigned m_mappedBytecodeIndex
;
936 unsigned m_mappedVirtualRegisterIndex
;
937 RegisterID m_mappedTag
;
938 RegisterID m_mappedPayload
;
940 int m_lastResultBytecodeRegister
;
941 unsigned m_jumpTargetsPosition
;
945 #if defined(ASSEMBLER_HAS_CONSTANT_POOL) && ASSEMBLER_HAS_CONSTANT_POOL
946 Label m_uninterruptedInstructionSequenceBegin
;
947 int m_uninterruptedConstantSequenceBegin
;
950 static PassRefPtr
<NativeExecutable
> stringGetByValStubGenerator(JSGlobalData
* globalData
, ExecutablePool
* pool
);
951 } JIT_CLASS_ALIGNMENT
;
953 inline void JIT::emit_op_loop(Instruction
* currentInstruction
)
956 emit_op_jmp(currentInstruction
);
959 inline void JIT::emit_op_loop_if_true(Instruction
* currentInstruction
)
962 emit_op_jtrue(currentInstruction
);
965 inline void JIT::emitSlow_op_loop_if_true(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
967 emitSlow_op_jtrue(currentInstruction
, iter
);
970 inline void JIT::emit_op_loop_if_false(Instruction
* currentInstruction
)
973 emit_op_jfalse(currentInstruction
);
976 inline void JIT::emitSlow_op_loop_if_false(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
978 emitSlow_op_jfalse(currentInstruction
, iter
);
981 inline void JIT::emit_op_loop_if_less(Instruction
* currentInstruction
)
984 emit_op_jless(currentInstruction
);
987 inline void JIT::emitSlow_op_loop_if_less(Instruction
* currentInstruction
, Vector
<SlowCaseEntry
>::iterator
& iter
)
989 emitSlow_op_jless(currentInstruction
, iter
);
994 #endif // ENABLE(JIT)