2 * Copyright (C) 2008 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29 #include <wtf/Platform.h>
30 #include <bytecode/SamplingTool.h>
34 #define WTF_USE_CTI_REPATCH_PIC 1
36 #include "Interpreter.h"
38 #include "RegisterFile.h"
39 #include "MacroAssembler.h"
41 #include <wtf/AlwaysInline.h>
42 #include <wtf/Vector.h>
45 #define STUB_ARGS_offset 0x10
47 #define STUB_ARGS_offset 0x0C
50 #define STUB_ARGS_code (STUB_ARGS_offset)
51 #define STUB_ARGS_registerFile (STUB_ARGS_offset + 1)
52 #define STUB_ARGS_callFrame (STUB_ARGS_offset + 2)
53 #define STUB_ARGS_exception (STUB_ARGS_offset + 3)
54 #define STUB_ARGS_profilerReference (STUB_ARGS_offset + 4)
55 #define STUB_ARGS_globalData (STUB_ARGS_offset + 5)
57 #define ARG_callFrame static_cast<CallFrame*>(ARGS[STUB_ARGS_callFrame])
58 #define ARG_registerFile static_cast<RegisterFile*>(ARGS[STUB_ARGS_registerFile])
59 #define ARG_exception static_cast<JSValuePtr*>(ARGS[STUB_ARGS_exception])
60 #define ARG_profilerReference static_cast<Profiler**>(ARGS[STUB_ARGS_profilerReference])
61 #define ARG_globalData static_cast<JSGlobalData*>(ARGS[STUB_ARGS_globalData])
63 #define ARG_setCallFrame(newCallFrame) (ARGS[STUB_ARGS_callFrame] = (newCallFrame))
65 #define ARG_src1 JSValuePtr::decode(static_cast<JSValueEncodedAsPointer*>(ARGS[1]))
66 #define ARG_src2 JSValuePtr::decode(static_cast<JSValueEncodedAsPointer*>(ARGS[2]))
67 #define ARG_src3 JSValuePtr::decode(static_cast<JSValueEncodedAsPointer*>(ARGS[3]))
68 #define ARG_src4 JSValuePtr::decode(static_cast<JSValueEncodedAsPointer*>(ARGS[4]))
69 #define ARG_src5 JSValuePtr::decode(static_cast<JSValueEncodedAsPointer*>(ARGS[5]))
70 #define ARG_id1 static_cast<Identifier*>(ARGS[1])
71 #define ARG_id2 static_cast<Identifier*>(ARGS[2])
72 #define ARG_id3 static_cast<Identifier*>(ARGS[3])
73 #define ARG_id4 static_cast<Identifier*>(ARGS[4])
74 #define ARG_int1 static_cast<int32_t>(reinterpret_cast<intptr_t>(ARGS[1]))
75 #define ARG_int2 static_cast<int32_t>(reinterpret_cast<intptr_t>(ARGS[2]))
76 #define ARG_int3 static_cast<int32_t>(reinterpret_cast<intptr_t>(ARGS[3]))
77 #define ARG_int4 static_cast<int32_t>(reinterpret_cast<intptr_t>(ARGS[4]))
78 #define ARG_int5 static_cast<int32_t>(reinterpret_cast<intptr_t>(ARGS[5]))
79 #define ARG_int6 static_cast<int32_t>(reinterpret_cast<intptr_t>(ARGS[6]))
80 #define ARG_func1 static_cast<FuncDeclNode*>(ARGS[1])
81 #define ARG_funcexp1 static_cast<FuncExprNode*>(ARGS[1])
82 #define ARG_regexp1 static_cast<RegExp*>(ARGS[1])
83 #define ARG_pni1 static_cast<JSPropertyNameIterator*>(ARGS[1])
84 #define ARG_returnAddress2 static_cast<void*>(ARGS[2])
85 #define ARG_codeBlock4 static_cast<CodeBlock*>(ARGS[4])
87 #define STUB_RETURN_ADDRESS_SLOT (ARGS[-1])
92 class JSPropertyNameIterator
;
97 class SimpleJumpTable
;
98 class StringJumpTable
;
104 struct PolymorphicAccessStructureList
;
105 struct StructureStubInfo
;
107 typedef JSValueEncodedAsPointer
* (JIT_STUB
*CTIHelper_j
)(STUB_ARGS
);
108 typedef JSObject
* (JIT_STUB
*CTIHelper_o
)(STUB_ARGS
);
109 typedef JSPropertyNameIterator
* (JIT_STUB
*CTIHelper_p
)(STUB_ARGS
);
110 typedef void (JIT_STUB
*CTIHelper_v
)(STUB_ARGS
);
111 typedef void* (JIT_STUB
*CTIHelper_s
)(STUB_ARGS
);
112 typedef int (JIT_STUB
*CTIHelper_b
)(STUB_ARGS
);
113 typedef VoidPtrPair (JIT_STUB
*CTIHelper_2
)(STUB_ARGS
);
116 MacroAssembler::Jump from
;
117 unsigned bytecodeIndex
;
124 CallRecord(MacroAssembler::Jump from
, unsigned bytecodeIndex
, void* to
= 0)
126 , bytecodeIndex(bytecodeIndex
)
133 MacroAssembler::Jump from
;
134 unsigned toBytecodeIndex
;
136 JumpTable(MacroAssembler::Jump f
, unsigned t
)
143 struct SlowCaseEntry
{
144 MacroAssembler::Jump from
;
148 SlowCaseEntry(MacroAssembler::Jump f
, unsigned t
, unsigned h
= 0)
156 struct SwitchRecord
{
166 SimpleJumpTable
* simpleJumpTable
;
167 StringJumpTable
* stringJumpTable
;
170 unsigned bytecodeIndex
;
171 unsigned defaultOffset
;
173 SwitchRecord(SimpleJumpTable
* jumpTable
, unsigned bytecodeIndex
, unsigned defaultOffset
, Type type
)
175 , bytecodeIndex(bytecodeIndex
)
176 , defaultOffset(defaultOffset
)
178 this->jumpTable
.simpleJumpTable
= jumpTable
;
181 SwitchRecord(StringJumpTable
* jumpTable
, unsigned bytecodeIndex
, unsigned defaultOffset
)
183 , bytecodeIndex(bytecodeIndex
)
184 , defaultOffset(defaultOffset
)
186 this->jumpTable
.stringJumpTable
= jumpTable
;
190 struct PropertyStubCompilationInfo
{
191 MacroAssembler::Jump callReturnLocation
;
192 MacroAssembler::Label hotPathBegin
;
195 struct StructureStubCompilationInfo
{
196 MacroAssembler::DataLabelPtr hotPathBegin
;
197 MacroAssembler::Jump hotPathOther
;
198 MacroAssembler::Jump callReturnLocation
;
199 MacroAssembler::Label coldPathOther
;
203 JSValueEncodedAsPointer
* ctiTrampoline(
205 // FIXME: (bug #22910) this will force all arguments onto the stack (regparm(0) does not appear to have any effect).
206 // We can allow register passing here, and move the writes of these values into the trampoline.
207 void*, void*, void*, void*, void*, void*,
209 void* code
, RegisterFile
*, CallFrame
*, JSValuePtr
* exception
, Profiler
**, JSGlobalData
*);
210 void ctiVMThrowTrampoline();
213 void ctiSetReturnAddress(void** where
, void* what
);
214 void ctiPatchCallByReturnAddress(void* where
, void* what
);
216 class JIT
: private MacroAssembler
{
217 using MacroAssembler::Jump
;
218 using MacroAssembler::JumpList
;
219 using MacroAssembler::Label
;
222 static const RegisterID timeoutCheckRegister
= X86::r12
;
223 static const RegisterID callFrameRegister
= X86::r13
;
224 static const RegisterID tagTypeNumberRegister
= X86::r14
;
225 static const RegisterID tagMaskRegister
= X86::r15
;
227 static const RegisterID timeoutCheckRegister
= X86::esi
;
228 static const RegisterID callFrameRegister
= X86::edi
;
231 static const int patchGetByIdDefaultStructure
= -1;
232 // Magic number - initial offset cannot be representable as a signed 8bit value, or the X86Assembler
233 // will compress the displacement, and we may not be able to fit a patched offset.
234 static const int patchGetByIdDefaultOffset
= 256;
236 #if USE(JIT_STUB_ARGUMENT_REGISTER)
238 static const int ctiArgumentInitSize
= 6;
240 static const int ctiArgumentInitSize
= 2;
242 #elif USE(JIT_STUB_ARGUMENT_STACK)
243 static const int ctiArgumentInitSize
= 4;
244 #else // JIT_STUB_ARGUMENT_VA_LIST
245 static const int ctiArgumentInitSize
= 0;
249 // These architecture specific value are used to enable patching - see comment on op_put_by_id.
250 static const int patchOffsetPutByIdStructure
= 10;
251 static const int patchOffsetPutByIdPropertyMapOffset
= 31;
252 // These architecture specific value are used to enable patching - see comment on op_get_by_id.
253 static const int patchOffsetGetByIdStructure
= 10;
254 static const int patchOffsetGetByIdBranchToSlowCase
= 20;
255 static const int patchOffsetGetByIdPropertyMapOffset
= 31;
256 static const int patchOffsetGetByIdPutResult
= 31;
257 #if ENABLE(OPCODE_SAMPLING)
258 static const int patchOffsetGetByIdSlowCaseCall
= 53 + ctiArgumentInitSize
;
260 static const int patchOffsetGetByIdSlowCaseCall
= 30 + ctiArgumentInitSize
;
262 static const int patchOffsetOpCallCompareToJump
= 9;
264 // These architecture specific value are used to enable patching - see comment on op_put_by_id.
265 static const int patchOffsetPutByIdStructure
= 7;
266 static const int patchOffsetPutByIdPropertyMapOffset
= 22;
267 // These architecture specific value are used to enable patching - see comment on op_get_by_id.
268 static const int patchOffsetGetByIdStructure
= 7;
269 static const int patchOffsetGetByIdBranchToSlowCase
= 13;
270 static const int patchOffsetGetByIdPropertyMapOffset
= 22;
271 static const int patchOffsetGetByIdPutResult
= 22;
272 #if ENABLE(OPCODE_SAMPLING)
273 static const int patchOffsetGetByIdSlowCaseCall
= 31 + ctiArgumentInitSize
;
275 static const int patchOffsetGetByIdSlowCaseCall
= 21 + ctiArgumentInitSize
;
277 static const int patchOffsetOpCallCompareToJump
= 6;
281 static void compile(JSGlobalData
* globalData
, CodeBlock
* codeBlock
)
283 JIT
jit(globalData
, codeBlock
);
284 jit
.privateCompile();
287 static void compileGetByIdSelf(JSGlobalData
* globalData
, CodeBlock
* codeBlock
, StructureStubInfo
* stubInfo
, Structure
* structure
, size_t cachedOffset
, void* returnAddress
)
289 JIT
jit(globalData
, codeBlock
);
290 jit
.privateCompileGetByIdSelf(stubInfo
, structure
, cachedOffset
, returnAddress
);
293 static void compileGetByIdProto(JSGlobalData
* globalData
, CallFrame
* callFrame
, CodeBlock
* codeBlock
, StructureStubInfo
* stubInfo
, Structure
* structure
, Structure
* prototypeStructure
, size_t cachedOffset
, void* returnAddress
)
295 JIT
jit(globalData
, codeBlock
);
296 jit
.privateCompileGetByIdProto(stubInfo
, structure
, prototypeStructure
, cachedOffset
, returnAddress
, callFrame
);
299 #if USE(CTI_REPATCH_PIC)
300 static void compileGetByIdSelfList(JSGlobalData
* globalData
, CodeBlock
* codeBlock
, StructureStubInfo
* stubInfo
, PolymorphicAccessStructureList
* polymorphicStructures
, int currentIndex
, Structure
* structure
, size_t cachedOffset
)
302 JIT
jit(globalData
, codeBlock
);
303 jit
.privateCompileGetByIdSelfList(stubInfo
, polymorphicStructures
, currentIndex
, structure
, cachedOffset
);
305 static void compileGetByIdProtoList(JSGlobalData
* globalData
, CallFrame
* callFrame
, CodeBlock
* codeBlock
, StructureStubInfo
* stubInfo
, PolymorphicAccessStructureList
* prototypeStructureList
, int currentIndex
, Structure
* structure
, Structure
* prototypeStructure
, size_t cachedOffset
)
307 JIT
jit(globalData
, codeBlock
);
308 jit
.privateCompileGetByIdProtoList(stubInfo
, prototypeStructureList
, currentIndex
, structure
, prototypeStructure
, cachedOffset
, callFrame
);
310 static void compileGetByIdChainList(JSGlobalData
* globalData
, CallFrame
* callFrame
, CodeBlock
* codeBlock
, StructureStubInfo
* stubInfo
, PolymorphicAccessStructureList
* prototypeStructureList
, int currentIndex
, Structure
* structure
, StructureChain
* chain
, size_t count
, size_t cachedOffset
)
312 JIT
jit(globalData
, codeBlock
);
313 jit
.privateCompileGetByIdChainList(stubInfo
, prototypeStructureList
, currentIndex
, structure
, chain
, count
, cachedOffset
, callFrame
);
317 static void compileGetByIdChain(JSGlobalData
* globalData
, CallFrame
* callFrame
, CodeBlock
* codeBlock
, StructureStubInfo
* stubInfo
, Structure
* structure
, StructureChain
* chain
, size_t count
, size_t cachedOffset
, void* returnAddress
)
319 JIT
jit(globalData
, codeBlock
);
320 jit
.privateCompileGetByIdChain(stubInfo
, structure
, chain
, count
, cachedOffset
, returnAddress
, callFrame
);
323 static void compilePutByIdReplace(JSGlobalData
* globalData
, CodeBlock
* codeBlock
, StructureStubInfo
* stubInfo
, Structure
* structure
, size_t cachedOffset
, void* returnAddress
)
325 JIT
jit(globalData
, codeBlock
);
326 jit
.privateCompilePutByIdReplace(stubInfo
, structure
, cachedOffset
, returnAddress
);
329 static void compilePutByIdTransition(JSGlobalData
* globalData
, CodeBlock
* codeBlock
, StructureStubInfo
* stubInfo
, Structure
* oldStructure
, Structure
* newStructure
, size_t cachedOffset
, StructureChain
* chain
, void* returnAddress
)
331 JIT
jit(globalData
, codeBlock
);
332 jit
.privateCompilePutByIdTransition(stubInfo
, oldStructure
, newStructure
, cachedOffset
, chain
, returnAddress
);
335 static void compileCTIMachineTrampolines(JSGlobalData
* globalData
)
338 jit
.privateCompileCTIMachineTrampolines();
341 static void patchGetByIdSelf(StructureStubInfo
*, Structure
*, size_t cachedOffset
, void* returnAddress
);
342 static void patchPutByIdReplace(StructureStubInfo
*, Structure
*, size_t cachedOffset
, void* returnAddress
);
344 static void compilePatchGetArrayLength(JSGlobalData
* globalData
, CodeBlock
* codeBlock
, void* returnAddress
)
346 JIT
jit(globalData
, codeBlock
);
347 return jit
.privateCompilePatchGetArrayLength(returnAddress
);
350 static void linkCall(JSFunction
* callee
, CodeBlock
* calleeCodeBlock
, void* ctiCode
, CallLinkInfo
* callLinkInfo
, int callerArgCount
);
351 static void unlinkCall(CallLinkInfo
*);
353 inline static JSValuePtr
execute(void* code
, RegisterFile
* registerFile
, CallFrame
* callFrame
, JSGlobalData
* globalData
, JSValuePtr
* exception
)
355 return JSValuePtr::decode(ctiTrampoline(
359 code
, registerFile
, callFrame
, exception
, Profiler::enabledProfilerReference(), globalData
));
363 JIT(JSGlobalData
*, CodeBlock
* = 0);
365 void privateCompileMainPass();
366 void privateCompileLinkPass();
367 void privateCompileSlowCases();
368 void privateCompile();
369 void privateCompileGetByIdSelf(StructureStubInfo
*, Structure
*, size_t cachedOffset
, void* returnAddress
);
370 void privateCompileGetByIdProto(StructureStubInfo
*, Structure
*, Structure
* prototypeStructure
, size_t cachedOffset
, void* returnAddress
, CallFrame
* callFrame
);
371 #if USE(CTI_REPATCH_PIC)
372 void privateCompileGetByIdSelfList(StructureStubInfo
*, PolymorphicAccessStructureList
*, int, Structure
*, size_t cachedOffset
);
373 void privateCompileGetByIdProtoList(StructureStubInfo
*, PolymorphicAccessStructureList
*, int, Structure
*, Structure
* prototypeStructure
, size_t cachedOffset
, CallFrame
* callFrame
);
374 void privateCompileGetByIdChainList(StructureStubInfo
*, PolymorphicAccessStructureList
*, int, Structure
*, StructureChain
* chain
, size_t count
, size_t cachedOffset
, CallFrame
* callFrame
);
376 void privateCompileGetByIdChain(StructureStubInfo
*, Structure
*, StructureChain
*, size_t count
, size_t cachedOffset
, void* returnAddress
, CallFrame
* callFrame
);
377 void privateCompilePutByIdReplace(StructureStubInfo
*, Structure
*, size_t cachedOffset
, void* returnAddress
);
378 void privateCompilePutByIdTransition(StructureStubInfo
*, Structure
*, Structure
*, size_t cachedOffset
, StructureChain
*, void* returnAddress
);
380 void privateCompileCTIMachineTrampolines();
381 void privateCompilePatchGetArrayLength(void* returnAddress
);
383 void addSlowCase(Jump
);
384 void addJump(Jump
, int);
385 void emitJumpSlowToHot(Jump
, int);
387 void compileGetByIdHotPath(int resultVReg
, int baseVReg
, Identifier
* ident
, unsigned propertyAccessInstructionIndex
);
388 void compileGetByIdSlowCase(int resultVReg
, int baseVReg
, Identifier
* ident
, Vector
<SlowCaseEntry
>::iterator
& iter
, unsigned propertyAccessInstructionIndex
);
389 void compilePutByIdHotPath(int baseVReg
, Identifier
* ident
, int valueVReg
, unsigned propertyAccessInstructionIndex
);
390 void compilePutByIdSlowCase(int baseVReg
, Identifier
* ident
, int valueVReg
, Vector
<SlowCaseEntry
>::iterator
& iter
, unsigned propertyAccessInstructionIndex
);
391 void compileOpCall(OpcodeID
, Instruction
* instruction
, unsigned callLinkInfoIndex
);
392 void compileOpCallInitializeCallFrame();
393 void compileOpCallSetupArgs(Instruction
*);
394 void compileOpCallEvalSetupArgs(Instruction
*);
395 void compileOpCallSlowCase(Instruction
* instruction
, Vector
<SlowCaseEntry
>::iterator
& iter
, unsigned callLinkInfoIndex
, OpcodeID opcodeID
);
396 void compileOpConstructSetupArgs(Instruction
*);
397 enum CompileOpStrictEqType
{ OpStrictEq
, OpNStrictEq
};
398 void compileOpStrictEq(Instruction
* instruction
, CompileOpStrictEqType type
);
399 void putDoubleResultToJSNumberCellOrJSImmediate(X86Assembler::XMMRegisterID xmmSource
, RegisterID jsNumberCell
, unsigned dst
, X86Assembler::JmpSrc
* wroteJSNumberCell
, X86Assembler::XMMRegisterID tempXmm
, RegisterID tempReg1
, RegisterID tempReg2
);
401 void compileFastArith_op_add(Instruction
*);
402 void compileFastArith_op_sub(Instruction
*);
403 void compileFastArith_op_mul(Instruction
*);
404 void compileFastArith_op_mod(unsigned result
, unsigned op1
, unsigned op2
);
405 void compileFastArith_op_bitand(unsigned result
, unsigned op1
, unsigned op2
);
406 void compileFastArith_op_lshift(unsigned result
, unsigned op1
, unsigned op2
);
407 void compileFastArith_op_rshift(unsigned result
, unsigned op1
, unsigned op2
);
408 void compileFastArith_op_pre_inc(unsigned srcDst
);
409 void compileFastArith_op_pre_dec(unsigned srcDst
);
410 void compileFastArith_op_post_inc(unsigned result
, unsigned srcDst
);
411 void compileFastArith_op_post_dec(unsigned result
, unsigned srcDst
);
412 void compileFastArithSlow_op_add(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
413 void compileFastArithSlow_op_sub(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
414 void compileFastArithSlow_op_mul(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
415 void compileFastArithSlow_op_mod(unsigned result
, unsigned op1
, unsigned op2
, Vector
<SlowCaseEntry
>::iterator
&);
416 void compileFastArithSlow_op_bitand(unsigned result
, unsigned op1
, unsigned op2
, Vector
<SlowCaseEntry
>::iterator
&);
417 void compileFastArithSlow_op_lshift(unsigned result
, unsigned op1
, unsigned op2
, Vector
<SlowCaseEntry
>::iterator
&);
418 void compileFastArithSlow_op_rshift(unsigned result
, unsigned op1
, unsigned op2
, Vector
<SlowCaseEntry
>::iterator
&);
419 void compileFastArithSlow_op_pre_inc(unsigned srcDst
, Vector
<SlowCaseEntry
>::iterator
&);
420 void compileFastArithSlow_op_pre_dec(unsigned srcDst
, Vector
<SlowCaseEntry
>::iterator
&);
421 void compileFastArithSlow_op_post_inc(unsigned result
, unsigned srcDst
, Vector
<SlowCaseEntry
>::iterator
&);
422 void compileFastArithSlow_op_post_dec(unsigned result
, unsigned srcDst
, Vector
<SlowCaseEntry
>::iterator
&);
423 #if ENABLE(JIT_OPTIMIZE_ARITHMETIC)
424 void compileBinaryArithOp(OpcodeID
, unsigned dst
, unsigned src1
, unsigned src2
, OperandTypes opi
);
425 void compileBinaryArithOpSlowCase(OpcodeID
, Vector
<SlowCaseEntry
>::iterator
&, unsigned dst
, unsigned src1
, unsigned src2
, OperandTypes opi
);
428 void emitGetVirtualRegister(int src
, RegisterID dst
);
429 void emitGetVirtualRegisters(int src1
, RegisterID dst1
, int src2
, RegisterID dst2
);
430 void emitPutVirtualRegister(unsigned dst
, RegisterID from
= X86::eax
);
432 void emitPutJITStubArg(RegisterID src
, unsigned argumentNumber
);
433 void emitPutJITStubArgFromVirtualRegister(unsigned src
, unsigned argumentNumber
, RegisterID scratch
);
434 void emitPutJITStubArgConstant(unsigned value
, unsigned argumentNumber
);
435 void emitPutJITStubArgConstant(void* value
, unsigned argumentNumber
);
436 void emitGetJITStubArg(unsigned argumentNumber
, RegisterID dst
);
438 void emitInitRegister(unsigned dst
);
440 void emitPutCTIParam(void* value
, unsigned name
);
441 void emitPutCTIParam(RegisterID from
, unsigned name
);
442 void emitGetCTIParam(unsigned name
, RegisterID to
);
444 void emitPutToCallFrameHeader(RegisterID from
, RegisterFile::CallFrameHeaderEntry entry
);
445 void emitPutImmediateToCallFrameHeader(void* value
, RegisterFile::CallFrameHeaderEntry entry
);
446 void emitGetFromCallFrameHeader(RegisterFile::CallFrameHeaderEntry entry
, RegisterID to
);
448 JSValuePtr
getConstantOperand(unsigned src
);
449 int32_t getConstantOperandImmediateInt(unsigned src
);
450 bool isOperandConstantImmediateInt(unsigned src
);
452 Jump
emitJumpIfJSCell(RegisterID
);
453 Jump
emitJumpIfBothJSCells(RegisterID
, RegisterID
, RegisterID
);
454 void emitJumpSlowCaseIfJSCell(RegisterID
);
455 Jump
emitJumpIfNotJSCell(RegisterID
);
456 void emitJumpSlowCaseIfNotJSCell(RegisterID
);
457 void emitJumpSlowCaseIfNotJSCell(RegisterID
, int VReg
);
458 #if USE(ALTERNATE_JSIMMEDIATE)
459 JIT::Jump
emitJumpIfImmediateNumber(RegisterID
);
460 JIT::Jump
emitJumpIfNotImmediateNumber(RegisterID
);
463 Jump
getSlowCase(Vector
<SlowCaseEntry
>::iterator
& iter
)
467 void linkSlowCase(Vector
<SlowCaseEntry
>::iterator
& iter
)
469 iter
->from
.link(this);
472 void linkSlowCaseIfNotJSCell(Vector
<SlowCaseEntry
>::iterator
&, int vReg
);
474 JIT::Jump
emitJumpIfImmediateInteger(RegisterID
);
475 JIT::Jump
emitJumpIfNotImmediateInteger(RegisterID
);
476 JIT::Jump
emitJumpIfNotImmediateIntegers(RegisterID
, RegisterID
, RegisterID
);
477 void emitJumpSlowCaseIfNotImmediateInteger(RegisterID
);
478 void emitJumpSlowCaseIfNotImmediateIntegers(RegisterID
, RegisterID
, RegisterID
);
480 Jump
checkStructure(RegisterID reg
, Structure
* structure
);
482 #if !USE(ALTERNATE_JSIMMEDIATE)
483 void emitFastArithDeTagImmediate(RegisterID
);
484 Jump
emitFastArithDeTagImmediateJumpIfZero(RegisterID
);
486 void emitFastArithReTagImmediate(RegisterID src
, RegisterID dest
);
487 void emitFastArithImmToInt(RegisterID
);
488 void emitFastArithIntToImmNoCheck(RegisterID src
, RegisterID dest
);
490 void emitTagAsBoolImmediate(RegisterID reg
);
492 void restoreArgumentReference();
493 void restoreArgumentReferenceForTrampoline();
495 Jump
emitNakedCall(RegisterID
);
496 Jump
emitNakedCall(void* function
);
497 Jump
emitCTICall_internal(void*);
498 Jump
emitCTICall(CTIHelper_j helper
) { return emitCTICall_internal(reinterpret_cast<void*>(helper
)); }
499 Jump
emitCTICall(CTIHelper_o helper
) { return emitCTICall_internal(reinterpret_cast<void*>(helper
)); }
500 Jump
emitCTICall(CTIHelper_p helper
) { return emitCTICall_internal(reinterpret_cast<void*>(helper
)); }
501 Jump
emitCTICall(CTIHelper_v helper
) { return emitCTICall_internal(reinterpret_cast<void*>(helper
)); }
502 Jump
emitCTICall(CTIHelper_s helper
) { return emitCTICall_internal(reinterpret_cast<void*>(helper
)); }
503 Jump
emitCTICall(CTIHelper_b helper
) { return emitCTICall_internal(reinterpret_cast<void*>(helper
)); }
504 Jump
emitCTICall(CTIHelper_2 helper
) { return emitCTICall_internal(reinterpret_cast<void*>(helper
)); }
506 void emitGetVariableObjectRegister(RegisterID variableObject
, int index
, RegisterID dst
);
507 void emitPutVariableObjectRegister(RegisterID src
, RegisterID variableObject
, int index
);
509 void emitSlowScriptCheck();
511 void printBytecodeOperandTypes(unsigned src1
, unsigned src2
);
514 void killLastResultRegister();
516 #if ENABLE(CODEBLOCK_SAMPLING)
517 void sampleCodeBlock(CodeBlock
* codeBlock
)
520 move(ImmPtr(m_interpreter
->sampler()->codeBlockSlot()), X86::ecx
);
521 storePtr(ImmPtr(codeBlock
), X86::ecx
);
523 storePtr(ImmPtr(codeBlock
), m_interpreter
->sampler()->codeBlockSlot());
527 void sampleCodeBlock(CodeBlock
*) {}
530 #if ENABLE(OPCODE_SAMPLING)
531 void sampleInstruction(Instruction
* instruction
, bool inHostFunction
=false)
534 move(ImmPtr(m_interpreter
->sampler()->sampleSlot()), X86::ecx
);
535 storePtr(ImmPtr(m_interpreter
->sampler()->encodeSample(instruction
, inHostFunction
)), X86::ecx
);
537 storePtr(ImmPtr(m_interpreter
->sampler()->encodeSample(instruction
, inHostFunction
)), m_interpreter
->sampler()->sampleSlot());
541 void sampleInstruction(Instruction
*, bool) {}
544 Interpreter
* m_interpreter
;
545 JSGlobalData
* m_globalData
;
546 CodeBlock
* m_codeBlock
;
548 Vector
<CallRecord
> m_calls
;
549 Vector
<Label
> m_labels
;
550 Vector
<PropertyStubCompilationInfo
> m_propertyAccessCompilationInfo
;
551 Vector
<StructureStubCompilationInfo
> m_callStructureStubCompilationInfo
;
552 Vector
<JumpTable
> m_jmpTable
;
555 DataLabelPtr storeLocation
;
558 JSRInfo(DataLabelPtr storeLocation
, Label targetLocation
)
559 : storeLocation(storeLocation
)
560 , target(targetLocation
)
565 unsigned m_bytecodeIndex
;
566 Vector
<JSRInfo
> m_jsrSites
;
567 Vector
<SlowCaseEntry
> m_slowCases
;
568 Vector
<SwitchRecord
> m_switches
;
570 int m_lastResultBytecodeRegister
;
571 unsigned m_jumpTargetsPosition
;
575 #endif // ENABLE(JIT)