2 * Copyright (C) 2008, 2012, 2013 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 // Verbose logging of code generation
32 #define ENABLE_JIT_VERBOSE 0
33 // Verbose logging for OSR-related code.
34 #define ENABLE_JIT_VERBOSE_OSR 0
36 // We've run into some problems where changing the size of the class JIT leads to
37 // performance fluctuations. Try forcing alignment in an attempt to stabalize this.
39 #define JIT_CLASS_ALIGNMENT __attribute__ ((aligned (32)))
41 #define JIT_CLASS_ALIGNMENT
44 #define ASSERT_JIT_OFFSET(actual, expected) ASSERT_WITH_MESSAGE(actual == expected, "JIT Offset \"%s\" should be %d, not %d.\n", #expected, static_cast<int>(expected), static_cast<int>(actual));
46 #include "CodeBlock.h"
47 #include "CompactJITCodeMap.h"
48 #include "Interpreter.h"
49 #include "JITDisassembler.h"
50 #include "JSInterfaceJIT.h"
51 #include "LegacyProfiler.h"
53 #include "ResultType.h"
54 #include "UnusedPointer.h"
55 #include <bytecode/SamplingTool.h>
60 class FunctionExecutable
;
62 class JSPropertyNameIterator
;
66 class MarkedAllocator
;
73 struct PolymorphicAccessStructureList
;
74 struct SimpleJumpTable
;
75 struct StringJumpTable
;
76 struct StructureStubInfo
;
79 MacroAssembler::Call from
;
80 unsigned bytecodeOffset
;
87 CallRecord(MacroAssembler::Call from
, unsigned bytecodeOffset
, void* to
= 0)
89 , bytecodeOffset(bytecodeOffset
)
96 MacroAssembler::Jump from
;
97 unsigned toBytecodeOffset
;
99 JumpTable(MacroAssembler::Jump f
, unsigned t
)
101 , toBytecodeOffset(t
)
106 struct SlowCaseEntry
{
107 MacroAssembler::Jump from
;
111 SlowCaseEntry(MacroAssembler::Jump f
, unsigned t
, unsigned h
= 0)
119 struct SwitchRecord
{
129 SimpleJumpTable
* simpleJumpTable
;
130 StringJumpTable
* stringJumpTable
;
133 unsigned bytecodeOffset
;
134 unsigned defaultOffset
;
136 SwitchRecord(SimpleJumpTable
* jumpTable
, unsigned bytecodeOffset
, unsigned defaultOffset
, Type type
)
138 , bytecodeOffset(bytecodeOffset
)
139 , defaultOffset(defaultOffset
)
141 this->jumpTable
.simpleJumpTable
= jumpTable
;
144 SwitchRecord(StringJumpTable
* jumpTable
, unsigned bytecodeOffset
, unsigned defaultOffset
)
146 , bytecodeOffset(bytecodeOffset
)
147 , defaultOffset(defaultOffset
)
149 this->jumpTable
.stringJumpTable
= jumpTable
;
153 enum PropertyStubGetById_T
{ PropertyStubGetById
};
154 enum PropertyStubPutById_T
{ PropertyStubPutById
};
156 struct PropertyStubCompilationInfo
{
157 enum Type
{ GetById
, PutById
} m_type
;
159 unsigned bytecodeIndex
;
160 MacroAssembler::Call callReturnLocation
;
161 MacroAssembler::Label hotPathBegin
;
162 MacroAssembler::DataLabelPtr getStructureToCompare
;
163 MacroAssembler::PatchableJump getStructureCheck
;
164 MacroAssembler::ConvertibleLoadLabel propertyStorageLoad
;
166 MacroAssembler::DataLabelCompact getDisplacementLabel
;
168 MacroAssembler::DataLabelCompact getDisplacementLabel1
;
169 MacroAssembler::DataLabelCompact getDisplacementLabel2
;
171 MacroAssembler::Label getPutResult
;
172 MacroAssembler::Label getColdPathBegin
;
173 MacroAssembler::DataLabelPtr putStructureToCompare
;
175 MacroAssembler::DataLabel32 putDisplacementLabel
;
177 MacroAssembler::DataLabel32 putDisplacementLabel1
;
178 MacroAssembler::DataLabel32 putDisplacementLabel2
;
182 PropertyStubCompilationInfo()
183 : bytecodeIndex(std::numeric_limits
<unsigned>::max())
189 PropertyStubCompilationInfo(
190 PropertyStubGetById_T
, unsigned bytecodeIndex
, MacroAssembler::Label hotPathBegin
,
191 MacroAssembler::DataLabelPtr structureToCompare
,
192 MacroAssembler::PatchableJump structureCheck
,
193 MacroAssembler::ConvertibleLoadLabel propertyStorageLoad
,
195 MacroAssembler::DataLabelCompact displacementLabel
,
197 MacroAssembler::DataLabelCompact displacementLabel1
,
198 MacroAssembler::DataLabelCompact displacementLabel2
,
200 MacroAssembler::Label putResult
)
202 , bytecodeIndex(bytecodeIndex
)
203 , hotPathBegin(hotPathBegin
)
204 , getStructureToCompare(structureToCompare
)
205 , getStructureCheck(structureCheck
)
206 , propertyStorageLoad(propertyStorageLoad
)
208 , getDisplacementLabel(displacementLabel
)
210 , getDisplacementLabel1(displacementLabel1
)
211 , getDisplacementLabel2(displacementLabel2
)
213 , getPutResult(putResult
)
217 PropertyStubCompilationInfo(
218 PropertyStubPutById_T
, unsigned bytecodeIndex
, MacroAssembler::Label hotPathBegin
,
219 MacroAssembler::DataLabelPtr structureToCompare
,
220 MacroAssembler::ConvertibleLoadLabel propertyStorageLoad
,
222 MacroAssembler::DataLabel32 displacementLabel
224 MacroAssembler::DataLabel32 displacementLabel1
,
225 MacroAssembler::DataLabel32 displacementLabel2
229 , bytecodeIndex(bytecodeIndex
)
230 , hotPathBegin(hotPathBegin
)
231 , propertyStorageLoad(propertyStorageLoad
)
232 , putStructureToCompare(structureToCompare
)
234 , putDisplacementLabel(displacementLabel
)
236 , putDisplacementLabel1(displacementLabel1
)
237 , putDisplacementLabel2(displacementLabel2
)
242 void slowCaseInfo(PropertyStubGetById_T
, MacroAssembler::Label coldPathBegin
, MacroAssembler::Call call
)
244 ASSERT(m_type
== GetById
);
245 callReturnLocation
= call
;
246 getColdPathBegin
= coldPathBegin
;
249 void slowCaseInfo(PropertyStubPutById_T
, MacroAssembler::Call call
)
251 ASSERT(m_type
== PutById
);
252 callReturnLocation
= call
;
255 void copyToStubInfo(StructureStubInfo
& info
, LinkBuffer
&patchBuffer
);
258 struct ByValCompilationInfo
{
259 ByValCompilationInfo() { }
261 ByValCompilationInfo(unsigned bytecodeIndex
, MacroAssembler::PatchableJump badTypeJump
, JITArrayMode arrayMode
, MacroAssembler::Label doneTarget
)
262 : bytecodeIndex(bytecodeIndex
)
263 , badTypeJump(badTypeJump
)
264 , arrayMode(arrayMode
)
265 , doneTarget(doneTarget
)
269 unsigned bytecodeIndex
;
270 MacroAssembler::PatchableJump badTypeJump
;
271 JITArrayMode arrayMode
;
272 MacroAssembler::Label doneTarget
;
273 MacroAssembler::Label slowPathTarget
;
274 MacroAssembler::Call returnAddress
;
277 struct StructureStubCompilationInfo
{
278 MacroAssembler::DataLabelPtr hotPathBegin
;
279 MacroAssembler::Call hotPathOther
;
280 MacroAssembler::Call callReturnLocation
;
281 CallLinkInfo::CallType callType
;
282 unsigned bytecodeIndex
;
285 // Near calls can only be patched to other JIT code, regular calls can be patched to JIT code or relinked to stub functions.
286 void ctiPatchNearCallByReturnAddress(CodeBlock
* codeblock
, ReturnAddressPtr returnAddress
, MacroAssemblerCodePtr newCalleeFunction
);
287 void ctiPatchCallByReturnAddress(CodeBlock
* codeblock
, ReturnAddressPtr returnAddress
, MacroAssemblerCodePtr newCalleeFunction
);
288 void ctiPatchCallByReturnAddress(CodeBlock
* codeblock
, ReturnAddressPtr returnAddress
, FunctionPtr newCalleeFunction
);
290 class JIT
: private JSInterfaceJIT
{
291 friend class JITStubCall
;
292 friend struct PropertyStubCompilationInfo
;
294 using MacroAssembler::Jump
;
295 using MacroAssembler::JumpList
;
296 using MacroAssembler::Label
;
298 static const uintptr_t patchGetByIdDefaultStructure
= unusedPointer
;
299 static const int patchGetByIdDefaultOffset
= 0;
300 // Magic number - initial offset cannot be representable as a signed 8bit value, or the X86Assembler
301 // will compress the displacement, and we may not be able to fit a patched offset.
302 static const int patchPutByIdDefaultOffset
= 256;
305 static JITCode
compile(VM
* vm
, CodeBlock
* codeBlock
, JITCompilationEffort effort
, CodePtr
* functionEntryArityCheck
= 0)
307 return JIT(vm
, codeBlock
).privateCompile(functionEntryArityCheck
, effort
);
310 static void compileClosureCall(VM
* vm
, CallLinkInfo
* callLinkInfo
, CodeBlock
* callerCodeBlock
, CodeBlock
* calleeCodeBlock
, Structure
* expectedStructure
, ExecutableBase
* expectedExecutable
, MacroAssemblerCodePtr codePtr
)
312 JIT
jit(vm
, callerCodeBlock
);
313 jit
.m_bytecodeOffset
= callLinkInfo
->codeOrigin
.bytecodeIndex
;
314 jit
.privateCompileClosureCall(callLinkInfo
, calleeCodeBlock
, expectedStructure
, expectedExecutable
, codePtr
);
317 static void compileGetByIdProto(VM
* vm
, CallFrame
* callFrame
, CodeBlock
* codeBlock
, StructureStubInfo
* stubInfo
, Structure
* structure
, Structure
* prototypeStructure
, const Identifier
& ident
, const PropertySlot
& slot
, PropertyOffset cachedOffset
, ReturnAddressPtr returnAddress
)
319 JIT
jit(vm
, codeBlock
);
320 jit
.m_bytecodeOffset
= stubInfo
->bytecodeIndex
;
321 jit
.privateCompileGetByIdProto(stubInfo
, structure
, prototypeStructure
, ident
, slot
, cachedOffset
, returnAddress
, callFrame
);
324 static void compileGetByIdSelfList(VM
* vm
, CodeBlock
* codeBlock
, StructureStubInfo
* stubInfo
, PolymorphicAccessStructureList
* polymorphicStructures
, int currentIndex
, Structure
* structure
, const Identifier
& ident
, const PropertySlot
& slot
, PropertyOffset cachedOffset
)
326 JIT
jit(vm
, codeBlock
);
327 jit
.m_bytecodeOffset
= stubInfo
->bytecodeIndex
;
328 jit
.privateCompileGetByIdSelfList(stubInfo
, polymorphicStructures
, currentIndex
, structure
, ident
, slot
, cachedOffset
);
330 static void compileGetByIdProtoList(VM
* vm
, CallFrame
* callFrame
, CodeBlock
* codeBlock
, StructureStubInfo
* stubInfo
, PolymorphicAccessStructureList
* prototypeStructureList
, int currentIndex
, Structure
* structure
, Structure
* prototypeStructure
, const Identifier
& ident
, const PropertySlot
& slot
, PropertyOffset cachedOffset
)
332 JIT
jit(vm
, codeBlock
);
333 jit
.m_bytecodeOffset
= stubInfo
->bytecodeIndex
;
334 jit
.privateCompileGetByIdProtoList(stubInfo
, prototypeStructureList
, currentIndex
, structure
, prototypeStructure
, ident
, slot
, cachedOffset
, callFrame
);
336 static void compileGetByIdChainList(VM
* vm
, CallFrame
* callFrame
, CodeBlock
* codeBlock
, StructureStubInfo
* stubInfo
, PolymorphicAccessStructureList
* prototypeStructureList
, int currentIndex
, Structure
* structure
, StructureChain
* chain
, size_t count
, const Identifier
& ident
, const PropertySlot
& slot
, PropertyOffset cachedOffset
)
338 JIT
jit(vm
, codeBlock
);
339 jit
.m_bytecodeOffset
= stubInfo
->bytecodeIndex
;
340 jit
.privateCompileGetByIdChainList(stubInfo
, prototypeStructureList
, currentIndex
, structure
, chain
, count
, ident
, slot
, cachedOffset
, callFrame
);
343 static void compileGetByIdChain(VM
* vm
, CallFrame
* callFrame
, CodeBlock
* codeBlock
, StructureStubInfo
* stubInfo
, Structure
* structure
, StructureChain
* chain
, size_t count
, const Identifier
& ident
, const PropertySlot
& slot
, PropertyOffset cachedOffset
, ReturnAddressPtr returnAddress
)
345 JIT
jit(vm
, codeBlock
);
346 jit
.m_bytecodeOffset
= stubInfo
->bytecodeIndex
;
347 jit
.privateCompileGetByIdChain(stubInfo
, structure
, chain
, count
, ident
, slot
, cachedOffset
, returnAddress
, callFrame
);
350 static void compilePutByIdTransition(VM
* vm
, CodeBlock
* codeBlock
, StructureStubInfo
* stubInfo
, Structure
* oldStructure
, Structure
* newStructure
, PropertyOffset cachedOffset
, StructureChain
* chain
, ReturnAddressPtr returnAddress
, bool direct
)
352 JIT
jit(vm
, codeBlock
);
353 jit
.m_bytecodeOffset
= stubInfo
->bytecodeIndex
;
354 jit
.privateCompilePutByIdTransition(stubInfo
, oldStructure
, newStructure
, cachedOffset
, chain
, returnAddress
, direct
);
357 static void compileGetByVal(VM
* vm
, CodeBlock
* codeBlock
, ByValInfo
* byValInfo
, ReturnAddressPtr returnAddress
, JITArrayMode arrayMode
)
359 JIT
jit(vm
, codeBlock
);
360 jit
.m_bytecodeOffset
= byValInfo
->bytecodeIndex
;
361 jit
.privateCompileGetByVal(byValInfo
, returnAddress
, arrayMode
);
364 static void compilePutByVal(VM
* vm
, CodeBlock
* codeBlock
, ByValInfo
* byValInfo
, ReturnAddressPtr returnAddress
, JITArrayMode arrayMode
)
366 JIT
jit(vm
, codeBlock
);
367 jit
.m_bytecodeOffset
= byValInfo
->bytecodeIndex
;
368 jit
.privateCompilePutByVal(byValInfo
, returnAddress
, arrayMode
);
371 static CodeRef
compileCTINativeCall(VM
* vm
, NativeFunction func
)
373 if (!vm
->canUseJIT()) {
375 return CodeRef::createLLIntCodeRef(llint_native_call_trampoline
);
381 return jit
.privateCompileCTINativeCall(vm
, func
);
384 static void resetPatchGetById(RepatchBuffer
&, StructureStubInfo
*);
385 static void resetPatchPutById(RepatchBuffer
&, StructureStubInfo
*);
386 static void patchGetByIdSelf(CodeBlock
*, StructureStubInfo
*, Structure
*, PropertyOffset cachedOffset
, ReturnAddressPtr
);
387 static void patchPutByIdReplace(CodeBlock
*, StructureStubInfo
*, Structure
*, PropertyOffset cachedOffset
, ReturnAddressPtr
, bool direct
);
389 static void compilePatchGetArrayLength(VM
* vm
, CodeBlock
* codeBlock
, ReturnAddressPtr returnAddress
)
391 JIT
jit(vm
, codeBlock
);
393 // Force profiling to be enabled during stub generation.
394 jit
.m_canBeOptimized
= true;
395 jit
.m_canBeOptimizedOrInlined
= true;
396 jit
.m_shouldEmitProfiling
= true;
397 #endif // ENABLE(DFG_JIT)
398 return jit
.privateCompilePatchGetArrayLength(returnAddress
);
401 static void linkFor(JSFunction
* callee
, CodeBlock
* callerCodeBlock
, CodeBlock
* calleeCodeBlock
, CodePtr
, CallLinkInfo
*, VM
*, CodeSpecializationKind
);
402 static void linkSlowCall(CodeBlock
* callerCodeBlock
, CallLinkInfo
*);
405 JIT(VM
*, CodeBlock
* = 0);
407 void privateCompileMainPass();
408 void privateCompileLinkPass();
409 void privateCompileSlowCases();
410 JITCode
privateCompile(CodePtr
* functionEntryArityCheck
, JITCompilationEffort
);
412 void privateCompileClosureCall(CallLinkInfo
*, CodeBlock
* calleeCodeBlock
, Structure
*, ExecutableBase
*, MacroAssemblerCodePtr
);
414 void privateCompileGetByIdProto(StructureStubInfo
*, Structure
*, Structure
* prototypeStructure
, const Identifier
&, const PropertySlot
&, PropertyOffset cachedOffset
, ReturnAddressPtr
, CallFrame
*);
415 void privateCompileGetByIdSelfList(StructureStubInfo
*, PolymorphicAccessStructureList
*, int, Structure
*, const Identifier
&, const PropertySlot
&, PropertyOffset cachedOffset
);
416 void privateCompileGetByIdProtoList(StructureStubInfo
*, PolymorphicAccessStructureList
*, int, Structure
*, Structure
* prototypeStructure
, const Identifier
&, const PropertySlot
&, PropertyOffset cachedOffset
, CallFrame
*);
417 void privateCompileGetByIdChainList(StructureStubInfo
*, PolymorphicAccessStructureList
*, int, Structure
*, StructureChain
*, size_t count
, const Identifier
&, const PropertySlot
&, PropertyOffset cachedOffset
, CallFrame
*);
418 void privateCompileGetByIdChain(StructureStubInfo
*, Structure
*, StructureChain
*, size_t count
, const Identifier
&, const PropertySlot
&, PropertyOffset cachedOffset
, ReturnAddressPtr
, CallFrame
*);
419 void privateCompilePutByIdTransition(StructureStubInfo
*, Structure
*, Structure
*, PropertyOffset cachedOffset
, StructureChain
*, ReturnAddressPtr
, bool direct
);
421 void privateCompileGetByVal(ByValInfo
*, ReturnAddressPtr
, JITArrayMode
);
422 void privateCompilePutByVal(ByValInfo
*, ReturnAddressPtr
, JITArrayMode
);
424 Label
privateCompileCTINativeCall(VM
*, bool isConstruct
= false);
425 CodeRef
privateCompileCTINativeCall(VM
*, NativeFunction
);
426 void privateCompilePatchGetArrayLength(ReturnAddressPtr returnAddress
);
428 static bool isDirectPutById(StructureStubInfo
*);
430 void addSlowCase(Jump
);
431 void addSlowCase(JumpList
);
433 void addJump(Jump
, int);
434 void emitJumpSlowToHot(Jump
, int);
436 void compileOpCall(OpcodeID
, Instruction
*, unsigned callLinkInfoIndex
);
437 void compileOpCallSlowCase(OpcodeID
, Instruction
*, Vector
<SlowCaseEntry
>::iterator
&, unsigned callLinkInfoIndex
);
438 void compileLoadVarargs(Instruction
*);
439 void compileCallEval();
440 void compileCallEvalSlowCase(Vector
<SlowCaseEntry
>::iterator
&);
442 enum CompileOpStrictEqType
{ OpStrictEq
, OpNStrictEq
};
443 void compileOpStrictEq(Instruction
* instruction
, CompileOpStrictEqType type
);
444 bool isOperandConstantImmediateDouble(unsigned src
);
446 void emitLoadDouble(int index
, FPRegisterID value
);
447 void emitLoadInt32ToDouble(int index
, FPRegisterID value
);
448 Jump
emitJumpIfNotObject(RegisterID structureReg
);
450 Jump
addStructureTransitionCheck(JSCell
*, Structure
*, StructureStubInfo
*, RegisterID scratch
);
451 void addStructureTransitionCheck(JSCell
*, Structure
*, StructureStubInfo
*, JumpList
& failureCases
, RegisterID scratch
);
452 void testPrototype(JSValue
, JumpList
& failureCases
, StructureStubInfo
*);
454 enum WriteBarrierMode
{ UnconditionalWriteBarrier
, ShouldFilterImmediates
};
455 // value register in write barrier is used before any scratch registers
456 // so may safely be the same as either of the scratch registers.
457 void emitWriteBarrier(RegisterID owner
, RegisterID valueTag
, RegisterID scratch
, RegisterID scratch2
, WriteBarrierMode
, WriteBarrierUseKind
);
458 void emitWriteBarrier(JSCell
* owner
, RegisterID value
, RegisterID scratch
, WriteBarrierMode
, WriteBarrierUseKind
);
460 template<typename StructureType
> // StructureType can be RegisterID or ImmPtr.
461 void emitAllocateJSObject(RegisterID allocator
, StructureType
, RegisterID result
, RegisterID scratch
);
463 #if ENABLE(VALUE_PROFILER)
464 // This assumes that the value to profile is in regT0 and that regT3 is available for
466 void emitValueProfilingSite(ValueProfile
*);
467 void emitValueProfilingSite(unsigned bytecodeOffset
);
468 void emitValueProfilingSite();
470 void emitValueProfilingSite(unsigned) { }
471 void emitValueProfilingSite() { }
473 void emitArrayProfilingSite(RegisterID structureAndIndexingType
, RegisterID scratch
, ArrayProfile
*);
474 void emitArrayProfilingSiteForBytecodeIndex(RegisterID structureAndIndexingType
, RegisterID scratch
, unsigned bytecodeIndex
);
475 void emitArrayProfileStoreToHoleSpecialCase(ArrayProfile
*);
476 void emitArrayProfileOutOfBoundsSpecialCase(ArrayProfile
*);
478 JITArrayMode
chooseArrayMode(ArrayProfile
*);
480 // Property is in regT1, base is in regT0. regT2 contains indexing type.
481 // Property is int-checked and zero extended. Base is cell checked.
482 // Structure is already profiled. Returns the slow cases. Fall-through
483 // case contains result in regT0, and it is not yet profiled.
484 JumpList
emitInt32GetByVal(Instruction
* instruction
, PatchableJump
& badType
) { return emitContiguousGetByVal(instruction
, badType
, Int32Shape
); }
485 JumpList
emitDoubleGetByVal(Instruction
*, PatchableJump
& badType
);
486 JumpList
emitContiguousGetByVal(Instruction
*, PatchableJump
& badType
, IndexingType expectedShape
= ContiguousShape
);
487 JumpList
emitArrayStorageGetByVal(Instruction
*, PatchableJump
& badType
);
488 JumpList
emitIntTypedArrayGetByVal(Instruction
*, PatchableJump
& badType
, const TypedArrayDescriptor
&, size_t elementSize
, TypedArraySignedness
);
489 JumpList
emitFloatTypedArrayGetByVal(Instruction
*, PatchableJump
& badType
, const TypedArrayDescriptor
&, size_t elementSize
);
491 // Property is in regT0, base is in regT0. regT2 contains indecing type.
492 // The value to store is not yet loaded. Property is int-checked and
493 // zero-extended. Base is cell checked. Structure is already profiled.
494 // returns the slow cases.
495 JumpList
emitInt32PutByVal(Instruction
* currentInstruction
, PatchableJump
& badType
)
497 return emitGenericContiguousPutByVal(currentInstruction
, badType
, Int32Shape
);
499 JumpList
emitDoublePutByVal(Instruction
* currentInstruction
, PatchableJump
& badType
)
501 return emitGenericContiguousPutByVal(currentInstruction
, badType
, DoubleShape
);
503 JumpList
emitContiguousPutByVal(Instruction
* currentInstruction
, PatchableJump
& badType
)
505 return emitGenericContiguousPutByVal(currentInstruction
, badType
);
507 JumpList
emitGenericContiguousPutByVal(Instruction
*, PatchableJump
& badType
, IndexingType indexingShape
= ContiguousShape
);
508 JumpList
emitArrayStoragePutByVal(Instruction
*, PatchableJump
& badType
);
509 JumpList
emitIntTypedArrayPutByVal(Instruction
*, PatchableJump
& badType
, const TypedArrayDescriptor
&, size_t elementSize
, TypedArraySignedness
, TypedArrayRounding
);
510 JumpList
emitFloatTypedArrayPutByVal(Instruction
*, PatchableJump
& badType
, const TypedArrayDescriptor
&, size_t elementSize
);
512 enum FinalObjectMode
{ MayBeFinal
, KnownNotFinal
};
514 #if USE(JSVALUE32_64)
515 bool getOperandConstantImmediateInt(unsigned op1
, unsigned op2
, unsigned& op
, int32_t& constant
);
517 void emitLoadTag(int index
, RegisterID tag
);
518 void emitLoadPayload(int index
, RegisterID payload
);
520 void emitLoad(const JSValue
& v
, RegisterID tag
, RegisterID payload
);
521 void emitLoad(int index
, RegisterID tag
, RegisterID payload
, RegisterID base
= callFrameRegister
);
522 void emitLoad2(int index1
, RegisterID tag1
, RegisterID payload1
, int index2
, RegisterID tag2
, RegisterID payload2
);
524 void emitStore(int index
, RegisterID tag
, RegisterID payload
, RegisterID base
= callFrameRegister
);
525 void emitStore(int index
, const JSValue constant
, RegisterID base
= callFrameRegister
);
526 void emitStoreInt32(int index
, RegisterID payload
, bool indexIsInt32
= false);
527 void emitStoreInt32(int index
, TrustedImm32 payload
, bool indexIsInt32
= false);
528 void emitStoreAndMapInt32(int index
, RegisterID tag
, RegisterID payload
, bool indexIsInt32
, size_t opcodeLength
);
529 void emitStoreCell(int index
, RegisterID payload
, bool indexIsCell
= false);
530 void emitStoreBool(int index
, RegisterID payload
, bool indexIsBool
= false);
531 void emitStoreDouble(int index
, FPRegisterID value
);
533 bool isLabeled(unsigned bytecodeOffset
);
534 void map(unsigned bytecodeOffset
, int virtualRegisterIndex
, RegisterID tag
, RegisterID payload
);
535 void unmap(RegisterID
);
537 bool isMapped(int virtualRegisterIndex
);
538 bool getMappedPayload(int virtualRegisterIndex
, RegisterID
& payload
);
539 bool getMappedTag(int virtualRegisterIndex
, RegisterID
& tag
);
541 void emitJumpSlowCaseIfNotJSCell(int virtualRegisterIndex
);
542 void emitJumpSlowCaseIfNotJSCell(int virtualRegisterIndex
, RegisterID tag
);
544 void compileGetByIdHotPath(Identifier
*);
545 void compileGetByIdSlowCase(int resultVReg
, int baseVReg
, Identifier
*, Vector
<SlowCaseEntry
>::iterator
&);
546 void compileGetDirectOffset(RegisterID base
, RegisterID resultTag
, RegisterID resultPayload
, PropertyOffset cachedOffset
);
547 void compileGetDirectOffset(JSObject
* base
, RegisterID resultTag
, RegisterID resultPayload
, PropertyOffset cachedOffset
);
548 void compileGetDirectOffset(RegisterID base
, RegisterID resultTag
, RegisterID resultPayload
, RegisterID offset
, FinalObjectMode
= MayBeFinal
);
549 void compilePutDirectOffset(RegisterID base
, RegisterID valueTag
, RegisterID valuePayload
, PropertyOffset cachedOffset
);
551 // Arithmetic opcode helpers
552 void emitAdd32Constant(unsigned dst
, unsigned op
, int32_t constant
, ResultType opType
);
553 void emitSub32Constant(unsigned dst
, unsigned op
, int32_t constant
, ResultType opType
);
554 void emitBinaryDoubleOp(OpcodeID
, unsigned dst
, unsigned op1
, unsigned op2
, OperandTypes
, JumpList
& notInt32Op1
, JumpList
& notInt32Op2
, bool op1IsInRegisters
= true, bool op2IsInRegisters
= true);
556 #if CPU(ARM_TRADITIONAL)
558 static const int sequenceOpCallInstructionSpace
= 12;
559 static const int sequenceOpCallConstantSpace
= 2;
560 // sequenceGetByIdHotPath
561 static const int sequenceGetByIdHotPathInstructionSpace
= 36;
562 static const int sequenceGetByIdHotPathConstantSpace
= 4;
563 // sequenceGetByIdSlowCase
564 static const int sequenceGetByIdSlowCaseInstructionSpace
= 80;
565 static const int sequenceGetByIdSlowCaseConstantSpace
= 4;
567 static const int sequencePutByIdInstructionSpace
= 36;
568 static const int sequencePutByIdConstantSpace
= 4;
571 static const int sequenceOpCallInstructionSpace
= 12;
572 static const int sequenceOpCallConstantSpace
= 2;
573 // sequenceGetByIdHotPath
574 static const int sequenceGetByIdHotPathInstructionSpace
= 36;
575 static const int sequenceGetByIdHotPathConstantSpace
= 5;
576 // sequenceGetByIdSlowCase
577 static const int sequenceGetByIdSlowCaseInstructionSpace
= 38;
578 static const int sequenceGetByIdSlowCaseConstantSpace
= 4;
580 static const int sequencePutByIdInstructionSpace
= 36;
581 static const int sequencePutByIdConstantSpace
= 5;
584 #else // USE(JSVALUE32_64)
585 /* This function is deprecated. */
586 void emitGetJITStubArg(unsigned argumentNumber
, RegisterID dst
);
588 void emitGetVirtualRegister(int src
, RegisterID dst
);
589 void emitGetVirtualRegisters(int src1
, RegisterID dst1
, int src2
, RegisterID dst2
);
590 void emitPutVirtualRegister(unsigned dst
, RegisterID from
= regT0
);
591 void emitStoreCell(unsigned dst
, RegisterID payload
, bool /* only used in JSValue32_64 */ = false)
593 emitPutVirtualRegister(dst
, payload
);
596 int32_t getConstantOperandImmediateInt(unsigned src
);
598 void killLastResultRegister();
600 Jump
emitJumpIfJSCell(RegisterID
);
601 Jump
emitJumpIfBothJSCells(RegisterID
, RegisterID
, RegisterID
);
602 void emitJumpSlowCaseIfJSCell(RegisterID
);
603 void emitJumpSlowCaseIfNotJSCell(RegisterID
);
604 void emitJumpSlowCaseIfNotJSCell(RegisterID
, int VReg
);
605 Jump
emitJumpIfImmediateInteger(RegisterID
);
606 Jump
emitJumpIfNotImmediateInteger(RegisterID
);
607 Jump
emitJumpIfNotImmediateIntegers(RegisterID
, RegisterID
, RegisterID
);
608 void emitJumpSlowCaseIfNotImmediateInteger(RegisterID
);
609 void emitJumpSlowCaseIfNotImmediateNumber(RegisterID
);
610 void emitJumpSlowCaseIfNotImmediateIntegers(RegisterID
, RegisterID
, RegisterID
);
612 void emitFastArithReTagImmediate(RegisterID src
, RegisterID dest
);
614 void emitTagAsBoolImmediate(RegisterID reg
);
615 void compileBinaryArithOp(OpcodeID
, unsigned dst
, unsigned src1
, unsigned src2
, OperandTypes opi
);
616 void compileBinaryArithOpSlowCase(OpcodeID
, Vector
<SlowCaseEntry
>::iterator
&, unsigned dst
, unsigned src1
, unsigned src2
, OperandTypes
, bool op1HasImmediateIntFastCase
, bool op2HasImmediateIntFastCase
);
618 void compileGetByIdHotPath(int baseVReg
, Identifier
*);
619 void compileGetByIdSlowCase(int resultVReg
, int baseVReg
, Identifier
*, Vector
<SlowCaseEntry
>::iterator
&);
620 void compileGetDirectOffset(RegisterID base
, RegisterID result
, PropertyOffset cachedOffset
);
621 void compileGetDirectOffset(JSObject
* base
, RegisterID result
, PropertyOffset cachedOffset
);
622 void compileGetDirectOffset(RegisterID base
, RegisterID result
, RegisterID offset
, RegisterID scratch
, FinalObjectMode
= MayBeFinal
);
623 void compilePutDirectOffset(RegisterID base
, RegisterID value
, PropertyOffset cachedOffset
);
625 #endif // USE(JSVALUE32_64)
627 #if (defined(ASSEMBLER_HAS_CONSTANT_POOL) && ASSEMBLER_HAS_CONSTANT_POOL)
628 #define BEGIN_UNINTERRUPTED_SEQUENCE(name) do { beginUninterruptedSequence(name ## InstructionSpace, name ## ConstantSpace); } while (false)
629 #define END_UNINTERRUPTED_SEQUENCE_FOR_PUT(name, dst) do { endUninterruptedSequence(name ## InstructionSpace, name ## ConstantSpace, dst); } while (false)
630 #define END_UNINTERRUPTED_SEQUENCE(name) END_UNINTERRUPTED_SEQUENCE_FOR_PUT(name, 0)
632 void beginUninterruptedSequence(int, int);
633 void endUninterruptedSequence(int, int, int);
636 #define BEGIN_UNINTERRUPTED_SEQUENCE(name)
637 #define END_UNINTERRUPTED_SEQUENCE(name)
638 #define END_UNINTERRUPTED_SEQUENCE_FOR_PUT(name, dst)
641 void emit_compareAndJump(OpcodeID
, unsigned op1
, unsigned op2
, unsigned target
, RelationalCondition
);
642 void emit_compareAndJumpSlow(unsigned op1
, unsigned op2
, unsigned target
, DoubleCondition
, int (JIT_STUB
*stub
)(STUB_ARGS_DECLARATION
), bool invert
, Vector
<SlowCaseEntry
>::iterator
&);
644 void emit_op_add(Instruction
*);
645 void emit_op_bitand(Instruction
*);
646 void emit_op_bitor(Instruction
*);
647 void emit_op_bitxor(Instruction
*);
648 void emit_op_call(Instruction
*);
649 void emit_op_call_eval(Instruction
*);
650 void emit_op_call_varargs(Instruction
*);
651 void emit_op_call_put_result(Instruction
*);
652 void emit_op_catch(Instruction
*);
653 void emit_op_construct(Instruction
*);
654 void emit_op_get_callee(Instruction
*);
655 void emit_op_create_this(Instruction
*);
656 void emit_op_convert_this(Instruction
*);
657 void emit_op_create_arguments(Instruction
*);
658 void emit_op_debug(Instruction
*);
659 void emit_op_del_by_id(Instruction
*);
660 void emit_op_div(Instruction
*);
661 void emit_op_end(Instruction
*);
662 void emit_op_enter(Instruction
*);
663 void emit_op_create_activation(Instruction
*);
664 void emit_op_eq(Instruction
*);
665 void emit_op_eq_null(Instruction
*);
666 void emit_op_get_by_id(Instruction
*);
667 void emit_op_get_arguments_length(Instruction
*);
668 void emit_op_get_by_val(Instruction
*);
669 void emit_op_get_argument_by_val(Instruction
*);
670 void emit_op_get_by_pname(Instruction
*);
671 void emit_op_init_lazy_reg(Instruction
*);
672 void emit_op_check_has_instance(Instruction
*);
673 void emit_op_instanceof(Instruction
*);
674 void emit_op_is_undefined(Instruction
*);
675 void emit_op_is_boolean(Instruction
*);
676 void emit_op_is_number(Instruction
*);
677 void emit_op_is_string(Instruction
*);
678 void emit_op_jeq_null(Instruction
*);
679 void emit_op_jfalse(Instruction
*);
680 void emit_op_jmp(Instruction
*);
681 void emit_op_jneq_null(Instruction
*);
682 void emit_op_jneq_ptr(Instruction
*);
683 void emit_op_jless(Instruction
*);
684 void emit_op_jlesseq(Instruction
*);
685 void emit_op_jgreater(Instruction
*);
686 void emit_op_jgreatereq(Instruction
*);
687 void emit_op_jnless(Instruction
*);
688 void emit_op_jnlesseq(Instruction
*);
689 void emit_op_jngreater(Instruction
*);
690 void emit_op_jngreatereq(Instruction
*);
691 void emit_op_jtrue(Instruction
*);
692 void emit_op_loop_hint(Instruction
*);
693 void emit_op_lshift(Instruction
*);
694 void emit_op_mod(Instruction
*);
695 void emit_op_mov(Instruction
*);
696 void emit_op_mul(Instruction
*);
697 void emit_op_negate(Instruction
*);
698 void emit_op_neq(Instruction
*);
699 void emit_op_neq_null(Instruction
*);
700 void emit_op_new_array(Instruction
*);
701 void emit_op_new_array_with_size(Instruction
*);
702 void emit_op_new_array_buffer(Instruction
*);
703 void emit_op_new_func(Instruction
*);
704 void emit_op_new_func_exp(Instruction
*);
705 void emit_op_new_object(Instruction
*);
706 void emit_op_new_regexp(Instruction
*);
707 void emit_op_get_pnames(Instruction
*);
708 void emit_op_next_pname(Instruction
*);
709 void emit_op_not(Instruction
*);
710 void emit_op_nstricteq(Instruction
*);
711 void emit_op_pop_scope(Instruction
*);
712 void emit_op_dec(Instruction
*);
713 void emit_op_inc(Instruction
*);
714 void emit_op_profile_did_call(Instruction
*);
715 void emit_op_profile_will_call(Instruction
*);
716 void emit_op_push_name_scope(Instruction
*);
717 void emit_op_push_with_scope(Instruction
*);
718 void emit_op_put_by_id(Instruction
*);
719 void emit_op_put_by_index(Instruction
*);
720 void emit_op_put_by_val(Instruction
*);
721 void emit_op_put_getter_setter(Instruction
*);
722 void emit_op_init_global_const(Instruction
*);
723 void emit_op_init_global_const_check(Instruction
*);
724 void emit_resolve_operations(ResolveOperations
*, const int* base
, const int* value
);
725 void emitSlow_link_resolve_operations(ResolveOperations
*, Vector
<SlowCaseEntry
>::iterator
&);
726 void emit_op_resolve(Instruction
*);
727 void emit_op_resolve_base(Instruction
*);
728 void emit_op_resolve_with_base(Instruction
*);
729 void emit_op_resolve_with_this(Instruction
*);
730 void emit_op_put_to_base(Instruction
*);
731 void emit_op_ret(Instruction
*);
732 void emit_op_ret_object_or_this(Instruction
*);
733 void emit_op_rshift(Instruction
*);
734 void emit_op_strcat(Instruction
*);
735 void emit_op_stricteq(Instruction
*);
736 void emit_op_sub(Instruction
*);
737 void emit_op_switch_char(Instruction
*);
738 void emit_op_switch_imm(Instruction
*);
739 void emit_op_switch_string(Instruction
*);
740 void emit_op_tear_off_activation(Instruction
*);
741 void emit_op_tear_off_arguments(Instruction
*);
742 void emit_op_throw(Instruction
*);
743 void emit_op_throw_static_error(Instruction
*);
744 void emit_op_to_number(Instruction
*);
745 void emit_op_to_primitive(Instruction
*);
746 void emit_op_unexpected_load(Instruction
*);
747 void emit_op_urshift(Instruction
*);
748 void emit_op_get_scoped_var(Instruction
*);
749 void emit_op_put_scoped_var(Instruction
*);
751 void emitSlow_op_add(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
752 void emitSlow_op_bitand(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
753 void emitSlow_op_bitor(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
754 void emitSlow_op_bitxor(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
755 void emitSlow_op_call(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
756 void emitSlow_op_call_eval(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
757 void emitSlow_op_call_varargs(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
758 void emitSlow_op_construct(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
759 void emitSlow_op_convert_this(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
760 void emitSlow_op_create_this(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
761 void emitSlow_op_div(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
762 void emitSlow_op_eq(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
763 void emitSlow_op_get_by_id(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
764 void emitSlow_op_get_arguments_length(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
765 void emitSlow_op_get_by_val(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
766 void emitSlow_op_get_argument_by_val(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
767 void emitSlow_op_get_by_pname(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
768 void emitSlow_op_check_has_instance(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
769 void emitSlow_op_instanceof(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
770 void emitSlow_op_jfalse(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
771 void emitSlow_op_jless(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
772 void emitSlow_op_jlesseq(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
773 void emitSlow_op_jgreater(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
774 void emitSlow_op_jgreatereq(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
775 void emitSlow_op_jnless(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
776 void emitSlow_op_jnlesseq(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
777 void emitSlow_op_jngreater(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
778 void emitSlow_op_jngreatereq(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
779 void emitSlow_op_jtrue(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
780 void emitSlow_op_loop_hint(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
781 void emitSlow_op_lshift(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
782 void emitSlow_op_mod(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
783 void emitSlow_op_mul(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
784 void emitSlow_op_negate(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
785 void emitSlow_op_neq(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
786 void emitSlow_op_new_object(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
787 void emitSlow_op_not(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
788 void emitSlow_op_nstricteq(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
789 void emitSlow_op_dec(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
790 void emitSlow_op_inc(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
791 void emitSlow_op_put_by_id(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
792 void emitSlow_op_put_by_val(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
793 void emitSlow_op_init_global_const_check(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
794 void emitSlow_op_rshift(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
795 void emitSlow_op_stricteq(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
796 void emitSlow_op_sub(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
797 void emitSlow_op_to_number(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
798 void emitSlow_op_to_primitive(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
799 void emitSlow_op_urshift(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
801 void emitSlow_op_resolve(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
802 void emitSlow_op_resolve_base(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
803 void emitSlow_op_resolve_with_base(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
804 void emitSlow_op_resolve_with_this(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
805 void emitSlow_op_put_to_base(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&);
807 void emitRightShift(Instruction
*, bool isUnsigned
);
808 void emitRightShiftSlowCase(Instruction
*, Vector
<SlowCaseEntry
>::iterator
&, bool isUnsigned
);
810 void emitInitRegister(unsigned dst
);
812 void emitPutIntToCallFrameHeader(RegisterID from
, JSStack::CallFrameHeaderEntry
);
813 void emitGetFromCallFrameHeaderPtr(JSStack::CallFrameHeaderEntry
, RegisterID to
, RegisterID from
= callFrameRegister
);
814 void emitGetFromCallFrameHeader32(JSStack::CallFrameHeaderEntry
, RegisterID to
, RegisterID from
= callFrameRegister
);
816 void emitGetFromCallFrameHeader64(JSStack::CallFrameHeaderEntry
, RegisterID to
, RegisterID from
= callFrameRegister
);
819 JSValue
getConstantOperand(unsigned src
);
820 bool isOperandConstantImmediateInt(unsigned src
);
821 bool isOperandConstantImmediateChar(unsigned src
);
825 Jump
getSlowCase(Vector
<SlowCaseEntry
>::iterator
& iter
)
829 void linkSlowCase(Vector
<SlowCaseEntry
>::iterator
& iter
)
831 iter
->from
.link(this);
834 void linkDummySlowCase(Vector
<SlowCaseEntry
>::iterator
& iter
)
836 ASSERT(!iter
->from
.isSet());
839 void linkSlowCaseIfNotJSCell(Vector
<SlowCaseEntry
>::iterator
&, int virtualRegisterIndex
);
841 Jump
checkStructure(RegisterID reg
, Structure
* structure
);
843 void restoreArgumentReferenceForTrampoline();
844 void updateTopCallFrame();
846 Call
emitNakedCall(CodePtr function
= CodePtr());
848 // Loads the character value of a single character string into dst.
849 void emitLoadCharacterString(RegisterID src
, RegisterID dst
, JumpList
& failures
);
852 void emitEnterOptimizationCheck();
854 void emitEnterOptimizationCheck() { }
858 void printBytecodeOperandTypes(unsigned src1
, unsigned src2
);
861 #if ENABLE(SAMPLING_FLAGS)
862 void setSamplingFlag(int32_t);
863 void clearSamplingFlag(int32_t);
866 #if ENABLE(SAMPLING_COUNTERS)
867 void emitCount(AbstractSamplingCounter
&, int32_t = 1);
870 #if ENABLE(OPCODE_SAMPLING)
871 void sampleInstruction(Instruction
*, bool = false);
874 #if ENABLE(CODEBLOCK_SAMPLING)
875 void sampleCodeBlock(CodeBlock
*);
877 void sampleCodeBlock(CodeBlock
*) {}
881 bool canBeOptimized() { return m_canBeOptimized
; }
882 bool canBeOptimizedOrInlined() { return m_canBeOptimizedOrInlined
; }
883 bool shouldEmitProfiling() { return m_shouldEmitProfiling
; }
885 bool canBeOptimized() { return false; }
886 bool canBeOptimizedOrInlined() { return false; }
887 // Enables use of value profiler with tiered compilation turned off,
888 // in which case all code gets profiled.
889 bool shouldEmitProfiling() { return false; }
892 Interpreter
* m_interpreter
;
894 CodeBlock
* m_codeBlock
;
896 Vector
<CallRecord
> m_calls
;
897 Vector
<Label
> m_labels
;
898 Vector
<PropertyStubCompilationInfo
> m_propertyAccessCompilationInfo
;
899 Vector
<ByValCompilationInfo
> m_byValCompilationInfo
;
900 Vector
<StructureStubCompilationInfo
> m_callStructureStubCompilationInfo
;
901 Vector
<JumpTable
> m_jmpTable
;
903 unsigned m_bytecodeOffset
;
904 Vector
<SlowCaseEntry
> m_slowCases
;
905 Vector
<SwitchRecord
> m_switches
;
907 unsigned m_propertyAccessInstructionIndex
;
908 unsigned m_byValInstructionIndex
;
909 unsigned m_globalResolveInfoIndex
;
910 unsigned m_callLinkInfoIndex
;
912 #if USE(JSVALUE32_64)
913 unsigned m_jumpTargetIndex
;
914 unsigned m_mappedBytecodeOffset
;
915 int m_mappedVirtualRegisterIndex
;
916 RegisterID m_mappedTag
;
917 RegisterID m_mappedPayload
;
919 int m_lastResultBytecodeRegister
;
921 unsigned m_jumpTargetsPosition
;
924 #if defined(ASSEMBLER_HAS_CONSTANT_POOL) && ASSEMBLER_HAS_CONSTANT_POOL
925 Label m_uninterruptedInstructionSequenceBegin
;
926 int m_uninterruptedConstantSequenceBegin
;
929 OwnPtr
<JITDisassembler
> m_disassembler
;
930 RefPtr
<Profiler::Compilation
> m_compilation
;
931 WeakRandom m_randomGenerator
;
932 static CodeRef
stringGetByValStubGenerator(VM
*);
934 #if ENABLE(VALUE_PROFILER)
935 bool m_canBeOptimized
;
936 bool m_canBeOptimizedOrInlined
;
937 bool m_shouldEmitProfiling
;
939 } JIT_CLASS_ALIGNMENT
;
943 #endif // ENABLE(JIT)