2 * Copyright (C) 2011 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 #ifndef DFGAssemblyHelpers_h
27 #define DFGAssemblyHelpers_h
29 #include <wtf/Platform.h>
33 #include "CodeBlock.h"
34 #include "DFGFPRInfo.h"
35 #include "DFGGPRInfo.h"
37 #include "JSGlobalData.h"
38 #include "MacroAssembler.h"
40 namespace JSC
{ namespace DFG
{
42 typedef void (*V_DFGDebugOperation_EP
)(ExecState
*, void*);
44 class AssemblyHelpers
: public MacroAssembler
{
46 AssemblyHelpers(JSGlobalData
* globalData
, CodeBlock
* codeBlock
)
47 : m_globalData(globalData
)
48 , m_codeBlock(codeBlock
)
49 , m_baselineCodeBlock(codeBlock
->baselineVersion())
52 ASSERT(m_baselineCodeBlock
);
53 ASSERT(!m_baselineCodeBlock
->alternative());
54 ASSERT(m_baselineCodeBlock
->getJITType() == JITCode::BaselineJIT
);
57 CodeBlock
* codeBlock() { return m_codeBlock
; }
58 JSGlobalData
* globalData() { return m_globalData
; }
59 AssemblerType_T
& assembler() { return m_assembler
; }
61 #if CPU(X86_64) || CPU(X86)
62 void preserveReturnAddressAfterCall(GPRReg reg
)
67 void restoreReturnAddressBeforeReturn(GPRReg reg
)
72 void restoreReturnAddressBeforeReturn(Address address
)
76 #endif // CPU(X86_64) || CPU(X86)
79 ALWAYS_INLINE
void preserveReturnAddressAfterCall(RegisterID reg
)
81 move(linkRegister
, reg
);
84 ALWAYS_INLINE
void restoreReturnAddressBeforeReturn(RegisterID reg
)
86 move(reg
, linkRegister
);
89 ALWAYS_INLINE
void restoreReturnAddressBeforeReturn(Address address
)
91 loadPtr(address
, linkRegister
);
95 void emitGetFromCallFrameHeaderPtr(RegisterFile::CallFrameHeaderEntry entry
, GPRReg to
)
97 loadPtr(Address(GPRInfo::callFrameRegister
, entry
* sizeof(Register
)), to
);
99 void emitPutToCallFrameHeader(GPRReg from
, RegisterFile::CallFrameHeaderEntry entry
)
101 storePtr(from
, Address(GPRInfo::callFrameRegister
, entry
* sizeof(Register
)));
104 void emitPutImmediateToCallFrameHeader(void* value
, RegisterFile::CallFrameHeaderEntry entry
)
106 storePtr(TrustedImmPtr(value
), Address(GPRInfo::callFrameRegister
, entry
* sizeof(Register
)));
109 Jump
branchIfNotCell(GPRReg reg
)
112 return branchTestPtr(MacroAssembler::NonZero
, reg
, GPRInfo::tagMaskRegister
);
114 return branch32(MacroAssembler::NotEqual
, reg
, TrustedImm32(JSValue::CellTag
));
118 static Address
addressForGlobalVar(GPRReg global
, int32_t varNumber
)
120 return Address(global
, varNumber
* sizeof(Register
));
123 static Address
tagForGlobalVar(GPRReg global
, int32_t varNumber
)
125 return Address(global
, varNumber
* sizeof(Register
) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.tag
));
128 static Address
payloadForGlobalVar(GPRReg global
, int32_t varNumber
)
130 return Address(global
, varNumber
* sizeof(Register
) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.payload
));
133 static Address
addressFor(VirtualRegister virtualRegister
)
135 return Address(GPRInfo::callFrameRegister
, virtualRegister
* sizeof(Register
));
138 static Address
tagFor(VirtualRegister virtualRegister
)
140 return Address(GPRInfo::callFrameRegister
, virtualRegister
* sizeof(Register
) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.tag
));
143 static Address
payloadFor(VirtualRegister virtualRegister
)
145 return Address(GPRInfo::callFrameRegister
, virtualRegister
* sizeof(Register
) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.payload
));
148 Jump
branchIfNotObject(GPRReg structureReg
)
150 return branch8(Below
, Address(structureReg
, Structure::typeInfoTypeOffset()), TrustedImm32(ObjectType
));
153 static GPRReg
selectScratchGPR(GPRReg preserve1
= InvalidGPRReg
, GPRReg preserve2
= InvalidGPRReg
, GPRReg preserve3
= InvalidGPRReg
, GPRReg preserve4
= InvalidGPRReg
)
155 if (preserve1
!= GPRInfo::regT0
&& preserve2
!= GPRInfo::regT0
&& preserve3
!= GPRInfo::regT0
&& preserve4
!= GPRInfo::regT0
)
156 return GPRInfo::regT0
;
158 if (preserve1
!= GPRInfo::regT1
&& preserve2
!= GPRInfo::regT1
&& preserve3
!= GPRInfo::regT1
&& preserve4
!= GPRInfo::regT1
)
159 return GPRInfo::regT1
;
161 if (preserve1
!= GPRInfo::regT2
&& preserve2
!= GPRInfo::regT2
&& preserve3
!= GPRInfo::regT2
&& preserve4
!= GPRInfo::regT2
)
162 return GPRInfo::regT2
;
164 if (preserve1
!= GPRInfo::regT3
&& preserve2
!= GPRInfo::regT3
&& preserve3
!= GPRInfo::regT3
&& preserve4
!= GPRInfo::regT3
)
165 return GPRInfo::regT3
;
167 return GPRInfo::regT4
;
170 // Add a debug call. This call has no effect on JIT code execution state.
171 void debugCall(V_DFGDebugOperation_EP function
, void* argument
)
173 size_t scratchSize
= sizeof(EncodedJSValue
) * (GPRInfo::numberOfRegisters
+ FPRInfo::numberOfRegisters
);
174 ScratchBuffer
* scratchBuffer
= m_globalData
->scratchBufferForSize(scratchSize
);
175 EncodedJSValue
* buffer
= static_cast<EncodedJSValue
*>(scratchBuffer
->dataBuffer());
177 for (unsigned i
= 0; i
< GPRInfo::numberOfRegisters
; ++i
)
178 storePtr(GPRInfo::toRegister(i
), buffer
+ i
);
179 for (unsigned i
= 0; i
< FPRInfo::numberOfRegisters
; ++i
) {
180 move(TrustedImmPtr(buffer
+ GPRInfo::numberOfRegisters
+ i
), GPRInfo::regT0
);
181 storeDouble(FPRInfo::toRegister(i
), GPRInfo::regT0
);
184 // Tell GC mark phase how much of the scratch buffer is active during call.
185 move(TrustedImmPtr(scratchBuffer
->activeLengthPtr()), GPRInfo::regT0
);
186 storePtr(TrustedImmPtr(scratchSize
), GPRInfo::regT0
);
188 #if CPU(X86_64) || CPU(ARM_THUMB2)
189 move(TrustedImmPtr(argument
), GPRInfo::argumentGPR1
);
190 move(GPRInfo::callFrameRegister
, GPRInfo::argumentGPR0
);
191 GPRReg scratch
= selectScratchGPR(GPRInfo::argumentGPR0
, GPRInfo::argumentGPR1
);
193 poke(GPRInfo::callFrameRegister
, 0);
194 poke(TrustedImmPtr(argument
), 1);
195 GPRReg scratch
= GPRInfo::regT0
;
197 #error "DFG JIT not supported on this platform."
199 move(TrustedImmPtr(reinterpret_cast<void*>(function
)), scratch
);
202 move(TrustedImmPtr(scratchBuffer
->activeLengthPtr()), GPRInfo::regT0
);
203 storePtr(TrustedImmPtr(0), GPRInfo::regT0
);
205 for (unsigned i
= 0; i
< FPRInfo::numberOfRegisters
; ++i
) {
206 move(TrustedImmPtr(buffer
+ GPRInfo::numberOfRegisters
+ i
), GPRInfo::regT0
);
207 loadDouble(GPRInfo::regT0
, FPRInfo::toRegister(i
));
209 for (unsigned i
= 0; i
< GPRInfo::numberOfRegisters
; ++i
)
210 loadPtr(buffer
+ i
, GPRInfo::toRegister(i
));
213 // These methods JIT generate dynamic, debug-only checks - akin to ASSERTs.
214 #if DFG_ENABLE(JIT_ASSERT)
215 void jitAssertIsInt32(GPRReg
);
216 void jitAssertIsJSInt32(GPRReg
);
217 void jitAssertIsJSNumber(GPRReg
);
218 void jitAssertIsJSDouble(GPRReg
);
219 void jitAssertIsCell(GPRReg
);
220 void jitAssertHasValidCallFrame();
222 void jitAssertIsInt32(GPRReg
) { }
223 void jitAssertIsJSInt32(GPRReg
) { }
224 void jitAssertIsJSNumber(GPRReg
) { }
225 void jitAssertIsJSDouble(GPRReg
) { }
226 void jitAssertIsCell(GPRReg
) { }
227 void jitAssertHasValidCallFrame() { }
230 // These methods convert between doubles, and doubles boxed and JSValues.
232 GPRReg
boxDouble(FPRReg fpr
, GPRReg gpr
)
234 moveDoubleToPtr(fpr
, gpr
);
235 subPtr(GPRInfo::tagTypeNumberRegister
, gpr
);
236 jitAssertIsJSDouble(gpr
);
239 FPRReg
unboxDouble(GPRReg gpr
, FPRReg fpr
)
241 jitAssertIsJSDouble(gpr
);
242 addPtr(GPRInfo::tagTypeNumberRegister
, gpr
);
243 movePtrToDouble(gpr
, fpr
);
248 #if USE(JSVALUE32_64) && CPU(X86)
249 void boxDouble(FPRReg fpr
, GPRReg tagGPR
, GPRReg payloadGPR
)
251 movePackedToInt32(fpr
, payloadGPR
);
252 rshiftPacked(TrustedImm32(32), fpr
);
253 movePackedToInt32(fpr
, tagGPR
);
255 void unboxDouble(GPRReg tagGPR
, GPRReg payloadGPR
, FPRReg fpr
, FPRReg scratchFPR
)
257 jitAssertIsJSDouble(tagGPR
);
258 moveInt32ToPacked(payloadGPR
, fpr
);
259 moveInt32ToPacked(tagGPR
, scratchFPR
);
260 lshiftPacked(TrustedImm32(32), scratchFPR
);
261 orPacked(scratchFPR
, fpr
);
265 #if USE(JSVALUE32_64) && CPU(ARM)
266 void boxDouble(FPRReg fpr
, GPRReg tagGPR
, GPRReg payloadGPR
)
268 m_assembler
.vmov(payloadGPR
, tagGPR
, fpr
);
270 void unboxDouble(GPRReg tagGPR
, GPRReg payloadGPR
, FPRReg fpr
, FPRReg scratchFPR
)
272 jitAssertIsJSDouble(tagGPR
);
273 UNUSED_PARAM(scratchFPR
);
274 m_assembler
.vmov(fpr
, payloadGPR
, tagGPR
);
278 enum ExceptionCheckKind
{ NormalExceptionCheck
, InvertedExceptionCheck
};
279 Jump
emitExceptionCheck(ExceptionCheckKind kind
= NormalExceptionCheck
)
282 return branchTestPtr(kind
== NormalExceptionCheck
? NonZero
: Zero
, AbsoluteAddress(&globalData()->exception
));
283 #elif USE(JSVALUE32_64)
284 return branch32(kind
== NormalExceptionCheck
? NotEqual
: Equal
, AbsoluteAddress(reinterpret_cast<char*>(&globalData()->exception
) + OBJECT_OFFSETOF(JSValue
, u
.asBits
.tag
)), TrustedImm32(JSValue::EmptyValueTag
));
288 #if ENABLE(SAMPLING_COUNTERS)
289 static void emitCount(MacroAssembler
& jit
, AbstractSamplingCounter
& counter
, int32_t increment
= 1)
291 jit
.add64(TrustedImm32(increment
), AbsoluteAddress(counter
.addressOfCounter()));
293 void emitCount(AbstractSamplingCounter
& counter
, int32_t increment
= 1)
295 add64(TrustedImm32(increment
), AbsoluteAddress(counter
.addressOfCounter()));
299 #if ENABLE(SAMPLING_FLAGS)
300 void setSamplingFlag(int32_t);
301 void clearSamplingFlag(int32_t flag
);
304 JSGlobalObject
* globalObjectFor(CodeOrigin codeOrigin
)
306 return codeBlock()->globalObjectFor(codeOrigin
);
309 JSObject
* globalThisObjectFor(CodeOrigin codeOrigin
)
311 JSGlobalObject
* object
= globalObjectFor(codeOrigin
);
312 return object
->methodTable()->toThisObject(object
, 0);
315 bool strictModeFor(CodeOrigin codeOrigin
)
317 if (!codeOrigin
.inlineCallFrame
)
318 return codeBlock()->isStrictMode();
319 return codeOrigin
.inlineCallFrame
->callee
->jsExecutable()->isStrictMode();
322 CodeBlock
* baselineCodeBlockFor(const CodeOrigin
& codeOrigin
)
324 return baselineCodeBlockForOriginAndBaselineCodeBlock(codeOrigin
, baselineCodeBlock());
327 CodeBlock
* baselineCodeBlock()
329 return m_baselineCodeBlock
;
332 Vector
<BytecodeAndMachineOffset
>& decodedCodeMapFor(CodeBlock
*);
334 static const double twoToThe32
;
337 JSGlobalData
* m_globalData
;
338 CodeBlock
* m_codeBlock
;
339 CodeBlock
* m_baselineCodeBlock
;
341 HashMap
<CodeBlock
*, Vector
<BytecodeAndMachineOffset
> > m_decodedCodeMaps
;
344 } } // namespace JSC::DFG
346 #endif // ENABLE(DFG_JIT)
348 #endif // DFGAssemblyHelpers_h