2 * Copyright (C) 2010 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
14 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
15 * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
17 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
18 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
19 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
20 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
21 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
22 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
23 * THE POSSIBILITY OF SUCH DAMAGE.
26 #ifndef SpecializedThunkJIT_h
27 #define SpecializedThunkJIT_h
31 #include "Executable.h"
33 #include "JITInlines.h"
34 #include "JSInterfaceJIT.h"
36 #include "LinkBuffer.h"
40 class SpecializedThunkJIT
: public JSInterfaceJIT
{
42 static const int ThisArgument
= -1;
43 SpecializedThunkJIT(VM
* vm
, int expectedArgCount
)
46 emitFunctionPrologue();
47 // Check that we have the expected number of arguments
48 m_failures
.append(branch32(NotEqual
, payloadFor(JSStack::ArgumentCount
), TrustedImm32(expectedArgCount
+ 1)));
51 explicit SpecializedThunkJIT(VM
* vm
)
54 emitFunctionPrologue();
57 void loadDoubleArgument(int argument
, FPRegisterID dst
, RegisterID scratch
)
59 unsigned src
= CallFrame::argumentOffset(argument
);
60 m_failures
.append(emitLoadDouble(src
, dst
, scratch
));
63 void loadCellArgument(int argument
, RegisterID dst
)
65 unsigned src
= CallFrame::argumentOffset(argument
);
66 m_failures
.append(emitLoadJSCell(src
, dst
));
69 void loadJSStringArgument(VM
& vm
, int argument
, RegisterID dst
)
71 loadCellArgument(argument
, dst
);
72 m_failures
.append(branchStructure(*this, NotEqual
,
73 Address(dst
, JSCell::structureIDOffset()),
74 vm
.stringStructure
.get()));
77 void loadArgumentWithSpecificClass(const ClassInfo
* classInfo
, int argument
, RegisterID dst
, RegisterID scratch
)
79 loadCellArgument(argument
, dst
);
80 emitLoadStructure(dst
, scratch
, dst
);
81 appendFailure(branchPtr(NotEqual
, Address(scratch
, Structure::classInfoOffset()), TrustedImmPtr(classInfo
)));
82 // We have to reload the argument since emitLoadStructure clobbered it.
83 loadCellArgument(argument
, dst
);
86 void loadInt32Argument(int argument
, RegisterID dst
, Jump
& failTarget
)
88 unsigned src
= CallFrame::argumentOffset(argument
);
89 failTarget
= emitLoadInt32(src
, dst
);
92 void loadInt32Argument(int argument
, RegisterID dst
)
94 Jump conversionFailed
;
95 loadInt32Argument(argument
, dst
, conversionFailed
);
96 m_failures
.append(conversionFailed
);
99 void appendFailure(const Jump
& failure
)
101 m_failures
.append(failure
);
104 void returnJSValue(RegisterID src
)
108 emitFunctionEpilogue();
112 void returnJSValue(RegisterID payload
, RegisterID tag
)
114 ASSERT_UNUSED(payload
, payload
== regT0
);
115 ASSERT_UNUSED(tag
, tag
== regT1
);
116 emitFunctionEpilogue();
121 void returnDouble(FPRegisterID src
)
124 moveDoubleTo64(src
, regT0
);
125 Jump zero
= branchTest64(Zero
, regT0
);
126 sub64(tagTypeNumberRegister
, regT0
);
129 move(tagTypeNumberRegister
, regT0
);
133 // The src register is not clobbered by moveDoubleToInts with ARM, MIPS and SH4 macro assemblers, so let's use it.
134 moveDoubleToInts(src
, regT0
, regT1
);
136 storeDouble(src
, Address(stackPointerRegister
, -(int)sizeof(double)));
137 loadPtr(Address(stackPointerRegister
, OBJECT_OFFSETOF(JSValue
, u
.asBits
.tag
) - sizeof(double)), regT1
);
138 loadPtr(Address(stackPointerRegister
, OBJECT_OFFSETOF(JSValue
, u
.asBits
.payload
) - sizeof(double)), regT0
);
140 Jump lowNonZero
= branchTestPtr(NonZero
, regT1
);
141 Jump highNonZero
= branchTestPtr(NonZero
, regT0
);
142 move(TrustedImm32(0), regT0
);
143 move(TrustedImm32(Int32Tag
), regT1
);
144 lowNonZero
.link(this);
145 highNonZero
.link(this);
147 emitFunctionEpilogue();
151 void returnInt32(RegisterID src
)
156 emitFunctionEpilogue();
160 void returnJSCell(RegisterID src
)
165 emitFunctionEpilogue();
169 MacroAssemblerCodeRef
finalize(MacroAssemblerCodePtr fallback
, const char* thunkKind
)
171 LinkBuffer
patchBuffer(*m_vm
, *this, GLOBAL_THUNK_ID
);
172 patchBuffer
.link(m_failures
, CodeLocationLabel(fallback
));
173 for (unsigned i
= 0; i
< m_calls
.size(); i
++)
174 patchBuffer
.link(m_calls
[i
].first
, m_calls
[i
].second
);
175 return FINALIZE_CODE(patchBuffer
, ("Specialized thunk for %s", thunkKind
));
178 // Assumes that the target function uses fpRegister0 as the first argument
179 // and return value. Like any sensible architecture would.
180 void callDoubleToDouble(FunctionPtr function
)
182 m_calls
.append(std::make_pair(call(), function
));
185 void callDoubleToDoublePreservingReturn(FunctionPtr function
)
188 preserveReturnAddressAfterCall(regT3
);
189 callDoubleToDouble(function
);
191 restoreReturnAddressBeforeReturn(regT3
);
196 void tagReturnAsInt32()
199 or64(tagTypeNumberRegister
, regT0
);
201 move(TrustedImm32(JSValue::Int32Tag
), regT1
);
205 void tagReturnAsJSCell()
207 #if USE(JSVALUE32_64)
208 move(TrustedImm32(JSValue::CellTag
), regT1
);
212 MacroAssembler::JumpList m_failures
;
213 Vector
<std::pair
<Call
, FunctionPtr
>> m_calls
;
218 #endif // ENABLE(JIT)
220 #endif // SpecializedThunkJIT_h