2 * Copyright (C) 2010 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
14 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
15 * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
17 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
18 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
19 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
20 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
21 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
22 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
23 * THE POSSIBILITY OF SUCH DAMAGE.
26 #ifndef SpecializedThunkJIT_h
27 #define SpecializedThunkJIT_h
31 #include "Executable.h"
32 #include "JSInterfaceJIT.h"
33 #include "LinkBuffer.h"
37 class SpecializedThunkJIT
: public JSInterfaceJIT
{
39 static const int ThisArgument
= -1;
40 SpecializedThunkJIT(int expectedArgCount
)
42 // Check that we have the expected number of arguments
43 m_failures
.append(branch32(NotEqual
, payloadFor(JSStack::ArgumentCount
), TrustedImm32(expectedArgCount
+ 1)));
46 void loadDoubleArgument(int argument
, FPRegisterID dst
, RegisterID scratch
)
48 unsigned src
= CallFrame::argumentOffset(argument
);
49 m_failures
.append(emitLoadDouble(src
, dst
, scratch
));
52 void loadCellArgument(int argument
, RegisterID dst
)
54 unsigned src
= CallFrame::argumentOffset(argument
);
55 m_failures
.append(emitLoadJSCell(src
, dst
));
58 void loadJSStringArgument(VM
& vm
, int argument
, RegisterID dst
)
60 loadCellArgument(argument
, dst
);
61 m_failures
.append(branchPtr(NotEqual
, Address(dst
, JSCell::structureOffset()), TrustedImmPtr(vm
.stringStructure
.get())));
64 void loadInt32Argument(int argument
, RegisterID dst
, Jump
& failTarget
)
66 unsigned src
= CallFrame::argumentOffset(argument
);
67 failTarget
= emitLoadInt32(src
, dst
);
70 void loadInt32Argument(int argument
, RegisterID dst
)
72 Jump conversionFailed
;
73 loadInt32Argument(argument
, dst
, conversionFailed
);
74 m_failures
.append(conversionFailed
);
77 void appendFailure(const Jump
& failure
)
79 m_failures
.append(failure
);
82 void returnJSValue(RegisterID src
)
86 loadPtr(payloadFor(JSStack::CallerFrame
, callFrameRegister
), callFrameRegister
);
90 void returnDouble(FPRegisterID src
)
93 moveDoubleTo64(src
, regT0
);
94 Jump zero
= branchTest64(Zero
, regT0
);
95 sub64(tagTypeNumberRegister
, regT0
);
98 move(tagTypeNumberRegister
, regT0
);
101 storeDouble(src
, Address(stackPointerRegister
, -(int)sizeof(double)));
102 loadPtr(Address(stackPointerRegister
, OBJECT_OFFSETOF(JSValue
, u
.asBits
.tag
) - sizeof(double)), regT1
);
103 loadPtr(Address(stackPointerRegister
, OBJECT_OFFSETOF(JSValue
, u
.asBits
.payload
) - sizeof(double)), regT0
);
104 Jump lowNonZero
= branchTestPtr(NonZero
, regT1
);
105 Jump highNonZero
= branchTestPtr(NonZero
, regT0
);
106 move(TrustedImm32(0), regT0
);
107 move(TrustedImm32(Int32Tag
), regT1
);
108 lowNonZero
.link(this);
109 highNonZero
.link(this);
111 loadPtr(payloadFor(JSStack::CallerFrame
, callFrameRegister
), callFrameRegister
);
115 void returnInt32(RegisterID src
)
120 loadPtr(payloadFor(JSStack::CallerFrame
, callFrameRegister
), callFrameRegister
);
124 void returnJSCell(RegisterID src
)
129 loadPtr(payloadFor(JSStack::CallerFrame
, callFrameRegister
), callFrameRegister
);
133 MacroAssemblerCodeRef
finalize(VM
& vm
, MacroAssemblerCodePtr fallback
, const char* thunkKind
)
135 LinkBuffer
patchBuffer(vm
, this, GLOBAL_THUNK_ID
);
136 patchBuffer
.link(m_failures
, CodeLocationLabel(fallback
));
137 for (unsigned i
= 0; i
< m_calls
.size(); i
++)
138 patchBuffer
.link(m_calls
[i
].first
, m_calls
[i
].second
);
139 return FINALIZE_CODE(patchBuffer
, ("Specialized thunk for %s", thunkKind
));
142 // Assumes that the target function uses fpRegister0 as the first argument
143 // and return value. Like any sensible architecture would.
144 void callDoubleToDouble(FunctionPtr function
)
146 m_calls
.append(std::make_pair(call(), function
));
149 void callDoubleToDoublePreservingReturn(FunctionPtr function
)
152 preserveReturnAddressAfterCall(regT3
);
153 callDoubleToDouble(function
);
155 restoreReturnAddressBeforeReturn(regT3
);
160 void tagReturnAsInt32()
163 or64(tagTypeNumberRegister
, regT0
);
165 move(TrustedImm32(JSValue::Int32Tag
), regT1
);
169 void tagReturnAsJSCell()
171 #if USE(JSVALUE32_64)
172 move(TrustedImm32(JSValue::CellTag
), regT1
);
176 MacroAssembler::JumpList m_failures
;
177 Vector
<std::pair
<Call
, FunctionPtr
> > m_calls
;
182 #endif // ENABLE(JIT)
184 #endif // SpecializedThunkJIT_h