2 * Copyright (C) 2008 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 #include "CallFrame.h"
32 #include "MacroAssemblerCodeRef.h"
45 typedef MacroAssemblerCodeRef CodeRef
;
46 typedef MacroAssemblerCodePtr CodePtr
;
51 enum JITType
{ None
, HostCallThunk
, InterpreterThunk
, BaselineJIT
, DFGJIT
};
53 static JITType
bottomTierJIT()
58 static JITType
topTierJIT()
63 static JITType
nextTierJIT(JITType jitType
)
65 ASSERT_UNUSED(jitType
, jitType
== BaselineJIT
|| jitType
== DFGJIT
);
69 static bool isOptimizingJIT(JITType jitType
)
71 return jitType
== DFGJIT
;
74 static bool isBaselineCode(JITType jitType
)
76 return jitType
== InterpreterThunk
|| jitType
== BaselineJIT
;
85 JITCode(const CodeRef ref
, JITType jitType
)
89 ASSERT(jitType
!= None
);
92 bool operator !() const
97 CodePtr
addressForCall()
102 void* executableAddressAtOffset(size_t offset
) const
104 ASSERT(offset
< size());
105 return reinterpret_cast<char*>(m_ref
.code().executableAddress()) + offset
;
108 void* dataAddressAtOffset(size_t offset
) const
110 ASSERT(offset
< size());
111 return reinterpret_cast<char*>(m_ref
.code().dataLocation()) + offset
;
114 // This function returns the offset in bytes of 'pointerIntoCode' into
115 // this block of code. The pointer provided must be a pointer into this
116 // block of code. It is ASSERTed that no codeblock >4gb in size.
117 unsigned offsetOf(void* pointerIntoCode
)
119 intptr_t result
= reinterpret_cast<intptr_t>(pointerIntoCode
) - reinterpret_cast<intptr_t>(m_ref
.code().executableAddress());
120 ASSERT(static_cast<intptr_t>(static_cast<unsigned>(result
)) == result
);
121 return static_cast<unsigned>(result
);
125 inline JSValue
execute(RegisterFile
* registerFile
, CallFrame
* callFrame
, JSGlobalData
* globalData
)
127 JSValue result
= JSValue::decode(ctiTrampoline(m_ref
.code().executableAddress(), registerFile
, callFrame
, 0, Profiler::enabledProfilerReference(), globalData
));
128 return globalData
->exception
? jsNull() : result
;
133 return m_ref
.code().dataLocation();
138 ASSERT(m_ref
.code().executableAddress());
142 ExecutableMemoryHandle
* getExecutableMemory()
144 return m_ref
.executableMemory();
152 // Host functions are a bit special; they have a m_code pointer but they
153 // do not individully ref the executable pool containing the trampoline.
154 static JITCode
HostFunction(CodeRef code
)
156 return JITCode(code
, HostCallThunk
);
162 new (NotNull
, &m_ref
) CodeRef();
166 JITCode(PassRefPtr
<ExecutableMemoryHandle
> executableMemory
, JITType jitType
)
167 : m_ref(executableMemory
)
174 #endif // ENABLE(JIT)