2 * Copyright (C) 2009 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 #ifndef MacroAssemblerCodeRef_h
27 #define MacroAssemblerCodeRef_h
29 #include "ExecutableAllocator.h"
30 #include "PassRefPtr.h"
32 #include "UnusedParam.h"
36 // ASSERT_VALID_CODE_POINTER checks that ptr is a non-null pointer, and that it is a valid
37 // instruction address on the platform (for example, check any alignment requirements).
39 // ARM/thumb instructions must be 16-bit aligned, but all code pointers to be loaded
40 // into the processor are decorated with the bottom bit set, indicating that this is
41 // thumb code (as oposed to 32-bit traditional ARM). The first test checks for both
42 // decorated and undectorated null, and the second test ensures that the pointer is
44 #define ASSERT_VALID_CODE_POINTER(ptr) \
45 ASSERT(reinterpret_cast<intptr_t>(ptr) & ~1); \
46 ASSERT(reinterpret_cast<intptr_t>(ptr) & 1)
47 #define ASSERT_VALID_CODE_OFFSET(offset) \
48 ASSERT(!(offset & 1)) // Must be multiple of 2.
50 #define ASSERT_VALID_CODE_POINTER(ptr) \
52 #define ASSERT_VALID_CODE_OFFSET(offset) // Anything goes!
59 // FunctionPtr should be used to wrap pointers to C/C++ functions in JSC
60 // (particularly, the stub functions).
68 template<typename returnType
>
69 FunctionPtr(returnType(*value
)())
70 : m_value((void*)value
)
72 ASSERT_VALID_CODE_POINTER(m_value
);
75 template<typename returnType
, typename argType1
>
76 FunctionPtr(returnType(*value
)(argType1
))
77 : m_value((void*)value
)
79 ASSERT_VALID_CODE_POINTER(m_value
);
82 template<typename returnType
, typename argType1
, typename argType2
>
83 FunctionPtr(returnType(*value
)(argType1
, argType2
))
84 : m_value((void*)value
)
86 ASSERT_VALID_CODE_POINTER(m_value
);
89 template<typename returnType
, typename argType1
, typename argType2
, typename argType3
>
90 FunctionPtr(returnType(*value
)(argType1
, argType2
, argType3
))
91 : m_value((void*)value
)
93 ASSERT_VALID_CODE_POINTER(m_value
);
96 template<typename returnType
, typename argType1
, typename argType2
, typename argType3
, typename argType4
>
97 FunctionPtr(returnType(*value
)(argType1
, argType2
, argType3
, argType4
))
98 : m_value((void*)value
)
100 ASSERT_VALID_CODE_POINTER(m_value
);
103 template<typename FunctionType
>
104 explicit FunctionPtr(FunctionType
* value
)
105 // Using a C-ctyle cast here to avoid compiler error on RVTC:
106 // Error: #694: reinterpret_cast cannot cast away const or other type qualifiers
107 // (I guess on RVTC function pointers have a different constness to GCC/MSVC?)
108 : m_value((void*)value
)
110 ASSERT_VALID_CODE_POINTER(m_value
);
113 void* value() const { return m_value
; }
114 void* executableAddress() const { return m_value
; }
123 // ReturnAddressPtr should be used to wrap return addresses generated by processor
124 // 'call' instructions exectued in JIT code. We use return addresses to look up
125 // exception and optimization information, and to repatch the call instruction
126 // that is the source of the return address.
127 class ReturnAddressPtr
{
134 explicit ReturnAddressPtr(void* value
)
137 ASSERT_VALID_CODE_POINTER(m_value
);
140 explicit ReturnAddressPtr(FunctionPtr function
)
141 : m_value(function
.value())
143 ASSERT_VALID_CODE_POINTER(m_value
);
146 void* value() const { return m_value
; }
152 // MacroAssemblerCodePtr:
154 // MacroAssemblerCodePtr should be used to wrap pointers to JIT generated code.
155 class MacroAssemblerCodePtr
{
157 MacroAssemblerCodePtr()
162 explicit MacroAssemblerCodePtr(void* value
)
164 // Decorate the pointer as a thumb code pointer.
165 : m_value(reinterpret_cast<char*>(value
) + 1)
170 ASSERT_VALID_CODE_POINTER(m_value
);
173 explicit MacroAssemblerCodePtr(ReturnAddressPtr ra
)
174 : m_value(ra
.value())
176 ASSERT_VALID_CODE_POINTER(m_value
);
179 void* executableAddress() const { return m_value
; }
181 // To use this pointer as a data address remove the decoration.
182 void* dataLocation() const { ASSERT_VALID_CODE_POINTER(m_value
); return reinterpret_cast<char*>(m_value
) - 1; }
184 void* dataLocation() const { ASSERT_VALID_CODE_POINTER(m_value
); return m_value
; }
187 bool operator!() const
196 // MacroAssemblerCodeRef:
198 // A reference to a section of JIT generated code. A CodeRef consists of a
199 // pointer to the code, and a ref pointer to the pool from within which it
201 class MacroAssemblerCodeRef
{
203 MacroAssemblerCodeRef()
208 MacroAssemblerCodeRef(void* code
, PassRefPtr
<ExecutablePool
> executablePool
, size_t size
)
210 , m_executablePool(executablePool
)
215 MacroAssemblerCodePtr m_code
;
216 RefPtr
<ExecutablePool
> m_executablePool
;
222 #endif // ENABLE(ASSEMBLER)
224 #endif // MacroAssemblerCodeRef_h