]>
Commit | Line | Data |
---|---|---|
1 | /* | |
2 | * Copyright (C) 2009, 2012 Apple Inc. All rights reserved. | |
3 | * | |
4 | * Redistribution and use in source and binary forms, with or without | |
5 | * modification, are permitted provided that the following conditions | |
6 | * are met: | |
7 | * 1. Redistributions of source code must retain the above copyright | |
8 | * notice, this list of conditions and the following disclaimer. | |
9 | * 2. Redistributions in binary form must reproduce the above copyright | |
10 | * notice, this list of conditions and the following disclaimer in the | |
11 | * documentation and/or other materials provided with the distribution. | |
12 | * | |
13 | * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY | |
14 | * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE | |
15 | * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR | |
16 | * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR | |
17 | * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, | |
18 | * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, | |
19 | * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR | |
20 | * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY | |
21 | * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | |
22 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | |
23 | * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | |
24 | */ | |
25 | ||
26 | #ifndef MacroAssemblerCodeRef_h | |
27 | #define MacroAssemblerCodeRef_h | |
28 | ||
29 | #include "Disassembler.h" | |
30 | #include "ExecutableAllocator.h" | |
31 | #include "LLIntData.h" | |
32 | #include <wtf/DataLog.h> | |
33 | #include <wtf/PassRefPtr.h> | |
34 | #include <wtf/PrintStream.h> | |
35 | #include <wtf/RefPtr.h> | |
36 | ||
37 | // ASSERT_VALID_CODE_POINTER checks that ptr is a non-null pointer, and that it is a valid | |
38 | // instruction address on the platform (for example, check any alignment requirements). | |
39 | #if CPU(ARM_THUMB2) && ENABLE(JIT) | |
40 | // ARM instructions must be 16-bit aligned. Thumb2 code pointers to be loaded into | |
41 | // into the processor are decorated with the bottom bit set, while traditional ARM has | |
42 | // the lower bit clear. Since we don't know what kind of pointer, we check for both | |
43 | // decorated and undecorated null. | |
44 | #define ASSERT_VALID_CODE_POINTER(ptr) \ | |
45 | ASSERT(reinterpret_cast<intptr_t>(ptr) & ~1) | |
46 | #define ASSERT_VALID_CODE_OFFSET(offset) \ | |
47 | ASSERT(!(offset & 1)) // Must be multiple of 2. | |
48 | #else | |
49 | #define ASSERT_VALID_CODE_POINTER(ptr) \ | |
50 | ASSERT(ptr) | |
51 | #define ASSERT_VALID_CODE_OFFSET(offset) // Anything goes! | |
52 | #endif | |
53 | ||
54 | #if CPU(X86) && OS(WINDOWS) | |
55 | #define CALLING_CONVENTION_IS_STDCALL 1 | |
56 | #ifndef CDECL | |
57 | #if COMPILER(MSVC) | |
58 | #define CDECL __cdecl | |
59 | #else | |
60 | #define CDECL __attribute__ ((__cdecl)) | |
61 | #endif // COMPILER(MSVC) | |
62 | #endif // CDECL | |
63 | #else | |
64 | #define CALLING_CONVENTION_IS_STDCALL 0 | |
65 | #endif | |
66 | ||
67 | #if CPU(X86) | |
68 | #define HAS_FASTCALL_CALLING_CONVENTION 1 | |
69 | #ifndef FASTCALL | |
70 | #if COMPILER(MSVC) | |
71 | #define FASTCALL __fastcall | |
72 | #else | |
73 | #define FASTCALL __attribute__ ((fastcall)) | |
74 | #endif // COMPILER(MSVC) | |
75 | #endif // FASTCALL | |
76 | #else | |
77 | #define HAS_FASTCALL_CALLING_CONVENTION 0 | |
78 | #endif // CPU(X86) | |
79 | ||
80 | namespace JSC { | |
81 | ||
82 | // FunctionPtr: | |
83 | // | |
84 | // FunctionPtr should be used to wrap pointers to C/C++ functions in JSC | |
85 | // (particularly, the stub functions). | |
86 | class FunctionPtr { | |
87 | public: | |
88 | FunctionPtr() | |
89 | : m_value(0) | |
90 | { | |
91 | } | |
92 | ||
93 | template<typename returnType> | |
94 | FunctionPtr(returnType(*value)()) | |
95 | : m_value((void*)value) | |
96 | { | |
97 | ASSERT_VALID_CODE_POINTER(m_value); | |
98 | } | |
99 | ||
100 | template<typename returnType, typename argType1> | |
101 | FunctionPtr(returnType(*value)(argType1)) | |
102 | : m_value((void*)value) | |
103 | { | |
104 | ASSERT_VALID_CODE_POINTER(m_value); | |
105 | } | |
106 | ||
107 | template<typename returnType, typename argType1, typename argType2> | |
108 | FunctionPtr(returnType(*value)(argType1, argType2)) | |
109 | : m_value((void*)value) | |
110 | { | |
111 | ASSERT_VALID_CODE_POINTER(m_value); | |
112 | } | |
113 | ||
114 | template<typename returnType, typename argType1, typename argType2, typename argType3> | |
115 | FunctionPtr(returnType(*value)(argType1, argType2, argType3)) | |
116 | : m_value((void*)value) | |
117 | { | |
118 | ASSERT_VALID_CODE_POINTER(m_value); | |
119 | } | |
120 | ||
121 | template<typename returnType, typename argType1, typename argType2, typename argType3, typename argType4> | |
122 | FunctionPtr(returnType(*value)(argType1, argType2, argType3, argType4)) | |
123 | : m_value((void*)value) | |
124 | { | |
125 | ASSERT_VALID_CODE_POINTER(m_value); | |
126 | } | |
127 | ||
128 | template<typename returnType, typename argType1, typename argType2, typename argType3, typename argType4, typename argType5> | |
129 | FunctionPtr(returnType(*value)(argType1, argType2, argType3, argType4, argType5)) | |
130 | : m_value((void*)value) | |
131 | { | |
132 | ASSERT_VALID_CODE_POINTER(m_value); | |
133 | } | |
134 | ||
135 | template<typename returnType, typename argType1, typename argType2, typename argType3, typename argType4, typename argType5, typename argType6> | |
136 | FunctionPtr(returnType(*value)(argType1, argType2, argType3, argType4, argType5, argType6)) | |
137 | : m_value((void*)value) | |
138 | { | |
139 | ASSERT_VALID_CODE_POINTER(m_value); | |
140 | } | |
141 | // MSVC doesn't seem to treat functions with different calling conventions as | |
142 | // different types; these methods already defined for fastcall, below. | |
143 | #if CALLING_CONVENTION_IS_STDCALL && !OS(WINDOWS) | |
144 | ||
145 | template<typename returnType> | |
146 | FunctionPtr(returnType (CDECL *value)()) | |
147 | : m_value((void*)value) | |
148 | { | |
149 | ASSERT_VALID_CODE_POINTER(m_value); | |
150 | } | |
151 | ||
152 | template<typename returnType, typename argType1> | |
153 | FunctionPtr(returnType (CDECL *value)(argType1)) | |
154 | : m_value((void*)value) | |
155 | { | |
156 | ASSERT_VALID_CODE_POINTER(m_value); | |
157 | } | |
158 | ||
159 | template<typename returnType, typename argType1, typename argType2> | |
160 | FunctionPtr(returnType (CDECL *value)(argType1, argType2)) | |
161 | : m_value((void*)value) | |
162 | { | |
163 | ASSERT_VALID_CODE_POINTER(m_value); | |
164 | } | |
165 | ||
166 | template<typename returnType, typename argType1, typename argType2, typename argType3> | |
167 | FunctionPtr(returnType (CDECL *value)(argType1, argType2, argType3)) | |
168 | : m_value((void*)value) | |
169 | { | |
170 | ASSERT_VALID_CODE_POINTER(m_value); | |
171 | } | |
172 | ||
173 | template<typename returnType, typename argType1, typename argType2, typename argType3, typename argType4> | |
174 | FunctionPtr(returnType (CDECL *value)(argType1, argType2, argType3, argType4)) | |
175 | : m_value((void*)value) | |
176 | { | |
177 | ASSERT_VALID_CODE_POINTER(m_value); | |
178 | } | |
179 | #endif | |
180 | ||
181 | #if HAS_FASTCALL_CALLING_CONVENTION | |
182 | ||
183 | template<typename returnType> | |
184 | FunctionPtr(returnType (FASTCALL *value)()) | |
185 | : m_value((void*)value) | |
186 | { | |
187 | ASSERT_VALID_CODE_POINTER(m_value); | |
188 | } | |
189 | ||
190 | template<typename returnType, typename argType1> | |
191 | FunctionPtr(returnType (FASTCALL *value)(argType1)) | |
192 | : m_value((void*)value) | |
193 | { | |
194 | ASSERT_VALID_CODE_POINTER(m_value); | |
195 | } | |
196 | ||
197 | template<typename returnType, typename argType1, typename argType2> | |
198 | FunctionPtr(returnType (FASTCALL *value)(argType1, argType2)) | |
199 | : m_value((void*)value) | |
200 | { | |
201 | ASSERT_VALID_CODE_POINTER(m_value); | |
202 | } | |
203 | ||
204 | template<typename returnType, typename argType1, typename argType2, typename argType3> | |
205 | FunctionPtr(returnType (FASTCALL *value)(argType1, argType2, argType3)) | |
206 | : m_value((void*)value) | |
207 | { | |
208 | ASSERT_VALID_CODE_POINTER(m_value); | |
209 | } | |
210 | ||
211 | template<typename returnType, typename argType1, typename argType2, typename argType3, typename argType4> | |
212 | FunctionPtr(returnType (FASTCALL *value)(argType1, argType2, argType3, argType4)) | |
213 | : m_value((void*)value) | |
214 | { | |
215 | ASSERT_VALID_CODE_POINTER(m_value); | |
216 | } | |
217 | #endif | |
218 | ||
219 | template<typename FunctionType> | |
220 | explicit FunctionPtr(FunctionType* value) | |
221 | // Using a C-ctyle cast here to avoid compiler error on RVTC: | |
222 | // Error: #694: reinterpret_cast cannot cast away const or other type qualifiers | |
223 | // (I guess on RVTC function pointers have a different constness to GCC/MSVC?) | |
224 | : m_value((void*)value) | |
225 | { | |
226 | ASSERT_VALID_CODE_POINTER(m_value); | |
227 | } | |
228 | ||
229 | void* value() const { return m_value; } | |
230 | void* executableAddress() const { return m_value; } | |
231 | ||
232 | ||
233 | private: | |
234 | void* m_value; | |
235 | }; | |
236 | ||
237 | // ReturnAddressPtr: | |
238 | // | |
239 | // ReturnAddressPtr should be used to wrap return addresses generated by processor | |
240 | // 'call' instructions exectued in JIT code. We use return addresses to look up | |
241 | // exception and optimization information, and to repatch the call instruction | |
242 | // that is the source of the return address. | |
243 | class ReturnAddressPtr { | |
244 | public: | |
245 | ReturnAddressPtr() | |
246 | : m_value(0) | |
247 | { | |
248 | } | |
249 | ||
250 | explicit ReturnAddressPtr(void* value) | |
251 | : m_value(value) | |
252 | { | |
253 | ASSERT_VALID_CODE_POINTER(m_value); | |
254 | } | |
255 | ||
256 | explicit ReturnAddressPtr(FunctionPtr function) | |
257 | : m_value(function.value()) | |
258 | { | |
259 | ASSERT_VALID_CODE_POINTER(m_value); | |
260 | } | |
261 | ||
262 | void* value() const { return m_value; } | |
263 | ||
264 | void dump(PrintStream& out) const | |
265 | { | |
266 | out.print(RawPointer(m_value)); | |
267 | } | |
268 | ||
269 | private: | |
270 | void* m_value; | |
271 | }; | |
272 | ||
273 | // MacroAssemblerCodePtr: | |
274 | // | |
275 | // MacroAssemblerCodePtr should be used to wrap pointers to JIT generated code. | |
276 | class MacroAssemblerCodePtr { | |
277 | public: | |
278 | MacroAssemblerCodePtr() | |
279 | : m_value(0) | |
280 | { | |
281 | } | |
282 | ||
283 | explicit MacroAssemblerCodePtr(void* value) | |
284 | #if CPU(ARM_THUMB2) | |
285 | // Decorate the pointer as a thumb code pointer. | |
286 | : m_value(reinterpret_cast<char*>(value) + 1) | |
287 | #else | |
288 | : m_value(value) | |
289 | #endif | |
290 | { | |
291 | ASSERT_VALID_CODE_POINTER(m_value); | |
292 | } | |
293 | ||
294 | static MacroAssemblerCodePtr createFromExecutableAddress(void* value) | |
295 | { | |
296 | ASSERT_VALID_CODE_POINTER(value); | |
297 | MacroAssemblerCodePtr result; | |
298 | result.m_value = value; | |
299 | return result; | |
300 | } | |
301 | ||
302 | static MacroAssemblerCodePtr createLLIntCodePtr(OpcodeID codeId) | |
303 | { | |
304 | return createFromExecutableAddress(LLInt::getCodePtr(codeId)); | |
305 | } | |
306 | ||
307 | explicit MacroAssemblerCodePtr(ReturnAddressPtr ra) | |
308 | : m_value(ra.value()) | |
309 | { | |
310 | ASSERT_VALID_CODE_POINTER(m_value); | |
311 | } | |
312 | ||
313 | void* executableAddress() const { return m_value; } | |
314 | #if CPU(ARM_THUMB2) | |
315 | // To use this pointer as a data address remove the decoration. | |
316 | void* dataLocation() const { ASSERT_VALID_CODE_POINTER(m_value); return reinterpret_cast<char*>(m_value) - 1; } | |
317 | #else | |
318 | void* dataLocation() const { ASSERT_VALID_CODE_POINTER(m_value); return m_value; } | |
319 | #endif | |
320 | ||
321 | explicit operator bool() const { return m_value; } | |
322 | ||
323 | bool operator==(const MacroAssemblerCodePtr& other) const | |
324 | { | |
325 | return m_value == other.m_value; | |
326 | } | |
327 | ||
328 | void dumpWithName(const char* name, PrintStream& out) const | |
329 | { | |
330 | if (executableAddress() == dataLocation()) { | |
331 | out.print(name, "(", RawPointer(executableAddress()), ")"); | |
332 | return; | |
333 | } | |
334 | out.print(name, "(executable = ", RawPointer(executableAddress()), ", dataLocation = ", RawPointer(dataLocation()), ")"); | |
335 | } | |
336 | ||
337 | void dump(PrintStream& out) const | |
338 | { | |
339 | dumpWithName("CodePtr", out); | |
340 | } | |
341 | ||
342 | enum EmptyValueTag { EmptyValue }; | |
343 | enum DeletedValueTag { DeletedValue }; | |
344 | ||
345 | MacroAssemblerCodePtr(EmptyValueTag) | |
346 | : m_value(emptyValue()) | |
347 | { | |
348 | } | |
349 | ||
350 | MacroAssemblerCodePtr(DeletedValueTag) | |
351 | : m_value(deletedValue()) | |
352 | { | |
353 | } | |
354 | ||
355 | bool isEmptyValue() const { return m_value == emptyValue(); } | |
356 | bool isDeletedValue() const { return m_value == deletedValue(); } | |
357 | ||
358 | unsigned hash() const { return PtrHash<void*>::hash(m_value); } | |
359 | ||
360 | private: | |
361 | static void* emptyValue() { return bitwise_cast<void*>(static_cast<intptr_t>(1)); } | |
362 | static void* deletedValue() { return bitwise_cast<void*>(static_cast<intptr_t>(2)); } | |
363 | ||
364 | void* m_value; | |
365 | }; | |
366 | ||
367 | struct MacroAssemblerCodePtrHash { | |
368 | static unsigned hash(const MacroAssemblerCodePtr& ptr) { return ptr.hash(); } | |
369 | static bool equal(const MacroAssemblerCodePtr& a, const MacroAssemblerCodePtr& b) | |
370 | { | |
371 | return a == b; | |
372 | } | |
373 | static const bool safeToCompareToEmptyOrDeleted = true; | |
374 | }; | |
375 | ||
376 | // MacroAssemblerCodeRef: | |
377 | // | |
378 | // A reference to a section of JIT generated code. A CodeRef consists of a | |
379 | // pointer to the code, and a ref pointer to the pool from within which it | |
380 | // was allocated. | |
381 | class MacroAssemblerCodeRef { | |
382 | private: | |
383 | // This is private because it's dangerous enough that we want uses of it | |
384 | // to be easy to find - hence the static create method below. | |
385 | explicit MacroAssemblerCodeRef(MacroAssemblerCodePtr codePtr) | |
386 | : m_codePtr(codePtr) | |
387 | { | |
388 | ASSERT(m_codePtr); | |
389 | } | |
390 | ||
391 | public: | |
392 | MacroAssemblerCodeRef() | |
393 | { | |
394 | } | |
395 | ||
396 | MacroAssemblerCodeRef(PassRefPtr<ExecutableMemoryHandle> executableMemory) | |
397 | : m_codePtr(executableMemory->start()) | |
398 | , m_executableMemory(executableMemory) | |
399 | { | |
400 | ASSERT(m_executableMemory->isManaged()); | |
401 | ASSERT(m_executableMemory->start()); | |
402 | ASSERT(m_codePtr); | |
403 | } | |
404 | ||
405 | // Use this only when you know that the codePtr refers to code that is | |
406 | // already being kept alive through some other means. Typically this means | |
407 | // that codePtr is immortal. | |
408 | static MacroAssemblerCodeRef createSelfManagedCodeRef(MacroAssemblerCodePtr codePtr) | |
409 | { | |
410 | return MacroAssemblerCodeRef(codePtr); | |
411 | } | |
412 | ||
413 | // Helper for creating self-managed code refs from LLInt. | |
414 | static MacroAssemblerCodeRef createLLIntCodeRef(OpcodeID codeId) | |
415 | { | |
416 | return createSelfManagedCodeRef(MacroAssemblerCodePtr::createFromExecutableAddress(LLInt::getCodePtr(codeId))); | |
417 | } | |
418 | ||
419 | ExecutableMemoryHandle* executableMemory() const | |
420 | { | |
421 | return m_executableMemory.get(); | |
422 | } | |
423 | ||
424 | MacroAssemblerCodePtr code() const | |
425 | { | |
426 | return m_codePtr; | |
427 | } | |
428 | ||
429 | size_t size() const | |
430 | { | |
431 | if (!m_executableMemory) | |
432 | return 0; | |
433 | return m_executableMemory->sizeInBytes(); | |
434 | } | |
435 | ||
436 | bool tryToDisassemble(const char* prefix) const | |
437 | { | |
438 | return JSC::tryToDisassemble(m_codePtr, size(), prefix, WTF::dataFile()); | |
439 | } | |
440 | ||
441 | explicit operator bool() const { return !!m_codePtr; } | |
442 | ||
443 | void dump(PrintStream& out) const | |
444 | { | |
445 | m_codePtr.dumpWithName("CodeRef", out); | |
446 | } | |
447 | ||
448 | private: | |
449 | MacroAssemblerCodePtr m_codePtr; | |
450 | RefPtr<ExecutableMemoryHandle> m_executableMemory; | |
451 | }; | |
452 | ||
453 | } // namespace JSC | |
454 | ||
455 | namespace WTF { | |
456 | ||
457 | template<typename T> struct DefaultHash; | |
458 | template<> struct DefaultHash<JSC::MacroAssemblerCodePtr> { | |
459 | typedef JSC::MacroAssemblerCodePtrHash Hash; | |
460 | }; | |
461 | ||
462 | template<typename T> struct HashTraits; | |
463 | template<> struct HashTraits<JSC::MacroAssemblerCodePtr> : public CustomHashTraits<JSC::MacroAssemblerCodePtr> { }; | |
464 | ||
465 | } // namespace WTF | |
466 | ||
467 | #endif // MacroAssemblerCodeRef_h |