]> git.saurik.com Git - apple/objc4.git/blob - runtime/objc-block-trampolines.m
objc4-493.9.tar.gz
[apple/objc4.git] / runtime / objc-block-trampolines.m
1 /*
2 * Copyright (c) 2010 Apple Inc. All Rights Reserved.
3 *
4 * @APPLE_LICENSE_HEADER_START@
5 *
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. Please obtain a copy of the License at
10 * http://www.opensource.apple.com/apsl/ and read it before using this
11 * file.
12 *
13 * The Original Code and all software distributed under the License are
14 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
15 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
16 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
18 * Please see the License for the specific language governing rights and
19 * limitations under the License.
20 *
21 * @APPLE_LICENSE_HEADER_END@
22 */
23 /***********************************************************************
24 * objc-block-trampolines.m
25 * Author: b.bum
26 *
27 **********************************************************************/
28
29 /***********************************************************************
30 * Imports.
31 **********************************************************************/
32 #include "objc-private.h"
33 #include "runtime.h"
34
35 #include <Block.h>
36 #include <Block_private.h>
37 #include <mach/mach.h>
38
39 // symbols defined in assembly files
40 // Don't use the symbols directly; they're thumb-biased on some ARM archs.
41 #define TRAMP(tramp) \
42 static inline uintptr_t tramp(void) { \
43 extern void *_##tramp; \
44 return ((uintptr_t)&_##tramp) & ~1UL; \
45 }
46 // Scalar return
47 TRAMP(a1a2_tramphead); // trampoline header code
48 TRAMP(a1a2_firsttramp); // first trampoline
49 TRAMP(a1a2_nexttramp); // second trampoline
50 TRAMP(a1a2_trampend); // after the last trampoline
51
52 // Struct return
53 TRAMP(a2a3_tramphead);
54 TRAMP(a2a3_firsttramp);
55 TRAMP(a2a3_nexttramp);
56 TRAMP(a2a3_trampend);
57
58 // argument mode identifier
59 typedef enum {
60 ReturnValueInRegisterArgumentMode,
61 ReturnValueOnStackArgumentMode,
62
63 ArgumentModeMax
64 } ArgumentMode;
65
66 // slot size is 8 bytes on both i386 and x86_64 (because of bytes-per-call instruction is > 4 for both)
67 #define SLOT_SIZE 8
68
69 // unsigned value, any value, larger thna # of blocks that fit in the page pair
70 #define LAST_SLOT_MARKER 4241
71
72 #define TRAMPOLINE_PAGE_PAIR_HEADER_SIZE (sizeof(uint32_t) + sizeof(struct _TrampolineBlockPagePair *) + sizeof(struct _TrampolineBlockPagePair *))
73 typedef struct _TrampolineBlockPagePair {
74 struct _TrampolineBlockPagePair *nextPagePair; // linked list of all page pairs
75 struct _TrampolineBlockPagePair *nextAvailablePage; // linked list of pages with available slots
76
77 uint32_t nextAvailable; // index of next available slot, 0 if no more available
78
79 // Data: block pointers and free list.
80 // Bytes parallel with trampoline header are the fields above, or unused.
81 uint8_t blocks[ PAGE_SIZE - TRAMPOLINE_PAGE_PAIR_HEADER_SIZE ]
82 __attribute__((unavailable)) /* always use _headerSize() */;
83
84 // Code: trampoline header followed by trampolines.
85 uint8_t trampolines[PAGE_SIZE];
86
87 // Per-trampoline block data format:
88 // initial value is 0 while page pair is filled sequentially (last slot is LAST_SLOT_MARKER to indicate end of page)
89 // when filled, value is reference to Block_copy()d block
90 // when empty, value is index of next available slot OR LAST_SLOT_MARKER
91
92 } TrampolineBlockPagePair;
93
94 // two sets of trampoline page pairs; one for stack returns and one for register returns
95 static TrampolineBlockPagePair *headPagePairs[2];
96
97 #pragma mark Utility Functions
98 static inline uint32_t _headerSize() {
99 uint32_t headerSize = (uint32_t) (a1a2_firsttramp() - a1a2_tramphead());
100
101 // make sure stret and non-stret sizes match
102 assert(a2a3_firsttramp() - a2a3_tramphead() == headerSize);
103
104 return headerSize;
105 }
106
107 static inline uint32_t _slotSize() {
108 uint32_t slotSize = (uint32_t) (a1a2_nexttramp() - a1a2_firsttramp());
109
110 // make sure stret and non-stret sizes match
111 assert(a2a3_nexttramp() - a2a3_firsttramp() == slotSize);
112
113 return slotSize;
114 }
115
116 static inline bool trampolinesAreThumb(void) {
117 extern void *_a1a2_firsttramp;
118 extern void *_a1a2_nexttramp;
119 extern void *_a2a3_firsttramp;
120 extern void *_a2a3_nexttramp;
121
122 // make sure thumb-edness of all trampolines match
123 assert(((uintptr_t)&_a1a2_firsttramp) % 2 ==
124 ((uintptr_t)&_a2a3_firsttramp) % 2);
125 assert(((uintptr_t)&_a1a2_firsttramp) % 2 ==
126 ((uintptr_t)&_a1a2_nexttramp) % 2);
127 assert(((uintptr_t)&_a1a2_firsttramp) % 2 ==
128 ((uintptr_t)&_a2a3_nexttramp) % 2);
129
130 return ((uintptr_t)&_a1a2_firsttramp) % 2;
131 }
132
133 static inline uint32_t _slotsPerPagePair() {
134 uint32_t slotSize = _slotSize();
135 uint32_t slotsPerPagePair = PAGE_SIZE / slotSize;
136 return slotsPerPagePair;
137 }
138
139 static inline uint32_t _paddingSlotCount() {
140 uint32_t headerSize = _headerSize();
141 uint32_t slotSize = _slotSize();
142 uint32_t paddingSlots = headerSize / slotSize;
143 return paddingSlots;
144 }
145
146 static inline void **_payloadAddressAtIndex(TrampolineBlockPagePair *pagePair, uint32_t index) {
147 uint32_t slotSize = _slotSize();
148 uintptr_t baseAddress = (uintptr_t) pagePair;
149 uintptr_t payloadAddress = baseAddress + (slotSize * index);
150 return (void **)payloadAddress;
151 }
152
153 static inline IMP _trampolineAddressAtIndex(TrampolineBlockPagePair *pagePair, uint32_t index) {
154 uint32_t slotSize = _slotSize();
155 uintptr_t baseAddress = (uintptr_t) &(pagePair->trampolines);
156 uintptr_t trampolineAddress = baseAddress + (slotSize * index);
157
158 #if defined(__arm__)
159 if (trampolinesAreThumb()) trampolineAddress++;
160 #endif
161
162 return (IMP)trampolineAddress;
163 }
164
165 static inline void _lock() {
166 #if __OBJC2__
167 rwlock_write(&runtimeLock);
168 #else
169 mutex_lock(&classLock);
170 #endif
171 }
172
173 static inline void _unlock() {
174 #if __OBJC2__
175 rwlock_unlock_write(&runtimeLock);
176 #else
177 mutex_unlock(&classLock);
178 #endif
179 }
180
181 static inline void _assert_locked() {
182 #if __OBJC2__
183 rwlock_assert_writing(&runtimeLock);
184 #else
185 mutex_assert_locked(&classLock);
186 #endif
187 }
188
189 #pragma mark Trampoline Management Functions
190 static TrampolineBlockPagePair *_allocateTrampolinesAndData(ArgumentMode aMode) {
191 _assert_locked();
192
193 vm_address_t dataAddress;
194
195 // make sure certain assumptions are met
196 assert(PAGE_SIZE == 4096);
197 assert(sizeof(TrampolineBlockPagePair) == 2*PAGE_SIZE);
198 assert(_slotSize() == 8);
199 assert(_headerSize() >= TRAMPOLINE_PAGE_PAIR_HEADER_SIZE);
200 assert((_headerSize() % _slotSize()) == 0);
201
202 assert(a1a2_tramphead() % PAGE_SIZE == 0);
203 assert(a1a2_tramphead() + PAGE_SIZE == a1a2_trampend());
204 assert(a2a3_tramphead() % PAGE_SIZE == 0);
205 assert(a2a3_tramphead() + PAGE_SIZE == a2a3_trampend());
206
207 TrampolineBlockPagePair *headPagePair = headPagePairs[aMode];
208
209 if (headPagePair) {
210 assert(headPagePair->nextAvailablePage == NULL);
211 }
212
213 int i;
214 kern_return_t result = KERN_FAILURE;
215 for(i = 0; i < 5; i++) {
216 result = vm_allocate(mach_task_self(), &dataAddress, PAGE_SIZE * 2, TRUE);
217 if (result != KERN_SUCCESS) {
218 mach_error("vm_allocate failed", result);
219 return NULL;
220 }
221
222 vm_address_t codeAddress = dataAddress + PAGE_SIZE;
223 result = vm_deallocate(mach_task_self(), codeAddress, PAGE_SIZE);
224 if (result != KERN_SUCCESS) {
225 mach_error("vm_deallocate failed", result);
226 return NULL;
227 }
228
229 uintptr_t codePage;
230 switch(aMode) {
231 case ReturnValueInRegisterArgumentMode:
232 codePage = a1a2_firsttramp() & ~(PAGE_MASK);
233 break;
234 case ReturnValueOnStackArgumentMode:
235 codePage = a2a3_firsttramp() & ~(PAGE_MASK);
236 break;
237 default:
238 _objc_fatal("unknown return mode %d", (int)aMode);
239 break;
240 }
241 vm_prot_t currentProtection, maxProtection;
242 result = vm_remap(mach_task_self(), &codeAddress, PAGE_SIZE, 0, FALSE, mach_task_self(),
243 codePage, TRUE, &currentProtection, &maxProtection, VM_INHERIT_SHARE);
244 if (result != KERN_SUCCESS) {
245 result = vm_deallocate(mach_task_self(), dataAddress, PAGE_SIZE);
246 if (result != KERN_SUCCESS) {
247 mach_error("vm_deallocate for retry failed.", result);
248 return NULL;
249 }
250 } else
251 break;
252 }
253
254 if (result != KERN_SUCCESS)
255 return NULL;
256
257 TrampolineBlockPagePair *pagePair = (TrampolineBlockPagePair *) dataAddress;
258 pagePair->nextAvailable = _paddingSlotCount();
259 pagePair->nextPagePair = NULL;
260 pagePair->nextAvailablePage = NULL;
261 void **lastPageBlockPtr = _payloadAddressAtIndex(pagePair, _slotsPerPagePair() - 1);
262 *lastPageBlockPtr = (void*)(uintptr_t) LAST_SLOT_MARKER;
263
264 if (headPagePair) {
265 TrampolineBlockPagePair *lastPage = headPagePair;
266 while(lastPage->nextPagePair)
267 lastPage = lastPage->nextPagePair;
268
269 lastPage->nextPagePair = pagePair;
270 headPagePairs[aMode]->nextAvailablePage = pagePair;
271 } else {
272 headPagePairs[aMode] = pagePair;
273 }
274
275 return pagePair;
276 }
277
278 static TrampolineBlockPagePair *_getOrAllocatePagePairWithNextAvailable(ArgumentMode aMode) {
279 _assert_locked();
280
281 TrampolineBlockPagePair *headPagePair = headPagePairs[aMode];
282
283 if (!headPagePair)
284 return _allocateTrampolinesAndData(aMode);
285
286 if (headPagePair->nextAvailable) // make sure head page is filled first
287 return headPagePair;
288
289 if (headPagePair->nextAvailablePage) // check if there is a page w/a hole
290 return headPagePair->nextAvailablePage;
291
292 return _allocateTrampolinesAndData(aMode); // tack on a new one
293 }
294
295 static TrampolineBlockPagePair *_pagePairAndIndexContainingIMP(IMP anImp, uint32_t *outIndex, TrampolineBlockPagePair **outHeadPagePair) {
296 _assert_locked();
297
298 uintptr_t impValue = (uintptr_t) anImp;
299 uint32_t i;
300
301 for(i = 0; i < ArgumentModeMax; i++) {
302 TrampolineBlockPagePair *pagePair = headPagePairs[i];
303
304 while(pagePair) {
305 uintptr_t startOfTrampolines = (uintptr_t) &(pagePair->trampolines);
306 uintptr_t endOfTrampolines = ((uintptr_t) startOfTrampolines) + PAGE_SIZE;
307
308 if ( (impValue >=startOfTrampolines) && (impValue <= endOfTrampolines) ) {
309 if (outIndex) {
310 *outIndex = (uint32_t) ((impValue - startOfTrampolines) / SLOT_SIZE);
311 }
312 if (outHeadPagePair) {
313 *outHeadPagePair = headPagePairs[i];
314 }
315 return pagePair;
316 }
317
318 pagePair = pagePair->nextPagePair;
319 }
320 }
321
322 return NULL;
323 }
324
325 // `block` must already have been copied
326 static IMP _imp_implementationWithBlockNoCopy(ArgumentMode aMode, void *block)
327 {
328 _assert_locked();
329
330 TrampolineBlockPagePair *pagePair = _getOrAllocatePagePairWithNextAvailable(aMode);
331 if (!headPagePairs[aMode])
332 headPagePairs[aMode] = pagePair;
333
334 uint32_t index = pagePair->nextAvailable;
335 void **payloadAddress = _payloadAddressAtIndex(pagePair, index);
336 assert((index < 1024) || (index == LAST_SLOT_MARKER));
337
338 uint32_t nextAvailableIndex = (uint32_t) *((uintptr_t *) payloadAddress);
339 if (nextAvailableIndex == 0)
340 // first time through, slots are filled with zeros, fill sequentially
341 pagePair->nextAvailable = index + 1;
342 else if (nextAvailableIndex == LAST_SLOT_MARKER) {
343 // last slot is filled with this as marker
344 // page now full, remove from available page linked list
345 pagePair->nextAvailable = 0;
346 TrampolineBlockPagePair *iteratorPair = headPagePairs[aMode];
347 while(iteratorPair && (iteratorPair->nextAvailablePage != pagePair))
348 iteratorPair = iteratorPair->nextAvailablePage;
349 if (iteratorPair) {
350 iteratorPair->nextAvailablePage = pagePair->nextAvailablePage;
351 pagePair->nextAvailablePage = NULL;
352 }
353 } else {
354 // empty slot at index contains pointer to next available index
355 pagePair->nextAvailable = nextAvailableIndex;
356 }
357
358 *payloadAddress = block;
359 IMP trampoline = _trampolineAddressAtIndex(pagePair, index);
360
361 return trampoline;
362 }
363
364 static ArgumentMode _argumentModeForBlock(void *block) {
365 ArgumentMode aMode = ReturnValueInRegisterArgumentMode;
366
367 if (_Block_has_signature(block) && _Block_use_stret(block))
368 aMode = ReturnValueOnStackArgumentMode;
369
370 return aMode;
371 }
372
373 #pragma mark Public API
374 IMP imp_implementationWithBlock(void *block)
375 {
376 block = Block_copy(block);
377 _lock();
378 IMP returnIMP = _imp_implementationWithBlockNoCopy(_argumentModeForBlock(block), block);
379 _unlock();
380 return returnIMP;
381 }
382
383
384 void *imp_getBlock(IMP anImp) {
385 uint32_t index;
386 TrampolineBlockPagePair *pagePair;
387
388 if (!anImp) return NULL;
389
390 _lock();
391
392 pagePair = _pagePairAndIndexContainingIMP(anImp, &index, NULL);
393
394 if (!pagePair) {
395 _unlock();
396 return NULL;
397 }
398
399 void *potentialBlock = *_payloadAddressAtIndex(pagePair, index);
400
401 if ((uintptr_t) potentialBlock == (uintptr_t) LAST_SLOT_MARKER) {
402 _unlock();
403 return NULL;
404 }
405
406 if ((uintptr_t) potentialBlock < (uintptr_t) _slotsPerPagePair()) {
407 _unlock();
408 return NULL;
409 }
410
411 _unlock();
412
413 return potentialBlock;
414 }
415
416 BOOL imp_removeBlock(IMP anImp) {
417 TrampolineBlockPagePair *pagePair;
418 TrampolineBlockPagePair *headPagePair;
419 uint32_t index;
420
421 if (!anImp) return NO;
422
423 _lock();
424 pagePair = _pagePairAndIndexContainingIMP(anImp, &index, &headPagePair);
425
426 if (!pagePair) {
427 _unlock();
428 return NO;
429 }
430
431 void **payloadAddress = _payloadAddressAtIndex(pagePair, index);
432 void *block = *payloadAddress;
433 // block is released below
434
435 if (pagePair->nextAvailable) {
436 *payloadAddress = (void *) (uintptr_t) pagePair->nextAvailable;
437 pagePair->nextAvailable = index;
438 } else {
439 *payloadAddress = (void *) (uintptr_t) LAST_SLOT_MARKER; // nada after this one is used
440 pagePair->nextAvailable = index;
441 }
442
443 // make sure this page is on available linked list
444 TrampolineBlockPagePair *pagePairIterator = headPagePair;
445
446 // see if pagePair is the next available page for any existing pages
447 while(pagePairIterator->nextAvailablePage && (pagePairIterator->nextAvailablePage != pagePair))
448 pagePairIterator = pagePairIterator->nextAvailablePage;
449
450 if (! pagePairIterator->nextAvailablePage) { // if iteration stopped because nextAvail was NULL
451 // add to end of list.
452 pagePairIterator->nextAvailablePage = pagePair;
453 pagePair->nextAvailablePage = NULL;
454 }
455
456 _unlock();
457 Block_release(block);
458 return YES;
459 }