]> git.saurik.com Git - apple/objc4.git/blob - runtime/objc-block-trampolines.mm
objc4-551.1.tar.gz
[apple/objc4.git] / runtime / objc-block-trampolines.mm
1 /*
2 * Copyright (c) 2010 Apple Inc. All Rights Reserved.
3 *
4 * @APPLE_LICENSE_HEADER_START@
5 *
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. Please obtain a copy of the License at
10 * http://www.opensource.apple.com/apsl/ and read it before using this
11 * file.
12 *
13 * The Original Code and all software distributed under the License are
14 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
15 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
16 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
18 * Please see the License for the specific language governing rights and
19 * limitations under the License.
20 *
21 * @APPLE_LICENSE_HEADER_END@
22 */
23 /***********************************************************************
24 * objc-block-trampolines.m
25 * Author: b.bum
26 *
27 **********************************************************************/
28
29 /***********************************************************************
30 * Imports.
31 **********************************************************************/
32 #include "objc-private.h"
33 #include "runtime.h"
34
35 #include <Block.h>
36 #include <Block_private.h>
37 #include <mach/mach.h>
38
39 // symbols defined in assembly files
40 // Don't use the symbols directly; they're thumb-biased on some ARM archs.
41 #define TRAMP(tramp) \
42 static inline uintptr_t tramp(void) { \
43 extern void *_##tramp; \
44 return ((uintptr_t)&_##tramp) & ~1UL; \
45 }
46 // Scalar return
47 TRAMP(a1a2_tramphead); // trampoline header code
48 TRAMP(a1a2_firsttramp); // first trampoline
49 TRAMP(a1a2_nexttramp); // second trampoline
50 TRAMP(a1a2_trampend); // after the last trampoline
51
52 // Struct return
53 TRAMP(a2a3_tramphead);
54 TRAMP(a2a3_firsttramp);
55 TRAMP(a2a3_nexttramp);
56 TRAMP(a2a3_trampend);
57
58 // argument mode identifier
59 typedef enum {
60 ReturnValueInRegisterArgumentMode,
61 ReturnValueOnStackArgumentMode,
62
63 ArgumentModeMax
64 } ArgumentMode;
65
66 // slot size is 8 bytes on both i386 and x86_64 (because of bytes-per-call instruction is > 4 for both)
67 #define SLOT_SIZE 8
68
69 // unsigned value, any value, larger thna # of blocks that fit in the page pair
70 #define LAST_SLOT_MARKER 4241
71
72 #define TRAMPOLINE_PAGE_PAIR_HEADER_SIZE (sizeof(uint32_t) + sizeof(struct _TrampolineBlockPagePair *) + sizeof(struct _TrampolineBlockPagePair *))
73 typedef struct _TrampolineBlockPagePair {
74 struct _TrampolineBlockPagePair *nextPagePair; // linked list of all page pairs
75 struct _TrampolineBlockPagePair *nextAvailablePage; // linked list of pages with available slots
76
77 uint32_t nextAvailable; // index of next available slot, 0 if no more available
78
79 // Data: block pointers and free list.
80 // Bytes parallel with trampoline header are the fields above, or unused.
81 uint8_t blocks[ PAGE_SIZE - TRAMPOLINE_PAGE_PAIR_HEADER_SIZE ]
82 __attribute__((unavailable)) /* always use _headerSize() */;
83
84 // Code: trampoline header followed by trampolines.
85 uint8_t trampolines[PAGE_SIZE];
86
87 // Per-trampoline block data format:
88 // initial value is 0 while page pair is filled sequentially (last slot is LAST_SLOT_MARKER to indicate end of page)
89 // when filled, value is reference to Block_copy()d block
90 // when empty, value is index of next available slot OR LAST_SLOT_MARKER
91
92 } TrampolineBlockPagePair;
93
94 // two sets of trampoline page pairs; one for stack returns and one for register returns
95 static TrampolineBlockPagePair *headPagePairs[2];
96
97 #pragma mark Utility Functions
98 static inline uint32_t _headerSize() {
99 uint32_t headerSize = (uint32_t) (a1a2_firsttramp() - a1a2_tramphead());
100
101 // make sure stret and non-stret sizes match
102 assert(a2a3_firsttramp() - a2a3_tramphead() == headerSize);
103
104 return headerSize;
105 }
106
107 static inline uint32_t _slotSize() {
108 uint32_t slotSize = (uint32_t) (a1a2_nexttramp() - a1a2_firsttramp());
109
110 // make sure stret and non-stret sizes match
111 assert(a2a3_nexttramp() - a2a3_firsttramp() == slotSize);
112
113 return slotSize;
114 }
115
116 static inline bool trampolinesAreThumb(void) {
117 extern void *_a1a2_firsttramp;
118 #if !NDEBUG
119 extern void *_a1a2_nexttramp;
120 extern void *_a2a3_firsttramp;
121 extern void *_a2a3_nexttramp;
122 #endif
123
124 // make sure thumb-edness of all trampolines match
125 assert(((uintptr_t)&_a1a2_firsttramp) % 2 ==
126 ((uintptr_t)&_a2a3_firsttramp) % 2);
127 assert(((uintptr_t)&_a1a2_firsttramp) % 2 ==
128 ((uintptr_t)&_a1a2_nexttramp) % 2);
129 assert(((uintptr_t)&_a1a2_firsttramp) % 2 ==
130 ((uintptr_t)&_a2a3_nexttramp) % 2);
131
132 return ((uintptr_t)&_a1a2_firsttramp) % 2;
133 }
134
135 static inline uint32_t _slotsPerPagePair() {
136 uint32_t slotSize = _slotSize();
137 uint32_t slotsPerPagePair = PAGE_SIZE / slotSize;
138 return slotsPerPagePair;
139 }
140
141 static inline uint32_t _paddingSlotCount() {
142 uint32_t headerSize = _headerSize();
143 uint32_t slotSize = _slotSize();
144 uint32_t paddingSlots = headerSize / slotSize;
145 return paddingSlots;
146 }
147
148 static inline id *_payloadAddressAtIndex(TrampolineBlockPagePair *pagePair, uint32_t index) {
149 uint32_t slotSize = _slotSize();
150 uintptr_t baseAddress = (uintptr_t) pagePair;
151 uintptr_t payloadAddress = baseAddress + (slotSize * index);
152 return (id *)payloadAddress;
153 }
154
155 static inline IMP _trampolineAddressAtIndex(TrampolineBlockPagePair *pagePair, uint32_t index) {
156 uint32_t slotSize = _slotSize();
157 uintptr_t baseAddress = (uintptr_t) &(pagePair->trampolines);
158 uintptr_t trampolineAddress = baseAddress + (slotSize * index);
159
160 #if defined(__arm__)
161 if (trampolinesAreThumb()) trampolineAddress++;
162 #endif
163
164 return (IMP)trampolineAddress;
165 }
166
167 static inline void _lock() {
168 #if __OBJC2__
169 rwlock_write(&runtimeLock);
170 #else
171 mutex_lock(&classLock);
172 #endif
173 }
174
175 static inline void _unlock() {
176 #if __OBJC2__
177 rwlock_unlock_write(&runtimeLock);
178 #else
179 mutex_unlock(&classLock);
180 #endif
181 }
182
183 static inline void _assert_locked() {
184 #if __OBJC2__
185 rwlock_assert_writing(&runtimeLock);
186 #else
187 mutex_assert_locked(&classLock);
188 #endif
189 }
190
191 #pragma mark Trampoline Management Functions
192 static TrampolineBlockPagePair *_allocateTrampolinesAndData(ArgumentMode aMode) {
193 _assert_locked();
194
195 vm_address_t dataAddress;
196
197 // make sure certain assumptions are met
198 assert(sizeof(TrampolineBlockPagePair) == 2*PAGE_SIZE);
199 assert(_slotSize() == 8);
200 assert(_headerSize() >= TRAMPOLINE_PAGE_PAIR_HEADER_SIZE);
201 assert((_headerSize() % _slotSize()) == 0);
202
203 assert(a1a2_tramphead() % PAGE_SIZE == 0);
204 assert(a1a2_tramphead() + PAGE_SIZE == a1a2_trampend());
205 assert(a2a3_tramphead() % PAGE_SIZE == 0);
206 assert(a2a3_tramphead() + PAGE_SIZE == a2a3_trampend());
207
208 TrampolineBlockPagePair *headPagePair = headPagePairs[aMode];
209
210 if (headPagePair) {
211 assert(headPagePair->nextAvailablePage == nil);
212 }
213
214 int i;
215 kern_return_t result = KERN_FAILURE;
216 for(i = 0; i < 5; i++) {
217 result = vm_allocate(mach_task_self(), &dataAddress, PAGE_SIZE * 2,
218 TRUE | VM_MAKE_TAG(VM_MEMORY_FOUNDATION));
219 if (result != KERN_SUCCESS) {
220 mach_error("vm_allocate failed", result);
221 return nil;
222 }
223
224 vm_address_t codeAddress = dataAddress + PAGE_SIZE;
225 result = vm_deallocate(mach_task_self(), codeAddress, PAGE_SIZE);
226 if (result != KERN_SUCCESS) {
227 mach_error("vm_deallocate failed", result);
228 return nil;
229 }
230
231 uintptr_t codePage;
232 switch(aMode) {
233 case ReturnValueInRegisterArgumentMode:
234 codePage = a1a2_firsttramp() & ~(PAGE_MASK);
235 break;
236 case ReturnValueOnStackArgumentMode:
237 codePage = a2a3_firsttramp() & ~(PAGE_MASK);
238 break;
239 default:
240 _objc_fatal("unknown return mode %d", (int)aMode);
241 break;
242 }
243 vm_prot_t currentProtection, maxProtection;
244 result = vm_remap(mach_task_self(), &codeAddress, PAGE_SIZE, 0, FALSE, mach_task_self(),
245 codePage, TRUE, &currentProtection, &maxProtection, VM_INHERIT_SHARE);
246 if (result != KERN_SUCCESS) {
247 result = vm_deallocate(mach_task_self(), dataAddress, PAGE_SIZE);
248 if (result != KERN_SUCCESS) {
249 mach_error("vm_deallocate for retry failed.", result);
250 return nil;
251 }
252 } else
253 break;
254 }
255
256 if (result != KERN_SUCCESS)
257 return nil;
258
259 TrampolineBlockPagePair *pagePair = (TrampolineBlockPagePair *) dataAddress;
260 pagePair->nextAvailable = _paddingSlotCount();
261 pagePair->nextPagePair = nil;
262 pagePair->nextAvailablePage = nil;
263 id *lastPageBlockPtr = _payloadAddressAtIndex(pagePair, _slotsPerPagePair() - 1);
264 *lastPageBlockPtr = (id)(uintptr_t) LAST_SLOT_MARKER;
265
266 if (headPagePair) {
267 TrampolineBlockPagePair *lastPage = headPagePair;
268 while(lastPage->nextPagePair)
269 lastPage = lastPage->nextPagePair;
270
271 lastPage->nextPagePair = pagePair;
272 headPagePairs[aMode]->nextAvailablePage = pagePair;
273 } else {
274 headPagePairs[aMode] = pagePair;
275 }
276
277 return pagePair;
278 }
279
280 static TrampolineBlockPagePair *_getOrAllocatePagePairWithNextAvailable(ArgumentMode aMode) {
281 _assert_locked();
282
283 TrampolineBlockPagePair *headPagePair = headPagePairs[aMode];
284
285 if (!headPagePair)
286 return _allocateTrampolinesAndData(aMode);
287
288 if (headPagePair->nextAvailable) // make sure head page is filled first
289 return headPagePair;
290
291 if (headPagePair->nextAvailablePage) // check if there is a page w/a hole
292 return headPagePair->nextAvailablePage;
293
294 return _allocateTrampolinesAndData(aMode); // tack on a new one
295 }
296
297 static TrampolineBlockPagePair *_pagePairAndIndexContainingIMP(IMP anImp, uint32_t *outIndex, TrampolineBlockPagePair **outHeadPagePair) {
298 _assert_locked();
299
300 uintptr_t impValue = (uintptr_t) anImp;
301 uint32_t i;
302
303 for(i = 0; i < ArgumentModeMax; i++) {
304 TrampolineBlockPagePair *pagePair = headPagePairs[i];
305
306 while(pagePair) {
307 uintptr_t startOfTrampolines = (uintptr_t) &(pagePair->trampolines);
308 uintptr_t endOfTrampolines = ((uintptr_t) startOfTrampolines) + PAGE_SIZE;
309
310 if ( (impValue >=startOfTrampolines) && (impValue <= endOfTrampolines) ) {
311 if (outIndex) {
312 *outIndex = (uint32_t) ((impValue - startOfTrampolines) / SLOT_SIZE);
313 }
314 if (outHeadPagePair) {
315 *outHeadPagePair = headPagePairs[i];
316 }
317 return pagePair;
318 }
319
320 pagePair = pagePair->nextPagePair;
321 }
322 }
323
324 return nil;
325 }
326
327 // `block` must already have been copied
328 static IMP _imp_implementationWithBlockNoCopy(ArgumentMode aMode, id block)
329 {
330 _assert_locked();
331
332 TrampolineBlockPagePair *pagePair = _getOrAllocatePagePairWithNextAvailable(aMode);
333 if (!headPagePairs[aMode])
334 headPagePairs[aMode] = pagePair;
335
336 uint32_t index = pagePair->nextAvailable;
337 id *payloadAddress = _payloadAddressAtIndex(pagePair, index);
338 assert((index < _slotsPerPagePair()) || (index == LAST_SLOT_MARKER));
339
340 uint32_t nextAvailableIndex = (uint32_t) *((uintptr_t *) payloadAddress);
341 if (nextAvailableIndex == 0)
342 // first time through, slots are filled with zeros, fill sequentially
343 pagePair->nextAvailable = index + 1;
344 else if (nextAvailableIndex == LAST_SLOT_MARKER) {
345 // last slot is filled with this as marker
346 // page now full, remove from available page linked list
347 pagePair->nextAvailable = 0;
348 TrampolineBlockPagePair *iteratorPair = headPagePairs[aMode];
349 while(iteratorPair && (iteratorPair->nextAvailablePage != pagePair))
350 iteratorPair = iteratorPair->nextAvailablePage;
351 if (iteratorPair) {
352 iteratorPair->nextAvailablePage = pagePair->nextAvailablePage;
353 pagePair->nextAvailablePage = nil;
354 }
355 } else {
356 // empty slot at index contains pointer to next available index
357 pagePair->nextAvailable = nextAvailableIndex;
358 }
359
360 *payloadAddress = block;
361 IMP trampoline = _trampolineAddressAtIndex(pagePair, index);
362
363 return trampoline;
364 }
365
366 static ArgumentMode _argumentModeForBlock(id block) {
367 ArgumentMode aMode = ReturnValueInRegisterArgumentMode;
368
369 if (_Block_has_signature(block) && _Block_use_stret(block))
370 aMode = ReturnValueOnStackArgumentMode;
371
372 return aMode;
373 }
374
375 #pragma mark Public API
376 IMP imp_implementationWithBlock(id block)
377 {
378 block = Block_copy(block);
379 _lock();
380 IMP returnIMP = _imp_implementationWithBlockNoCopy(_argumentModeForBlock(block), block);
381 _unlock();
382 return returnIMP;
383 }
384
385
386 id imp_getBlock(IMP anImp) {
387 uint32_t index;
388 TrampolineBlockPagePair *pagePair;
389
390 if (!anImp) return nil;
391
392 _lock();
393
394 pagePair = _pagePairAndIndexContainingIMP(anImp, &index, nil);
395
396 if (!pagePair) {
397 _unlock();
398 return nil;
399 }
400
401 id potentialBlock = *_payloadAddressAtIndex(pagePair, index);
402
403 if ((uintptr_t) potentialBlock == (uintptr_t) LAST_SLOT_MARKER) {
404 _unlock();
405 return nil;
406 }
407
408 if ((uintptr_t) potentialBlock < (uintptr_t) _slotsPerPagePair()) {
409 _unlock();
410 return nil;
411 }
412
413 _unlock();
414
415 return potentialBlock;
416 }
417
418 BOOL imp_removeBlock(IMP anImp) {
419 TrampolineBlockPagePair *pagePair;
420 TrampolineBlockPagePair *headPagePair;
421 uint32_t index;
422
423 if (!anImp) return NO;
424
425 _lock();
426 pagePair = _pagePairAndIndexContainingIMP(anImp, &index, &headPagePair);
427
428 if (!pagePair) {
429 _unlock();
430 return NO;
431 }
432
433 id *payloadAddress = _payloadAddressAtIndex(pagePair, index);
434 id block = *payloadAddress;
435 // block is released below
436
437 if (pagePair->nextAvailable) {
438 *payloadAddress = (id) (uintptr_t) pagePair->nextAvailable;
439 pagePair->nextAvailable = index;
440 } else {
441 *payloadAddress = (id) (uintptr_t) LAST_SLOT_MARKER; // nada after this one is used
442 pagePair->nextAvailable = index;
443 }
444
445 // make sure this page is on available linked list
446 TrampolineBlockPagePair *pagePairIterator = headPagePair;
447
448 // see if pagePair is the next available page for any existing pages
449 while(pagePairIterator->nextAvailablePage && (pagePairIterator->nextAvailablePage != pagePair))
450 pagePairIterator = pagePairIterator->nextAvailablePage;
451
452 if (! pagePairIterator->nextAvailablePage) { // if iteration stopped because nextAvail was nil
453 // add to end of list.
454 pagePairIterator->nextAvailablePage = pagePair;
455 pagePair->nextAvailablePage = nil;
456 }
457
458 _unlock();
459 Block_release(block);
460 return YES;
461 }