2 * Copyright (c) 2010-2012 Apple Inc. All rights reserved.
4 * @APPLE_LICENSE_HEADER_START@
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. Please obtain a copy of the License at
10 * http://www.opensource.apple.com/apsl/ and read it before using this
13 * The Original Code and all software distributed under the License are
14 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
15 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
16 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
18 * Please see the License for the specific language governing rights and
19 * limitations under the License.
21 * @APPLE_LICENSE_HEADER_END@
24 #include "objc-weak.h"
25 #include "objc-private.h"
26 #include "objc-internal.h"
29 #include "objc-runtime-new.h"
32 #include "llvm-DenseMap.h"
34 #include <malloc/malloc.h>
37 #include <mach/mach.h>
38 #include <mach-o/dyld.h>
39 #include <mach-o/nlist.h>
40 #include <sys/types.h>
42 #include <libkern/OSAtomic.h>
47 @interface NSInvocation
51 // better to not rely on Foundation to build
53 @class NSMethodSignature;
55 typedef unsigned long NSUInteger;
57 typedef unsigned int NSUInteger;
59 typedef struct _NSZone NSZone;
63 - (BOOL)isEqual:(id)object;
71 - (id)performSelector:(SEL)aSelector;
72 - (id)performSelector:(SEL)aSelector withObject:(id)object;
73 - (id)performSelector:(SEL)aSelector withObject:(id)object1 withObject:(id)object2;
77 - (BOOL)isKindOfClass:(Class)aClass;
78 - (BOOL)isMemberOfClass:(Class)aClass;
79 - (BOOL)conformsToProtocol:(Protocol *)aProtocol;
81 - (BOOL)respondsToSelector:(SEL)aSelector;
84 - (oneway void)release;
86 - (NSUInteger)retainCount;
88 - (NSString *)description;
89 - (NSString *)debugDescription;
94 @interface NSObject <NSObject>
100 // HACK -- the use of these functions must be after the @implementation
101 id bypass_msgSend_retain(NSObject *obj) asm("-[NSObject retain]");
102 void bypass_msgSend_release(NSObject *obj) asm("-[NSObject release]");
103 id bypass_msgSend_autorelease(NSObject *obj) asm("-[NSObject autorelease]");
108 // NSObject used to be in Foundation/CoreFoundation.
110 #define SYMBOL_ELSEWHERE_IN_3(sym, vers, n) \
111 OBJC_EXPORT const char elsewhere_ ##n __asm__("$ld$hide$os" #vers "$" #sym); const char elsewhere_ ##n = 0
112 #define SYMBOL_ELSEWHERE_IN_2(sym, vers, n) \
113 SYMBOL_ELSEWHERE_IN_3(sym, vers, n)
114 #define SYMBOL_ELSEWHERE_IN(sym, vers) \
115 SYMBOL_ELSEWHERE_IN_2(sym, vers, __COUNTER__)
118 # define NSOBJECT_ELSEWHERE_IN(vers) \
119 SYMBOL_ELSEWHERE_IN(_OBJC_CLASS_$_NSObject, vers); \
120 SYMBOL_ELSEWHERE_IN(_OBJC_METACLASS_$_NSObject, vers); \
121 SYMBOL_ELSEWHERE_IN(_OBJC_IVAR_$_NSObject.isa, vers)
123 # define NSOBJECT_ELSEWHERE_IN(vers) \
124 SYMBOL_ELSEWHERE_IN(.objc_class_name_NSObject, vers)
128 NSOBJECT_ELSEWHERE_IN(5.1);
129 NSOBJECT_ELSEWHERE_IN(5.0);
130 NSOBJECT_ELSEWHERE_IN(4.3);
131 NSOBJECT_ELSEWHERE_IN(4.2);
132 NSOBJECT_ELSEWHERE_IN(4.1);
133 NSOBJECT_ELSEWHERE_IN(4.0);
134 NSOBJECT_ELSEWHERE_IN(3.2);
135 NSOBJECT_ELSEWHERE_IN(3.1);
136 NSOBJECT_ELSEWHERE_IN(3.0);
137 NSOBJECT_ELSEWHERE_IN(2.2);
138 NSOBJECT_ELSEWHERE_IN(2.1);
139 NSOBJECT_ELSEWHERE_IN(2.0);
141 NSOBJECT_ELSEWHERE_IN(10.7);
142 NSOBJECT_ELSEWHERE_IN(10.6);
143 NSOBJECT_ELSEWHERE_IN(10.5);
144 NSOBJECT_ELSEWHERE_IN(10.4);
145 NSOBJECT_ELSEWHERE_IN(10.3);
146 NSOBJECT_ELSEWHERE_IN(10.2);
147 NSOBJECT_ELSEWHERE_IN(10.1);
148 NSOBJECT_ELSEWHERE_IN(10.0);
154 #if SUPPORT_RETURN_AUTORELEASE
155 // We cannot peek at where we are returning to unless we always inline this:
156 __attribute__((always_inline))
157 static bool callerAcceptsFastAutorelease(const void * const ra0);
161 /***********************************************************************
163 **********************************************************************/
165 static bool seen_weak_refs;
167 static id defaultBadAllocHandler(Class cls)
169 _objc_fatal("attempt to allocate object of class '%s' failed",
173 static id(*badAllocHandler)(Class) = &defaultBadAllocHandler;
175 static id callBadAllocHandler(Class cls)
177 // fixme add re-entrancy protection in case allocation fails inside handler
178 return (*badAllocHandler)(cls);
181 void _objc_setBadAllocHandler(id(*newHandler)(Class))
183 badAllocHandler = newHandler;
187 #define ARR_LOGGING 0
195 } CompilerGenerated, ExplicitlyCoded;
197 void (^objc_arr_log)(const char *, id param) =
198 ^(const char *str, id param) { printf("%s %p\n", str, param); };
204 #if TARGET_OS_EMBEDDED
205 # define SIDE_TABLE_STRIPE 1
207 # define SIDE_TABLE_STRIPE 8
210 // should be a multiple of cache line size (64)
211 #define SIDE_TABLE_SIZE 64
213 typedef objc::DenseMap<id,size_t,true> RefcountMap;
217 static uint8_t table_buf[SIDE_TABLE_STRIPE * SIDE_TABLE_SIZE];
222 weak_table_t weak_table;
224 SideTable() : slock(OS_SPINLOCK_INIT)
226 memset(&weak_table, 0, sizeof(weak_table));
231 // never delete side_table in case other threads retain during exit
235 static SideTable *tableForPointer(const void *p)
237 # if SIDE_TABLE_STRIPE == 1
238 return (SideTable *)table_buf;
240 uintptr_t a = (uintptr_t)p;
241 int index = ((a >> 4) ^ (a >> 9)) & (SIDE_TABLE_STRIPE - 1);
242 return (SideTable *)&table_buf[index * SIDE_TABLE_SIZE];
247 // use placement new instead of static ctor to avoid dtor at exit
248 for (int i = 0; i < SIDE_TABLE_STRIPE; i++) {
249 new (&table_buf[i * SIDE_TABLE_SIZE]) SideTable;
253 static bool noLocksHeld(void) {
255 for (int i = 0; i < SIDE_TABLE_STRIPE && gotAll; i++) {
256 SideTable *s = (SideTable *)(&table_buf[i * SIDE_TABLE_SIZE]);
257 if (OSSpinLockTry(&s->slock)) {
258 OSSpinLockUnlock(&s->slock);
267 STATIC_ASSERT(sizeof(SideTable) <= SIDE_TABLE_SIZE);
268 __attribute__((aligned(SIDE_TABLE_SIZE))) uint8_t
269 SideTable::table_buf[SIDE_TABLE_STRIPE * SIDE_TABLE_SIZE];
271 // Avoid false-negative reports from tools like "leaks"
272 #define DISGUISE(x) ((id)~(uintptr_t)(x))
274 // anonymous namespace
277 bool noSideTableLocksHeld(void)
279 return SideTable::noLocksHeld();
283 // The -fobjc-arc flag causes the compiler to issue calls to objc_{retain/release/autorelease/retain_block}
286 id objc_retainBlock(id x) {
288 objc_arr_log("objc_retain_block", x);
289 ++CompilerGenerated.blockCopies;
291 return (id)_Block_copy(x);
295 // The following SHOULD be called by the compiler directly, but the request hasn't been made yet :-)
298 BOOL objc_should_deallocate(id object) {
303 objc_retain_autorelease(id obj)
305 return objc_autorelease(objc_retain(obj));
309 objc_storeWeak(id *location, id newObj)
315 #if SIDE_TABLE_STRIPE > 1
319 if (!seen_weak_refs) {
320 seen_weak_refs = true;
323 // Acquire locks for old and new values.
324 // Order by lock address to prevent lock ordering problems.
325 // Retry if the old value changes underneath us.
329 oldTable = SideTable::tableForPointer(oldObj);
330 newTable = SideTable::tableForPointer(newObj);
332 lock1 = &newTable->slock;
333 #if SIDE_TABLE_STRIPE > 1
334 lock2 = &oldTable->slock;
336 OSSpinLock *temp = lock1;
340 if (lock1 != lock2) OSSpinLockLock(lock2);
342 OSSpinLockLock(lock1);
344 if (*location != oldObj) {
345 OSSpinLockUnlock(lock1);
346 #if SIDE_TABLE_STRIPE > 1
347 if (lock1 != lock2) OSSpinLockUnlock(lock2);
353 weak_unregister_no_lock(&oldTable->weak_table, oldObj, location);
356 newObj = weak_register_no_lock(&newTable->weak_table, newObj,location);
357 // weak_register_no_lock returns NULL if weak store should be rejected
359 // Do not set *location anywhere else. That would introduce a race.
362 OSSpinLockUnlock(lock1);
363 #if SIDE_TABLE_STRIPE > 1
364 if (lock1 != lock2) OSSpinLockUnlock(lock2);
371 objc_loadWeakRetained(id *location)
380 if (!result) return NULL;
382 table = SideTable::tableForPointer(result);
383 lock = &table->slock;
385 OSSpinLockLock(lock);
386 if (*location != result) {
387 OSSpinLockUnlock(lock);
391 result = arr_read_weak_reference(&table->weak_table, location);
393 OSSpinLockUnlock(lock);
398 objc_loadWeak(id *location)
400 return objc_autorelease(objc_loadWeakRetained(location));
404 objc_initWeak(id *addr, id val)
407 return objc_storeWeak(addr, val);
411 objc_destroyWeak(id *addr)
413 objc_storeWeak(addr, 0);
417 objc_copyWeak(id *to, id *from)
419 id val = objc_loadWeakRetained(from);
420 objc_initWeak(to, val);
425 objc_moveWeak(id *to, id *from)
427 objc_copyWeak(to, from);
428 objc_destroyWeak(from);
432 /* Autorelease pool implementation
433 A thread's autorelease pool is a stack of pointers.
434 Each pointer is either an object to release, or POOL_SENTINEL which is
435 an autorelease pool boundary.
436 A pool token is a pointer to the POOL_SENTINEL for that pool. When
437 the pool is popped, every object hotter than the sentinel is released.
438 The stack is divided into a doubly-linked list of pages. Pages are added
439 and deleted as necessary.
440 Thread-local storage points to the hot page, where newly autoreleased
444 extern "C" BREAKPOINT_FUNCTION(void objc_autoreleaseNoPool(id obj));
449 static const uint32_t M0 = 0xA1A1A1A1;
450 # define M1 "AUTORELEASE!"
451 static const size_t M1_len = 12;
455 assert(M1_len == strlen(M1));
456 assert(M1_len == 3 * sizeof(m[1]));
459 strncpy((char *)&m[1], M1, M1_len);
463 m[0] = m[1] = m[2] = m[3] = 0;
467 return (m[0] == M0 && 0 == strncmp((char *)&m[1], M1, M1_len));
470 bool fastcheck() const {
482 // Set this to 1 to mprotect() autorelease pool contents
483 #define PROTECT_AUTORELEASEPOOL 0
485 class AutoreleasePoolPage
488 #define POOL_SENTINEL 0
489 static pthread_key_t const key = AUTORELEASE_POOL_KEY;
490 static uint8_t const SCRIBBLE = 0xA3; // 0xA3A3A3A3 after releasing
491 static size_t const SIZE =
492 #if PROTECT_AUTORELEASEPOOL
493 4096; // must be multiple of vm page size
495 4096; // size and alignment, power of 2
497 static size_t const COUNT = SIZE / sizeof(id);
501 pthread_t const thread;
502 AutoreleasePoolPage * const parent;
503 AutoreleasePoolPage *child;
504 uint32_t const depth;
507 // SIZE-sizeof(*this) bytes of contents follow
509 static void * operator new(size_t size) {
510 return malloc_zone_memalign(malloc_default_zone(), SIZE, SIZE);
512 static void operator delete(void * p) {
516 inline void protect() {
517 #if PROTECT_AUTORELEASEPOOL
518 mprotect(this, SIZE, PROT_READ);
523 inline void unprotect() {
524 #if PROTECT_AUTORELEASEPOOL
526 mprotect(this, SIZE, PROT_READ | PROT_WRITE);
530 AutoreleasePoolPage(AutoreleasePoolPage *newParent)
531 : magic(), next(begin()), thread(pthread_self()),
532 parent(newParent), child(NULL),
533 depth(parent ? 1+parent->depth : 0),
534 hiwat(parent ? parent->hiwat : 0)
538 assert(!parent->child);
540 parent->child = this;
546 ~AutoreleasePoolPage()
552 // Not recursive: we don't want to blow out the stack
553 // if a thread accumulates a stupendous amount of garbage
558 void busted(bool die = true)
560 (die ? _objc_fatal : _objc_inform)
561 ("autorelease pool page %p corrupted\n"
562 " magic 0x%08x 0x%08x 0x%08x 0x%08x\n pthread %p\n",
563 this, magic.m[0], magic.m[1], magic.m[2], magic.m[3],
567 void check(bool die = true)
569 if (!magic.check() || !pthread_equal(thread, pthread_self())) {
574 void fastcheck(bool die = true)
576 if (! magic.fastcheck()) {
583 return (id *) ((uint8_t *)this+sizeof(*this));
587 return (id *) ((uint8_t *)this+SIZE);
591 return next == begin();
595 return next == end();
598 bool lessThanHalfFull() {
599 return (next - begin() < (end() - begin()) / 2);
613 releaseUntil(begin());
616 void releaseUntil(id *stop)
618 // Not recursive: we don't want to blow out the stack
619 // if a thread accumulates a stupendous amount of garbage
621 while (this->next != stop) {
622 // Restart from hotPage() every time, in case -release
623 // autoreleased more objects
624 AutoreleasePoolPage *page = hotPage();
626 // fixme I think this `while` can be `if`, but I can't prove it
627 while (page->empty()) {
633 id obj = *--page->next;
634 memset((void*)page->next, SCRIBBLE, sizeof(*page->next));
637 if (obj != POOL_SENTINEL) {
645 // we expect any children to be completely empty
646 for (AutoreleasePoolPage *page = child; page; page = page->child) {
647 assert(page->empty());
654 // Not recursive: we don't want to blow out the stack
655 // if a thread accumulates a stupendous amount of garbage
656 AutoreleasePoolPage *page = this;
657 while (page->child) page = page->child;
659 AutoreleasePoolPage *deathptr;
669 } while (deathptr != this);
672 static void tls_dealloc(void *p)
674 // reinstate TLS value while we work
675 setHotPage((AutoreleasePoolPage *)p);
680 static AutoreleasePoolPage *pageForPointer(const void *p)
682 return pageForPointer((uintptr_t)p);
685 static AutoreleasePoolPage *pageForPointer(uintptr_t p)
687 AutoreleasePoolPage *result;
688 uintptr_t offset = p % SIZE;
690 assert(offset >= sizeof(AutoreleasePoolPage));
692 result = (AutoreleasePoolPage *)(p - offset);
699 static inline AutoreleasePoolPage *hotPage()
701 AutoreleasePoolPage *result = (AutoreleasePoolPage *)
703 if (result) result->fastcheck();
707 static inline void setHotPage(AutoreleasePoolPage *page)
709 if (page) page->fastcheck();
710 tls_set_direct(key, (void *)page);
713 static inline AutoreleasePoolPage *coldPage()
715 AutoreleasePoolPage *result = hotPage();
717 while (result->parent) {
718 result = result->parent;
726 static inline id *autoreleaseFast(id obj)
728 AutoreleasePoolPage *page = hotPage();
729 if (page && !page->full()) {
730 return page->add(obj);
732 return autoreleaseSlow(obj);
736 static __attribute__((noinline))
737 id *autoreleaseSlow(id obj)
739 AutoreleasePoolPage *page;
742 // The code below assumes some cases are handled by autoreleaseFast()
743 assert(!page || page->full());
746 assert(obj != POOL_SENTINEL);
747 _objc_inform("Object %p of class %s autoreleased "
748 "with no pool in place - just leaking - "
749 "break on objc_autoreleaseNoPool() to debug",
750 obj, object_getClassName(obj));
751 objc_autoreleaseNoPool(obj);
756 if (page->child) page = page->child;
757 else page = new AutoreleasePoolPage(page);
758 } while (page->full());
761 return page->add(obj);
765 static inline id autorelease(id obj)
768 assert(!OBJC_IS_TAGGED_PTR(obj));
769 id *dest __unused = autoreleaseFast(obj);
770 assert(!dest || *dest == obj);
775 static inline void *push()
778 setHotPage(new AutoreleasePoolPage(NULL));
780 id *dest = autoreleaseFast(POOL_SENTINEL);
781 assert(*dest == POOL_SENTINEL);
785 static inline void pop(void *token)
787 AutoreleasePoolPage *page;
791 page = pageForPointer(token);
793 assert(*stop == POOL_SENTINEL);
795 // Token 0 is top-level pool
798 stop = page->begin();
801 if (PrintPoolHiwat) printHiwat();
803 page->releaseUntil(stop);
805 // memory: delete empty children
806 // hysteresis: keep one empty child if this page is more than half full
807 // special case: delete everything for pop(0)
811 } else if (page->child) {
812 if (page->lessThanHalfFull()) {
815 else if (page->child->child) {
816 page->child->child->kill();
823 int r __unused = pthread_key_init_np(AutoreleasePoolPage::key,
824 AutoreleasePoolPage::tls_dealloc);
830 _objc_inform("[%p] ................ PAGE %s %s %s", this,
831 full() ? "(full)" : "",
832 this == hotPage() ? "(hot)" : "",
833 this == coldPage() ? "(cold)" : "");
835 for (id *p = begin(); p < next; p++) {
836 if (*p == POOL_SENTINEL) {
837 _objc_inform("[%p] ################ POOL %p", p, p);
839 _objc_inform("[%p] %#16lx %s",
840 p, (unsigned long)*p, object_getClassName(*p));
845 static void printAll()
847 _objc_inform("##############");
848 _objc_inform("AUTORELEASE POOLS for thread %p", pthread_self());
850 AutoreleasePoolPage *page;
851 ptrdiff_t objects = 0;
852 for (page = coldPage(); page; page = page->child) {
853 objects += page->next - page->begin();
855 _objc_inform("%llu releases pending.", (unsigned long long)objects);
857 for (page = coldPage(); page; page = page->child) {
861 _objc_inform("##############");
864 static void printHiwat()
866 // Check and propagate high water mark
867 // Ignore high water marks under 256 to suppress noise.
868 AutoreleasePoolPage *p = hotPage();
869 uint32_t mark = p->depth*COUNT + (uint32_t)(p->next - p->begin());
870 if (mark > p->hiwat && mark > 256) {
871 for( ; p; p = p->parent) {
877 _objc_inform("POOL HIGHWATER: new high water mark of %u "
878 "pending autoreleases for thread %p:",
879 mark, pthread_self());
882 int count = backtrace(stack, sizeof(stack)/sizeof(stack[0]));
883 char **sym = backtrace_symbols(stack, count);
884 for (int i = 0; i < count; i++) {
885 _objc_inform("POOL HIGHWATER: %s", sym[i]);
894 // anonymous namespace
897 // API to only be called by root classes like NSObject or NSProxy
900 __attribute__((used,noinline,nothrow))
901 static id _objc_rootRetain_slow(id obj);
902 __attribute__((used,noinline,nothrow))
903 static bool _objc_rootReleaseWasZero_slow(id obj);
907 _objc_rootRetain_slow(id obj)
909 SideTable *table = SideTable::tableForPointer(obj);
910 OSSpinLockLock(&table->slock);
911 table->refcnts[DISGUISE(obj)] += 2;
912 OSSpinLockUnlock(&table->slock);
918 _objc_rootTryRetain(id obj)
923 if (OBJC_IS_TAGGED_PTR(obj)) return true;
925 SideTable *table = SideTable::tableForPointer(obj);
928 // _objc_rootTryRetain() is called exclusively by _objc_loadWeak(),
929 // which already acquired the lock on our behalf.
930 if (table->slock == 0) {
931 _objc_fatal("Do not call -_tryRetain.");
935 RefcountMap::iterator it = table->refcnts.find(DISGUISE(obj));
936 if (it == table->refcnts.end()) {
937 table->refcnts[DISGUISE(obj)] = 2;
938 } else if (it->second & 1) {
948 _objc_rootIsDeallocating(id obj)
953 if (OBJC_IS_TAGGED_PTR(obj)) return false;
955 SideTable *table = SideTable::tableForPointer(obj);
958 // _objc_rootIsDeallocating() is called exclusively by _objc_storeWeak(),
959 // which already acquired the lock on our behalf.
960 if (table->slock == 0) {
961 _objc_fatal("Do not call -_isDeallocating.");
964 RefcountMap::iterator it = table->refcnts.find(DISGUISE(obj));
965 return (it != table->refcnts.end()) && ((it->second & 1) == 1);
970 objc_clear_deallocating(id obj)
975 SideTable *table = SideTable::tableForPointer(obj);
977 // clear any weak table items
978 // clear extra retain count and deallocating bit
979 // (fixme warn or abort if extra retain count == 0 ?)
980 OSSpinLockLock(&table->slock);
981 if (seen_weak_refs) {
982 arr_clear_deallocating(&table->weak_table, obj);
984 table->refcnts.erase(DISGUISE(obj));
985 OSSpinLockUnlock(&table->slock);
990 _objc_rootReleaseWasZero_slow(id obj)
992 SideTable *table = SideTable::tableForPointer(obj);
994 bool do_dealloc = false;
996 OSSpinLockLock(&table->slock);
997 RefcountMap::iterator it = table->refcnts.find(DISGUISE(obj));
998 if (it == table->refcnts.end()) {
1000 table->refcnts[DISGUISE(obj)] = 1;
1001 } else if (it->second == 0) {
1007 OSSpinLockUnlock(&table->slock);
1012 _objc_rootReleaseWasZero(id obj)
1017 if (OBJC_IS_TAGGED_PTR(obj)) return false;
1019 SideTable *table = SideTable::tableForPointer(obj);
1021 bool do_dealloc = false;
1023 if (OSSpinLockTry(&table->slock)) {
1024 RefcountMap::iterator it = table->refcnts.find(DISGUISE(obj));
1025 if (it == table->refcnts.end()) {
1027 table->refcnts[DISGUISE(obj)] = 1;
1028 } else if (it->second == 0) {
1034 OSSpinLockUnlock(&table->slock);
1037 return _objc_rootReleaseWasZero_slow(obj);
1040 __attribute__((noinline,used))
1041 static id _objc_rootAutorelease2(id obj)
1043 if (OBJC_IS_TAGGED_PTR(obj)) return obj;
1044 return AutoreleasePoolPage::autorelease(obj);
1048 _objc_rootRetainCount(id obj)
1053 // XXX -- There is no way that anybody can use this API race free in a
1054 // threaded environment because the result is immediately stale by the
1055 // time the caller receives it.
1057 if (OBJC_IS_TAGGED_PTR(obj)) return (uintptr_t)obj;
1059 SideTable *table = SideTable::tableForPointer(obj);
1061 size_t refcnt_result = 1;
1063 OSSpinLockLock(&table->slock);
1064 RefcountMap::iterator it = table->refcnts.find(DISGUISE(obj));
1065 if (it != table->refcnts.end()) {
1066 refcnt_result = (it->second >> 1) + 1;
1068 OSSpinLockUnlock(&table->slock);
1069 return refcnt_result;
1073 _objc_rootInit(id obj)
1075 // In practice, it will be hard to rely on this function.
1076 // Many classes do not properly chain -init calls.
1081 _objc_rootAllocWithZone(Class cls, malloc_zone_t *zone)
1086 // allocWithZone under __OBJC2__ ignores the zone parameter
1088 obj = class_createInstance(cls, 0);
1090 if (!zone || UseGC) {
1091 obj = class_createInstance(cls, 0);
1094 obj = class_createInstanceFromZone(cls, 0, zone);
1098 if (!obj) obj = callBadAllocHandler(cls);
1103 _objc_rootAlloc(Class cls)
1106 // Skip over the +allocWithZone: call if the class doesn't override it.
1107 // fixme not - this breaks ObjectAlloc
1108 if (! ((class_t *)cls)->isa->hasCustomAWZ()) {
1109 return class_createInstance(cls, 0);
1112 return [cls allocWithZone: nil];
1116 _objc_rootDealloc(id obj)
1121 if (OBJC_IS_TAGGED_PTR(obj)) return;
1123 object_dispose(obj);
1127 _objc_rootFinalize(id obj __unused)
1135 _objc_fatal("_objc_rootFinalize called with garbage collection off");
1139 _objc_rootZone(id obj)
1146 // allocWithZone under __OBJC2__ ignores the zone parameter
1147 return malloc_default_zone();
1149 malloc_zone_t *rval = malloc_zone_from_ptr(obj);
1150 return rval ? rval : malloc_default_zone();
1155 _objc_rootHash(id obj)
1158 return _object_getExternalHash(obj);
1160 return (uintptr_t)obj;
1163 // make CF link for now
1164 void *_objc_autoreleasePoolPush(void) { return objc_autoreleasePoolPush(); }
1165 void _objc_autoreleasePoolPop(void *ctxt) { objc_autoreleasePoolPop(ctxt); }
1168 objc_autoreleasePoolPush(void)
1170 if (UseGC) return NULL;
1171 return AutoreleasePoolPage::push();
1175 objc_autoreleasePoolPop(void *ctxt)
1179 // fixme rdar://9167170
1182 AutoreleasePoolPage::pop(ctxt);
1186 _objc_autoreleasePoolPrint(void)
1189 AutoreleasePoolPage::printAll();
1192 #if SUPPORT_RETURN_AUTORELEASE
1195 Fast handling of returned autoreleased values.
1196 The caller and callee cooperate to keep the returned object
1197 out of the autorelease pool.
1201 objc_retainAutoreleasedReturnValue(ret);
1207 return objc_autoreleaseReturnValue(ret);
1209 objc_autoreleaseReturnValue() examines the caller's instructions following
1210 the return. If the caller's instructions immediately call
1211 objc_autoreleaseReturnValue, then the callee omits the -autorelease and saves
1212 the result in thread-local storage. If the caller does not look like it
1213 cooperates, then the callee calls -autorelease as usual.
1215 objc_autoreleaseReturnValue checks if the returned value is the same as the
1216 one in thread-local storage. If it is, the value is used directly. If not,
1217 the value is assumed to be truly autoreleased and is retained again. In
1218 either case, the caller now has a retained reference to the value.
1220 Tagged pointer objects do participate in the fast autorelease scheme,
1221 because it saves message sends. They are not entered in the autorelease
1222 pool in the slow case.
1227 static bool callerAcceptsFastAutorelease(const void * const ra0)
1229 const uint8_t *ra1 = (const uint8_t *)ra0;
1230 const uint16_t *ra2;
1231 const uint32_t *ra4 = (const uint32_t *)ra1;
1234 #define PREFER_GOTPCREL 0
1236 // 48 89 c7 movq %rax,%rdi
1237 // ff 15 callq *symbol@GOTPCREL(%rip)
1238 if (*ra4 != 0xffc78948) {
1241 if (ra1[4] != 0x15) {
1246 // 48 89 c7 movq %rax,%rdi
1248 if (*ra4 != 0xe8c78948) {
1251 ra1 += (long)*(const int32_t *)(ra1 + 4) + 8l;
1252 ra2 = (const uint16_t *)ra1;
1253 // ff 25 jmpq *symbol@DYLDMAGIC(%rip)
1254 if (*ra2 != 0x25ff) {
1258 ra1 += 6l + (long)*(const int32_t *)(ra1 + 2);
1259 sym = (const void **)ra1;
1260 if (*sym != objc_retainAutoreleasedReturnValue)
1271 static bool callerAcceptsFastAutorelease(const void *ra)
1273 // if the low bit is set, we're returning to thumb mode
1274 if ((uintptr_t)ra & 1) {
1276 // we mask off the low bit via subtraction
1277 if (*(uint16_t *)((uint8_t *)ra - 1) == 0x463f) {
1281 // 07 70 a0 e1 mov r7, r7
1282 if (*(uint32_t *)ra == 0xe1a07007) {
1290 # elif __i386__ && TARGET_IPHONE_SIMULATOR
1292 static bool callerAcceptsFastAutorelease(const void *ra)
1297 // __i386__ && TARGET_IPHONE_SIMULATOR
1300 #warning unknown architecture
1302 static bool callerAcceptsFastAutorelease(const void *ra)
1309 // SUPPORT_RETURN_AUTORELEASE
1314 objc_autoreleaseReturnValue(id obj)
1316 #if SUPPORT_RETURN_AUTORELEASE
1317 assert(tls_get_direct(AUTORELEASE_POOL_RECLAIM_KEY) == NULL);
1319 if (callerAcceptsFastAutorelease(__builtin_return_address(0))) {
1320 tls_set_direct(AUTORELEASE_POOL_RECLAIM_KEY, obj);
1325 return objc_autorelease(obj);
1329 objc_retainAutoreleaseReturnValue(id obj)
1331 return objc_autoreleaseReturnValue(objc_retain(obj));
1335 objc_retainAutoreleasedReturnValue(id obj)
1337 #if SUPPORT_RETURN_AUTORELEASE
1338 if (obj == tls_get_direct(AUTORELEASE_POOL_RECLAIM_KEY)) {
1339 tls_set_direct(AUTORELEASE_POOL_RECLAIM_KEY, 0);
1343 return objc_retain(obj);
1347 objc_storeStrong(id *location, id obj)
1349 // XXX FIXME -- GC support?
1350 id prev = *location;
1360 objc_retainAutorelease(id obj)
1362 return objc_autorelease(objc_retain(obj));
1366 _objc_deallocOnMainThreadHelper(void *context)
1368 id obj = (id)context;
1372 #undef objc_retainedObject
1373 #undef objc_unretainedObject
1374 #undef objc_unretainedPointer
1376 // convert objc_objectptr_t to id, callee must take ownership.
1377 id objc_retainedObject(objc_objectptr_t pointer) { return (id)pointer; }
1379 // convert objc_objectptr_t to id, without ownership transfer.
1380 id objc_unretainedObject(objc_objectptr_t pointer) { return (id)pointer; }
1382 // convert id to objc_objectptr_t, no ownership transfer.
1383 objc_objectptr_t objc_unretainedPointer(id object) { return object; }
1388 AutoreleasePoolPage::init();
1392 @implementation NSObject
1395 if (UseGC) gc_init2();
1398 + (void)initialize {
1414 return object_getClass(self);
1417 + (Class)superclass {
1418 return class_getSuperclass(self);
1421 - (Class)superclass {
1422 return class_getSuperclass([self class]);
1425 + (BOOL)isMemberOfClass:(Class)cls {
1426 return object_getClass((id)self) == cls;
1429 - (BOOL)isMemberOfClass:(Class)cls {
1430 return [self class] == cls;
1433 + (BOOL)isKindOfClass:(Class)cls {
1434 for (Class tcls = object_getClass((id)self); tcls; tcls = class_getSuperclass(tcls)) {
1435 if (tcls == cls) return YES;
1440 - (BOOL)isKindOfClass:(Class)cls {
1441 for (Class tcls = [self class]; tcls; tcls = class_getSuperclass(tcls)) {
1442 if (tcls == cls) return YES;
1447 + (BOOL)isSubclassOfClass:(Class)cls {
1448 for (Class tcls = self; tcls; tcls = class_getSuperclass(tcls)) {
1449 if (tcls == cls) return YES;
1454 + (BOOL)isAncestorOfObject:(NSObject *)obj {
1455 for (Class tcls = [obj class]; tcls; tcls = class_getSuperclass(tcls)) {
1456 if (tcls == self) return YES;
1461 + (BOOL)instancesRespondToSelector:(SEL)sel {
1462 if (!sel) return NO;
1463 return class_respondsToSelector(self, sel);
1466 + (BOOL)respondsToSelector:(SEL)sel {
1467 if (!sel) return NO;
1468 return class_respondsToSelector(object_getClass((id)self), sel);
1471 - (BOOL)respondsToSelector:(SEL)sel {
1472 if (!sel) return NO;
1473 return class_respondsToSelector([self class], sel);
1476 + (BOOL)conformsToProtocol:(Protocol *)protocol {
1477 if (!protocol) return NO;
1478 for (Class tcls = self; tcls; tcls = class_getSuperclass(tcls)) {
1479 if (class_conformsToProtocol(tcls, protocol)) return YES;
1484 - (BOOL)conformsToProtocol:(Protocol *)protocol {
1485 if (!protocol) return NO;
1486 for (Class tcls = [self class]; tcls; tcls = class_getSuperclass(tcls)) {
1487 if (class_conformsToProtocol(tcls, protocol)) return YES;
1492 + (NSUInteger)hash {
1493 return _objc_rootHash(self);
1496 - (NSUInteger)hash {
1497 return _objc_rootHash(self);
1500 + (BOOL)isEqual:(id)obj {
1501 return obj == (id)self;
1504 - (BOOL)isEqual:(id)obj {
1534 + (IMP)instanceMethodForSelector:(SEL)sel {
1535 if (!sel) [self doesNotRecognizeSelector:sel];
1536 return class_getMethodImplementation(self, sel);
1539 + (IMP)methodForSelector:(SEL)sel {
1540 if (!sel) [self doesNotRecognizeSelector:sel];
1541 return class_getMethodImplementation(object_getClass((id)self), sel);
1544 - (IMP)methodForSelector:(SEL)sel {
1545 if (!sel) [self doesNotRecognizeSelector:sel];
1546 return class_getMethodImplementation([self class], sel);
1549 + (BOOL)resolveClassMethod:(SEL)sel {
1553 + (BOOL)resolveInstanceMethod:(SEL)sel {
1557 // Replaced by CF (throws an NSException)
1558 + (void)doesNotRecognizeSelector:(SEL)sel {
1559 _objc_fatal("+[%s %s]: unrecognized selector sent to instance %p",
1560 class_getName(self), sel_getName(sel), self);
1563 // Replaced by CF (throws an NSException)
1564 - (void)doesNotRecognizeSelector:(SEL)sel {
1565 _objc_fatal("-[%s %s]: unrecognized selector sent to instance %p",
1566 object_getClassName(self), sel_getName(sel), self);
1570 + (id)performSelector:(SEL)sel {
1571 if (!sel) [self doesNotRecognizeSelector:sel];
1572 return ((id(*)(id, SEL))objc_msgSend)((id)self, sel);
1575 + (id)performSelector:(SEL)sel withObject:(id)obj {
1576 if (!sel) [self doesNotRecognizeSelector:sel];
1577 return ((id(*)(id, SEL, id))objc_msgSend)((id)self, sel, obj);
1580 + (id)performSelector:(SEL)sel withObject:(id)obj1 withObject:(id)obj2 {
1581 if (!sel) [self doesNotRecognizeSelector:sel];
1582 return ((id(*)(id, SEL, id, id))objc_msgSend)((id)self, sel, obj1, obj2);
1585 - (id)performSelector:(SEL)sel {
1586 if (!sel) [self doesNotRecognizeSelector:sel];
1587 return ((id(*)(id, SEL))objc_msgSend)(self, sel);
1590 - (id)performSelector:(SEL)sel withObject:(id)obj {
1591 if (!sel) [self doesNotRecognizeSelector:sel];
1592 return ((id(*)(id, SEL, id))objc_msgSend)(self, sel, obj);
1595 - (id)performSelector:(SEL)sel withObject:(id)obj1 withObject:(id)obj2 {
1596 if (!sel) [self doesNotRecognizeSelector:sel];
1597 return ((id(*)(id, SEL, id, id))objc_msgSend)(self, sel, obj1, obj2);
1601 // Replaced by CF (returns an NSMethodSignature)
1602 + (NSMethodSignature *)instanceMethodSignatureForSelector:(SEL)sel {
1603 _objc_fatal("+[NSObject instanceMethodSignatureForSelector:] "
1604 "not available without CoreFoundation");
1607 // Replaced by CF (returns an NSMethodSignature)
1608 + (NSMethodSignature *)methodSignatureForSelector:(SEL)sel {
1609 _objc_fatal("+[NSObject methodSignatureForSelector:] "
1610 "not available without CoreFoundation");
1613 // Replaced by CF (returns an NSMethodSignature)
1614 - (NSMethodSignature *)methodSignatureForSelector:(SEL)sel {
1615 _objc_fatal("-[NSObject methodSignatureForSelector:] "
1616 "not available without CoreFoundation");
1619 + (void)forwardInvocation:(NSInvocation *)invocation {
1620 [self doesNotRecognizeSelector:(invocation ? [invocation selector] : 0)];
1623 - (void)forwardInvocation:(NSInvocation *)invocation {
1624 [self doesNotRecognizeSelector:(invocation ? [invocation selector] : 0)];
1627 + (id)forwardingTargetForSelector:(SEL)sel {
1631 - (id)forwardingTargetForSelector:(SEL)sel {
1636 // Replaced by CF (returns an NSString)
1637 + (NSString *)description {
1641 // Replaced by CF (returns an NSString)
1642 - (NSString *)description {
1646 + (NSString *)debugDescription {
1647 return [self description];
1650 - (NSString *)debugDescription {
1651 return [self description];
1656 return [[self alloc] init];
1663 // Replaced by ObjectAlloc
1665 __attribute__((aligned(16)))
1667 if (OBJC_IS_TAGGED_PTR(self)) return self;
1669 SideTable *table = SideTable::tableForPointer(self);
1671 if (OSSpinLockTry(&table->slock)) {
1672 table->refcnts[DISGUISE(self)] += 2;
1673 OSSpinLockUnlock(&table->slock);
1676 return _objc_rootRetain_slow(self);
1680 + (BOOL)_tryRetain {
1684 // Replaced by ObjectAlloc
1685 - (BOOL)_tryRetain {
1686 return _objc_rootTryRetain(self);
1689 + (BOOL)_isDeallocating {
1693 - (BOOL)_isDeallocating {
1694 return _objc_rootIsDeallocating(self);
1697 + (BOOL)allowsWeakReference {
1701 + (BOOL)retainWeakReference {
1705 - (BOOL)allowsWeakReference {
1706 return ! [self _isDeallocating];
1709 - (BOOL)retainWeakReference {
1710 return [self _tryRetain];
1713 + (oneway void)release {
1716 // Replaced by ObjectAlloc
1717 - (oneway void)release
1718 __attribute__((aligned(16)))
1720 // tagged pointer check is inside _objc_rootReleaseWasZero().
1722 if (_objc_rootReleaseWasZero(self) == false) {
1732 // Replaced by ObjectAlloc
1734 __attribute__((aligned(16)))
1736 // no tag check here: tagged pointers DO use fast autoreleasing
1738 #if SUPPORT_RETURN_AUTORELEASE
1739 assert(tls_get_direct(AUTORELEASE_POOL_RECLAIM_KEY) == NULL);
1741 if (callerAcceptsFastAutorelease(__builtin_return_address(0))) {
1742 tls_set_direct(AUTORELEASE_POOL_RECLAIM_KEY, self);
1746 return _objc_rootAutorelease2(self);
1749 + (NSUInteger)retainCount {
1753 - (NSUInteger)retainCount {
1754 return _objc_rootRetainCount(self);
1758 return _objc_rootAlloc(self);
1761 // Replaced by ObjectAlloc
1762 + (id)allocWithZone:(NSZone *)zone {
1763 return _objc_rootAllocWithZone(self, (malloc_zone_t *)zone);
1766 // Replaced by CF (throws an NSException)
1772 return _objc_rootInit(self);
1775 // Replaced by CF (throws an NSException)
1779 // Replaced by NSZombies
1781 _objc_rootDealloc(self);
1784 // Replaced by CF (throws an NSException)
1789 _objc_rootFinalize(self);
1793 return (NSZone *)_objc_rootZone(self);
1797 return (NSZone *)_objc_rootZone(self);
1804 + (id)copyWithZone:(NSZone *)zone {
1809 return [(id)self copyWithZone:NULL];
1816 + (id)mutableCopyWithZone:(NSZone *)zone {
1821 return [(id)self mutableCopyWithZone:NULL];
1826 __attribute__((aligned(16)))
1830 if (!obj || OBJC_IS_TAGGED_PTR(obj)) {
1834 if (((class_t *)obj->isa)->hasCustomRR()) {
1835 return [obj retain];
1837 return bypass_msgSend_retain(obj);
1839 return [obj retain];
1842 // clang really wants to reorder the "mov %rdi, %rax" early
1843 // force better code gen with a data barrier
1848 __attribute__((aligned(16)))
1850 objc_release(id obj)
1852 if (!obj || OBJC_IS_TAGGED_PTR(obj)) {
1856 if (((class_t *)obj->isa)->hasCustomRR()) {
1857 return (void)[obj release];
1859 return bypass_msgSend_release(obj);
1865 __attribute__((aligned(16)))
1867 objc_autorelease(id obj)
1869 if (!obj || OBJC_IS_TAGGED_PTR(obj)) {
1873 if (((class_t *)obj->isa)->hasCustomRR()) {
1874 return [obj autorelease];
1876 return bypass_msgSend_autorelease(obj);
1878 return [obj autorelease];
1881 // clang really wants to reorder the "mov %rdi, %rax" early
1882 // force better code gen with a data barrier
1888 _objc_rootRetain(id obj)
1893 if (OBJC_IS_TAGGED_PTR(obj)) return obj;
1895 return bypass_msgSend_retain(obj);
1899 _objc_rootRelease(id obj)
1904 if (OBJC_IS_TAGGED_PTR(obj)) return;
1906 bypass_msgSend_release(obj);
1910 _objc_rootAutorelease(id obj)
1912 assert(obj); // root classes shouldn't get here, since objc_msgSend ignores nil
1919 // no tag check here: tagged pointers DO use fast autoreleasing
1921 return bypass_msgSend_autorelease(obj);