2 * Copyright (c) 2004-2007 Apple Inc. All rights reserved.
4 * @APPLE_LICENSE_HEADER_START@
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. Please obtain a copy of the License at
10 * http://www.opensource.apple.com/apsl/ and read it before using this
13 * The Original Code and all software distributed under the License are
14 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
15 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
16 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
18 * Please see the License for the specific language governing rights and
19 * limitations under the License.
21 * @APPLE_LICENSE_HEADER_END@
24 #include "objc-private.h"
26 #include "objc-config.h"
27 #include "objc-auto.h"
28 #include "objc-accessors.h"
36 #include <mach/mach.h>
37 #include <mach-o/dyld.h>
38 #include <mach-o/nlist.h>
39 #include <sys/types.h>
41 #include <libkern/OSAtomic.h>
42 #include <auto_zone.h>
44 #include <Block_private.h>
45 #include <dispatch/private.h>
47 #include "objc-private.h"
48 #include "objc-references.h"
53 #if !defined(NDEBUG) && !__OBJC2__
54 #include "objc-exception.h"
58 static auto_zone_t *gc_zone_init(void);
59 static void gc_block_init(void);
60 static void registeredClassTableInit(void);
61 static BOOL objc_isRegisteredClass(Class candidate);
64 static BOOL WantsMainThreadFinalization = NO;
66 auto_zone_t *gc_zone = nil;
69 /* Method prototypes */
70 @interface DoesNotExist
71 - (const char *)UTF8String;
76 /***********************************************************************
77 * Break-on-error functions
78 **********************************************************************/
81 void objc_assign_ivar_error(id base, ptrdiff_t offset)
85 void objc_assign_global_error(id value, id *slot)
89 void objc_exception_during_finalize_error(void)
92 /***********************************************************************
94 * Called by various libraries.
95 **********************************************************************/
97 OBJC_EXPORT void objc_set_collection_threshold(size_t threshold) { // Old naming
99 auto_collection_parameters(gc_zone)->collection_threshold = threshold;
103 OBJC_EXPORT void objc_setCollectionThreshold(size_t threshold) {
105 auto_collection_parameters(gc_zone)->collection_threshold = threshold;
109 void objc_setCollectionRatio(size_t ratio) {
111 auto_collection_parameters(gc_zone)->full_vs_gen_frequency = ratio;
115 void objc_set_collection_ratio(size_t ratio) { // old naming
117 auto_collection_parameters(gc_zone)->full_vs_gen_frequency = ratio;
121 void objc_finalizeOnMainThread(Class cls) {
123 WantsMainThreadFinalization = YES;
124 cls->setShouldFinalizeOnMainThread();
128 // stack based data structure queued if/when there is main-thread-only finalization work TBD
129 typedef struct BatchFinalizeBlock {
130 auto_zone_foreach_object_t foreach;
131 auto_zone_cursor_t cursor;
133 volatile BOOL finished;
134 volatile BOOL started;
135 struct BatchFinalizeBlock *next;
136 } BatchFinalizeBlock_t;
138 // The Main Thread Finalization Work Queue Head
140 pthread_mutex_t mutex;
141 pthread_cond_t condition;
142 BatchFinalizeBlock_t *head;
143 BatchFinalizeBlock_t *tail;
147 void objc_startCollectorThread(void) {
150 void objc_start_collector_thread(void) {
153 static void batchFinalizeOnMainThread(void);
155 void objc_collect(unsigned long options) {
157 BOOL onMainThread = pthread_main_np() ? YES : NO;
159 // while we're here, sneak off and do some finalization work (if any)
160 if (onMainThread) batchFinalizeOnMainThread();
161 // now on with our normally scheduled programming
162 auto_zone_options_t amode = AUTO_ZONE_COLLECT_NO_OPTIONS;
163 if (!(options & OBJC_COLLECT_IF_NEEDED)) {
164 switch (options & 0x3) {
165 case OBJC_RATIO_COLLECTION: amode = AUTO_ZONE_COLLECT_RATIO_COLLECTION; break;
166 case OBJC_GENERATIONAL_COLLECTION: amode = AUTO_ZONE_COLLECT_GENERATIONAL_COLLECTION; break;
167 case OBJC_FULL_COLLECTION: amode = AUTO_ZONE_COLLECT_FULL_COLLECTION; break;
168 case OBJC_EXHAUSTIVE_COLLECTION: amode = AUTO_ZONE_COLLECT_EXHAUSTIVE_COLLECTION; break;
170 amode |= AUTO_ZONE_COLLECT_COALESCE;
171 amode |= AUTO_ZONE_COLLECT_LOCAL_COLLECTION;
173 if (options & OBJC_WAIT_UNTIL_DONE) {
174 __block BOOL done = NO;
175 // If executing on the main thread, use the main thread work queue condition to block,
176 // so main thread finalization can complete. Otherwise, use a thread-local condition.
177 pthread_mutex_t localMutex = PTHREAD_MUTEX_INITIALIZER, *mutex = &localMutex;
178 pthread_cond_t localCondition = PTHREAD_COND_INITIALIZER, *condition = &localCondition;
180 mutex = &MainThreadWorkQ.mutex;
181 condition = &MainThreadWorkQ.condition;
183 pthread_mutex_lock(mutex);
184 auto_zone_collect_and_notify(gc_zone, amode, dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
185 pthread_mutex_lock(mutex);
187 pthread_cond_signal(condition);
188 pthread_mutex_unlock(mutex);
191 pthread_cond_wait(condition, mutex);
192 if (onMainThread && MainThreadWorkQ.head) {
193 pthread_mutex_unlock(mutex);
194 batchFinalizeOnMainThread();
195 pthread_mutex_lock(mutex);
198 pthread_mutex_unlock(mutex);
200 auto_zone_collect(gc_zone, amode);
205 // USED BY CF & ONE OTHER
206 BOOL objc_isAuto(id object)
208 return UseGC && auto_zone_is_valid_pointer(gc_zone, object) != 0;
212 BOOL objc_collectingEnabled(void)
217 BOOL objc_collecting_enabled(void) // Old naming
222 malloc_zone_t *objc_collectableZone(void) {
226 BOOL objc_dumpHeap(char *filenamebuffer, unsigned long length) {
227 static int counter = 0;
230 sprintf(buffer, OBJC_HEAP_DUMP_FILENAME_FORMAT, getpid(), counter);
231 if (!_objc_dumpHeap(gc_zone, buffer)) return NO;
232 if (filenamebuffer) {
233 unsigned long blen = strlen(buffer);
235 strncpy(filenamebuffer, buffer, blen+1);
237 filenamebuffer[0] = 0; // give some answer
243 /***********************************************************************
245 * Called by CF and Foundation.
246 **********************************************************************/
248 // Allocate an object in the GC zone, with the given number of extra bytes.
249 id objc_allocate_object(Class cls, int extra)
251 return class_createInstance(cls, extra);
255 /***********************************************************************
256 * Write barrier implementations, optimized for when GC is known to be on
257 * Called by the write barrier exports only.
258 * These implementations assume GC is on. The exported function must
259 * either perform the check itself or be conditionally stomped at
261 **********************************************************************/
263 id objc_assign_strongCast_gc(id value, id *slot) {
264 if (!auto_zone_set_write_barrier(gc_zone, (void*)slot, value)) { // stores & returns true if slot points into GC allocated memory
265 auto_zone_root_write_barrier(gc_zone, slot, value); // always stores
270 id objc_assign_global_gc(id value, id *slot) {
271 // use explicit root registration.
272 if (value && auto_zone_is_valid_pointer(gc_zone, value)) {
273 if (auto_zone_is_finalized(gc_zone, value)) {
274 _objc_inform("GC: storing an already collected object %p into global memory at %p, break on objc_assign_global_error to debug\n", (void*)value, slot);
275 objc_assign_global_error(value, slot);
277 auto_zone_add_root(gc_zone, slot, value);
285 id objc_assign_threadlocal_gc(id value, id *slot)
287 if (value && auto_zone_is_valid_pointer(gc_zone, value)) {
288 auto_zone_add_root(gc_zone, slot, value);
297 id objc_assign_ivar_gc(id value, id base, ptrdiff_t offset)
299 id *slot = (id*) ((char *)base + offset);
302 if (!auto_zone_set_write_barrier(gc_zone, (char *)base + offset, value)) {
303 _objc_inform("GC: %p + %tu isn't in the auto_zone, break on objc_assign_ivar_error to debug.\n", (void*)base, offset);
304 objc_assign_ivar_error(base, offset);
313 id objc_assign_strongCast_non_gc(id value, id *slot) {
314 return (*slot = value);
317 id objc_assign_global_non_gc(id value, id *slot) {
318 return (*slot = value);
321 id objc_assign_threadlocal_non_gc(id value, id *slot) {
322 return (*slot = value);
325 id objc_assign_ivar_non_gc(id value, id base, ptrdiff_t offset) {
326 id *slot = (id*) ((char *)base + offset);
327 return (*slot = value);
331 /***********************************************************************
332 * Non-trivial write barriers
333 **********************************************************************/
335 void *objc_memmove_collectable(void *dst, const void *src, size_t size)
338 return auto_zone_write_barrier_memmove(gc_zone, dst, src, size);
340 return memmove(dst, src, size);
344 BOOL objc_atomicCompareAndSwapPtr(id predicate, id replacement, volatile id *objectLocation) {
345 const BOOL issueMemoryBarrier = NO;
347 return auto_zone_atomicCompareAndSwapPtr(gc_zone, (void *)predicate, (void *)replacement, (void * volatile *)objectLocation, issueMemoryBarrier);
349 return OSAtomicCompareAndSwapPtr((void *)predicate, (void *)replacement, (void * volatile *)objectLocation);
352 BOOL objc_atomicCompareAndSwapPtrBarrier(id predicate, id replacement, volatile id *objectLocation) {
353 const BOOL issueMemoryBarrier = YES;
355 return auto_zone_atomicCompareAndSwapPtr(gc_zone, (void *)predicate, (void *)replacement, (void * volatile *)objectLocation, issueMemoryBarrier);
357 return OSAtomicCompareAndSwapPtrBarrier((void *)predicate, (void *)replacement, (void * volatile *)objectLocation);
360 BOOL objc_atomicCompareAndSwapGlobal(id predicate, id replacement, volatile id *objectLocation) {
361 const BOOL isGlobal = YES;
362 const BOOL issueMemoryBarrier = NO;
364 return auto_zone_atomicCompareAndSwap(gc_zone, (void *)predicate, (void *)replacement, (void * volatile *)objectLocation, isGlobal, issueMemoryBarrier);
366 return OSAtomicCompareAndSwapPtr((void *)predicate, (void *)replacement, (void * volatile *)objectLocation);
369 BOOL objc_atomicCompareAndSwapGlobalBarrier(id predicate, id replacement, volatile id *objectLocation) {
370 const BOOL isGlobal = YES;
371 const BOOL issueMemoryBarrier = YES;
373 return auto_zone_atomicCompareAndSwap(gc_zone, (void *)predicate, (void *)replacement, (void * volatile *)objectLocation, isGlobal, issueMemoryBarrier);
375 return OSAtomicCompareAndSwapPtrBarrier((void *)predicate, (void *)replacement, (void * volatile *)objectLocation);
378 BOOL objc_atomicCompareAndSwapInstanceVariable(id predicate, id replacement, volatile id *objectLocation) {
379 const BOOL isGlobal = NO;
380 const BOOL issueMemoryBarrier = NO;
382 return auto_zone_atomicCompareAndSwap(gc_zone, (void *)predicate, (void *)replacement, (void * volatile *)objectLocation, isGlobal, issueMemoryBarrier);
384 return OSAtomicCompareAndSwapPtr((void *)predicate, (void *)replacement, (void * volatile *)objectLocation);
387 BOOL objc_atomicCompareAndSwapInstanceVariableBarrier(id predicate, id replacement, volatile id *objectLocation) {
388 const BOOL isGlobal = NO;
389 const BOOL issueMemoryBarrier = YES;
391 return auto_zone_atomicCompareAndSwap(gc_zone, (void *)predicate, (void *)replacement, (void * volatile *)objectLocation, isGlobal, issueMemoryBarrier);
393 return OSAtomicCompareAndSwapPtrBarrier((void *)predicate, (void *)replacement, (void * volatile *)objectLocation);
397 /***********************************************************************
399 **********************************************************************/
401 id objc_read_weak_gc(id *location) {
402 id result = *location;
404 result = (id)auto_read_weak_reference(gc_zone, (void **)location);
409 id objc_read_weak_non_gc(id *location) {
413 id objc_assign_weak_gc(id value, id *location) {
414 auto_assign_weak_reference(gc_zone, value, (const void **)location, nil);
418 id objc_assign_weak_non_gc(id value, id *location) {
419 return (*location = value);
423 void gc_fixup_weakreferences(id newObject, id oldObject) {
424 // fix up weak references if any.
425 const unsigned char *weakLayout = (const unsigned char *)class_getWeakIvarLayout(newObject->ISA());
427 void **newPtr = (void **)newObject, **oldPtr = (void **)oldObject;
429 while ((byte = *weakLayout++)) {
430 unsigned skips = (byte >> 4);
431 unsigned weaks = (byte & 0x0F);
432 newPtr += skips, oldPtr += skips;
435 auto_assign_weak_reference(gc_zone, auto_read_weak_reference(gc_zone, oldPtr), (const void **)newPtr, nil);
443 /***********************************************************************
444 * dyld resolver functions for basic GC write barriers
445 * dyld calls the resolver function to bind the symbol.
446 * We return the GC or non-GC variant as appropriate.
447 **********************************************************************/
449 #define GC_RESOLVER(name) \
450 OBJC_EXPORT void *name##_resolver(void) __asm__("_" #name); \
451 void *name##_resolver(void) \
453 __asm__(".symbol_resolver _" #name); \
454 if (UseGC) return (void*)name##_gc; \
455 else return (void*)name##_non_gc; \
458 GC_RESOLVER(objc_assign_ivar)
459 GC_RESOLVER(objc_assign_strongCast)
460 GC_RESOLVER(objc_assign_global)
461 GC_RESOLVER(objc_assign_threadlocal)
462 GC_RESOLVER(objc_read_weak)
463 GC_RESOLVER(objc_assign_weak)
464 GC_RESOLVER(objc_getProperty)
465 GC_RESOLVER(objc_setProperty)
466 GC_RESOLVER(objc_getAssociatedObject)
467 GC_RESOLVER(objc_setAssociatedObject)
468 GC_RESOLVER(_object_addExternalReference)
469 GC_RESOLVER(_object_readExternalReference)
470 GC_RESOLVER(_object_removeExternalReference)
473 /***********************************************************************
475 * Used to isolate resurrection of garbage objects during finalization.
476 **********************************************************************/
477 BOOL objc_is_finalized(void *ptr) {
478 if (ptr != nil && UseGC) {
479 return auto_zone_is_finalized(gc_zone, ptr);
485 /***********************************************************************
487 * Used by top-level thread loops to reduce false pointers from the stack.
488 **********************************************************************/
489 void objc_clear_stack(unsigned long options) {
491 auto_zone_clear_stack(gc_zone, 0);
495 /***********************************************************************
496 * Finalization support
497 **********************************************************************/
499 // Finalizer crash debugging
500 static void *finalizing_object;
502 // finalize a single object without fuss
503 // When there are no main-thread-only classes this is used directly
504 // Otherwise, it is used indirectly by smarter code that knows main-thread-affinity requirements
505 static void finalizeOneObject(void *obj, void *ignored) {
507 finalizing_object = obj;
509 Class cls = object->ISA();
510 CRSetCrashLogMessage2(class_getName(cls));
512 /// call -finalize method.
513 ((void(*)(id, SEL))objc_msgSend)(object, @selector(finalize));
515 // Call C++ destructors.
516 // This would be objc_destructInstance() but for performance.
517 if (cls->hasCxxDtor()) {
518 object_cxxDestruct(object);
521 finalizing_object = nil;
522 CRSetCrashLogMessage2(nil);
525 // finalize object only if it is a main-thread-only object.
526 // Called only from the main thread.
527 static void finalizeOneMainThreadOnlyObject(void *obj, void *arg) {
529 Class cls = object->ISA();
531 _objc_fatal("object with nil ISA passed to finalizeOneMainThreadOnlyObject: %p\n", obj);
533 if (cls->shouldFinalizeOnMainThread()) {
534 finalizeOneObject(obj, nil);
538 // finalize one object only if it is not a main-thread-only object
539 // called from any other thread than the main thread
540 // Important: if a main-thread-only object is passed, return that fact in the needsMain argument
541 static void finalizeOneAnywhereObject(void *obj, void *needsMain) {
543 Class cls = object->ISA();
544 bool *needsMainThreadWork = (bool *)needsMain;
546 _objc_fatal("object with nil ISA passed to finalizeOneAnywhereObject: %p\n", obj);
548 if (!cls->shouldFinalizeOnMainThread()) {
549 finalizeOneObject(obj, nil);
552 *needsMainThreadWork = true;
557 // Utility workhorse.
558 // Set up the expensive @try block and ask the collector to hand the next object to
559 // our finalizeAnObject function.
560 // Track and return a boolean that records whether or not any main thread work is necessary.
561 // (When we know that there are no main thread only objects then the boolean isn't even computed)
562 static bool batchFinalize(auto_zone_t *zone,
563 auto_zone_foreach_object_t foreach,
564 auto_zone_cursor_t cursor,
566 void (*finalizeAnObject)(void *, void*))
568 #if !defined(NDEBUG) && !__OBJC2__
569 // debug: don't call try/catch before exception handlers are installed
570 objc_exception_functions_t table = {};
571 objc_exception_get_functions(&table);
572 assert(table.throw_exc);
575 bool needsMainThreadWork = false;
578 foreach(cursor, finalizeAnObject, &needsMainThreadWork);
579 // non-exceptional return means finalization is complete.
582 @catch (id exception) {
583 // whoops, note exception, then restart at cursor's position
584 _objc_inform("GC: -finalize resulted in an exception (%p) being thrown, break on objc_exception_during_finalize_error to debug\n\t%s", exception, (const char*)[[exception description] UTF8String]);
585 objc_exception_during_finalize_error();
588 // whoops, note exception, then restart at cursor's position
589 _objc_inform("GC: -finalize resulted in an exception being thrown, break on objc_exception_during_finalize_error to debug");
590 objc_exception_during_finalize_error();
593 return needsMainThreadWork;
596 // Called on main thread-only.
597 // Pick up work from global queue.
598 // called parasitically by anyone requesting a collection
599 // called explicitly when there is known to be main thread only finalization work
600 // In both cases we are on the main thread
601 // Guard against recursion by something called from a finalizer
602 static void batchFinalizeOnMainThread() {
603 pthread_mutex_lock(&MainThreadWorkQ.mutex);
604 if (!MainThreadWorkQ.head || MainThreadWorkQ.head->started) {
605 // No work or we're already here
606 pthread_mutex_unlock(&MainThreadWorkQ.mutex);
609 while (MainThreadWorkQ.head) {
610 BatchFinalizeBlock_t *bfb = MainThreadWorkQ.head;
612 pthread_mutex_unlock(&MainThreadWorkQ.mutex);
614 batchFinalize(gc_zone, bfb->foreach, bfb->cursor, bfb->cursor_size, finalizeOneMainThreadOnlyObject);
615 // signal the collector thread(s) that finalization has finished.
616 pthread_mutex_lock(&MainThreadWorkQ.mutex);
618 pthread_cond_broadcast(&MainThreadWorkQ.condition);
619 MainThreadWorkQ.head = bfb->next;
621 MainThreadWorkQ.tail = nil;
622 pthread_mutex_unlock(&MainThreadWorkQ.mutex);
626 // Knowing that we possibly have main thread only work to do, first process everything
627 // that is not main-thread-only. If we discover main thread only work, queue a work block
628 // to the main thread that will do just the main thread only work. Wait for it.
629 // Called from a non main thread.
630 static void batchFinalizeOnTwoThreads(auto_zone_t *zone,
631 auto_zone_foreach_object_t foreach,
632 auto_zone_cursor_t cursor,
635 // First, lets get rid of everything we can on this thread, then ask main thread to help if needed
636 char cursor_copy[cursor_size];
637 memcpy(cursor_copy, cursor, cursor_size);
638 bool needsMainThreadFinalization = batchFinalize(zone, foreach, (auto_zone_cursor_t)cursor_copy, cursor_size, finalizeOneAnywhereObject);
640 if (! needsMainThreadFinalization)
641 return; // no help needed
643 // set up the control block. Either our ping of main thread with _callOnMainThread will get to it, or
644 // an objc_collect(if_needed) will get to it. Either way, this block will be processed on the main thread.
645 BatchFinalizeBlock_t bfb;
646 bfb.foreach = foreach;
648 bfb.cursor_size = cursor_size;
652 pthread_mutex_lock(&MainThreadWorkQ.mutex);
653 if (MainThreadWorkQ.tail) {
655 // link to end so that ordering of finalization is preserved.
656 MainThreadWorkQ.tail->next = &bfb;
657 MainThreadWorkQ.tail = &bfb;
660 MainThreadWorkQ.head = &bfb;
661 MainThreadWorkQ.tail = &bfb;
663 pthread_mutex_unlock(&MainThreadWorkQ.mutex);
665 //printf("----->asking main thread to finalize\n");
666 dispatch_async(dispatch_get_main_queue(), ^{ batchFinalizeOnMainThread(); });
668 // wait for the main thread to finish finalizing instances of classes marked CLS_FINALIZE_ON_MAIN_THREAD.
669 pthread_mutex_lock(&MainThreadWorkQ.mutex);
670 while (!bfb.finished) {
671 // the main thread might be blocked waiting for a synchronous collection to complete, so wake it here
672 pthread_cond_signal(&MainThreadWorkQ.condition);
673 pthread_cond_wait(&MainThreadWorkQ.condition, &MainThreadWorkQ.mutex);
675 pthread_mutex_unlock(&MainThreadWorkQ.mutex);
676 //printf("<------ main thread finalize done\n");
682 // collector calls this with garbage ready
683 // thread collectors, too, so this needs to be thread-safe
684 static void BatchInvalidate(auto_zone_t *zone,
685 auto_zone_foreach_object_t foreach,
686 auto_zone_cursor_t cursor,
689 if (pthread_main_np() || !WantsMainThreadFinalization) {
690 // Collect all objects. We're either pre-multithreaded on main thread or we're on the collector thread
691 // but no main-thread-only objects have been allocated.
692 batchFinalize(zone, foreach, cursor, cursor_size, finalizeOneObject);
695 // We're on the dedicated thread. Collect some on main thread, the rest here.
696 batchFinalizeOnTwoThreads(zone, foreach, cursor, cursor_size);
704 * Collector calls into this system when it finds resurrected objects.
705 * This keeps them pitifully alive and leaked, even if they reference garbage.
708 // idea: keep a side table mapping resurrected object pointers to their original Class, so we don't
709 // need to smash anything. alternatively, could use associative references to track against a secondary
710 // object with information about the resurrection, such as a stack crawl, etc.
712 static Class _NSResurrectedObjectClass;
713 static NXMapTable *_NSResurrectedObjectMap = nil;
714 static pthread_mutex_t _NSResurrectedObjectLock = PTHREAD_MUTEX_INITIALIZER;
716 static Class resurrectedObjectOriginalClass(id object) {
718 pthread_mutex_lock(&_NSResurrectedObjectLock);
719 originalClass = (Class) NXMapGet(_NSResurrectedObjectMap, object);
720 pthread_mutex_unlock(&_NSResurrectedObjectLock);
721 return originalClass;
724 static id _NSResurrectedObject_classMethod(id self, SEL selector) { return self; }
726 static id _NSResurrectedObject_instanceMethod(id self, SEL name) {
727 _objc_inform("**resurrected** object %p of class %s being sent message '%s'\n", (void*)self, class_getName(resurrectedObjectOriginalClass(self)), sel_getName(name));
731 static void _NSResurrectedObject_finalize(id self, SEL _cmd) {
733 pthread_mutex_lock(&_NSResurrectedObjectLock);
734 originalClass = (Class) NXMapRemove(_NSResurrectedObjectMap, self);
735 pthread_mutex_unlock(&_NSResurrectedObjectLock);
736 if (originalClass) _objc_inform("**resurrected** object %p of class %s being finalized\n", (void*)self, class_getName(originalClass));
737 _objc_rootFinalize(self);
740 static BOOL _NSResurrectedObject_resolveInstanceMethod(id self, SEL _cmd, SEL name) {
741 class_addMethod((Class)self, name, (IMP)_NSResurrectedObject_instanceMethod, "@@:");
745 static BOOL _NSResurrectedObject_resolveClassMethod(id self, SEL _cmd, SEL name) {
746 class_addMethod(self->ISA(), name, (IMP)_NSResurrectedObject_classMethod, "@@:");
750 static void _NSResurrectedObject_initialize() {
751 _NSResurrectedObjectMap = NXCreateMapTable(NXPtrValueMapPrototype, 128);
752 _NSResurrectedObjectClass = objc_allocateClassPair(objc_getClass("NSObject"), "_NSResurrectedObject", 0);
753 class_addMethod(_NSResurrectedObjectClass, @selector(finalize), (IMP)_NSResurrectedObject_finalize, "v@:");
754 Class metaClass = _NSResurrectedObjectClass->ISA();
755 class_addMethod(metaClass, @selector(resolveInstanceMethod:), (IMP)_NSResurrectedObject_resolveInstanceMethod, "c@::");
756 class_addMethod(metaClass, @selector(resolveClassMethod:), (IMP)_NSResurrectedObject_resolveClassMethod, "c@::");
757 objc_registerClassPair(_NSResurrectedObjectClass);
760 static void resurrectZombie(auto_zone_t *zone, void *ptr) {
761 id object = (id) ptr;
762 Class cls = object->ISA();
763 if (cls != _NSResurrectedObjectClass) {
764 // remember the original class for this instance.
765 pthread_mutex_lock(&_NSResurrectedObjectLock);
766 NXMapInsert(_NSResurrectedObjectMap, ptr, cls);
767 pthread_mutex_unlock(&_NSResurrectedObjectLock);
768 object_setClass(object, _NSResurrectedObjectClass);
772 /***********************************************************************
773 * Pretty printing support
774 * For development purposes.
775 **********************************************************************/
778 static char *name_for_address(auto_zone_t *zone, vm_address_t base, vm_address_t offset, int withRetainCount);
780 static char* objc_name_for_address(auto_zone_t *zone, vm_address_t base, vm_address_t offset)
782 return name_for_address(zone, base, offset, false);
785 static const char* objc_name_for_object(auto_zone_t *zone, void *object) {
786 Class cls = *(Class *)object;
787 if (!objc_isRegisteredClass(cls)) return "";
788 return class_getName(cls);
791 /***********************************************************************
793 **********************************************************************/
795 static BOOL objc_isRegisteredClass(Class candidate);
797 static const unsigned char *objc_layout_for_address(auto_zone_t *zone, void *address) {
798 id object = (id)address;
799 volatile void *clsptr = (void*)object->ISA();
800 Class cls = (Class)clsptr;
801 return objc_isRegisteredClass(cls) ? _object_getIvarLayout(cls, object) : nil;
804 static const unsigned char *objc_weak_layout_for_address(auto_zone_t *zone, void *address) {
805 id object = (id)address;
806 volatile void *clsptr = (void*)object->ISA();
807 Class cls = (Class)clsptr;
808 return objc_isRegisteredClass(cls) ? class_getWeakIvarLayout(cls) : nil;
811 void gc_register_datasegment(uintptr_t base, size_t size) {
812 auto_zone_register_datasegment(gc_zone, (void*)base, size);
815 void gc_unregister_datasegment(uintptr_t base, size_t size) {
816 auto_zone_unregister_datasegment(gc_zone, (void*)base, size);
820 /***********************************************************************
822 **********************************************************************/
824 static void objc_will_grow(auto_zone_t *zone, auto_heap_growth_info_t info) {
825 if (auto_zone_is_collecting(gc_zone)) {
829 auto_zone_collect(gc_zone, AUTO_ZONE_COLLECT_COALESCE|AUTO_ZONE_COLLECT_RATIO_COLLECTION);
834 static auto_zone_t *gc_zone_init(void)
837 static int didOnce = 0;
841 // initialize the batch finalization queue
842 MainThreadWorkQ.head = nil;
843 MainThreadWorkQ.tail = nil;
844 pthread_mutex_init(&MainThreadWorkQ.mutex, nil);
845 pthread_cond_init(&MainThreadWorkQ.condition, nil);
848 result = auto_zone_create("auto_zone");
850 auto_zone_disable_compaction(result);
852 auto_collection_control_t *control = auto_collection_parameters(result);
854 // set up the magic control parameters
855 control->batch_invalidate = BatchInvalidate;
856 control->will_grow = objc_will_grow;
857 control->resurrect = resurrectZombie;
858 control->layout_for_address = objc_layout_for_address;
859 control->weak_layout_for_address = objc_weak_layout_for_address;
860 control->name_for_address = objc_name_for_address;
862 if (control->version >= sizeof(auto_collection_control_t)) {
863 control->name_for_object = objc_name_for_object;
870 /* should be defined in /usr/local/include/libdispatch_private.h. */
871 extern void (*dispatch_begin_thread_4GC)(void);
872 extern void (*dispatch_end_thread_4GC)(void);
874 static void objc_reapThreadLocalBlocks()
876 if (UseGC) auto_zone_reap_all_local_blocks(gc_zone);
879 void objc_registerThreadWithCollector()
881 if (UseGC) auto_zone_register_thread(gc_zone);
884 void objc_unregisterThreadWithCollector()
886 if (UseGC) auto_zone_unregister_thread(gc_zone);
889 void objc_assertRegisteredThreadWithCollector()
891 if (UseGC) auto_zone_assert_thread_registered(gc_zone);
894 // Always called by _objcInit, even if GC is off.
895 void gc_init(BOOL wantsGC)
901 _objc_inform("GC: is %s", wantsGC ? "ON" : "OFF");
905 // Set up the GC zone
906 gc_zone = gc_zone_init();
908 // tell libdispatch to register its threads with the GC.
909 dispatch_begin_thread_4GC = objc_registerThreadWithCollector;
910 dispatch_end_thread_4GC = objc_reapThreadLocalBlocks;
912 // set up the registered classes list
913 registeredClassTableInit();
915 // tell Blocks to use collectable memory. CF will cook up the classes separately.
918 // Add GC state to crash log reports
919 _objc_inform_on_crash("garbage collection is ON");
924 // Called by NSObject +load to perform late GC setup
925 // This work must wait until after all of libSystem initializes.
930 // create the _NSResurrectedObject class used to track resurrections.
931 _NSResurrectedObject_initialize();
933 // tell libauto to set up its dispatch queues
934 auto_collect_multithreaded(gc_zone);
937 // Called by Foundation.
938 // This function used to initialize NSObject stuff, but now does nothing.
939 malloc_zone_t *objc_collect_init(int (*callback)(void) __unused)
941 return (malloc_zone_t *)gc_zone;
945 * Support routines for the Block implementation
949 // The Block runtime now needs to sometimes allocate a Block that is an Object - namely
950 // when it neesd to have a finalizer which, for now, is only if there are C++ destructors
951 // in the helper function. Hence the isObject parameter.
952 // Under GC a -copy message should allocate a refcount 0 block, ergo the isOne parameter.
953 static void *block_gc_alloc5(const unsigned long size, const bool isOne, const bool isObject) {
954 auto_memory_type_t type = isObject ? (AUTO_OBJECT|AUTO_MEMORY_SCANNED) : AUTO_MEMORY_SCANNED;
955 return auto_zone_allocate_object(gc_zone, size, type, isOne, false);
958 // The Blocks runtime keeps track of everything above 1 and so it only calls
959 // up to the collector to tell it about the 0->1 transition and then the 1->0 transition
960 static void block_gc_setHasRefcount(const void *block, const bool hasRefcount) {
962 auto_zone_retain(gc_zone, (void *)block);
964 auto_zone_release(gc_zone, (void *)block);
967 static void block_gc_memmove(void *dst, void *src, unsigned long size) {
968 auto_zone_write_barrier_memmove(gc_zone, dst, src, (size_t)size);
971 static void gc_block_init(void) {
974 block_gc_setHasRefcount,
975 (void (*)(void *, void **))objc_assign_strongCast_gc,
976 (void (*)(const void *, void *))objc_assign_weak,
982 /***********************************************************************
984 * In addition to the global class hashtable (set) indexed by name, we
985 * also keep one based purely by pointer when running under Garbage Collection.
986 * This allows the background collector to race against objects recycled from TLC.
987 * Specifically, the background collector can read the admin byte and see that
988 * a thread local object is an object, get scheduled out, and the TLC recovers it,
989 * linking it into the cache, then the background collector reads the isa field and
990 * finds linkage info. By qualifying all isa fields read we avoid this.
991 **********************************************************************/
993 // This is a self-contained hash table of all classes. The first two elements contain the (size-1) and count.
994 static volatile Class *AllClasses = nil;
997 #define INITIALSIZE 512
1000 // Allocate the side table.
1001 static void registeredClassTableInit() {
1003 // allocate a collectable (refcount 0) zeroed hunk of unscanned memory
1004 uintptr_t *table = (uintptr_t *)auto_zone_allocate_object(gc_zone, INITIALSIZE*sizeof(void *), AUTO_MEMORY_UNSCANNED, true, true);
1005 // set initial capacity (as mask)
1006 table[0] = INITIALSIZE - 1;
1007 // set initial count
1009 AllClasses = (Class *)table;
1012 // Verify that a particular pointer is to a class.
1013 // Safe from any thread anytime
1014 static BOOL objc_isRegisteredClass(Class candidate) {
1016 // nil is never a valid ISA.
1017 if (candidate == nil) return NO;
1018 // We don't care about a race with another thread adding a class to which we randomly might have a pointer
1019 // Get local copy of classes so that we're immune from updates.
1020 // We keep the size of the list as the first element so there is no race as the list & size get updated.
1021 uintptr_t *allClasses = (uintptr_t *)AllClasses;
1022 // Slot 0 is always the size of the list in log 2 masked terms (e.g. size - 1) where size is always power of 2
1024 uintptr_t slot = (((uintptr_t)candidate) >> SHIFT) & allClasses[0];
1025 // avoid slot 0 and 1
1026 if (slot < 2) slot = 2;
1028 long int slotValue = allClasses[slot];
1029 if (slotValue == (long int)candidate) {
1032 if (slotValue == 0) {
1036 if (slot > allClasses[0])
1037 slot = 2; // skip size, count
1041 // Utility used when growing
1042 // Assumes lock held
1043 static void addClassHelper(uintptr_t *table, uintptr_t candidate) {
1044 uintptr_t slot = (((long int)candidate) >> SHIFT) & table[0];
1045 if (slot < 2) slot = 2;
1047 uintptr_t slotValue = table[slot];
1048 if (slotValue == 0) {
1049 table[slot] = candidate;
1054 if (slot > table[0])
1055 slot = 2; // skip size, count
1059 // lock held by callers
1060 void objc_addRegisteredClass(Class candidate) {
1062 uintptr_t *table = (uintptr_t *)AllClasses;
1063 // Slot 0 is always the size of the list in log 2 masked terms (e.g. size - 1) where size is always power of 2
1064 // Slot 1 is count - always non-zero
1065 uintptr_t slot = (((long int)candidate) >> SHIFT) & table[0];
1066 if (slot < 2) slot = 2;
1068 uintptr_t slotValue = table[slot];
1069 assert(slotValue != (uintptr_t)candidate);
1070 if (slotValue == REMOVED) {
1071 table[slot] = (long)candidate;
1074 else if (slotValue == 0) {
1075 table[slot] = (long)candidate;
1076 if (2*++table[1] > table[0]) { // add to count; check if we cross 50% utilization
1078 uintptr_t oldSize = table[0]+1;
1079 uintptr_t *newTable = (uintptr_t *)auto_zone_allocate_object(gc_zone, oldSize*2*sizeof(void *), AUTO_MEMORY_UNSCANNED, true, true);
1081 newTable[0] = 2*oldSize - 1;
1083 for (i = 2; i < oldSize; ++i) {
1084 if (table[i] && table[i] != REMOVED)
1085 addClassHelper(newTable, table[i]);
1087 AllClasses = (Class *)newTable;
1088 // let the old table be collected when other threads are no longer reading it.
1089 auto_zone_release(gc_zone, (void *)table);
1094 if (slot > table[0])
1095 slot = 2; // skip size, count
1099 // lock held by callers
1100 void objc_removeRegisteredClass(Class candidate) {
1102 uintptr_t *table = (uintptr_t *)AllClasses;
1103 // Slot 0 is always the size of the list in log 2 masked terms (e.g. size - 1) where size is always power of 2
1104 // Slot 1 is count - always non-zero
1105 uintptr_t slot = (((uintptr_t)candidate) >> SHIFT) & table[0];
1106 if (slot < 2) slot = 2;
1108 uintptr_t slotValue = table[slot];
1109 if (slotValue == (uintptr_t)candidate) {
1110 table[slot] = REMOVED; // if next slot == 0 we could set to 0 here and decr count
1113 assert(slotValue != 0);
1115 if (slot > table[0])
1116 slot = 2; // skip size, count
1121 /***********************************************************************
1122 * Debugging - support for smart printouts when errors occur
1123 **********************************************************************/
1126 static malloc_zone_t *objc_debug_zone(void)
1128 static malloc_zone_t *z = nil;
1130 z = malloc_create_zone(PAGE_MAX_SIZE, 0);
1131 malloc_set_zone_name(z, "objc-auto debug");
1136 static char *_malloc_append_unsigned(uintptr_t value, unsigned base, char *head) {
1140 if (value >= base) head = _malloc_append_unsigned(value / base, base, head);
1141 value = value % base;
1142 head[0] = (value < 10) ? '0' + value : 'a' + value - 10;
1147 static void strlcati(char *str, uintptr_t value, size_t bufSize)
1149 if ( (bufSize - strlen(str)) < 30)
1151 str = _malloc_append_unsigned(value, 10, str + strlen(str));
1156 static Ivar ivar_for_offset(Class cls, vm_address_t offset)
1159 vm_address_t ivar_offset;
1160 Ivar super_ivar, result;
1162 unsigned int ivar_count;
1164 if (!cls) return nil;
1166 // scan base classes FIRST
1167 super_ivar = ivar_for_offset(cls->superclass, offset);
1168 // result is best-effort; our ivars may be closer
1170 ivars = class_copyIvarList(cls, &ivar_count);
1171 if (ivars && ivar_count) {
1172 // Try our first ivar. If it's too big, use super's best ivar.
1173 // (lose 64-bit precision)
1174 ivar_offset = ivar_getOffset(ivars[0]);
1175 if (ivar_offset > offset) result = super_ivar;
1176 else if (ivar_offset == offset) result = ivars[0];
1179 // Try our other ivars. If any is too big, use the previous.
1180 for (i = 1; result == nil && i < ivar_count; i++) {
1181 ivar_offset = ivar_getOffset(ivars[i]);
1182 if (ivar_offset == offset) {
1184 } else if (ivar_offset > offset) {
1185 result = ivars[i - 1];
1189 // Found nothing. Return our last ivar.
1191 result = ivars[ivar_count - 1];
1195 result = super_ivar;
1201 static void append_ivar_at_offset(char *buf, Class cls, vm_address_t offset, size_t bufSize)
1205 if (offset == 0) return; // don't bother with isa
1206 if (offset >= class_getInstanceSize(cls)) {
1207 strlcat(buf, ".<extra>+", bufSize);
1208 strlcati(buf, offset, bufSize);
1212 ivar = ivar_for_offset(cls, offset);
1214 strlcat(buf, ".<?>", bufSize);
1218 // fixme doesn't handle structs etc.
1220 strlcat(buf, ".", bufSize);
1221 const char *ivar_name = ivar_getName(ivar);
1222 if (ivar_name) strlcat(buf, ivar_name, bufSize);
1223 else strlcat(buf, "<anonymous ivar>", bufSize);
1225 offset -= ivar_getOffset(ivar);
1227 strlcat(buf, "+", bufSize);
1228 strlcati(buf, offset, bufSize);
1233 static const char *cf_class_for_object(void *cfobj)
1235 // ick - we don't link against CF anymore
1237 struct fake_cfclass {
1239 const char *className;
1240 // don't care about the rest
1245 size_t (*CFGetTypeID)(void *);
1246 fake_cfclass * (*_CFRuntimeGetClassWithTypeID)(size_t);
1248 result = "anonymous_NSCFType";
1250 dlh = dlopen("/System/Library/Frameworks/CoreFoundation.framework/Versions/A/CoreFoundation", RTLD_LAZY | RTLD_NOLOAD | RTLD_FIRST);
1251 if (!dlh) return result;
1253 CFGetTypeID = (size_t(*)(void*)) dlsym(dlh, "CFGetTypeID");
1254 _CFRuntimeGetClassWithTypeID = (fake_cfclass*(*)(size_t)) dlsym(dlh, "_CFRuntimeGetClassWithTypeID");
1256 if (CFGetTypeID && _CFRuntimeGetClassWithTypeID) {
1257 size_t cfid = (*CFGetTypeID)(cfobj);
1258 result = (*_CFRuntimeGetClassWithTypeID)(cfid)->className;
1266 static char *name_for_address(auto_zone_t *zone, vm_address_t base, vm_address_t offset, int withRetainCount)
1268 #define APPEND_SIZE(s) \
1269 strlcat(buf, "[", sizeof(buf)); \
1270 strlcati(buf, s, sizeof(buf)); \
1271 strlcat(buf, "]", sizeof(buf));
1279 auto_zone_size(zone, (void *)base);
1280 auto_memory_type_t type = size ?
1281 auto_zone_get_layout_type(zone, (void *)base) : AUTO_TYPE_UNKNOWN;
1282 unsigned int refcount = size ?
1283 auto_zone_retain_count(zone, (void *)base) : 0;
1286 case AUTO_OBJECT_SCANNED:
1287 case AUTO_OBJECT_UNSCANNED:
1288 case AUTO_OBJECT_ALL_POINTERS: {
1289 const char *class_name = object_getClassName((id)base);
1290 if ((0 == strcmp(class_name, "__NSCFType")) || (0 == strcmp(class_name, "NSCFType"))) {
1291 strlcat(buf, cf_class_for_object((void *)base), sizeof(buf));
1293 strlcat(buf, class_name, sizeof(buf));
1296 append_ivar_at_offset(buf, ((id)base)->ISA(), offset, sizeof(buf));
1301 case AUTO_MEMORY_SCANNED:
1302 strlcat(buf, "{conservative-block}", sizeof(buf));
1305 case AUTO_MEMORY_UNSCANNED:
1306 strlcat(buf, "{no-pointers-block}", sizeof(buf));
1309 case AUTO_MEMORY_ALL_POINTERS:
1310 strlcat(buf, "{all-pointers-block}", sizeof(buf));
1313 case AUTO_MEMORY_ALL_WEAK_POINTERS:
1314 strlcat(buf, "{all-weak-pointers-block}", sizeof(buf));
1317 case AUTO_TYPE_UNKNOWN:
1318 strlcat(buf, "{uncollectable-memory}", sizeof(buf));
1321 strlcat(buf, "{unknown-memory-type}", sizeof(buf));
1324 if (withRetainCount && refcount > 0) {
1325 strlcat(buf, " [[refcount=", sizeof(buf));
1326 strlcati(buf, refcount, sizeof(buf));
1327 strlcat(buf, "]]", sizeof(buf));
1330 size_t len = 1 + strlen(buf);
1331 result = (char *)malloc_zone_malloc(objc_debug_zone(), len);
1332 memcpy(result, buf, len);