2 * Copyright (c) 2004-2007 Apple Inc. All rights reserved.
4 * @APPLE_LICENSE_HEADER_START@
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. Please obtain a copy of the License at
10 * http://www.opensource.apple.com/apsl/ and read it before using this
13 * The Original Code and all software distributed under the License are
14 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
15 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
16 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
18 * Please see the License for the specific language governing rights and
19 * limitations under the License.
21 * @APPLE_LICENSE_HEADER_END@
28 #import <mach-o/dyld.h>
31 #import <libkern/OSAtomic.h>
34 #import "objc-private.h"
41 static auto_zone_t *gc_zone_init(void);
44 __private_extern__ BOOL UseGC NOBSS = NO;
45 static BOOL RecordAllocations = NO;
46 static BOOL MultiThreadedGC = NO;
47 static BOOL WantsMainThreadFinalization = NO;
48 static BOOL NeedsMainThreadFinalization = NO;
51 auto_zone_foreach_object_t foreach;
52 auto_zone_cursor_t cursor;
54 volatile BOOL finished;
55 volatile BOOL started;
56 pthread_mutex_t mutex;
57 pthread_cond_t condition;
61 __private_extern__ auto_zone_t *gc_zone = NULL;
63 // Pointer magic to make dyld happy. See notes in objc-private.h
64 __private_extern__ id (*objc_assign_ivar_internal)(id, id, ptrdiff_t) = objc_assign_ivar;
67 /***********************************************************************
69 * Called by various libraries.
70 **********************************************************************/
72 OBJC_EXPORT void objc_set_collection_threshold(size_t threshold) { // Old naming
74 auto_collection_parameters(gc_zone)->collection_threshold = threshold;
78 OBJC_EXPORT void objc_setCollectionThreshold(size_t threshold) {
80 auto_collection_parameters(gc_zone)->collection_threshold = threshold;
84 void objc_setCollectionRatio(size_t ratio) {
86 auto_collection_parameters(gc_zone)->full_vs_gen_frequency = ratio;
90 void objc_set_collection_ratio(size_t ratio) { // old naming
92 auto_collection_parameters(gc_zone)->full_vs_gen_frequency = ratio;
96 void objc_finalizeOnMainThread(Class cls) {
98 WantsMainThreadFinalization = YES;
99 _class_setFinalizeOnMainThread(cls);
104 void objc_startCollectorThread(void) {
105 static int didOnce = 0;
109 // pretend we're done to start out with.
110 BatchFinalizeBlock.started = YES;
111 BatchFinalizeBlock.finished = YES;
112 pthread_mutex_init(&BatchFinalizeBlock.mutex, NULL);
113 pthread_cond_init(&BatchFinalizeBlock.condition, NULL);
114 auto_collect_multithreaded(gc_zone);
115 MultiThreadedGC = YES;
119 void objc_start_collector_thread(void) {
120 objc_startCollectorThread();
123 static void batchFinalizeOnMainThread(void);
125 void objc_collect(unsigned long options) {
127 BOOL onMainThread = pthread_main_np() ? YES : NO;
129 if (MultiThreadedGC || onMainThread) {
130 if (MultiThreadedGC && onMainThread) batchFinalizeOnMainThread();
131 auto_collection_mode_t amode = AUTO_COLLECT_RATIO_COLLECTION;
132 switch (options & 0x3) {
133 case OBJC_RATIO_COLLECTION: amode = AUTO_COLLECT_RATIO_COLLECTION; break;
134 case OBJC_GENERATIONAL_COLLECTION: amode = AUTO_COLLECT_GENERATIONAL_COLLECTION; break;
135 case OBJC_FULL_COLLECTION: amode = AUTO_COLLECT_FULL_COLLECTION; break;
136 case OBJC_EXHAUSTIVE_COLLECTION: amode = AUTO_COLLECT_EXHAUSTIVE_COLLECTION; break;
138 if (options & OBJC_COLLECT_IF_NEEDED) amode |= AUTO_COLLECT_IF_NEEDED;
139 if (options & OBJC_WAIT_UNTIL_DONE) amode |= AUTO_COLLECT_SYNCHRONOUS; // uses different bits
140 auto_collect(gc_zone, amode, NULL);
143 objc_msgSend(objc_getClass("NSGarbageCollector"), @selector(_callOnMainThread:withArgs:), objc_collect, (void *)options);
148 // 0 - exhaustively NSGarbageCollector.m
149 // - from AppKit /Developer/Applications/Xcode.app/Contents/MacOS/Xcode via idleTimer
151 // - from autoreleasepool
152 // - several other places
153 void objc_collect_if_needed(unsigned long options) {
155 BOOL onMainThread = pthread_main_np() ? YES : NO;
157 if (MultiThreadedGC || onMainThread) {
158 auto_collection_mode_t mode;
159 if (options & OBJC_GENERATIONAL) {
160 mode = AUTO_COLLECT_IF_NEEDED | AUTO_COLLECT_RATIO_COLLECTION;
163 mode = AUTO_COLLECT_EXHAUSTIVE_COLLECTION;
165 if (MultiThreadedGC && onMainThread) batchFinalizeOnMainThread();
166 auto_collect(gc_zone, mode, NULL);
168 else { // XXX could be optimized (e.g. ask auto for threshold check, if so, set ASKING if not already ASKING,...
169 objc_msgSend(objc_getClass("NSGarbageCollector"), @selector(_callOnMainThread:withArgs:), objc_collect_if_needed, (void *)options);
174 size_t objc_numberAllocated(void)
176 auto_statistics_t stats;
178 auto_zone_statistics(gc_zone, &stats);
179 return stats.malloc_statistics.blocks_in_use;
182 // USED BY CF & ONE OTHER
183 BOOL objc_isAuto(id object)
185 return UseGC && auto_zone_is_valid_pointer(gc_zone, object) != 0;
189 BOOL objc_collectingEnabled(void)
193 BOOL objc_collecting_enabled(void) // Old naming
199 /***********************************************************************
201 * Called by CF and Foundation.
202 **********************************************************************/
204 // Allocate an object in the GC zone, with the given number of extra bytes.
205 id objc_allocate_object(Class cls, int extra)
207 return class_createInstance(cls, extra);
211 /***********************************************************************
212 * Write barrier implementations, optimized for when GC is known to be on
213 * Called by the write barrier exports only.
214 * These implementations assume GC is on. The exported function must
215 * either perform the check itself or be conditionally stomped at
217 **********************************************************************/
219 static void objc_strongCast_write_barrier(id value, id *slot) {
220 if (!auto_zone_set_write_barrier(gc_zone, (void*)slot, value)) {
221 auto_zone_root_write_barrier(gc_zone, slot, value);
225 __private_extern__ id objc_assign_strongCast_gc(id value, id *slot)
227 objc_strongCast_write_barrier(value, slot);
228 return (*slot = value);
231 static void objc_register_global(id value, id *slot)
233 // use explicit root registration.
234 if (value && auto_zone_is_valid_pointer(gc_zone, value)) {
235 if (auto_zone_is_finalized(gc_zone, value)) {
236 __private_extern__ void objc_assign_global_error(id value, id *slot);
238 _objc_inform("GC: storing an already collected object %p into global memory at %p, break on objc_assign_global_error to debug\n", value, slot);
239 objc_assign_global_error(value, slot);
241 auto_zone_add_root(gc_zone, slot, value);
245 __private_extern__ id objc_assign_global_gc(id value, id *slot) {
246 objc_register_global(value, slot);
247 return (*slot = value);
251 __private_extern__ id objc_assign_ivar_gc(id value, id base, ptrdiff_t offset)
253 id *slot = (id*) ((char *)base + offset);
256 if (!auto_zone_set_write_barrier(gc_zone, (char *)base + offset, value)) {
257 __private_extern__ void objc_assign_ivar_error(id base, ptrdiff_t offset);
259 _objc_inform("GC: %p + %d isn't in the auto_zone, break on objc_assign_ivar_error to debug.\n", base, offset);
260 objc_assign_ivar_error(base, offset);
264 return (*slot = value);
268 /***********************************************************************
269 * Write barrier exports
270 * Called by pretty much all GC-supporting code.
272 * These "generic" implementations, available in PPC, are thought to be
273 * called by Rosetta when it translates the bla instruction.
274 **********************************************************************/
276 // Platform-independent write barriers
277 // These contain the UseGC check that the platform-specific
278 // runtime-rewritten implementations do not.
280 id objc_assign_strongCast_generic(id value, id *dest)
283 return objc_assign_strongCast_gc(value, dest);
285 return (*dest = value);
290 id objc_assign_global_generic(id value, id *dest)
293 return objc_assign_global_gc(value, dest);
295 return (*dest = value);
300 id objc_assign_ivar_generic(id value, id dest, ptrdiff_t offset)
303 return objc_assign_ivar_gc(value, dest, offset);
305 id *slot = (id*) ((char *)dest + offset);
306 return (*slot = value);
310 #if defined(__ppc__) || defined(__i386__) || defined(__x86_64__)
312 // PPC write barriers are in objc-auto-ppc.s
313 // write_barrier_init conditionally stomps those to jump to the _impl versions.
315 // These 3 functions are defined in objc-auto-i386.s and objc-auto-x86_64.s as
316 // the non-GC variants. Under GC, rtp_init stomps them with jumps to
321 // use generic implementation until time can be spent on optimizations
322 id objc_assign_strongCast(id value, id *dest) { return objc_assign_strongCast_generic(value, dest); }
323 id objc_assign_global(id value, id *dest) { return objc_assign_global_generic(value, dest); }
324 id objc_assign_ivar(id value, id dest, ptrdiff_t offset) { return objc_assign_ivar_generic(value, dest, offset); }
326 // not (defined(__ppc__)) && not defined(__i386__) && not defined(__x86_64__)
330 void *objc_memmove_collectable(void *dst, const void *src, size_t size)
333 return auto_zone_write_barrier_memmove(gc_zone, dst, src, size);
335 return memmove(dst, src, size);
339 BOOL objc_atomicCompareAndSwapGlobal(id predicate, id replacement, volatile id *objectLocation) {
340 if (UseGC) objc_register_global(replacement, (id *)objectLocation);
341 return OSAtomicCompareAndSwapPtr((void *)predicate, (void *)replacement, (void * volatile *)objectLocation);
344 BOOL objc_atomicCompareAndSwapGlobalBarrier(id predicate, id replacement, volatile id *objectLocation) {
345 if (UseGC) objc_register_global(replacement, (id *)objectLocation);
346 return OSAtomicCompareAndSwapPtrBarrier((void *)predicate, (void *)replacement, (void * volatile *)objectLocation);
349 BOOL objc_atomicCompareAndSwapInstanceVariable(id predicate, id replacement, volatile id *objectLocation) {
350 if (UseGC) objc_strongCast_write_barrier(replacement, (id *)objectLocation);
351 return OSAtomicCompareAndSwapPtr((void *)predicate, (void *)replacement, (void * volatile *)objectLocation);
354 BOOL objc_atomicCompareAndSwapInstanceVariableBarrier(id predicate, id replacement, volatile id *objectLocation) {
355 if (UseGC) objc_strongCast_write_barrier(replacement, (id *)objectLocation);
356 return OSAtomicCompareAndSwapPtrBarrier((void *)predicate, (void *)replacement, (void * volatile *)objectLocation);
360 /***********************************************************************
362 **********************************************************************/
364 id objc_read_weak(id *location) {
365 id result = *location;
366 if (UseGC && result) {
367 result = auto_read_weak_reference(gc_zone, (void **)location);
372 id objc_assign_weak(id value, id *location) {
374 auto_assign_weak_reference(gc_zone, value, (void **)location, NULL);
383 /***********************************************************************
385 * Used to isolate resurrection of garbage objects during finalization.
386 **********************************************************************/
387 BOOL objc_is_finalized(void *ptr) {
388 if (ptr != NULL && UseGC) {
389 return auto_zone_is_finalized(gc_zone, ptr);
395 /***********************************************************************
397 * Used to tell clean up dirty stack frames before a thread blocks. To
398 * make this more efficient, we really need better support from pthreads.
399 * See <rdar://problem/4548631> for more details.
400 **********************************************************************/
402 static vm_address_t _stack_resident_base() {
403 pthread_t self = pthread_self();
404 size_t stack_size = pthread_get_stacksize_np(self);
405 vm_address_t stack_base = (vm_address_t)pthread_get_stackaddr_np(self) - stack_size;
406 size_t stack_page_count = stack_size / vm_page_size;
407 char stack_residency[stack_page_count];
408 vm_address_t stack_resident_base = 0;
409 if (mincore((void*)stack_base, stack_size, stack_residency) == 0) {
410 // we can now tell the degree to which the stack is resident, and use it as our ultimate high water mark.
412 for (i = 0; i < stack_page_count; ++i) {
413 if (stack_residency[i]) {
414 stack_resident_base = stack_base + i * vm_page_size;
415 // malloc_printf("last touched page = %lu\n", stack_page_count - i - 1);
420 return stack_resident_base;
423 static __attribute__((noinline)) void* _get_stack_pointer() {
424 #if defined(__i386__) || defined(__ppc__) || defined(__ppc64__) || defined(__x86_64__)
425 return __builtin_frame_address(0);
431 void objc_clear_stack(unsigned long options) {
433 if (options & OBJC_CLEAR_RESIDENT_STACK) {
434 // clear just the pages of stack that are currently resident.
435 vm_address_t stack_resident_base = _stack_resident_base();
436 vm_address_t stack_top = (vm_address_t)_get_stack_pointer() - 2 * sizeof(void*);
437 bzero((void*)stack_resident_base, (stack_top - stack_resident_base));
439 // clear the entire unused stack, regardless of whether it's pages are resident or not.
440 pthread_t self = pthread_self();
441 size_t stack_size = pthread_get_stacksize_np(self);
442 vm_address_t stack_base = (vm_address_t)pthread_get_stackaddr_np(self) - stack_size;
443 vm_address_t stack_top = (vm_address_t)_get_stack_pointer() - 2 * sizeof(void*);
444 bzero((void*)stack_base, stack_top - stack_base);
448 /***********************************************************************
449 * CF-only write barrier exports
451 * The gc_zone guards are not thought to be necessary
452 **********************************************************************/
454 // Exported as very private SPI to Foundation to tell CF about
455 void* objc_assign_ivar_address_CF(void *value, void *base, void **slot)
457 if (value && gc_zone) {
458 if (auto_zone_is_valid_pointer(gc_zone, base)) {
459 ptrdiff_t offset = (((char *)slot)-(char *)base);
460 auto_zone_write_barrier(gc_zone, base, offset, value);
464 return (*slot = value);
468 // Same as objc_assign_strongCast_gc, should tell Foundation to use _gc version instead
469 // exported as very private SPI to Foundation to tell CF about
470 void* objc_assign_strongCast_CF(void* value, void **slot)
472 if (value && gc_zone) {
473 void *base = (void *)auto_zone_base_pointer(gc_zone, (void*)slot);
475 ptrdiff_t offset = (((char *)slot)-(char *)base);
476 auto_zone_write_barrier(gc_zone, base, offset, value);
479 return (*slot = value);
483 /***********************************************************************
484 * Finalization support
485 **********************************************************************/
487 static IMP _NSObject_finalize = NULL;
489 // Finalizer crash debugging
490 static void *finalizing_object;
491 static const char *__crashreporter_info__;
493 static void finalizeOneObject(void *obj, void *sel) {
495 SEL selector = (SEL)sel;
496 finalizing_object = obj;
497 __crashreporter_info__ = object_getClassName(obj);
499 /// call -finalize method.
500 objc_msgSend(object, selector);
501 // Call C++ destructors, if any.
502 object_cxxDestruct(object);
504 finalizing_object = NULL;
505 __crashreporter_info__ = NULL;
508 static void finalizeOneMainThreadOnlyObject(void *obj, void *sel) {
510 Class cls = object->isa;
512 _objc_fatal("object with NULL ISA passed to finalizeOneMainThreadOnlyObject: %p\n", obj);
514 if (_class_shouldFinalizeOnMainThread(cls)) {
515 finalizeOneObject(obj, sel);
519 static void finalizeOneAnywhereObject(void *obj, void *sel) {
521 Class cls = object->isa;
523 _objc_fatal("object with NULL ISA passed to finalizeOneAnywhereObject: %p\n", obj);
525 if (!_class_shouldFinalizeOnMainThread(cls)) {
526 finalizeOneObject(obj, sel);
529 NeedsMainThreadFinalization = YES;
535 static void batchFinalize(auto_zone_t *zone,
536 auto_zone_foreach_object_t foreach,
537 auto_zone_cursor_t cursor,
539 void (*finalize)(void *, void*))
543 foreach(cursor, finalize, @selector(finalize));
544 // non-exceptional return means finalization is complete.
546 } @catch (id exception) {
547 // whoops, note exception, then restart at cursor's position
548 __private_extern__ void objc_exception_during_finalize_error(void);
549 _objc_inform("GC: -finalize resulted in an exception (%p) being thrown, break on objc_exception_during_finalize_error to debug\n\t%s", exception, (const char*)[[exception description] UTF8String]);
550 objc_exception_during_finalize_error();
556 static void batchFinalizeOnMainThread(void) {
557 pthread_mutex_lock(&BatchFinalizeBlock.mutex);
558 if (BatchFinalizeBlock.started) {
559 // main thread got here already
560 pthread_mutex_unlock(&BatchFinalizeBlock.mutex);
563 BatchFinalizeBlock.started = YES;
564 pthread_mutex_unlock(&BatchFinalizeBlock.mutex);
566 batchFinalize(gc_zone, BatchFinalizeBlock.foreach, BatchFinalizeBlock.cursor, BatchFinalizeBlock.cursor_size, finalizeOneMainThreadOnlyObject);
567 // signal the collector thread that finalization has finished.
568 pthread_mutex_lock(&BatchFinalizeBlock.mutex);
569 BatchFinalizeBlock.finished = YES;
570 pthread_cond_signal(&BatchFinalizeBlock.condition);
571 pthread_mutex_unlock(&BatchFinalizeBlock.mutex);
574 static void batchFinalizeOnTwoThreads(auto_zone_t *zone,
575 auto_zone_foreach_object_t foreach,
576 auto_zone_cursor_t cursor,
579 // First, lets get rid of everything we can on this thread, then ask main thread to help if needed
580 NeedsMainThreadFinalization = NO;
581 char cursor_copy[cursor_size];
582 memcpy(cursor_copy, cursor, cursor_size);
583 batchFinalize(zone, foreach, cursor_copy, cursor_size, finalizeOneAnywhereObject);
585 if (! NeedsMainThreadFinalization)
586 return; // no help needed
588 // set up the control block. Either our ping of main thread with _callOnMainThread will get to it, or
589 // an objc_collect_if_needed() will get to it. Either way, this block will be processed on the main thread.
590 pthread_mutex_lock(&BatchFinalizeBlock.mutex);
591 BatchFinalizeBlock.foreach = foreach;
592 BatchFinalizeBlock.cursor = cursor;
593 BatchFinalizeBlock.cursor_size = cursor_size;
594 BatchFinalizeBlock.started = NO;
595 BatchFinalizeBlock.finished = NO;
596 pthread_mutex_unlock(&BatchFinalizeBlock.mutex);
598 //printf("----->asking main thread to finalize\n");
599 objc_msgSend(objc_getClass("NSGarbageCollector"), @selector(_callOnMainThread:withArgs:), batchFinalizeOnMainThread, &BatchFinalizeBlock);
601 // wait for the main thread to finish finalizing instances of classes marked CLS_FINALIZE_ON_MAIN_THREAD.
602 pthread_mutex_lock(&BatchFinalizeBlock.mutex);
603 while (!BatchFinalizeBlock.finished) pthread_cond_wait(&BatchFinalizeBlock.condition, &BatchFinalizeBlock.mutex);
604 pthread_mutex_unlock(&BatchFinalizeBlock.mutex);
605 //printf("<------ main thread finalize done\n");
610 static void objc_will_grow(auto_zone_t *zone, auto_heap_growth_info_t info) {
611 if (MultiThreadedGC) {
612 //printf("objc_will_grow %d\n", info);
614 if (auto_zone_is_collecting(gc_zone)) {
618 auto_collect(gc_zone, AUTO_COLLECT_RATIO_COLLECTION, NULL);
624 // collector calls this with garbage ready
625 static void BatchInvalidate(auto_zone_t *zone,
626 auto_zone_foreach_object_t foreach,
627 auto_zone_cursor_t cursor,
630 if (pthread_main_np() || !WantsMainThreadFinalization) {
631 // Collect all objects. We're either pre-multithreaded on main thread or we're on the collector thread
632 // but no main-thread-only objects have been allocated.
633 batchFinalize(zone, foreach, cursor, cursor_size, finalizeOneObject);
636 // We're on the dedicated thread. Collect some on main thread, the rest here.
637 batchFinalizeOnTwoThreads(zone, foreach, cursor, cursor_size);
642 // idea: keep a side table mapping resurrected object pointers to their original Class, so we don't
643 // need to smash anything. alternatively, could use associative references to track against a secondary
644 // object with information about the resurrection, such as a stack crawl, etc.
646 static Class _NSResurrectedObjectClass;
647 static NXMapTable *_NSResurrectedObjectMap = NULL;
648 static OBJC_DECLARE_LOCK(_NSResurrectedObjectLock);
650 static Class resurrectedObjectOriginalClass(id object) {
652 OBJC_LOCK(&_NSResurrectedObjectLock);
653 originalClass = (Class) NXMapGet(_NSResurrectedObjectMap, object);
654 OBJC_UNLOCK(&_NSResurrectedObjectLock);
655 return originalClass;
658 static id _NSResurrectedObject_classMethod(id self, SEL selector) { return self; }
660 static id _NSResurrectedObject_instanceMethod(id self, SEL name) {
661 _objc_inform("**resurrected** object %p of class %s being sent message '%s'\n", self, class_getName(resurrectedObjectOriginalClass(self)), sel_getName(name));
665 static void _NSResurrectedObject_finalize(id self, SEL _cmd) {
667 OBJC_LOCK(&_NSResurrectedObjectLock);
668 originalClass = (Class) NXMapRemove(_NSResurrectedObjectMap, self);
669 OBJC_UNLOCK(&_NSResurrectedObjectLock);
670 if (originalClass) _objc_inform("**resurrected** object %p of class %s being finalized\n", self, class_getName(originalClass));
671 _NSObject_finalize(self, _cmd);
674 static BOOL _NSResurrectedObject_resolveInstanceMethod(id self, SEL _cmd, SEL name) {
675 class_addMethod((Class)self, name, (IMP)_NSResurrectedObject_instanceMethod, "@@:");
679 static BOOL _NSResurrectedObject_resolveClassMethod(id self, SEL _cmd, SEL name) {
680 class_addMethod(object_getClass(self), name, (IMP)_NSResurrectedObject_classMethod, "@@:");
684 static void _NSResurrectedObject_initialize() {
685 _NSResurrectedObjectMap = NXCreateMapTable(NXPtrValueMapPrototype, 128);
686 _NSResurrectedObjectClass = objc_allocateClassPair(objc_getClass("NSObject"), "_NSResurrectedObject", 0);
687 class_addMethod(_NSResurrectedObjectClass, @selector(finalize), (IMP)_NSResurrectedObject_finalize, "v@:");
688 Class metaClass = object_getClass(_NSResurrectedObjectClass);
689 class_addMethod(metaClass, @selector(resolveInstanceMethod:), (IMP)_NSResurrectedObject_resolveInstanceMethod, "c@::");
690 class_addMethod(metaClass, @selector(resolveClassMethod:), (IMP)_NSResurrectedObject_resolveClassMethod, "c@::");
691 objc_registerClassPair(_NSResurrectedObjectClass);
694 static void resurrectZombie(auto_zone_t *zone, void *ptr) {
695 id object = (id) ptr;
696 Class cls = object->isa;
697 if (cls != _NSResurrectedObjectClass) {
698 // remember the original class for this instance.
699 OBJC_LOCK(&_NSResurrectedObjectLock);
700 NXMapInsert(_NSResurrectedObjectMap, ptr, cls);
701 OBJC_UNLOCK(&_NSResurrectedObjectLock);
702 object->isa = _NSResurrectedObjectClass;
706 /***********************************************************************
707 * Pretty printing support
708 * For development purposes.
709 **********************************************************************/
712 static char *name_for_address(auto_zone_t *zone, vm_address_t base, vm_address_t offset, int withRetainCount);
714 static char* objc_name_for_address(auto_zone_t *zone, vm_address_t base, vm_address_t offset)
716 return name_for_address(zone, base, offset, false);
719 /***********************************************************************
721 **********************************************************************/
723 static const unsigned char *objc_layout_for_address(auto_zone_t *zone, void *address)
725 Class cls = *(Class *)address;
726 return (const unsigned char *)class_getIvarLayout(cls);
729 static const unsigned char *objc_weak_layout_for_address(auto_zone_t *zone, void *address)
731 Class cls = *(Class *)address;
732 return (const unsigned char *)class_getWeakIvarLayout(cls);
735 /***********************************************************************
737 **********************************************************************/
739 // Always called by _objcInit, even if GC is off.
740 __private_extern__ void gc_init(BOOL on)
745 _objc_inform("GC: is %s", on ? "ON" : "OFF");
749 // Add GC state to crash log reports
750 _objc_inform_on_crash("garbage collection is ON");
752 // Set up the GC zone
753 gc_zone = gc_zone_init();
755 // no NSObject until Foundation calls objc_collect_init()
756 _NSObject_finalize = &_objc_msgForward;
759 auto_zone_start_monitor(false);
760 auto_zone_set_class_list((int (*)(void **, int))objc_getClassList);
765 static auto_zone_t *gc_zone_init(void)
769 // result = auto_zone_create("objc auto collected zone");
770 result = auto_zone_create("auto_zone");
772 auto_collection_control_t *control = auto_collection_parameters(result);
774 // set up the magic control parameters
775 control->batch_invalidate = BatchInvalidate;
776 control->will_grow = objc_will_grow;
777 control->resurrect = resurrectZombie;
778 control->layout_for_address = objc_layout_for_address;
779 control->weak_layout_for_address = objc_weak_layout_for_address;
780 control->name_for_address = objc_name_for_address;
786 // Called by Foundation to install auto's interruption callback.
787 malloc_zone_t *objc_collect_init(int (*callback)(void))
789 // Find NSObject's finalize method now that Foundation is loaded.
790 // fixme only look for the base implementation, not a category's
791 _NSObject_finalize = class_getMethodImplementation(objc_getClass("NSObject"), @selector(finalize));
792 if (_NSObject_finalize == &_objc_msgForward) {
793 _objc_fatal("GC: -[NSObject finalize] unimplemented!");
796 // create the _NSResurrectedObject class used to track resurrections.
797 _NSResurrectedObject_initialize();
799 return (malloc_zone_t *)gc_zone;
807 /***********************************************************************
809 **********************************************************************/
811 /* This is non-deadlocking with respect to malloc's locks EXCEPT:
812 * %ls, %a, %A formats
815 static void objc_debug_printf(const char *format, ...)
818 va_start(ap, format);
819 vfprintf(stderr, format, ap);
823 static malloc_zone_t *objc_debug_zone(void)
825 static malloc_zone_t *z = NULL;
827 z = malloc_create_zone(4096, 0);
828 malloc_set_zone_name(z, "objc-auto debug");
833 static char *_malloc_append_unsigned(uintptr_t value, unsigned base, char *head) {
837 if (value >= base) head = _malloc_append_unsigned(value / base, base, head);
838 value = value % base;
839 head[0] = (value < 10) ? '0' + value : 'a' + value - 10;
844 static void strcati(char *str, uintptr_t value)
846 str = _malloc_append_unsigned(value, 10, str + strlen(str));
850 static void strcatx(char *str, uintptr_t value)
852 str = _malloc_append_unsigned(value, 16, str + strlen(str));
857 static Ivar ivar_for_offset(Class cls, vm_address_t offset)
861 Ivar super_ivar, result;
863 unsigned int ivar_count;
865 if (!cls) return NULL;
867 // scan base classes FIRST
868 super_ivar = ivar_for_offset(class_getSuperclass(cls), offset);
869 // result is best-effort; our ivars may be closer
871 ivars = class_copyIvarList(cls, &ivar_count);
872 if (ivars && ivar_count) {
873 // Try our first ivar. If it's too big, use super's best ivar.
874 ivar_offset = ivar_getOffset(ivars[0]);
875 if (ivar_offset > offset) result = super_ivar;
876 else if (ivar_offset == offset) result = ivars[0];
879 // Try our other ivars. If any is too big, use the previous.
880 for (i = 1; result == NULL && i < ivar_count; i++) {
881 ivar_offset = ivar_getOffset(ivars[i]);
882 if (ivar_offset == offset) {
884 } else if (ivar_offset > offset) {
885 result = ivars[i - 1];
889 // Found nothing. Return our last ivar.
891 result = ivars[ivar_count - 1];
901 static void append_ivar_at_offset(char *buf, Class cls, vm_address_t offset)
905 if (offset == 0) return; // don't bother with isa
906 if (offset >= class_getInstanceSize(cls)) {
907 strcat(buf, ".<extra>+");
908 strcati(buf, offset);
912 ivar = ivar_for_offset(cls, offset);
918 // fixme doesn't handle structs etc.
921 const char *ivar_name = ivar_getName(ivar);
922 if (ivar_name) strcat(buf, ivar_name);
923 else strcat(buf, "<anonymous ivar>");
925 offset -= ivar_getOffset(ivar);
928 strcati(buf, offset);
933 static const char *cf_class_for_object(void *cfobj)
935 // ick - we don't link against CF anymore
939 size_t (*CFGetTypeID)(void *);
940 void * (*_CFRuntimeGetClassWithTypeID)(size_t);
942 result = "anonymous_NSCFType";
944 dlh = dlopen("/System/Library/Frameworks/CoreFoundation.framework/Versions/A/CoreFoundation", RTLD_LAZY | RTLD_NOLOAD | RTLD_FIRST);
945 if (!dlh) return result;
947 CFGetTypeID = (size_t(*)(void*)) dlsym(dlh, "CFGetTypeID");
948 _CFRuntimeGetClassWithTypeID = (void*(*)(size_t)) dlsym(dlh, "_CFRuntimeGetClassWithTypeID");
950 if (CFGetTypeID && _CFRuntimeGetClassWithTypeID) {
953 const char *className;
954 // don't care about the rest
957 cfid = (*CFGetTypeID)(cfobj);
958 cfcls = (*_CFRuntimeGetClassWithTypeID)(cfid);
959 result = cfcls->className;
967 static char *name_for_address(auto_zone_t *zone, vm_address_t base, vm_address_t offset, int withRetainCount)
969 #define APPEND_SIZE(s) \
980 auto_zone_size_no_lock(zone, (void *)base);
981 auto_memory_type_t type = size ?
982 auto_zone_get_layout_type_no_lock(zone, (void *)base) : AUTO_TYPE_UNKNOWN;
983 unsigned int refcount = size ?
984 auto_zone_retain_count_no_lock(zone, (void *)base) : 0;
987 case AUTO_OBJECT_SCANNED:
988 case AUTO_OBJECT_UNSCANNED: {
989 const char *class_name = object_getClassName((id)base);
990 if (0 == strcmp(class_name, "NSCFType")) {
991 strcat(buf, cf_class_for_object((void *)base));
993 strcat(buf, class_name);
996 append_ivar_at_offset(buf, object_getClass((id)base), offset);
1001 case AUTO_MEMORY_SCANNED:
1002 strcat(buf, "{conservative-block}");
1005 case AUTO_MEMORY_UNSCANNED:
1006 strcat(buf, "{no-pointers-block}");
1010 strcat(buf, "{unallocated-or-stack}");
1013 if (withRetainCount && refcount > 0) {
1014 strcat(buf, " [[refcount=");
1015 strcati(buf, refcount);
1019 result = malloc_zone_malloc(objc_debug_zone(), 1 + strlen(buf));
1020 strcpy(result, buf);
1027 struct objc_class_recorder_context {
1028 malloc_zone_t *zone;
1034 static void objc_class_recorder(task_t task, void *context, unsigned type_mask,
1035 vm_range_t *ranges, unsigned range_count)
1037 struct objc_class_recorder_context *ctx =
1038 (struct objc_class_recorder_context *)context;
1042 for (r = ranges, end = ranges + range_count; r < end; r++) {
1043 auto_memory_type_t type =
1044 auto_zone_get_layout_type_no_lock(ctx->zone, (void *)r->address);
1045 if (type == AUTO_OBJECT_SCANNED || type == AUTO_OBJECT_UNSCANNED) {
1046 // Check if this is an instance of class ctx->cls or some subclass
1048 Class isa = *(Class *)r->address;
1049 for (cls = isa; cls; cls = _class_getSuperclass(cls)) {
1050 if (cls == ctx->cls) {
1052 objc_debug_printf("[%p] : %s", r->address, _class_getName(isa));
1053 if ((rc = auto_zone_retain_count_no_lock(ctx->zone, (void *)r->address))) {
1054 objc_debug_printf(" [[refcount %u]]", rc);
1056 objc_debug_printf("\n");
1065 __private_extern__ void objc_enumerate_class(char *clsname)
1067 struct objc_class_recorder_context ctx;
1068 ctx.zone = auto_zone();
1069 ctx.clsname = clsname;
1070 ctx.cls = objc_getClass(clsname); // GrP fixme may deadlock if classHash lock is already owned
1073 objc_debug_printf("No class '%s'\n", clsname);
1076 objc_debug_printf("\n\nINSTANCES OF CLASS '%s':\n\n", clsname);
1077 (*ctx.zone->introspect->enumerator)(mach_task_self(), &ctx, MALLOC_PTR_IN_USE_RANGE_TYPE, (vm_address_t)ctx.zone, NULL, objc_class_recorder);
1078 objc_debug_printf("\n%d instances\n\n", ctx.count);
1082 static void objc_reference_printer(auto_zone_t *zone, void *ctx,
1083 auto_reference_t ref)
1085 char *referrer_name = name_for_address(zone, ref.referrer_base, ref.referrer_offset, true);
1086 char *referent_name = name_for_address(zone, ref.referent, 0, true);
1088 objc_debug_printf("[%p%+d -> %p] : %s -> %s\n",
1089 ref.referrer_base, ref.referrer_offset, ref.referent,
1090 referrer_name, referent_name);
1092 malloc_zone_free(objc_debug_zone(), referrer_name);
1093 malloc_zone_free(objc_debug_zone(), referent_name);
1097 __private_extern__ void objc_print_references(void *referent, void *stack_bottom, int lock)
1100 auto_enumerate_references(auto_zone(), referent,
1101 objc_reference_printer, stack_bottom, NULL);
1103 auto_enumerate_references_no_lock(auto_zone(), referent,
1104 objc_reference_printer, stack_bottom, NULL);
1111 vm_address_t address; // of this object
1112 int refcount; // of this object - nonzero means ROOT
1113 int depth; // number of links away from referent, or -1
1114 auto_reference_t *referrers; // of this object
1116 int referrers_allocated;
1117 auto_reference_t back; // reference from this object back toward the target
1118 uint32_t ID; // Graphic ID for grafflization
1125 unsigned int allocated;
1128 static blob_queue blobs = {NULL, 0, 0};
1129 static blob_queue untraced_blobs = {NULL, 0, 0};
1130 static blob_queue root_blobs = {NULL, 0, 0};
1133 static void spin(void) {
1134 static time_t t = 0;
1135 time_t now = time(NULL);
1137 objc_debug_printf(".");
1143 static void enqueue_blob(blob_queue *q, blob *b)
1145 if (q->used == q->allocated) {
1146 q->allocated = q->allocated * 2 + 1;
1147 q->list = malloc_zone_realloc(objc_debug_zone(), q->list, q->allocated * sizeof(blob *));
1149 q->list[q->used++] = b;
1153 static blob *dequeue_blob(blob_queue *q)
1155 blob *result = q->list[0];
1157 memmove(&q->list[0], &q->list[1], q->used * sizeof(blob *));
1162 static blob *blob_for_address(vm_address_t addr)
1164 blob *b, **bp, **end;
1166 if (addr == 0) return NULL;
1168 for (bp = blobs.list, end = blobs.list+blobs.used; bp < end; bp++) {
1170 if (b->address == addr) return b;
1173 b = malloc_zone_calloc(objc_debug_zone(), sizeof(blob), 1);
1176 b->refcount = auto_zone_size_no_lock(auto_zone(), (void *)addr) ? auto_zone_retain_count_no_lock(auto_zone(), (void *)addr) : 1;
1177 enqueue_blob(&blobs, b);
1181 static int blob_exists(vm_address_t addr)
1183 blob *b, **bp, **end;
1184 for (bp = blobs.list, end = blobs.list+blobs.used; bp < end; bp++) {
1186 if (b->address == addr) return 1;
1192 // Destroy the blobs table and all blob data in it
1193 static void free_blobs(void)
1195 blob *b, **bp, **end;
1196 for (bp = blobs.list, end = blobs.list+blobs.used; bp < end; bp++) {
1198 malloc_zone_free(objc_debug_zone(), b);
1200 if (blobs.list) malloc_zone_free(objc_debug_zone(), blobs.list);
1203 static void print_chain(auto_zone_t *zone, blob *root)
1206 for (b = root; b != NULL; b = blob_for_address(b->back.referent)) {
1208 if (b->back.referent) {
1209 name = name_for_address(zone, b->address, b->back.referrer_offset, true);
1210 objc_debug_printf("[%p%+d] : %s ->\n", b->address, b->back.referrer_offset, name);
1212 name = name_for_address(zone, b->address, 0, true);
1213 objc_debug_printf("[%p] : %s\n", b->address, name);
1215 malloc_zone_free(objc_debug_zone(), name);
1220 static void objc_blob_recorder(auto_zone_t *zone, void *ctx,
1221 auto_reference_t ref)
1223 blob *b = (blob *)ctx;
1227 if (b->referrers_used == b->referrers_allocated) {
1228 b->referrers_allocated = b->referrers_allocated * 2 + 1;
1229 b->referrers = malloc_zone_realloc(objc_debug_zone(), b->referrers,
1230 b->referrers_allocated *
1231 sizeof(auto_reference_t));
1234 b->referrers[b->referrers_used++] = ref;
1235 if (!blob_exists(ref.referrer_base)) {
1236 enqueue_blob(&untraced_blobs, blob_for_address(ref.referrer_base));
1241 #define INSTANCE_ROOTS 1
1242 #define HEAP_ROOTS 2
1244 static void objc_print_recursive_refs(vm_address_t target, int which, void *stack_bottom, int lock);
1245 static void grafflize(blob_queue *blobs, int everything);
1247 __private_extern__ void objc_print_instance_roots(vm_address_t target, void *stack_bottom, int lock)
1249 objc_print_recursive_refs(target, INSTANCE_ROOTS, stack_bottom, lock);
1252 __private_extern__ void objc_print_heap_roots(vm_address_t target, void *stack_bottom, int lock)
1254 objc_print_recursive_refs(target, HEAP_ROOTS, stack_bottom, lock);
1257 __private_extern__ void objc_print_all_refs(vm_address_t target, void *stack_bottom, int lock)
1259 objc_print_recursive_refs(target, ALL_REFS, stack_bottom, lock);
1262 static void sort_blobs_by_refcount(blob_queue *blobs)
1266 // simple bubble sort
1267 for (i = 0; i < blobs->used; i++) {
1268 for (j = i+1; j < blobs->used; j++) {
1269 if (blobs->list[i]->refcount < blobs->list[j]->refcount) {
1270 blob *temp = blobs->list[i];
1271 blobs->list[i] = blobs->list[j];
1272 blobs->list[j] = temp;
1279 static void sort_blobs_by_depth(blob_queue *blobs)
1283 // simple bubble sort
1284 for (i = 0; i < blobs->used; i++) {
1285 for (j = i+1; j < blobs->used; j++) {
1286 if (blobs->list[i]->depth > blobs->list[j]->depth) {
1287 blob *temp = blobs->list[i];
1288 blobs->list[i] = blobs->list[j];
1289 blobs->list[j] = temp;
1296 static void objc_print_recursive_refs(vm_address_t target, int which, void *stack_bottom, int lock)
1298 objc_debug_printf("\n "); // make spinner draw in a pretty place
1300 // Construct pointed-to graph (of things eventually pointing to target)
1302 enqueue_blob(&untraced_blobs, blob_for_address(target));
1304 while (untraced_blobs.used > 0) {
1305 blob *b = dequeue_blob(&untraced_blobs);
1308 auto_enumerate_references(auto_zone(), (void *)b->address,
1309 objc_blob_recorder, stack_bottom, b);
1311 auto_enumerate_references_no_lock(auto_zone(), (void *)b->address,
1312 objc_blob_recorder, stack_bottom, b);
1316 // Walk pointed-to graph to find shortest paths from roots to target.
1317 // This is BREADTH-FIRST order.
1319 blob_for_address(target)->depth = 0;
1320 enqueue_blob(&untraced_blobs, blob_for_address(target));
1322 while (untraced_blobs.used > 0) {
1323 blob *b = dequeue_blob(&untraced_blobs);
1325 auto_reference_t *r, *end;
1330 if (which == ALL_REFS) {
1331 // Never stop at roots.
1333 } else if (which == HEAP_ROOTS) {
1334 // Stop at any root (a block with positive retain count)
1335 stop = (b->refcount > 0);
1336 } else if (which == INSTANCE_ROOTS) {
1337 // Only stop at roots that are instances
1338 auto_memory_type_t type = auto_zone_get_layout_type_no_lock(auto_zone(), (void *)b->address);
1339 stop = (b->refcount > 0 && (type == AUTO_OBJECT_SCANNED || type == AUTO_OBJECT_UNSCANNED)); // GREG XXX ???
1342 // If this object is a root, save it and don't walk its referrers.
1344 enqueue_blob(&root_blobs, b);
1348 // For any "other object" that points to "this object"
1349 // and does not yet have a depth:
1350 // (1) other object is one level deeper than this object
1351 // (2) (one of) the shortest path(s) from other object to the
1352 // target goes through this object
1354 for (r = b->referrers, end = b->referrers + b->referrers_used;
1358 other = blob_for_address(r->referrer_base);
1359 if (other->depth == -1) {
1360 other->depth = b->depth + 1;
1362 enqueue_blob(&untraced_blobs, other);
1368 char *name = name_for_address(auto_zone(), target, 0, true);
1369 objc_debug_printf("\n\n%d %s %p (%s)\n\n",
1370 (which==ALL_REFS) ? blobs.used : root_blobs.used,
1371 (which==ALL_REFS) ? "INDIRECT REFS TO" : "ROOTS OF",
1373 malloc_zone_free(objc_debug_zone(), name);
1376 if (which == ALL_REFS) {
1377 // Print all reference objects, biggest refcount first
1379 sort_blobs_by_refcount(&blobs);
1380 for (i = 0; i < blobs.used; i++) {
1381 char *name = name_for_address(auto_zone(), blobs.list[i]->address, 0, true);
1382 objc_debug_printf("[%p] : %s\n", blobs.list[i]->address, name);
1383 malloc_zone_free(objc_debug_zone(), name);
1387 // Walk back chain from every root to the target, printing every step.
1389 while (root_blobs.used > 0) {
1390 blob *root = dequeue_blob(&root_blobs);
1391 print_chain(auto_zone(), root);
1392 objc_debug_printf("\n");
1396 grafflize(&blobs, which == ALL_REFS);
1398 objc_debug_printf("\ndone\n\n");
1403 if (untraced_blobs.list) malloc_zone_free(objc_debug_zone(), untraced_blobs.list);
1404 if (root_blobs.list) malloc_zone_free(objc_debug_zone(), root_blobs.list);
1406 memset(&blobs, 0, sizeof(blobs));
1407 memset(&root_blobs, 0, sizeof(root_blobs));
1408 memset(&untraced_blobs, 0, sizeof(untraced_blobs));
1413 struct objc_block_recorder_context {
1414 malloc_zone_t *zone;
1420 static void objc_block_recorder(task_t task, void *context, unsigned type_mask,
1421 vm_range_t *ranges, unsigned range_count)
1424 struct objc_block_recorder_context *ctx =
1425 (struct objc_block_recorder_context *)context;
1429 for (r = ranges, end = ranges + range_count; r < end; r++) {
1430 char *name = name_for_address(ctx->zone, r->address, 0, true);
1432 strcatx(buf, r->address);
1434 write(ctx->fd, "0x", 2);
1435 write(ctx->fd, buf, strlen(buf));
1436 write(ctx->fd, " ", 1);
1437 write(ctx->fd, name, strlen(name));
1438 write(ctx->fd, "\n", 1);
1440 malloc_zone_free(objc_debug_zone(), name);
1446 __private_extern__ void objc_dump_block_list(const char* path)
1448 struct objc_block_recorder_context ctx;
1449 char filename[] = "/tmp/blocks-XXXXX.txt";
1451 ctx.zone = auto_zone();
1453 ctx.fd = (path ? open(path, O_WRONLY | O_CREAT | O_TRUNC, 0666) : mkstemps(filename, (int)strlen(strrchr(filename, '.'))));
1455 objc_debug_printf("\n\nALL AUTO-ALLOCATED BLOCKS\n\n");
1456 (*ctx.zone->introspect->enumerator)(mach_task_self(), &ctx, MALLOC_PTR_IN_USE_RANGE_TYPE, (vm_address_t)ctx.zone, NULL, objc_block_recorder);
1457 objc_debug_printf("%d blocks written to file\n", ctx.count);
1458 objc_debug_printf("open %s\n", (path ? path : filename));
1466 static void grafflize_id(int gfile, int ID)
1472 c = "<key>ID</key><integer>";
1473 write(gfile, c, strlen(c));
1474 write(gfile, buf, strlen(buf));
1476 write(gfile, c, strlen(c));
1480 // head = REFERENT end = arrow
1481 // tail = REFERRER end = no arrow
1482 static void grafflize_reference(int gfile, auto_reference_t reference,
1483 int ID, int important)
1485 blob *referrer = blob_for_address(reference.referrer_base);
1486 blob *referent = blob_for_address(reference.referent);
1490 c = "<dict><key>Class</key><string>LineGraphic</string>";
1491 write(gfile, c, strlen(c));
1494 grafflize_id(gfile, ID);
1497 c = "<key>Head</key><dict>";
1498 write(gfile, c, strlen(c));
1499 grafflize_id(gfile, referent->ID);
1501 write(gfile, c, strlen(c));
1504 c = "<key>Tail</key><dict>";
1505 write(gfile, c, strlen(c));
1506 grafflize_id(gfile, referrer->ID);
1508 write(gfile, c, strlen(c));
1510 // style - head arrow, thick line if important
1511 c = "<key>Style</key><dict><key>stroke</key><dict>"
1512 "<key>HeadArrow</key><string>FilledArrow</string>"
1513 "<key>LineType</key><integer>1</integer>";
1514 write(gfile, c, strlen(c));
1516 c = "<key>Width</key><real>3</real>";
1517 write(gfile, c, strlen(c));
1519 c = "</dict></dict>";
1520 write(gfile, c, strlen(c));
1524 write(gfile, c, strlen(c));
1528 static void grafflize_blob(int gfile, blob *b)
1530 // fixme include ivar names too
1531 char *name = name_for_address(auto_zone(), b->address, 0, false);
1532 int width = 30 + (int)strlen(name)*6;
1539 "<key>Class</key><string>ShapedGraphic</string>"
1540 "<key>Shape</key><string>Rectangle</string>";
1541 write(gfile, c, strlen(c));
1544 grafflize_id(gfile, b->ID);
1547 // order vertically by depth
1548 c = "<key>Bounds</key><string>{{0,";
1549 write(gfile, c, strlen(c));
1551 strcati(buf, b->depth*60);
1552 write(gfile, buf, strlen(buf));
1554 write(gfile, c, strlen(c));
1556 strcati(buf, width);
1558 strcati(buf, height);
1559 write(gfile, buf, strlen(buf));
1561 write(gfile, c, strlen(c));
1564 c = "<key>Text</key><dict><key>Text</key>"
1565 "<string>{\\rtf1\\mac\\ansicpg10000\\cocoartf102\n"
1566 "{\\fonttbl\\f0\\fswiss\\fcharset77 Helvetica;\\fonttbl\\f1\\fswiss\\fcharset77 Helvetica-Bold;}\n"
1567 "{\\colortbl;\\red255\\green255\\blue255;}\n"
1568 "\\pard\\tx560\\tx1120\\tx1680\\tx2240\\tx3360\\tx3920\\tx4480\\tx5040\\tx5600\\tx6160\\tx6720\\qc\n"
1569 "\\f0\\fs20 \\cf0 ";
1570 write(gfile, c, strlen(c));
1571 write(gfile, name, strlen(name));
1572 strcpy(buf, "\\\n0x");
1573 strcatx(buf, b->address);
1574 write(gfile, buf, strlen(buf));
1575 c = "}</string></dict>";
1576 write(gfile, c, strlen(c));
1579 c = "<key>Style</key><dict>";
1580 write(gfile, c, strlen(c));
1583 c = "<key>shadow</key><dict><key>Draws</key><string>NO</string></dict>";
1584 write(gfile, c, strlen(c));
1586 // fat border if refcount > 0
1587 if (b->refcount > 0) {
1588 c = "<key>stroke</key><dict><key>Width</key><real>4</real></dict>";
1589 write(gfile, c, strlen(c));
1594 write(gfile, c, strlen(c));
1598 write(gfile, c, strlen(c));
1600 malloc_zone_free(objc_debug_zone(), name);
1604 #define gheader "<?xml version=\"1.0\" encoding=\"UTF-8\"?><!DOCTYPE plist PUBLIC \"-//Apple Computer//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/PropertyList-1.0.dtd\"><plist version=\"1.0\"><dict><key>GraphDocumentVersion</key><integer>3</integer><key>ReadOnly</key><string>NO</string><key>GraphicsList</key><array>\n"
1606 #define gfooter "</array></dict></plist>\n"
1609 static void grafflize(blob_queue *blobs, int everything)
1611 // Don't require linking to Foundation!
1615 char filename[] = "/tmp/gc-XXXXX.graffle";
1618 gfile = mkstemps(filename, (int)strlen(strrchr(filename, '.')));
1620 objc_debug_printf("couldn't create a graffle file in /tmp/ (errno %d)\n", errno);
1625 write(gfile, gheader, strlen(gheader));
1627 // Write a rectangle for each blob
1628 sort_blobs_by_depth(blobs);
1629 for (i = 0; i < blobs->used; i++) {
1630 blob *b = blobs->list[i];
1632 if (everything || b->depth >= 0) {
1633 grafflize_blob(gfile, b);
1637 for (i = 0; i < blobs->used; i++) {
1639 blob *b = blobs->list[i];
1642 // Write an arrow for each reference
1643 // Use big arrows for backreferences
1644 for (j = 0; j < b->referrers_used; j++) {
1645 int is_back_ref = (b->referrers[i].referent == b->back.referent && b->referrers[i].referrer_offset == b->back.referrer_offset && b->referrers[i].referrer_base == b->back.referrer_base);
1647 grafflize_reference(gfile, b->referrers[j], nextid++,
1652 // Write an arrow for each backreference
1654 grafflize_reference(gfile, b->back, nextid++, false);
1659 // Write footer and close
1660 write(gfile, gfooter, strlen(gfooter));
1662 objc_debug_printf("wrote object graph (%d objects)\nopen %s\n",
1663 blobs->used, filename);