2 * Copyright (c) 2004 Apple Computer, Inc. All rights reserved.
4 * @APPLE_LICENSE_HEADER_START@
6 * Copyright (c) 2004 Apple Computer, Inc. All Rights Reserved.
8 * This file contains Original Code and/or Modifications of Original Code
9 * as defined in and that are subject to the Apple Public Source License
10 * Version 2.0 (the 'License'). You may not use this file except in
11 * compliance with the License. Please obtain a copy of the License at
12 * http://www.opensource.apple.com/apsl/ and read it before using this
15 * The Original Code and all software distributed under the License are
16 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
17 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
18 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
19 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
20 * Please see the License for the specific language governing rights and
21 * limitations under the License.
23 * @APPLE_LICENSE_HEADER_END@
27 * Copyright 2004 Apple Computer, Inc.
36 #import <mach-o/dyld.h>
38 #import "objc-private.h"
44 // Types and prototypes from non-open-source auto_zone.h
46 #include <sys/types.h>
47 #include <malloc/malloc.h>
49 typedef malloc_zone_t auto_zone_t;
51 typedef uint64_t auto_date_t;
54 unsigned version; // reserved - 0 for now
56 unsigned long long num_allocs; // number of allocations performed
57 volatile unsigned blocks_in_use;// number of pointers in use
58 unsigned bytes_in_use; // sum of the sizes of all pointers in use
59 unsigned max_bytes_in_use; // high water mark
60 unsigned bytes_allocated;
62 /* When there is an array, 0 stands for full collection, 1 for generational */
63 unsigned num_collections[2];
64 boolean_t last_collection_was_generational;
65 unsigned bytes_in_use_after_last_collection[2];
66 unsigned bytes_allocated_after_last_collection[2];
67 unsigned bytes_freed_during_last_collection[2];
68 auto_date_t duration_last_collection[2];
69 auto_date_t duration_all_collections[2];
73 AUTO_COLLECTION_NO_COLLECTION = 0,
74 AUTO_COLLECTION_GENERATIONAL_COLLECTION,
75 AUTO_COLLECTION_FULL_COLLECTION
76 } auto_collection_mode_t;
79 AUTO_LOG_COLLECTIONS = (1 << 1), // log whenever a collection occurs
80 AUTO_LOG_COLLECT_DECISION = (1 << 2), // logs when deciding whether to collect
81 AUTO_LOG_GC_IMPL = (1 << 3), // logs to help debug GC
82 AUTO_LOG_REGIONS = (1 << 4), // log whenever a new region is allocated
83 AUTO_LOG_UNUSUAL = (1 << 5), // log unusual circumstances
84 AUTO_LOG_WEAK = (1 << 6), // log weak reference manipulation
88 typedef struct auto_zone_cursor *auto_zone_cursor_t;
90 typedef void (*auto_zone_foreach_object_t) (auto_zone_cursor_t cursor, void (*op) (void *ptr, void *data), void* data);
93 unsigned version; // reserved - 0 for now
94 boolean_t trace_stack_conservatively;
95 boolean_t (*should_collect)(auto_zone_t *, const auto_statistics_t *stats, boolean_t about_to_create_a_new_region);
96 // called back when a threshold is reached; must say whether to collect (and what type)
97 // all locks are released when that call back is called
98 // callee is free to call for statistics or reset the threshold
99 unsigned ask_should_collect_frequency;
100 // should_collect() is called each <N> allocations or free, where <N> is this field
101 unsigned full_vs_gen_frequency;
102 // ratio of generational vs. full GC for the frequency based ones
103 int (*collection_should_interrupt)(void);
104 // called during scan to see if garbage collection should be aborted
105 void (*invalidate)(auto_zone_t *zone, void *ptr, void *collection_context);
106 void (*batch_invalidate) (auto_zone_t *zone, auto_zone_foreach_object_t foreach, auto_zone_cursor_t cursor);
107 // called back with an object that is unreferenced
108 // callee is responsible for invalidating object state
109 void (*resurrect) (auto_zone_t *zone, void *ptr);
110 // convert the object into a safe-to-use, but otherwise "undead" object. no guarantees are made about the
111 // contents of this object, other than its liveness.
112 unsigned word0_mask; // mask for defining class
113 void (*note_unknown_layout)(auto_zone_t *zone, unsigned class_field);
114 // called once for each class encountered for which we don't know the layout
115 // callee can decide to register class with auto_zone_register_layout(), or do nothing
116 // Note that this function is called during GC and therefore should not do any auto-allocation
117 char* (*name_for_address) (auto_zone_t *zone, vm_address_t base, vm_address_t offset);
119 // set to auto_log_mask_t bits as desired
120 boolean_t disable_generational;
121 // if true, ignores requests to do generational GC.
122 boolean_t paranoid_generational;
123 // if true, always compares generational GC result to full GC garbage list
124 boolean_t malloc_stack_logging;
125 // if true, uses malloc_zone_malloc() for stack logging.
126 } auto_collection_control_t;
129 AUTO_TYPE_UNKNOWN = -1, // this is an error value
132 AUTO_MEMORY_SCANNED = 0, // holds conservatively scanned pointers
133 AUTO_MEMORY_UNSCANNED = AUTO_UNSCANNED, // holds unscanned memory (bits)
134 AUTO_OBJECT_SCANNED = AUTO_OBJECT, // first word is 'isa', may have 'exact' layout info elsewhere
135 AUTO_OBJECT_UNSCANNED = AUTO_OBJECT | AUTO_UNSCANNED, // first word is 'isa', good for bits or auto_zone_retain'ed items
136 } auto_memory_type_t;
140 vm_address_t referent;
141 vm_address_t referrer_base;
142 intptr_t referrer_offset;
145 typedef void (*auto_reference_recorder_t)(auto_zone_t *zone, void *ctx,
146 auto_reference_t reference);
149 static void auto_collect(auto_zone_t *zone, auto_collection_mode_t mode, void *collection_context);
150 static auto_collection_control_t *auto_collection_parameters(auto_zone_t *zone);
151 static const auto_statistics_t *auto_collection_statistics(auto_zone_t *zone);
152 static void auto_enumerate_references(auto_zone_t *zone, void *referent,
153 auto_reference_recorder_t callback,
154 void *stack_bottom, void *ctx);
155 static void auto_enumerate_references_no_lock(auto_zone_t *zone, void *referent, auto_reference_recorder_t callback, void *stack_bottom, void *ctx);
156 static auto_zone_t *auto_zone(void);
157 static void auto_zone_add_root(auto_zone_t *zone, void *root, size_t size);
158 static void* auto_zone_allocate_object(auto_zone_t *zone, size_t size, auto_memory_type_t type, boolean_t initial_refcount_to_one, boolean_t clear);
159 static const void *auto_zone_base_pointer(auto_zone_t *zone, const void *ptr);
160 static auto_memory_type_t auto_zone_get_layout_type(auto_zone_t *zone, void *ptr);
161 static auto_memory_type_t auto_zone_get_layout_type_no_lock(auto_zone_t *zone, void *ptr);
162 static boolean_t auto_zone_is_finalized(auto_zone_t *zone, const void *ptr);
163 static boolean_t auto_zone_is_valid_pointer(auto_zone_t *zone, const void *ptr);
164 static unsigned int auto_zone_release(auto_zone_t *zone, void *ptr);
165 static void auto_zone_retain(auto_zone_t *zone, void *ptr);
166 static unsigned int auto_zone_retain_count_no_lock(auto_zone_t *zone, const void *ptr);
167 static void auto_zone_set_class_list(int (*get_class_list)(void **buffer, int count));
168 static size_t auto_zone_size_no_lock(auto_zone_t *zone, const void *ptr);
169 static void auto_zone_start_monitor(boolean_t force);
170 static void auto_zone_write_barrier(auto_zone_t *zone, void *recipient, const unsigned int offset_in_bytes, const void *new_value);
171 static void *auto_zone_write_barrier_memmove(auto_zone_t *zone, void *dst, const void *src, size_t size);
175 static void record_allocation(Class cls);
176 static auto_zone_t *gc_zone_init(void);
179 __private_extern__ BOOL UseGC NOBSS = NO;
180 static BOOL RecordAllocations = NO;
181 static int IsaStompBits = 0x0;
183 static auto_zone_t *gc_zone = NULL;
184 static BOOL gc_zone_finalizing = NO;
185 static intptr_t gc_collection_threshold = 128 * 1024;
186 static size_t gc_collection_ratio = 100, gc_collection_counter = 0;
187 static NXMapTable *gc_finalization_safe_classes = NULL;
188 static BOOL gc_roots_retained = YES;
190 /***********************************************************************
192 **********************************************************************/
194 #define ISAUTOOBJECT(x) (auto_zone_is_valid_pointer(gc_zone, (x)))
197 // A should-collect callback that never allows collection.
198 // Currently used to prevent on-demand collection.
199 static boolean_t objc_never_collect(auto_zone_t *zone, const auto_statistics_t *stats, boolean_t about_to_create_a_new_region)
205 /***********************************************************************
207 * Called by various libraries.
208 **********************************************************************/
210 void objc_collect(void)
213 auto_collect(gc_zone, AUTO_COLLECTION_FULL_COLLECTION, NULL);
217 void objc_collect_if_needed(unsigned long options) {
219 const auto_statistics_t *stats = auto_collection_statistics(gc_zone);
220 if (options & OBJC_GENERATIONAL) {
221 // use an absolute memory allocated threshold to decide when to generationally collect.
222 intptr_t bytes_allocated_since_last_gc = stats->bytes_in_use - stats->bytes_in_use_after_last_collection[stats->last_collection_was_generational];
223 if (bytes_allocated_since_last_gc >= gc_collection_threshold) {
224 // malloc_printf("bytes_allocated_since_last_gc = %ld\n", bytes_allocated_since_last_gc);
225 // periodically run a full collection until to keep memory usage down, controlled by OBJC_COLLECTION_RATIO (100 to 1 is the default).
226 auto_collection_mode_t mode = AUTO_COLLECTION_GENERATIONAL_COLLECTION;
227 if (gc_collection_counter++ >= gc_collection_ratio) {
228 mode = AUTO_COLLECTION_FULL_COLLECTION;
229 gc_collection_counter = 0;
231 auto_collect(gc_zone, mode, NULL);
234 // Run full collections until we no longer recover additional objects. We use two measurements
235 // to determine whether or not the collector is being productive: the total number of blocks
236 // must be shrinking, and the collector must itself be freeing bytes. Otherwise, another thread
237 // could be responsible for reducing the block count. On the other hand, another thread could
238 // be generating a lot of garbage, which would keep us collecting. This will need even more
239 // tuning to prevent starvation, etc.
240 unsigned blocks_in_use;
242 blocks_in_use = stats->blocks_in_use;
243 auto_collect(gc_zone, AUTO_COLLECTION_FULL_COLLECTION, NULL);
244 // malloc_printf("bytes freed = %ld\n", stats->bytes_freed_during_last_collection[0]);
245 } while (stats->bytes_freed_during_last_collection[0] > 0 && stats->blocks_in_use < blocks_in_use);
246 gc_collection_counter = 0;
251 void objc_collect_generation(void)
254 auto_collect(gc_zone, AUTO_COLLECTION_GENERATIONAL_COLLECTION, NULL);
259 unsigned int objc_numberAllocated(void)
261 const auto_statistics_t *stats = auto_collection_statistics(gc_zone);
262 return stats->blocks_in_use;
266 BOOL objc_isAuto(id object)
268 return UseGC && ISAUTOOBJECT(object) != 0;
272 BOOL objc_collecting_enabled(void)
278 /***********************************************************************
280 * Called by CF and Foundation.
281 **********************************************************************/
283 // Allocate an object in the GC zone, with the given number of extra bytes.
284 id objc_allocate_object(Class cls, int extra)
287 (id)auto_zone_allocate_object(gc_zone, cls->instance_size + extra,
288 AUTO_OBJECT_SCANNED, false, true);
290 if (RecordAllocations) record_allocation(cls);
295 /***********************************************************************
296 * Write barrier exports
297 * Called by pretty much all GC-supporting code.
298 **********************************************************************/
301 // Platform-independent write barriers
302 // These contain the UseGC check that the platform-specific
303 // runtime-rewritten implementations do not.
305 id objc_assign_strongCast_generic(id value, id *dest)
308 return objc_assign_strongCast_gc(value, dest);
310 return (*dest = value);
315 id objc_assign_global_generic(id value, id *dest)
318 return objc_assign_global_gc(value, dest);
320 return (*dest = value);
325 id objc_assign_ivar_generic(id value, id dest, unsigned int offset)
328 return objc_assign_ivar_gc(value, dest, offset);
330 id *slot = (id*) ((char *)dest + offset);
331 return (*slot = value);
337 // PPC write barriers are in objc-auto-ppc.s
338 // write_barrier_init conditionally stomps those to jump to the _impl versions.
342 // use generic implementation until time can be spent on optimizations
343 id objc_assign_strongCast(id value, id *dest) { return objc_assign_strongCast_generic(value, dest); }
344 id objc_assign_global(id value, id *dest) { return objc_assign_global_generic(value, dest); }
345 id objc_assign_ivar(id value, id dest, unsigned int offset) { return objc_assign_ivar_generic(value, dest, offset); }
347 // not defined(__ppc__)
351 void *objc_memmove_collectable(void *dst, const void *src, size_t size)
354 return auto_zone_write_barrier_memmove(gc_zone, dst, src, size);
356 return memmove(dst, src, size);
361 /***********************************************************************
363 * Used to isolate resurrection of garbage objects during finalization.
364 **********************************************************************/
365 BOOL objc_is_finalized(void *ptr) {
366 return ptr != NULL && auto_zone_is_finalized(gc_zone, ptr);
370 /***********************************************************************
371 * CF-only write barrier exports
373 * The gc_zone guards are not thought to be necessary
374 **********************************************************************/
376 // Exported as very private SPI to Foundation to tell CF about
377 void* objc_assign_ivar_address_CF(void *value, void *base, void **slot)
379 if (value && gc_zone) {
380 if (auto_zone_is_valid_pointer(gc_zone, base)) {
381 unsigned int offset = (((char *)slot)-(char *)base);
382 auto_zone_write_barrier(gc_zone, base, offset, value);
386 return (*slot = value);
390 // Same as objc_assign_strongCast_gc, should tell Foundation to use _gc version instead
391 // exported as very private SPI to Foundation to tell CF about
392 void* objc_assign_strongCast_CF(void* value, void **slot)
394 if (value && gc_zone) {
395 void *base = (void *)auto_zone_base_pointer(gc_zone, (void*)slot);
397 unsigned int offset = (((char *)slot)-(char *)base);
398 auto_zone_write_barrier(gc_zone, base, offset, value);
401 return (*slot = value);
405 /***********************************************************************
406 * Write barrier implementations, optimized for when GC is known to be on
407 * Called by the write barrier exports only.
408 * These implementations assume GC is on. The exported function must
409 * either perform the check itself or be conditionally stomped at
411 **********************************************************************/
413 __private_extern__ id objc_assign_strongCast_gc(id value, id *slot)
417 base = (id) auto_zone_base_pointer(gc_zone, (void*)slot);
419 unsigned int offset = (((char *)slot)-(char *)base);
420 auto_zone_write_barrier(gc_zone, base, (char*)slot - (char*)base, value);
422 return (*slot = value);
426 __private_extern__ id objc_assign_global_gc(id value, id *slot)
428 if (gc_roots_retained) {
429 if (value && ISAUTOOBJECT(value)) {
430 if (auto_zone_is_finalized(gc_zone, value))
431 _objc_inform("GC: storing an already collected object %p into global memory at %p\n", value, slot);
432 auto_zone_retain(gc_zone, value);
434 if (*slot && ISAUTOOBJECT(*slot)) {
435 auto_zone_release(gc_zone, *slot);
438 // use explicit root registration.
439 if (value && ISAUTOOBJECT(value)) {
440 if (auto_zone_is_finalized(gc_zone, value))
441 _objc_inform("GC: storing an already collected object %p into global memory at %p\n", value, slot);
442 auto_zone_add_root(gc_zone, slot, sizeof(id*));
445 return (*slot = value);
449 __private_extern__ id objc_assign_ivar_gc(id value, id base, unsigned int offset)
451 id *slot = (id*) ((char *)base + offset);
454 if (ISAUTOOBJECT(base)) {
455 auto_zone_write_barrier(gc_zone, base, offset, value);
456 if (gc_zone_finalizing && (auto_zone_get_layout_type(gc_zone, value) & AUTO_OBJECT) != AUTO_OBJECT) {
457 // XXX_PCB: Hack, don't allow resurrection by inhibiting assigns of garbage, non-object, pointers.
458 // XXX BG: move this check into auto & institute a new policy for resurrection, to wit:
459 // Resurrected Objects should go on a special list during finalization & be zombified afterwards
460 // using the noisy isa-slam hack.
461 if (auto_zone_is_finalized(gc_zone, value) && !auto_zone_is_finalized(gc_zone, base)) {
462 _objc_inform("GC: *** objc_assign_ivar_gc: preventing a resurrecting store of %p into %p + %d\n", value, base, offset);
467 _objc_inform("GC: *** objc_assign_ivar_gc: %p + %d isn't in the auto_zone.\n", base, offset);
471 return (*slot = value);
476 /***********************************************************************
477 * Finalization support
478 * Called by auto and Foundation.
479 **********************************************************************/
481 #define USE_ISA_HACK 1
482 #define DO_ISA_DEBUG 0
487 // NSDeallocatedObject silently ignores all messages sent to it.
488 @interface NSDeallocatedObject {
496 static unsigned int FTCount, FTSize;
497 static struct FTTable {
498 NSDeallocatedObject *object;
502 /* a quick and very dirty table to map finalized pointers to their isa's */
503 static void addPointerFT(NSDeallocatedObject *object, Class class) {
504 if (FTCount >= FTSize) {
505 FTSize = 2*(FTSize + 10);
506 FTTablePtr = realloc(FTTablePtr, FTSize*sizeof(struct FTTable));
508 FTTablePtr[FTCount].object = object;
509 FTTablePtr[FTCount].class = class;
513 static Class classForPointerFT(NSDeallocatedObject *object) {
515 for (i = 0; i < FTCount; ++i)
516 if (FTTablePtr[i].object == object)
517 return FTTablePtr[i].class;
521 void objc_stale(id object) {
524 @implementation NSDeallocatedObject
525 + (Class)class { return self; }
526 - (Class)class { return classForPointerFT(self); }
527 - (BOOL)isKindOfClass:(Class)aClass {
529 for (cls = classForPointerFT(self); nil != cls; cls = cls->super_class)
530 if (cls == (Class)aClass) return YES;
533 + forward:(SEL)aSelector :(marg_list)args { return nil; }
534 - forward:(SEL)aSelector :(marg_list)args {
535 Class class = classForPointerFT(self);
537 if (IsaStompBits & 0x2)
538 _objc_inform("***finalized & *recovered* object %p of being sent '%s'!!\n", self, sel_getName(aSelector));
539 // if its not in the current table, then its being messaged from a STALE REFERENCE!!
543 if (IsaStompBits & 0x4)
544 _objc_inform("finalized object %p of class %s being sent %s\n", self, class->name, sel_getName(aSelector));
550 static Class _NSDeallocatedObject = Nil;
552 static IMP _NSObject_finalize = NULL;
555 // Handed to and then called by auto
556 static void sendFinalize(auto_zone_t *zone, void* ptr, void *context)
559 // special signal to mark end of finalization phase
560 if (IsaStompBits & 0x8)
561 _objc_inform("----finalization phase over-----");
567 Class cls = object->isa;
569 if (cls == _NSDeallocatedObject) {
570 // already finalized, do nothing
571 _objc_inform("sendFinalize called on NSDeallocatedObject %p", ptr);
575 IMP finalizeMethod = class_lookupMethod(cls, @selector(finalize));
576 if (finalizeMethod == &_objc_msgForward) {
577 _objc_inform("GC: class '%s' does not implement -finalize!", cls->name);
580 gc_zone_finalizing = YES;
583 // fixme later, optimize away calls to NSObject's -finalize
584 (*finalizeMethod)(object, @selector(finalize));
585 } @catch (id exception) {
586 _objc_inform("GC: -finalize resulted in an exception being thrown %p!", exception);
587 // FIXME: what about uncaught C++ exceptions? Here's an idea, define a category
588 // in a .mm file, so we can catch both flavors of exceptions.
589 // @interface NSObject (TryToFinalize)
590 // - (BOOL)tryToFinalize {
594 // } @catch (id exception) {
605 gc_zone_finalizing = NO;
608 NSDeallocatedObject *dead = (NSDeallocatedObject *)object;
609 // examine list of okay classes and leave alone XXX get from file
610 // fixme hack: smash isa to dodge some out-of-order finalize bugs
611 // the following are somewhat finalize order safe
612 //if (!strcmp(dead->oldIsA->name, "NSCFArray")) return;
613 //if (!strcmp(dead->oldIsA->name, "NSSortedArray")) return;
614 if (IsaStompBits & 0x8)
615 printf("adding [%d] %p %s\n", FTCount, dead, dead->IsA->name);
616 addPointerFT(dead, dead->IsA);
617 objc_assign_ivar(_NSDeallocatedObject, dead, 0);
621 static void finalizeOneObject(void *ptr, void *data) {
623 Class cls = object->isa;
625 if (cls == _NSDeallocatedObject) {
626 // already finalized, do nothing
627 _objc_inform("finalizeOneObject called on NSDeallocatedObject %p", ptr);
631 IMP finalizeMethod = class_lookupMethod(cls, @selector(finalize));
632 if (finalizeMethod == &_objc_msgForward) {
633 _objc_inform("GC: class '%s' does not implement -finalize!", cls->name);
636 // fixme later, optimize away calls to NSObject's -finalize
637 (*finalizeMethod)(object, @selector(finalize));
640 NSDeallocatedObject *dead = (NSDeallocatedObject *)object;
641 // examine list of okay classes and leave alone XXX get from file
642 // fixme hack: smash isa to dodge some out-of-order finalize bugs
643 // the following are somewhat finalize order safe
644 //if (!strcmp(dead->oldIsA->name, "NSCFArray")) return;
645 //if (!strcmp(dead->oldIsA->name, "NSSortedArray")) return;
646 if (gc_finalization_safe_classes && NXMapGet(gc_finalization_safe_classes, cls->name)) {
647 // malloc_printf("&&& finalized safe instance of %s &&&\n", cls->name);
650 if (IsaStompBits & 0x8)
651 printf("adding [%d] %p %s\n", FTCount, dead, dead->IsA->name);
652 addPointerFT(dead, dead->IsA);
653 objc_assign_ivar(_NSDeallocatedObject, dead, 0);
657 static void batchFinalize(auto_zone_t *zone,
658 auto_zone_foreach_object_t foreach,
659 auto_zone_cursor_t cursor)
661 gc_zone_finalizing = YES;
664 // eventually foreach(cursor, objc_msgSend, @selector(finalize));
665 // foreach(cursor, finalizeOneObject, NULL);
666 foreach(cursor, objc_msgSend, @selector(finalize));
667 // non-exceptional return means finalization is complete.
669 } @catch (id exception) {
670 _objc_inform("GC: -finalize resulted in an exception being thrown %p!", exception);
673 gc_zone_finalizing = NO;
676 @interface NSResurrectedObject {
678 Class _isa; // [NSResurrectedObject class]
679 Class _old_isa; // original class
680 unsigned _resurrections; // how many times this object has been resurrected.
685 @implementation NSResurrectedObject
686 + (Class)class { return self; }
687 - (Class)class { return _isa; }
688 + forward:(SEL)aSelector :(marg_list)args { return nil; }
689 - forward:(SEL)aSelector :(marg_list)args {
690 _objc_inform("**resurrected** object %p of class %s being sent message '%s'\n", self, _old_isa->name, sel_getName(aSelector));
694 _objc_inform("**resurrected** object %p of class %s being finalized\n", self, _old_isa->name);
698 static Class _NSResurrectedObject;
700 static void resurrectZombie(auto_zone_t *zone, void *ptr) {
701 NSResurrectedObject *zombie = (NSResurrectedObject*) ptr;
702 if (zombie->_isa != _NSResurrectedObject) {
703 Class old_isa = zombie->_isa;
704 zombie->_isa = _NSResurrectedObject;
705 zombie->_old_isa = old_isa;
706 zombie->_resurrections = 1;
708 zombie->_resurrections++;
712 /***********************************************************************
713 * Allocation recording
714 * For development purposes.
715 **********************************************************************/
717 static NXMapTable *the_histogram = NULL;
718 static pthread_mutex_t the_histogram_lock = PTHREAD_MUTEX_INITIALIZER;
721 static void record_allocation(Class cls)
723 pthread_mutex_lock(&the_histogram_lock);
724 unsigned long count = (unsigned long) NXMapGet(the_histogram, cls);
725 NXMapInsert(the_histogram, cls, (const void*) (count + 1));
726 pthread_mutex_unlock(&the_histogram_lock);
730 void objc_allocation_histogram(void)
734 NXMapState state = NXInitMapState(the_histogram);
735 printf("struct histogram {\n\tconst char* name;\n\tunsigned long instance_size;\n\tunsigned long count;\n} the_histogram[] = {\n");
736 while (NXNextMapState(the_histogram, &state, (const void**) &cls, (const void**) &count)) {
737 printf("\t{ \"%s\", %lu, %lu },\n", cls->name, (unsigned long) cls->instance_size, count);
742 static char *name_for_address(auto_zone_t *zone, vm_address_t base, vm_address_t offset, int withRetainCount);
744 static char* objc_name_for_address(auto_zone_t *zone, vm_address_t base, vm_address_t offset)
746 return name_for_address(zone, base, offset, false);
749 /***********************************************************************
751 **********************************************************************/
753 // Always called by _objcInit, even if GC is off.
754 __private_extern__ void gc_init(BOOL on)
759 _objc_inform("GC: is %s", on ? "ON" : "OFF");
763 // Set up the GC zone
764 gc_zone = gc_zone_init();
766 // no NSObject until Foundation calls objc_collect_init()
767 _NSObject_finalize = &_objc_msgForward;
769 // Set up allocation recording
770 RecordAllocations = (getenv("OBJC_RECORD_ALLOCATIONS") != NULL);
771 if (RecordAllocations) the_histogram = NXCreateMapTable(NXPtrValueMapPrototype, 1024);
773 if (getenv("OBJC_FINALIZATION_SAFE_CLASSES")) {
774 FILE *f = fopen(getenv("OBJC_FINALIZATION_SAFE_CLASSES"), "r");
778 gc_finalization_safe_classes = NXCreateMapTable(NXStrValueMapPrototype, 17);
779 while ((line = fgetln(f, &length)) != NULL) {
780 char *last = &line[length - 1];
781 if (*last == '\n') *last = '\0'; // strip off trailing newline.
782 char *className = strdup(line);
783 NXMapInsert(gc_finalization_safe_classes, className, className);
789 auto_zone_start_monitor(false);
790 auto_zone_set_class_list(objc_getClassList);
795 static auto_zone_t *gc_zone_init(void)
799 // result = auto_zone_create("objc auto collected zone");
800 result = auto_zone(); // honor existing entry point for now (fixme)
802 auto_collection_control_t *control = auto_collection_parameters(result);
804 // set up the magic control parameters
805 control->invalidate = sendFinalize;
806 control->batch_invalidate = batchFinalize;
807 control->resurrect = resurrectZombie;
808 control->name_for_address = objc_name_for_address;
810 // don't collect "on-demand" until... all Cocoa allocations are outside locks
811 control->should_collect = objc_never_collect;
812 control->ask_should_collect_frequency = UINT_MAX;
813 control->trace_stack_conservatively = YES;
815 // No interruption callback yet. Foundation will install one later.
816 control->collection_should_interrupt = NULL;
818 // debug: if set, only do full generational; sometimes useful for bringup
819 control->disable_generational = getenv("AUTO_DISABLE_GENERATIONAL") != NULL;
821 // debug: always compare generational GC result to full GC garbage list
822 // this *can* catch missing write-barriers and other bugs
823 control->paranoid_generational = (getenv("AUTO_PARANOID_GENERATIONAL") != NULL);
825 // if set take a slightly slower path for object allocation
826 control->malloc_stack_logging = (getenv("MallocStackLogging") != NULL || getenv("MallocStackLoggingNoCompact") != NULL);
828 // logging level: none by default
830 if (getenv("AUTO_LOG_NOISY")) control->log |= AUTO_LOG_COLLECTIONS;
831 if (getenv("AUTO_LOG_ALL")) control->log |= AUTO_LOG_ALL;
832 if (getenv("AUTO_LOG_COLLECTIONS")) control->log |= AUTO_LOG_COLLECTIONS;
833 if (getenv("AUTO_LOG_COLLECT_DECISION")) control->log |= AUTO_LOG_COLLECT_DECISION;
834 if (getenv("AUTO_LOG_GC_IMPL")) control->log |= AUTO_LOG_GC_IMPL;
835 if (getenv("AUTO_LOG_REGIONS")) control->log |= AUTO_LOG_REGIONS;
836 if (getenv("AUTO_LOG_UNUSUAL")) control->log |= AUTO_LOG_UNUSUAL;
837 if (getenv("AUTO_LOG_WEAK")) control->log |= AUTO_LOG_WEAK;
839 if (getenv("OBJC_ISA_STOMP")) {
841 // 0x1, just stomp, no messages
842 // 0x2, log messaging after reclaim (break on objc_stale())
843 // 0x4, log messages sent during finalize
844 // 0x8, log all finalizations
845 IsaStompBits = strtol(getenv("OBJC_ISA_STOMP"), NULL, 0);
848 if (getenv("OBJC_COLLECTION_THRESHOLD")) {
849 gc_collection_threshold = (size_t) strtoul(getenv("OBJC_COLLECTION_THRESHOLD"), NULL, 0);
852 if (getenv("OBJC_COLLECTION_RATIO")) {
853 gc_collection_ratio = (size_t) strtoul(getenv("OBJC_COLLECTION_RATIO"), NULL, 0);
856 if (getenv("OBJC_EXPLICIT_ROOTS")) gc_roots_retained = NO;
862 // Called by Foundation to install auto's interruption callback.
863 malloc_zone_t *objc_collect_init(int (*callback)(void))
865 // Find NSObject's finalize method now that Foundation is loaded.
866 // fixme only look for the base implementation, not a category's
867 _NSDeallocatedObject = objc_getClass("NSDeallocatedObject");
868 _NSResurrectedObject = objc_getClass("NSResurrectedObject");
870 class_lookupMethod(objc_getClass("NSObject"), @selector(finalize));
871 if (_NSObject_finalize == &_objc_msgForward) {
872 _objc_fatal("GC: -[NSObject finalize] unimplemented!");
875 // Don't install the callback if OBJC_DISABLE_COLLECTION_INTERRUPT is set
876 if (gc_zone && getenv("OBJC_DISABLE_COLLECTION_INTERRUPT") == NULL) {
877 auto_collection_control_t *ctrl = auto_collection_parameters(gc_zone);
878 ctrl->collection_should_interrupt = callback;
881 return (malloc_zone_t *)gc_zone;
889 /***********************************************************************
891 **********************************************************************/
893 /* This is non-deadlocking with respect to malloc's locks EXCEPT:
894 * %ls, %a, %A formats
897 static void objc_debug_printf(const char *format, ...)
900 va_start(ap, format);
901 vfprintf(stderr, format, ap);
905 static malloc_zone_t *objc_debug_zone(void)
907 static malloc_zone_t *z = NULL;
909 z = malloc_create_zone(4096, 0);
910 malloc_set_zone_name(z, "objc-auto debug");
915 static char *_malloc_append_unsigned(unsigned value, unsigned base, char *head) {
919 if (value >= base) head = _malloc_append_unsigned(value / base, base, head);
920 value = value % base;
921 head[0] = (value < 10) ? '0' + value : 'a' + value - 10;
926 static void strcati(char *str, unsigned value)
928 str = _malloc_append_unsigned(value, 10, str + strlen(str));
932 static void strcatx(char *str, unsigned value)
934 str = _malloc_append_unsigned(value, 16, str + strlen(str));
939 static Ivar ivar_for_offset(struct objc_class *cls, vm_address_t offset)
944 struct objc_ivar_list *ivars;
946 if (!cls) return NULL;
948 // scan base classes FIRST
949 super_ivar = ivar_for_offset(cls->super_class, offset);
950 // result is best-effort; our ivars may be closer
953 // If we have no ivars, return super's ivar
954 if (!ivars || ivars->ivar_count == 0) return super_ivar;
956 // Try our first ivar. If it's too big, use super's best ivar.
957 ivar_offset = ivars->ivar_list[0].ivar_offset;
958 if (ivar_offset > offset) return super_ivar;
959 else if (ivar_offset == offset) return &ivars->ivar_list[0];
961 // Try our other ivars. If any is too big, use the previous.
962 for (i = 1; i < ivars->ivar_count; i++) {
963 int ivar_offset = ivars->ivar_list[i].ivar_offset;
964 if (ivar_offset == offset) {
965 return &ivars->ivar_list[i];
966 } else if (ivar_offset > offset) {
967 return &ivars->ivar_list[i-1];
971 // Found nothing. Return our last ivar.
972 return &ivars->ivar_list[ivars->ivar_count - 1];
975 static void append_ivar_at_offset(char *buf, struct objc_class *cls, vm_address_t offset)
979 if (offset == 0) return; // don't bother with isa
980 if (offset >= cls->instance_size) {
981 strcat(buf, ".<extra>+");
982 strcati(buf, offset);
986 ivar = ivar_for_offset(cls, offset);
992 // fixme doesn't handle structs etc.
995 if (ivar->ivar_name) strcat(buf, ivar->ivar_name);
996 else strcat(buf, "<anonymous ivar>");
998 offset -= ivar->ivar_offset;
1001 strcati(buf, offset);
1006 static const char *cf_class_for_object(void *cfobj)
1008 // ick - we don't link against CF anymore
1012 const char *className;
1013 // don't care about the rest
1017 uint32_t (*CFGetTypeID)(void *);
1018 void * (*_CFRuntimeGetClassWithTypeID)(uint32_t);
1020 sym = NSLookupAndBindSymbolWithHint("_CFGetTypeID", "CoreFoundation");
1021 if (!sym) return "anonymous_NSCFType";
1022 CFGetTypeID = NSAddressOfSymbol(sym);
1023 if (!CFGetTypeID) return "NSCFType";
1025 sym = NSLookupAndBindSymbolWithHint("__CFRuntimeGetClassWithTypeID", "CoreFoundation");
1026 if (!sym) return "anonymous_NSCFType";
1027 _CFRuntimeGetClassWithTypeID = NSAddressOfSymbol(sym);
1028 if (!_CFRuntimeGetClassWithTypeID) return "anonymous_NSCFType";
1030 cfid = (*CFGetTypeID)(cfobj);
1031 cfcls = (*_CFRuntimeGetClassWithTypeID)(cfid);
1032 return cfcls->className;
1036 static char *name_for_address(auto_zone_t *zone, vm_address_t base, vm_address_t offset, int withRetainCount)
1038 #define APPEND_SIZE(s) \
1049 auto_zone_size_no_lock(zone, (void *)base);
1050 auto_memory_type_t type = size ?
1051 auto_zone_get_layout_type_no_lock(zone, (void *)base) : AUTO_TYPE_UNKNOWN;
1052 unsigned int refcount = size ?
1053 auto_zone_retain_count_no_lock(zone, (void *)base) : 0;
1056 case AUTO_OBJECT_SCANNED:
1057 case AUTO_OBJECT_UNSCANNED: {
1058 Class cls = *(struct objc_class **)base;
1059 if (0 == strcmp(cls->name, "NSCFType")) {
1060 strcat(buf, cf_class_for_object((void *)base));
1062 strcat(buf, cls->name);
1065 append_ivar_at_offset(buf, cls, offset);
1070 case AUTO_MEMORY_SCANNED:
1071 strcat(buf, "{conservative-block}");
1074 case AUTO_MEMORY_UNSCANNED:
1075 strcat(buf, "{no-pointers-block}");
1079 strcat(buf, "{unallocated-or-stack}");
1082 if (withRetainCount && refcount > 0) {
1083 strcat(buf, " [[refcount=");
1084 strcati(buf, refcount);
1088 result = malloc_zone_malloc(objc_debug_zone(), 1 + strlen(buf));
1089 strcpy(result, buf);
1096 struct objc_class_recorder_context {
1097 malloc_zone_t *zone;
1103 static void objc_class_recorder(task_t task, void *context, unsigned type_mask,
1104 vm_range_t *ranges, unsigned range_count)
1106 struct objc_class_recorder_context *ctx =
1107 (struct objc_class_recorder_context *)context;
1111 for (r = ranges, end = ranges + range_count; r < end; r++) {
1112 auto_memory_type_t type =
1113 auto_zone_get_layout_type_no_lock(ctx->zone, (void *)r->address);
1114 if (type == AUTO_OBJECT_SCANNED || type == AUTO_OBJECT_UNSCANNED) {
1115 // Check if this is an instance of class ctx->cls or some subclass
1117 Class isa = *(Class *)r->address;
1118 for (cls = isa; cls; cls = cls->super_class) {
1119 if (cls == ctx->cls) {
1121 objc_debug_printf("[%p] : %s", r->address, isa->name);
1122 if ((rc = auto_zone_retain_count_no_lock(ctx->zone, (void *)r->address))) {
1123 objc_debug_printf(" [[refcount %u]]", rc);
1125 objc_debug_printf("\n");
1134 void objc_enumerate_class(char *clsname)
1136 struct objc_class_recorder_context ctx;
1137 ctx.zone = auto_zone();
1138 ctx.clsname = clsname;
1139 ctx.cls = objc_getClass(clsname); // GrP fixme may deadlock if classHash lock is already owned
1142 objc_debug_printf("No class '%s'\n", clsname);
1145 objc_debug_printf("\n\nINSTANCES OF CLASS '%s':\n\n", clsname);
1146 (*ctx.zone->introspect->enumerator)(mach_task_self(), &ctx, MALLOC_PTR_IN_USE_RANGE_TYPE, (vm_address_t)ctx.zone, NULL, objc_class_recorder);
1147 objc_debug_printf("\n%d instances\n\n", ctx.count);
1151 static void objc_reference_printer(auto_zone_t *zone, void *ctx,
1152 auto_reference_t ref)
1154 char *referrer_name = name_for_address(zone, ref.referrer_base, ref.referrer_offset, true);
1155 char *referent_name = name_for_address(zone, ref.referent, 0, true);
1157 objc_debug_printf("[%p%+d -> %p] : %s -> %s\n",
1158 ref.referrer_base, ref.referrer_offset, ref.referent,
1159 referrer_name, referent_name);
1161 malloc_zone_free(objc_debug_zone(), referrer_name);
1162 malloc_zone_free(objc_debug_zone(), referent_name);
1166 void objc_print_references(void *referent, void *stack_bottom, int lock)
1169 auto_enumerate_references(auto_zone(), referent,
1170 objc_reference_printer, stack_bottom, NULL);
1172 auto_enumerate_references_no_lock(auto_zone(), referent,
1173 objc_reference_printer, stack_bottom, NULL);
1180 vm_address_t address; // of this object
1181 int refcount; // of this object - nonzero means ROOT
1182 int depth; // number of links away from referent, or -1
1183 auto_reference_t *referrers; // of this object
1185 int referrers_allocated;
1186 auto_reference_t back; // reference from this object back toward the target
1187 uint32_t ID; // Graphic ID for grafflization
1194 unsigned int allocated;
1197 blob_queue blobs = {NULL, 0, 0};
1198 blob_queue untraced_blobs = {NULL, 0, 0};
1199 blob_queue root_blobs = {NULL, 0, 0};
1203 static void spin(void) {
1204 static char* spinner[] = {"\010\010| ", "\010\010/ ", "\010\010- ", "\010\010\\ "};
1205 static int spindex = 0;
1207 objc_debug_printf(spinner[spindex++]);
1208 if (spindex == 4) spindex = 0;
1212 static void enqueue_blob(blob_queue *q, blob *b)
1214 if (q->used == q->allocated) {
1215 q->allocated = q->allocated * 2 + 1;
1216 q->list = malloc_zone_realloc(objc_debug_zone(), q->list, q->allocated * sizeof(blob *));
1218 q->list[q->used++] = b;
1222 static blob *dequeue_blob(blob_queue *q)
1224 blob *result = q->list[0];
1226 memmove(&q->list[0], &q->list[1], q->used * sizeof(blob *));
1231 static blob *blob_for_address(vm_address_t addr)
1233 blob *b, **bp, **end;
1235 if (addr == 0) return NULL;
1237 for (bp = blobs.list, end = blobs.list+blobs.used; bp < end; bp++) {
1239 if (b->address == addr) return b;
1242 b = malloc_zone_calloc(objc_debug_zone(), sizeof(blob), 1);
1245 b->refcount = auto_zone_size_no_lock(auto_zone(), (void *)addr) ? auto_zone_retain_count_no_lock(auto_zone(), (void *)addr) : 1;
1246 enqueue_blob(&blobs, b);
1250 static int blob_exists(vm_address_t addr)
1252 blob *b, **bp, **end;
1253 for (bp = blobs.list, end = blobs.list+blobs.used; bp < end; bp++) {
1255 if (b->address == addr) return 1;
1261 // Destroy the blobs table and all blob data in it
1262 static void free_blobs(void)
1264 blob *b, **bp, **end;
1265 for (bp = blobs.list, end = blobs.list+blobs.used; bp < end; bp++) {
1267 malloc_zone_free(objc_debug_zone(), b);
1269 if (blobs.list) malloc_zone_free(objc_debug_zone(), blobs.list);
1272 static void print_chain(auto_zone_t *zone, blob *root)
1275 for (b = root; b != NULL; b = blob_for_address(b->back.referent)) {
1277 if (b->back.referent) {
1278 name = name_for_address(zone, b->address, b->back.referrer_offset, true);
1279 objc_debug_printf("[%p%+d] : %s ->\n", b->address, b->back.referrer_offset, name);
1281 name = name_for_address(zone, b->address, 0, true);
1282 objc_debug_printf("[%p] : %s\n", b->address, name);
1284 malloc_zone_free(objc_debug_zone(), name);
1289 static void objc_blob_recorder(auto_zone_t *zone, void *ctx,
1290 auto_reference_t ref)
1292 blob *b = (blob *)ctx;
1296 if (b->referrers_used == b->referrers_allocated) {
1297 b->referrers_allocated = b->referrers_allocated * 2 + 1;
1298 b->referrers = malloc_zone_realloc(objc_debug_zone(), b->referrers,
1299 b->referrers_allocated *
1300 sizeof(auto_reference_t));
1303 b->referrers[b->referrers_used++] = ref;
1304 if (!blob_exists(ref.referrer_base)) {
1305 enqueue_blob(&untraced_blobs, blob_for_address(ref.referrer_base));
1310 #define INSTANCE_ROOTS 1
1311 #define HEAP_ROOTS 2
1313 static void objc_print_recursive_refs(vm_address_t target, int which, void *stack_bottom, int lock);
1314 static void grafflize(blob_queue *blobs, int everything);
1316 void objc_print_instance_roots(vm_address_t target, void *stack_bottom, int lock)
1318 objc_print_recursive_refs(target, INSTANCE_ROOTS, stack_bottom, lock);
1321 void objc_print_heap_roots(vm_address_t target, void *stack_bottom, int lock)
1323 objc_print_recursive_refs(target, HEAP_ROOTS, stack_bottom, lock);
1326 void objc_print_all_refs(vm_address_t target, void *stack_bottom, int lock)
1328 objc_print_recursive_refs(target, ALL_REFS, stack_bottom, lock);
1331 static void sort_blobs_by_refcount(blob_queue *blobs)
1335 // simple bubble sort
1336 for (i = 0; i < blobs->used; i++) {
1337 for (j = i+1; j < blobs->used; j++) {
1338 if (blobs->list[i]->refcount < blobs->list[j]->refcount) {
1339 blob *temp = blobs->list[i];
1340 blobs->list[i] = blobs->list[j];
1341 blobs->list[j] = temp;
1348 static void sort_blobs_by_depth(blob_queue *blobs)
1352 // simple bubble sort
1353 for (i = 0; i < blobs->used; i++) {
1354 for (j = i+1; j < blobs->used; j++) {
1355 if (blobs->list[i]->depth > blobs->list[j]->depth) {
1356 blob *temp = blobs->list[i];
1357 blobs->list[i] = blobs->list[j];
1358 blobs->list[j] = temp;
1365 static void objc_print_recursive_refs(vm_address_t target, int which, void *stack_bottom, int lock)
1367 objc_debug_printf("\n "); // make spinner draw in a pretty place
1369 // Construct pointed-to graph (of things eventually pointing to target)
1371 enqueue_blob(&untraced_blobs, blob_for_address(target));
1373 while (untraced_blobs.used > 0) {
1374 blob *b = dequeue_blob(&untraced_blobs);
1377 auto_enumerate_references(auto_zone(), (void *)b->address,
1378 objc_blob_recorder, stack_bottom, b);
1380 auto_enumerate_references_no_lock(auto_zone(), (void *)b->address,
1381 objc_blob_recorder, stack_bottom, b);
1385 // Walk pointed-to graph to find shortest paths from roots to target.
1386 // This is BREADTH-FIRST order.
1388 blob_for_address(target)->depth = 0;
1389 enqueue_blob(&untraced_blobs, blob_for_address(target));
1391 while (untraced_blobs.used > 0) {
1392 blob *b = dequeue_blob(&untraced_blobs);
1394 auto_reference_t *r, *end;
1399 if (which == ALL_REFS) {
1400 // Never stop at roots.
1402 } else if (which == HEAP_ROOTS) {
1403 // Stop at any root (a block with positive retain count)
1404 stop = (b->refcount > 0);
1405 } else if (which == INSTANCE_ROOTS) {
1406 // Only stop at roots that are instances
1407 auto_memory_type_t type = auto_zone_get_layout_type_no_lock(auto_zone(), (void *)b->address);
1408 stop = (b->refcount > 0 && (type == AUTO_OBJECT_SCANNED || type == AUTO_OBJECT_UNSCANNED)); // GREG XXX ???
1411 // If this object is a root, save it and don't walk its referrers.
1413 enqueue_blob(&root_blobs, b);
1417 // For any "other object" that points to "this object"
1418 // and does not yet have a depth:
1419 // (1) other object is one level deeper than this object
1420 // (2) (one of) the shortest path(s) from other object to the
1421 // target goes through this object
1423 for (r = b->referrers, end = b->referrers + b->referrers_used;
1427 other = blob_for_address(r->referrer_base);
1428 if (other->depth == -1) {
1429 other->depth = b->depth + 1;
1431 enqueue_blob(&untraced_blobs, other);
1437 char *name = name_for_address(auto_zone(), target, 0, true);
1438 objc_debug_printf("\n\n%d %s %p (%s)\n\n",
1439 (which==ALL_REFS) ? blobs.used : root_blobs.used,
1440 (which==ALL_REFS) ? "INDIRECT REFS TO" : "ROOTS OF",
1442 malloc_zone_free(objc_debug_zone(), name);
1445 if (which == ALL_REFS) {
1446 // Print all reference objects, biggest refcount first
1448 sort_blobs_by_refcount(&blobs);
1449 for (i = 0; i < blobs.used; i++) {
1450 char *name = name_for_address(auto_zone(), blobs.list[i]->address, 0, true);
1451 objc_debug_printf("[%p] : %s\n", blobs.list[i]->address, name);
1452 malloc_zone_free(objc_debug_zone(), name);
1456 // Walk back chain from every root to the target, printing every step.
1458 while (root_blobs.used > 0) {
1459 blob *root = dequeue_blob(&root_blobs);
1460 print_chain(auto_zone(), root);
1461 objc_debug_printf("\n");
1465 grafflize(&blobs, which == ALL_REFS);
1467 objc_debug_printf("\ndone\n\n");
1472 if (untraced_blobs.list) malloc_zone_free(objc_debug_zone(), untraced_blobs.list);
1473 if (root_blobs.list) malloc_zone_free(objc_debug_zone(), root_blobs.list);
1475 memset(&blobs, 0, sizeof(blobs));
1476 memset(&root_blobs, 0, sizeof(root_blobs));
1477 memset(&untraced_blobs, 0, sizeof(untraced_blobs));
1482 struct objc_block_recorder_context {
1483 malloc_zone_t *zone;
1489 static void objc_block_recorder(task_t task, void *context, unsigned type_mask,
1490 vm_range_t *ranges, unsigned range_count)
1493 struct objc_block_recorder_context *ctx =
1494 (struct objc_block_recorder_context *)context;
1498 for (r = ranges, end = ranges + range_count; r < end; r++) {
1499 char *name = name_for_address(ctx->zone, r->address, 0, true);
1501 strcatx(buf, r->address);
1503 write(ctx->fd, "0x", 2);
1504 write(ctx->fd, buf, strlen(buf));
1505 write(ctx->fd, " ", 1);
1506 write(ctx->fd, name, strlen(name));
1507 write(ctx->fd, "\n", 1);
1509 malloc_zone_free(objc_debug_zone(), name);
1515 void objc_dump_block_list(const char* path)
1517 struct objc_block_recorder_context ctx;
1518 char filename[] = "/tmp/blocks-XXXXX.txt";
1520 ctx.zone = auto_zone();
1522 ctx.fd = (path ? open(path, O_WRONLY | O_CREAT | O_TRUNC, 0666) : mkstemps(filename, strlen(strrchr(filename, '.'))));
1524 objc_debug_printf("\n\nALL AUTO-ALLOCATED BLOCKS\n\n");
1525 (*ctx.zone->introspect->enumerator)(mach_task_self(), &ctx, MALLOC_PTR_IN_USE_RANGE_TYPE, (vm_address_t)ctx.zone, NULL, objc_block_recorder);
1526 objc_debug_printf("%d blocks written to file\n", ctx.count);
1527 objc_debug_printf("open %s\n", (path ? path : filename));
1535 static void grafflize_id(int gfile, int ID)
1541 c = "<key>ID</key><integer>";
1542 write(gfile, c, strlen(c));
1543 write(gfile, buf, strlen(buf));
1545 write(gfile, c, strlen(c));
1549 // head = REFERENT end = arrow
1550 // tail = REFERRER end = no arrow
1551 static void grafflize_reference(int gfile, auto_reference_t reference,
1552 int ID, int important)
1554 blob *referrer = blob_for_address(reference.referrer_base);
1555 blob *referent = blob_for_address(reference.referent);
1559 c = "<dict><key>Class</key><string>LineGraphic</string>";
1560 write(gfile, c, strlen(c));
1563 grafflize_id(gfile, ID);
1566 c = "<key>Head</key><dict>";
1567 write(gfile, c, strlen(c));
1568 grafflize_id(gfile, referent->ID);
1570 write(gfile, c, strlen(c));
1573 c = "<key>Tail</key><dict>";
1574 write(gfile, c, strlen(c));
1575 grafflize_id(gfile, referrer->ID);
1577 write(gfile, c, strlen(c));
1579 // style - head arrow, thick line if important
1580 c = "<key>Style</key><dict><key>stroke</key><dict>"
1581 "<key>HeadArrow</key><string>FilledArrow</string>"
1582 "<key>LineType</key><integer>1</integer>";
1583 write(gfile, c, strlen(c));
1585 c = "<key>Width</key><real>3</real>";
1586 write(gfile, c, strlen(c));
1588 c = "</dict></dict>";
1589 write(gfile, c, strlen(c));
1593 write(gfile, c, strlen(c));
1597 static void grafflize_blob(int gfile, blob *b)
1599 // fixme include ivar names too
1600 char *name = name_for_address(auto_zone(), b->address, 0, false);
1601 int width = 30 + strlen(name)*6;
1608 "<key>Class</key><string>ShapedGraphic</string>"
1609 "<key>Shape</key><string>Rectangle</string>";
1610 write(gfile, c, strlen(c));
1613 grafflize_id(gfile, b->ID);
1616 // order vertically by depth
1617 c = "<key>Bounds</key><string>{{0,";
1618 write(gfile, c, strlen(c));
1620 strcati(buf, b->depth*60);
1621 write(gfile, buf, strlen(buf));
1623 write(gfile, c, strlen(c));
1625 strcati(buf, width);
1627 strcati(buf, height);
1628 write(gfile, buf, strlen(buf));
1630 write(gfile, c, strlen(c));
1633 c = "<key>Text</key><dict><key>Text</key>"
1634 "<string>{\\rtf1\\mac\\ansicpg10000\\cocoartf102\n"
1635 "{\\fonttbl\\f0\\fswiss\\fcharset77 Helvetica;\\fonttbl\\f1\\fswiss\\fcharset77 Helvetica-Bold;}\n"
1636 "{\\colortbl;\\red255\\green255\\blue255;}\n"
1637 "\\pard\\tx560\\tx1120\\tx1680\\tx2240\\tx3360\\tx3920\\tx4480\\tx5040\\tx5600\\tx6160\\tx6720\\qc\n"
1638 "\\f0\\fs20 \\cf0 ";
1639 write(gfile, c, strlen(c));
1640 write(gfile, name, strlen(name));
1641 strcpy(buf, "\\\n0x");
1642 strcatx(buf, b->address);
1643 write(gfile, buf, strlen(buf));
1644 c = "}</string></dict>";
1645 write(gfile, c, strlen(c));
1648 c = "<key>Style</key><dict>";
1649 write(gfile, c, strlen(c));
1652 c = "<key>shadow</key><dict><key>Draws</key><string>NO</string></dict>";
1653 write(gfile, c, strlen(c));
1655 // fat border if refcount > 0
1656 if (b->refcount > 0) {
1657 c = "<key>stroke</key><dict><key>Width</key><real>4</real></dict>";
1658 write(gfile, c, strlen(c));
1663 write(gfile, c, strlen(c));
1667 write(gfile, c, strlen(c));
1669 malloc_zone_free(objc_debug_zone(), name);
1673 #define gheader "<?xml version=\"1.0\" encoding=\"UTF-8\"?><!DOCTYPE plist PUBLIC \"-//Apple Computer//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/PropertyList-1.0.dtd\"><plist version=\"1.0\"><dict><key>GraphDocumentVersion</key><integer>3</integer><key>ReadOnly</key><string>NO</string><key>GraphicsList</key><array>\n"
1675 #define gfooter "</array></dict></plist>\n"
1678 static void grafflize(blob_queue *blobs, int everything)
1680 // Don't require linking to Foundation!
1684 char filename[] = "/tmp/gc-XXXXX.graffle";
1687 gfile = mkstemps(filename, strlen(strrchr(filename, '.')));
1689 objc_debug_printf("couldn't create a graffle file in /tmp/ (errno %d)\n", errno);
1694 write(gfile, gheader, strlen(gheader));
1696 // Write a rectangle for each blob
1697 sort_blobs_by_depth(blobs);
1698 for (i = 0; i < blobs->used; i++) {
1699 blob *b = blobs->list[i];
1701 if (everything || b->depth >= 0) {
1702 grafflize_blob(gfile, b);
1706 for (i = 0; i < blobs->used; i++) {
1708 blob *b = blobs->list[i];
1711 // Write an arrow for each reference
1712 // Use big arrows for backreferences
1713 for (j = 0; j < b->referrers_used; j++) {
1714 int is_back_ref = (b->referrers[i].referent == b->back.referent && b->referrers[i].referrer_offset == b->back.referrer_offset && b->referrers[i].referrer_base == b->back.referrer_base);
1716 grafflize_reference(gfile, b->referrers[j], nextid++,
1721 // Write an arrow for each backreference
1723 grafflize_reference(gfile, b->back, nextid++, false);
1728 // Write footer and close
1729 write(gfile, gfooter, strlen(gfooter));
1731 objc_debug_printf("wrote object graph (%d objects)\nopen %s\n",
1732 blobs->used, filename);
1739 // Stubs for non-open-source libauto functions
1741 static void auto_collect(auto_zone_t *zone, auto_collection_mode_t mode, void *collection_context)
1745 static auto_collection_control_t *auto_collection_parameters(auto_zone_t *zone)
1750 static const auto_statistics_t *auto_collection_statistics(auto_zone_t *zone)
1755 static void auto_enumerate_references(auto_zone_t *zone, void *referent,
1756 auto_reference_recorder_t callback,
1757 void *stack_bottom, void *ctx)
1761 static void auto_enumerate_references_no_lock(auto_zone_t *zone, void *referent, auto_reference_recorder_t callback, void *stack_bottom, void *ctx)
1765 static auto_zone_t *auto_zone(void)
1770 static void auto_zone_add_root(auto_zone_t *zone, void *root, size_t size)
1774 static void* auto_zone_allocate_object(auto_zone_t *zone, size_t size, auto_memory_type_t type, boolean_t initial_refcount_to_one, boolean_t clear)
1779 static const void *auto_zone_base_pointer(auto_zone_t *zone, const void *ptr)
1784 static auto_memory_type_t auto_zone_get_layout_type(auto_zone_t *zone, void *ptr)
1789 static auto_memory_type_t auto_zone_get_layout_type_no_lock(auto_zone_t *zone, void *ptr)
1794 static boolean_t auto_zone_is_finalized(auto_zone_t *zone, const void *ptr)
1799 static boolean_t auto_zone_is_valid_pointer(auto_zone_t *zone, const void *ptr)
1804 static unsigned int auto_zone_release(auto_zone_t *zone, void *ptr)
1809 static void auto_zone_retain(auto_zone_t *zone, void *ptr)
1813 static unsigned int auto_zone_retain_count_no_lock(auto_zone_t *zone, const void *ptr)
1818 static void auto_zone_set_class_list(int (*get_class_list)(void **buffer, int count))
1822 static size_t auto_zone_size_no_lock(auto_zone_t *zone, const void *ptr)
1827 static void auto_zone_start_monitor(boolean_t force)
1831 static void auto_zone_write_barrier(auto_zone_t *zone, void *recipient, const unsigned int offset_in_bytes, const void *new_value)
1833 *(uintptr_t *)(offset_in_bytes + (uint8_t *)recipient) = (uintptr_t)new_value;
1836 static void *auto_zone_write_barrier_memmove(auto_zone_t *zone, void *dst, const void *src, size_t size)
1838 return memmove(dst, src, size);