]> git.saurik.com Git - apple/objc4.git/blob - runtime/objc-auto.m
objc4-493.11.tar.gz
[apple/objc4.git] / runtime / objc-auto.m
1 /*
2 * Copyright (c) 2004-2007 Apple Inc. All rights reserved.
3 *
4 * @APPLE_LICENSE_HEADER_START@
5 *
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. Please obtain a copy of the License at
10 * http://www.opensource.apple.com/apsl/ and read it before using this
11 * file.
12 *
13 * The Original Code and all software distributed under the License are
14 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
15 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
16 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
18 * Please see the License for the specific language governing rights and
19 * limitations under the License.
20 *
21 * @APPLE_LICENSE_HEADER_END@
22 */
23
24 #import "objc-config.h"
25 #import "objc-auto.h"
26 #import "objc-accessors.h"
27
28 #ifndef OBJC_NO_GC
29
30 #import <stdint.h>
31 #import <stdbool.h>
32 #import <fcntl.h>
33 #import <dlfcn.h>
34 #import <mach/mach.h>
35 #import <mach-o/dyld.h>
36 #import <mach-o/nlist.h>
37 #import <sys/types.h>
38 #import <sys/mman.h>
39 #import <libkern/OSAtomic.h>
40 #import <auto_zone.h>
41
42 #import <Block_private.h>
43 #include <dispatch/dispatch.h>
44
45 #import "objc-private.h"
46 #import "objc-references.h"
47 #import "maptable.h"
48 #import "message.h"
49 #import "objc-gdb.h"
50
51 #if !defined(NDEBUG) && !__OBJC2__
52 #import "objc-exception.h"
53 #endif
54
55
56 static auto_zone_t *gc_zone_init(BOOL wantsCompaction);
57 static void gc_block_init(void);
58 static void registeredClassTableInit(void);
59 static BOOL objc_isRegisteredClass(Class candidate);
60
61 PRIVATE_EXTERN BOOL UseGC = NO;
62 PRIVATE_EXTERN BOOL UseCompaction = NO;
63 static BOOL WantsMainThreadFinalization = NO;
64
65 PRIVATE_EXTERN auto_zone_t *gc_zone = NULL;
66
67 // Pointer magic to make dyld happy. See notes in objc-private.h
68 PRIVATE_EXTERN id (*objc_assign_ivar_internal)(id, id, ptrdiff_t) = objc_assign_ivar;
69
70
71 /* Method prototypes */
72 @interface DoesNotExist
73 - (const char *)UTF8String;
74 - (id)description;
75 @end
76
77
78 /***********************************************************************
79 * Break-on-error functions
80 **********************************************************************/
81
82 BREAKPOINT_FUNCTION(
83 void objc_assign_ivar_error(id base, ptrdiff_t offset)
84 );
85
86 BREAKPOINT_FUNCTION(
87 void objc_assign_global_error(id value, id *slot)
88 );
89
90 BREAKPOINT_FUNCTION(
91 void objc_exception_during_finalize_error(void)
92 );
93
94 /***********************************************************************
95 * Utility exports
96 * Called by various libraries.
97 **********************************************************************/
98
99 OBJC_EXPORT void objc_set_collection_threshold(size_t threshold) { // Old naming
100 if (UseGC) {
101 auto_collection_parameters(gc_zone)->collection_threshold = threshold;
102 }
103 }
104
105 OBJC_EXPORT void objc_setCollectionThreshold(size_t threshold) {
106 if (UseGC) {
107 auto_collection_parameters(gc_zone)->collection_threshold = threshold;
108 }
109 }
110
111 void objc_setCollectionRatio(size_t ratio) {
112 if (UseGC) {
113 auto_collection_parameters(gc_zone)->full_vs_gen_frequency = ratio;
114 }
115 }
116
117 void objc_set_collection_ratio(size_t ratio) { // old naming
118 if (UseGC) {
119 auto_collection_parameters(gc_zone)->full_vs_gen_frequency = ratio;
120 }
121 }
122
123 void objc_finalizeOnMainThread(Class cls) {
124 if (UseGC) {
125 WantsMainThreadFinalization = YES;
126 _class_setFinalizeOnMainThread(cls);
127 }
128 }
129
130 // stack based data structure queued if/when there is main-thread-only finalization work TBD
131 typedef struct BatchFinalizeBlock {
132 auto_zone_foreach_object_t foreach;
133 auto_zone_cursor_t cursor;
134 size_t cursor_size;
135 volatile BOOL finished;
136 volatile BOOL started;
137 struct BatchFinalizeBlock *next;
138 } BatchFinalizeBlock_t;
139
140 // The Main Thread Finalization Work Queue Head
141 static struct {
142 pthread_mutex_t mutex;
143 pthread_cond_t condition;
144 BatchFinalizeBlock_t *head;
145 BatchFinalizeBlock_t *tail;
146 } MainThreadWorkQ;
147
148
149 void objc_startCollectorThread(void) {
150 }
151
152 void objc_start_collector_thread(void) {
153 }
154
155 static void batchFinalizeOnMainThread(void);
156
157 void objc_collect(unsigned long options) {
158 if (!UseGC) return;
159 BOOL onMainThread = pthread_main_np() ? YES : NO;
160
161 // while we're here, sneak off and do some finalization work (if any)
162 if (onMainThread) batchFinalizeOnMainThread();
163 // now on with our normally scheduled programming
164 auto_zone_options_t amode = AUTO_ZONE_COLLECT_NO_OPTIONS;
165 if (!(options & OBJC_COLLECT_IF_NEEDED)) {
166 switch (options & 0x3) {
167 case OBJC_RATIO_COLLECTION: amode = AUTO_ZONE_COLLECT_RATIO_COLLECTION; break;
168 case OBJC_GENERATIONAL_COLLECTION: amode = AUTO_ZONE_COLLECT_GENERATIONAL_COLLECTION; break;
169 case OBJC_FULL_COLLECTION: amode = AUTO_ZONE_COLLECT_FULL_COLLECTION; break;
170 case OBJC_EXHAUSTIVE_COLLECTION: amode = AUTO_ZONE_COLLECT_EXHAUSTIVE_COLLECTION; break;
171 }
172 amode |= AUTO_ZONE_COLLECT_COALESCE;
173 amode |= AUTO_ZONE_COLLECT_LOCAL_COLLECTION;
174 }
175 if (options & OBJC_WAIT_UNTIL_DONE) {
176 __block BOOL done = NO;
177 // If executing on the main thread, use the main thread work queue condition to block,
178 // so main thread finalization can complete. Otherwise, use a thread-local condition.
179 pthread_mutex_t localMutex = PTHREAD_MUTEX_INITIALIZER, *mutex = &localMutex;
180 pthread_cond_t localCondition = PTHREAD_COND_INITIALIZER, *condition = &localCondition;
181 if (onMainThread) {
182 mutex = &MainThreadWorkQ.mutex;
183 condition = &MainThreadWorkQ.condition;
184 }
185 pthread_mutex_lock(mutex);
186 auto_zone_collect_and_notify(gc_zone, amode, dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
187 pthread_mutex_lock(mutex);
188 done = YES;
189 pthread_cond_signal(condition);
190 pthread_mutex_unlock(mutex);
191 });
192 while (!done) {
193 pthread_cond_wait(condition, mutex);
194 if (onMainThread && MainThreadWorkQ.head) {
195 pthread_mutex_unlock(mutex);
196 batchFinalizeOnMainThread();
197 pthread_mutex_lock(mutex);
198 }
199 }
200 pthread_mutex_unlock(mutex);
201 } else {
202 auto_zone_collect(gc_zone, amode);
203 }
204 }
205
206
207 // USED BY CF & ONE OTHER
208 BOOL objc_isAuto(id object)
209 {
210 return UseGC && auto_zone_is_valid_pointer(gc_zone, object) != 0;
211 }
212
213
214 BOOL objc_collectingEnabled(void)
215 {
216 return UseGC;
217 }
218
219 BOOL objc_collecting_enabled(void) // Old naming
220 {
221 return UseGC;
222 }
223
224 malloc_zone_t *objc_collectableZone(void) {
225 return gc_zone;
226 }
227
228 BOOL objc_dumpHeap(char *filenamebuffer, unsigned long length) {
229 static int counter = 0;
230 ++counter;
231 char buffer[1024];
232 sprintf(buffer, OBJC_HEAP_DUMP_FILENAME_FORMAT, getpid(), counter);
233 if (!_objc_dumpHeap(gc_zone, buffer)) return NO;
234 if (filenamebuffer) {
235 unsigned long blen = strlen(buffer);
236 if (blen < length)
237 strncpy(filenamebuffer, buffer, blen+1);
238 else if (length > 0)
239 filenamebuffer[0] = 0; // give some answer
240 }
241 return YES;
242 }
243
244
245 /***********************************************************************
246 * Memory management.
247 * Called by CF and Foundation.
248 **********************************************************************/
249
250 // Allocate an object in the GC zone, with the given number of extra bytes.
251 id objc_allocate_object(Class cls, int extra)
252 {
253 return class_createInstance(cls, extra);
254 }
255
256
257 /***********************************************************************
258 * Write barrier implementations, optimized for when GC is known to be on
259 * Called by the write barrier exports only.
260 * These implementations assume GC is on. The exported function must
261 * either perform the check itself or be conditionally stomped at
262 * startup time.
263 **********************************************************************/
264
265 PRIVATE_EXTERN id objc_assign_strongCast_gc(id value, id *slot) {
266 if (!auto_zone_set_write_barrier(gc_zone, (void*)slot, value)) { // stores & returns true if slot points into GC allocated memory
267 auto_zone_root_write_barrier(gc_zone, slot, value); // always stores
268 }
269 return value;
270 }
271
272 PRIVATE_EXTERN id objc_assign_global_gc(id value, id *slot) {
273 // use explicit root registration.
274 if (value && auto_zone_is_valid_pointer(gc_zone, value)) {
275 if (auto_zone_is_finalized(gc_zone, value)) {
276 _objc_inform("GC: storing an already collected object %p into global memory at %p, break on objc_assign_global_error to debug\n", value, slot);
277 objc_assign_global_error(value, slot);
278 }
279 auto_zone_add_root(gc_zone, slot, value);
280 }
281 else
282 *slot = value;
283
284 return value;
285 }
286
287 PRIVATE_EXTERN id objc_assign_threadlocal_gc(id value, id *slot)
288 {
289 if (value && auto_zone_is_valid_pointer(gc_zone, value)) {
290 auto_zone_add_root(gc_zone, slot, value);
291 }
292 else {
293 *slot = value;
294 }
295
296 return value;
297 }
298
299 PRIVATE_EXTERN id objc_assign_ivar_gc(id value, id base, ptrdiff_t offset)
300 {
301 id *slot = (id*) ((char *)base + offset);
302
303 if (value) {
304 if (!auto_zone_set_write_barrier(gc_zone, (char *)base + offset, value)) {
305 _objc_inform("GC: %p + %tu isn't in the auto_zone, break on objc_assign_ivar_error to debug.\n", base, offset);
306 objc_assign_ivar_error(base, offset);
307 }
308 }
309 else
310 *slot = value;
311
312 return value;
313 }
314
315 PRIVATE_EXTERN id objc_assign_strongCast_non_gc(id value, id *slot) {
316 return (*slot = value);
317 }
318
319 PRIVATE_EXTERN id objc_assign_global_non_gc(id value, id *slot) {
320 return (*slot = value);
321 }
322
323 PRIVATE_EXTERN id objc_assign_threadlocal_non_gc(id value, id *slot) {
324 return (*slot = value);
325 }
326
327 PRIVATE_EXTERN id objc_assign_ivar_non_gc(id value, id base, ptrdiff_t offset) {
328 id *slot = (id*) ((char *)base + offset);
329 return (*slot = value);
330 }
331
332 /***********************************************************************
333 * Write barrier exports
334 * Called by pretty much all GC-supporting code.
335 **********************************************************************/
336
337
338 #if defined(__i386__)
339
340 // These 3 functions are defined in objc-auto-i386.s as
341 // the non-GC variants. Under GC, rtp_init stomps them with jumps to
342 // objc_assign_*_gc.
343
344 #else
345
346 // use generic implementation until time can be spent on optimizations
347 id objc_assign_strongCast(id value, id *dest)
348 {
349 if (UseGC) {
350 return objc_assign_strongCast_gc(value, dest);
351 } else {
352 return (*dest = value);
353 }
354 }
355
356 id objc_assign_global(id value, id *dest)
357 {
358 if (UseGC) {
359 return objc_assign_global_gc(value, dest);
360 } else {
361 return (*dest = value);
362 }
363 }
364
365 id objc_assign_threadlocal(id value, id *dest)
366 {
367 if (UseGC) {
368 return objc_assign_threadlocal_gc(value, dest);
369 } else {
370 return (*dest = value);
371 }
372 }
373
374 id objc_assign_ivar(id value, id dest, ptrdiff_t offset)
375 {
376 if (UseGC) {
377 return objc_assign_ivar_gc(value, dest, offset);
378 } else {
379 id *slot = (id*) ((char *)dest + offset);
380 return (*slot = value);
381 }
382 }
383
384 // not defined(__i386__)
385 #endif
386
387 #if __LP64__
388 #define LC_SEGMENT_COMMAND LC_SEGMENT_64
389 #define LC_ROUTINES_COMMAND LC_ROUTINES_64
390 typedef struct mach_header_64 macho_header;
391 typedef struct section_64 macho_section;
392 typedef struct nlist_64 macho_nlist;
393 typedef struct segment_command_64 macho_segment_command;
394 #else
395 #define LC_SEGMENT_COMMAND LC_SEGMENT
396 #define LC_ROUTINES_COMMAND LC_ROUTINES
397 typedef struct mach_header macho_header;
398 typedef struct section macho_section;
399 typedef struct nlist macho_nlist;
400 typedef struct segment_command macho_segment_command;
401 #endif
402
403 PRIVATE_EXTERN void _objc_update_stubs_in_mach_header(const struct mach_header* mh, uint32_t symbol_count, const char *symbols[], void *functions[]) {
404 uint32_t cmd_index, cmd_count = mh->ncmds;
405 intptr_t slide = 0;
406 const struct load_command* const cmds = (struct load_command*)((char*)mh + sizeof(macho_header));
407 const struct load_command* cmd;
408 const uint8_t *linkEditBase = NULL;
409 const macho_nlist *symbolTable = NULL;
410 uint32_t symbolTableCount = 0;
411 const char *stringTable = NULL;
412 uint32_t stringTableSize = 0;
413 const uint32_t *indirectSymbolTable = NULL;
414 uint32_t indirectSymbolTableCount = 0;
415
416 // first pass at load commands gets linkEditBase
417 for (cmd = cmds, cmd_index = 0; cmd_index < cmd_count; ++cmd_index) {
418 if ( cmd->cmd == LC_SEGMENT_COMMAND ) {
419 const macho_segment_command* seg = (macho_segment_command*)cmd;
420 if ( strcmp(seg->segname,"__TEXT") == 0 )
421 slide = (uintptr_t)mh - seg->vmaddr;
422 else if ( strcmp(seg->segname,"__LINKEDIT") == 0 )
423 linkEditBase = (uint8_t*)(seg->vmaddr + slide - seg->fileoff);
424 }
425 cmd = (const struct load_command*)(((char*)cmd)+cmd->cmdsize);
426 }
427
428 for (cmd = cmds, cmd_index = 0; cmd_index < cmd_count; ++cmd_index) {
429 switch ( cmd->cmd ) {
430 case LC_SYMTAB:
431 {
432 const struct symtab_command* symtab = (struct symtab_command*)cmd;
433 symbolTableCount = symtab->nsyms;
434 symbolTable = (macho_nlist*)(&linkEditBase[symtab->symoff]);
435 stringTableSize = symtab->strsize;
436 stringTable = (const char*)&linkEditBase[symtab->stroff];
437 }
438 break;
439 case LC_DYSYMTAB:
440 {
441 const struct dysymtab_command* dsymtab = (struct dysymtab_command*)cmd;
442 indirectSymbolTableCount = dsymtab->nindirectsyms;
443 indirectSymbolTable = (uint32_t*)(&linkEditBase[dsymtab->indirectsymoff]);
444 }
445 break;
446 }
447 cmd = (const struct load_command*)(((char*)cmd)+cmd->cmdsize);
448 }
449
450 // walk sections to find one with this lazy pointer
451 for (cmd = cmds, cmd_index = 0; cmd_index < cmd_count; ++cmd_index) {
452 if (cmd->cmd == LC_SEGMENT_COMMAND) {
453 const macho_segment_command* seg = (macho_segment_command*)cmd;
454 const macho_section* const sectionsStart = (macho_section*)((char*)seg + sizeof(macho_segment_command));
455 const macho_section* const sectionsEnd = &sectionsStart[seg->nsects];
456 const macho_section* sect;
457 for (sect = sectionsStart; sect < sectionsEnd; ++sect) {
458 const uint8_t type = sect->flags & SECTION_TYPE;
459 if (type == S_LAZY_DYLIB_SYMBOL_POINTERS || type == S_LAZY_SYMBOL_POINTERS) { // S_LAZY_DYLIB_SYMBOL_POINTERS
460 uint32_t pointer_index, pointer_count = (uint32_t)(sect->size / sizeof(uintptr_t));
461 uintptr_t* const symbolPointers = (uintptr_t*)(sect->addr + slide);
462 for (pointer_index = 0; pointer_index < pointer_count; ++pointer_index) {
463 const uint32_t indirectTableOffset = sect->reserved1;
464 if ((indirectTableOffset + pointer_index) < indirectSymbolTableCount) {
465 uint32_t symbolIndex = indirectSymbolTable[indirectTableOffset + pointer_index];
466 // if symbolIndex is INDIRECT_SYMBOL_LOCAL or INDIRECT_SYMBOL_LOCAL|INDIRECT_SYMBOL_ABS, then it will
467 // by definition be >= symbolTableCount.
468 if (symbolIndex < symbolTableCount) {
469 // found symbol for this lazy pointer, now lookup address
470 uint32_t stringTableOffset = symbolTable[symbolIndex].n_un.n_strx;
471 if (stringTableOffset < stringTableSize) {
472 const char* symbolName = &stringTable[stringTableOffset];
473 uint32_t i;
474 for (i = 0; i < symbol_count; ++i) {
475 if (strcmp(symbols[i], symbolName) == 0) {
476 symbolPointers[pointer_index] = (uintptr_t)functions[i];
477 break;
478 }
479 }
480 }
481 }
482 }
483 }
484 }
485 }
486 }
487 cmd = (const struct load_command*)(((char*)cmd)+cmd->cmdsize);
488 }
489 }
490
491 void *objc_memmove_collectable(void *dst, const void *src, size_t size)
492 {
493 if (UseGC) {
494 return auto_zone_write_barrier_memmove(gc_zone, dst, src, size);
495 } else {
496 return memmove(dst, src, size);
497 }
498 }
499
500 BOOL objc_atomicCompareAndSwapPtr(id predicate, id replacement, volatile id *objectLocation) {
501 const BOOL issueMemoryBarrier = NO;
502 if (UseGC)
503 return auto_zone_atomicCompareAndSwapPtr(gc_zone, (void *)predicate, (void *)replacement, (void * volatile *)objectLocation, issueMemoryBarrier);
504 else
505 return OSAtomicCompareAndSwapPtr((void *)predicate, (void *)replacement, (void * volatile *)objectLocation);
506 }
507
508 BOOL objc_atomicCompareAndSwapPtrBarrier(id predicate, id replacement, volatile id *objectLocation) {
509 const BOOL issueMemoryBarrier = YES;
510 if (UseGC)
511 return auto_zone_atomicCompareAndSwapPtr(gc_zone, (void *)predicate, (void *)replacement, (void * volatile *)objectLocation, issueMemoryBarrier);
512 else
513 return OSAtomicCompareAndSwapPtrBarrier((void *)predicate, (void *)replacement, (void * volatile *)objectLocation);
514 }
515
516 BOOL objc_atomicCompareAndSwapGlobal(id predicate, id replacement, volatile id *objectLocation) {
517 const BOOL isGlobal = YES;
518 const BOOL issueMemoryBarrier = NO;
519 if (UseGC)
520 return auto_zone_atomicCompareAndSwap(gc_zone, (void *)predicate, (void *)replacement, (void * volatile *)objectLocation, isGlobal, issueMemoryBarrier);
521 else
522 return OSAtomicCompareAndSwapPtr((void *)predicate, (void *)replacement, (void * volatile *)objectLocation);
523 }
524
525 BOOL objc_atomicCompareAndSwapGlobalBarrier(id predicate, id replacement, volatile id *objectLocation) {
526 const BOOL isGlobal = YES;
527 const BOOL issueMemoryBarrier = YES;
528 if (UseGC)
529 return auto_zone_atomicCompareAndSwap(gc_zone, (void *)predicate, (void *)replacement, (void * volatile *)objectLocation, isGlobal, issueMemoryBarrier);
530 else
531 return OSAtomicCompareAndSwapPtrBarrier((void *)predicate, (void *)replacement, (void * volatile *)objectLocation);
532 }
533
534 BOOL objc_atomicCompareAndSwapInstanceVariable(id predicate, id replacement, volatile id *objectLocation) {
535 const BOOL isGlobal = NO;
536 const BOOL issueMemoryBarrier = NO;
537 if (UseGC)
538 return auto_zone_atomicCompareAndSwap(gc_zone, (void *)predicate, (void *)replacement, (void * volatile *)objectLocation, isGlobal, issueMemoryBarrier);
539 else
540 return OSAtomicCompareAndSwapPtr((void *)predicate, (void *)replacement, (void * volatile *)objectLocation);
541 }
542
543 BOOL objc_atomicCompareAndSwapInstanceVariableBarrier(id predicate, id replacement, volatile id *objectLocation) {
544 const BOOL isGlobal = NO;
545 const BOOL issueMemoryBarrier = YES;
546 if (UseGC)
547 return auto_zone_atomicCompareAndSwap(gc_zone, (void *)predicate, (void *)replacement, (void * volatile *)objectLocation, isGlobal, issueMemoryBarrier);
548 else
549 return OSAtomicCompareAndSwapPtrBarrier((void *)predicate, (void *)replacement, (void * volatile *)objectLocation);
550 }
551
552
553 /***********************************************************************
554 * Weak ivar support
555 **********************************************************************/
556
557 PRIVATE_EXTERN id objc_read_weak_gc(id *location) {
558 id result = *location;
559 if (result) {
560 result = auto_read_weak_reference(gc_zone, (void **)location);
561 }
562 return result;
563 }
564
565 PRIVATE_EXTERN id objc_read_weak_non_gc(id *location) {
566 return *location;
567 }
568
569 id objc_read_weak(id *location) {
570 id result = *location;
571 if (UseGC && result) {
572 result = auto_read_weak_reference(gc_zone, (void **)location);
573 }
574 return result;
575 }
576
577 PRIVATE_EXTERN id objc_assign_weak_gc(id value, id *location) {
578 auto_assign_weak_reference(gc_zone, value, (const void **)location, NULL);
579 return value;
580 }
581
582 PRIVATE_EXTERN id objc_assign_weak_non_gc(id value, id *location) {
583 return (*location = value);
584 }
585
586 id objc_assign_weak(id value, id *location) {
587 if (UseGC) {
588 auto_assign_weak_reference(gc_zone, value, (const void **)location, NULL);
589 }
590 else {
591 *location = value;
592 }
593 return value;
594 }
595
596 PRIVATE_EXTERN void gc_fixup_weakreferences(id newObject, id oldObject) {
597 // fix up weak references if any.
598 const unsigned char *weakLayout = (const unsigned char *)class_getWeakIvarLayout(_object_getClass(newObject));
599 if (weakLayout) {
600 void **newPtr = (void **)newObject, **oldPtr = (void **)oldObject;
601 unsigned char byte;
602 while ((byte = *weakLayout++)) {
603 unsigned skips = (byte >> 4);
604 unsigned weaks = (byte & 0x0F);
605 newPtr += skips, oldPtr += skips;
606 while (weaks--) {
607 *newPtr = NULL;
608 auto_assign_weak_reference(gc_zone, auto_read_weak_reference(gc_zone, oldPtr), (const void **)newPtr, NULL);
609 ++newPtr, ++oldPtr;
610 }
611 }
612 }
613 }
614
615 /***********************************************************************
616 * Testing tools
617 * Used to isolate resurrection of garbage objects during finalization.
618 **********************************************************************/
619 BOOL objc_is_finalized(void *ptr) {
620 if (ptr != NULL && UseGC) {
621 return auto_zone_is_finalized(gc_zone, ptr);
622 }
623 return NO;
624 }
625
626
627 /***********************************************************************
628 * Stack clearing.
629 * Used by top-level thread loops to reduce false pointers from the stack.
630 **********************************************************************/
631 void objc_clear_stack(unsigned long options) {
632 if (!UseGC) return;
633 auto_zone_clear_stack(gc_zone, 0);
634 }
635
636
637 /***********************************************************************
638 * Finalization support
639 **********************************************************************/
640
641 static IMP _NSObject_finalize = NULL;
642
643 // Finalizer crash debugging
644 static void *finalizing_object;
645
646 // finalize a single object without fuss
647 // When there are no main-thread-only classes this is used directly
648 // Otherwise, it is used indirectly by smarter code that knows main-thread-affinity requirements
649 static void finalizeOneObject(void *obj, void *ignored) {
650 id object = (id)obj;
651 finalizing_object = obj;
652
653 Class cls = object_getClass(obj);
654 CRSetCrashLogMessage2(class_getName(cls));
655
656 /// call -finalize method.
657 objc_msgSend(object, @selector(finalize));
658
659 // Call C++ destructors.
660 // This would be objc_destructInstance() but for performance.
661 if (_class_hasCxxStructors(cls)) {
662 object_cxxDestruct(object);
663 }
664
665 finalizing_object = NULL;
666 CRSetCrashLogMessage2(NULL);
667 }
668
669 // finalize object only if it is a main-thread-only object.
670 // Called only from the main thread.
671 static void finalizeOneMainThreadOnlyObject(void *obj, void *arg) {
672 id object = (id)obj;
673 Class cls = _object_getClass(object);
674 if (cls == NULL) {
675 _objc_fatal("object with NULL ISA passed to finalizeOneMainThreadOnlyObject: %p\n", obj);
676 }
677 if (_class_shouldFinalizeOnMainThread(cls)) {
678 finalizeOneObject(obj, NULL);
679 }
680 }
681
682 // finalize one object only if it is not a main-thread-only object
683 // called from any other thread than the main thread
684 // Important: if a main-thread-only object is passed, return that fact in the needsMain argument
685 static void finalizeOneAnywhereObject(void *obj, void *needsMain) {
686 id object = (id)obj;
687 Class cls = _object_getClass(object);
688 bool *needsMainThreadWork = needsMain;
689 if (cls == NULL) {
690 _objc_fatal("object with NULL ISA passed to finalizeOneAnywhereObject: %p\n", obj);
691 }
692 if (!_class_shouldFinalizeOnMainThread(cls)) {
693 finalizeOneObject(obj, NULL);
694 }
695 else {
696 *needsMainThreadWork = true;
697 }
698 }
699
700
701 // Utility workhorse.
702 // Set up the expensive @try block and ask the collector to hand the next object to
703 // our finalizeAnObject function.
704 // Track and return a boolean that records whether or not any main thread work is necessary.
705 // (When we know that there are no main thread only objects then the boolean isn't even computed)
706 static bool batchFinalize(auto_zone_t *zone,
707 auto_zone_foreach_object_t foreach,
708 auto_zone_cursor_t cursor,
709 size_t cursor_size,
710 void (*finalizeAnObject)(void *, void*))
711 {
712 #if !defined(NDEBUG) && !__OBJC2__
713 // debug: don't call try/catch before exception handlers are installed
714 objc_exception_functions_t table = {0};
715 objc_exception_get_functions(&table);
716 assert(table.throw_exc);
717 #endif
718
719 bool needsMainThreadWork = false;
720 for (;;) {
721 @try {
722 foreach(cursor, finalizeAnObject, &needsMainThreadWork);
723 // non-exceptional return means finalization is complete.
724 break;
725 }
726 @catch (id exception) {
727 // whoops, note exception, then restart at cursor's position
728 _objc_inform("GC: -finalize resulted in an exception (%p) being thrown, break on objc_exception_during_finalize_error to debug\n\t%s", exception, (const char*)[[exception description] UTF8String]);
729 objc_exception_during_finalize_error();
730 }
731 @catch (...) {
732 // whoops, note exception, then restart at cursor's position
733 _objc_inform("GC: -finalize resulted in an exception being thrown, break on objc_exception_during_finalize_error to debug");
734 objc_exception_during_finalize_error();
735 }
736 }
737 return needsMainThreadWork;
738 }
739
740 // Called on main thread-only.
741 // Pick up work from global queue.
742 // called parasitically by anyone requesting a collection
743 // called explicitly when there is known to be main thread only finalization work
744 // In both cases we are on the main thread
745 // Guard against recursion by something called from a finalizer
746 static void batchFinalizeOnMainThread() {
747 pthread_mutex_lock(&MainThreadWorkQ.mutex);
748 if (!MainThreadWorkQ.head || MainThreadWorkQ.head->started) {
749 // No work or we're already here
750 pthread_mutex_unlock(&MainThreadWorkQ.mutex);
751 return;
752 }
753 while (MainThreadWorkQ.head) {
754 BatchFinalizeBlock_t *bfb = MainThreadWorkQ.head;
755 bfb->started = YES;
756 pthread_mutex_unlock(&MainThreadWorkQ.mutex);
757
758 batchFinalize(gc_zone, bfb->foreach, bfb->cursor, bfb->cursor_size, finalizeOneMainThreadOnlyObject);
759 // signal the collector thread(s) that finalization has finished.
760 pthread_mutex_lock(&MainThreadWorkQ.mutex);
761 bfb->finished = YES;
762 pthread_cond_broadcast(&MainThreadWorkQ.condition);
763 MainThreadWorkQ.head = bfb->next;
764 }
765 MainThreadWorkQ.tail = NULL;
766 pthread_mutex_unlock(&MainThreadWorkQ.mutex);
767 }
768
769
770 // Knowing that we possibly have main thread only work to do, first process everything
771 // that is not main-thread-only. If we discover main thread only work, queue a work block
772 // to the main thread that will do just the main thread only work. Wait for it.
773 // Called from a non main thread.
774 static void batchFinalizeOnTwoThreads(auto_zone_t *zone,
775 auto_zone_foreach_object_t foreach,
776 auto_zone_cursor_t cursor,
777 size_t cursor_size)
778 {
779 // First, lets get rid of everything we can on this thread, then ask main thread to help if needed
780 char cursor_copy[cursor_size];
781 memcpy(cursor_copy, cursor, cursor_size);
782 bool needsMainThreadFinalization = batchFinalize(zone, foreach, (auto_zone_cursor_t)cursor_copy, cursor_size, finalizeOneAnywhereObject);
783
784 if (! needsMainThreadFinalization)
785 return; // no help needed
786
787 // set up the control block. Either our ping of main thread with _callOnMainThread will get to it, or
788 // an objc_collect(if_needed) will get to it. Either way, this block will be processed on the main thread.
789 BatchFinalizeBlock_t bfb;
790 bfb.foreach = foreach;
791 bfb.cursor = cursor;
792 bfb.cursor_size = cursor_size;
793 bfb.started = NO;
794 bfb.finished = NO;
795 bfb.next = NULL;
796 pthread_mutex_lock(&MainThreadWorkQ.mutex);
797 if (MainThreadWorkQ.tail) {
798
799 // link to end so that ordering of finalization is preserved.
800 MainThreadWorkQ.tail->next = &bfb;
801 MainThreadWorkQ.tail = &bfb;
802 }
803 else {
804 MainThreadWorkQ.head = &bfb;
805 MainThreadWorkQ.tail = &bfb;
806 }
807 pthread_mutex_unlock(&MainThreadWorkQ.mutex);
808
809 //printf("----->asking main thread to finalize\n");
810 dispatch_async(dispatch_get_main_queue(), ^{ batchFinalizeOnMainThread(); });
811
812 // wait for the main thread to finish finalizing instances of classes marked CLS_FINALIZE_ON_MAIN_THREAD.
813 pthread_mutex_lock(&MainThreadWorkQ.mutex);
814 while (!bfb.finished) {
815 // the main thread might be blocked waiting for a synchronous collection to complete, so wake it here
816 pthread_cond_signal(&MainThreadWorkQ.condition);
817 pthread_cond_wait(&MainThreadWorkQ.condition, &MainThreadWorkQ.mutex);
818 }
819 pthread_mutex_unlock(&MainThreadWorkQ.mutex);
820 //printf("<------ main thread finalize done\n");
821
822 }
823
824
825
826 // collector calls this with garbage ready
827 // thread collectors, too, so this needs to be thread-safe
828 static void BatchInvalidate(auto_zone_t *zone,
829 auto_zone_foreach_object_t foreach,
830 auto_zone_cursor_t cursor,
831 size_t cursor_size)
832 {
833 if (pthread_main_np() || !WantsMainThreadFinalization) {
834 // Collect all objects. We're either pre-multithreaded on main thread or we're on the collector thread
835 // but no main-thread-only objects have been allocated.
836 batchFinalize(zone, foreach, cursor, cursor_size, finalizeOneObject);
837 }
838 else {
839 // We're on the dedicated thread. Collect some on main thread, the rest here.
840 batchFinalizeOnTwoThreads(zone, foreach, cursor, cursor_size);
841 }
842
843 }
844
845
846 /*
847 * Zombie support
848 * Collector calls into this system when it finds resurrected objects.
849 * This keeps them pitifully alive and leaked, even if they reference garbage.
850 */
851
852 // idea: keep a side table mapping resurrected object pointers to their original Class, so we don't
853 // need to smash anything. alternatively, could use associative references to track against a secondary
854 // object with information about the resurrection, such as a stack crawl, etc.
855
856 static Class _NSResurrectedObjectClass;
857 static NXMapTable *_NSResurrectedObjectMap = NULL;
858 static pthread_mutex_t _NSResurrectedObjectLock = PTHREAD_MUTEX_INITIALIZER;
859
860 static Class resurrectedObjectOriginalClass(id object) {
861 Class originalClass;
862 pthread_mutex_lock(&_NSResurrectedObjectLock);
863 originalClass = (Class) NXMapGet(_NSResurrectedObjectMap, object);
864 pthread_mutex_unlock(&_NSResurrectedObjectLock);
865 return originalClass;
866 }
867
868 static id _NSResurrectedObject_classMethod(id self, SEL selector) { return self; }
869
870 static id _NSResurrectedObject_instanceMethod(id self, SEL name) {
871 _objc_inform("**resurrected** object %p of class %s being sent message '%s'\n", self, class_getName(resurrectedObjectOriginalClass(self)), sel_getName(name));
872 return self;
873 }
874
875 static void _NSResurrectedObject_finalize(id self, SEL _cmd) {
876 Class originalClass;
877 pthread_mutex_lock(&_NSResurrectedObjectLock);
878 originalClass = (Class) NXMapRemove(_NSResurrectedObjectMap, self);
879 pthread_mutex_unlock(&_NSResurrectedObjectLock);
880 if (originalClass) _objc_inform("**resurrected** object %p of class %s being finalized\n", self, class_getName(originalClass));
881 _NSObject_finalize(self, _cmd);
882 }
883
884 static BOOL _NSResurrectedObject_resolveInstanceMethod(id self, SEL _cmd, SEL name) {
885 class_addMethod((Class)self, name, (IMP)_NSResurrectedObject_instanceMethod, "@@:");
886 return YES;
887 }
888
889 static BOOL _NSResurrectedObject_resolveClassMethod(id self, SEL _cmd, SEL name) {
890 class_addMethod(_object_getClass(self), name, (IMP)_NSResurrectedObject_classMethod, "@@:");
891 return YES;
892 }
893
894 static void _NSResurrectedObject_initialize() {
895 _NSResurrectedObjectMap = NXCreateMapTable(NXPtrValueMapPrototype, 128);
896 _NSResurrectedObjectClass = objc_allocateClassPair(objc_getClass("NSObject"), "_NSResurrectedObject", 0);
897 class_addMethod(_NSResurrectedObjectClass, @selector(finalize), (IMP)_NSResurrectedObject_finalize, "v@:");
898 Class metaClass = _object_getClass(_NSResurrectedObjectClass);
899 class_addMethod(metaClass, @selector(resolveInstanceMethod:), (IMP)_NSResurrectedObject_resolveInstanceMethod, "c@::");
900 class_addMethod(metaClass, @selector(resolveClassMethod:), (IMP)_NSResurrectedObject_resolveClassMethod, "c@::");
901 objc_registerClassPair(_NSResurrectedObjectClass);
902 }
903
904 static void resurrectZombie(auto_zone_t *zone, void *ptr) {
905 id object = (id) ptr;
906 Class cls = _object_getClass(object);
907 if (cls != _NSResurrectedObjectClass) {
908 // remember the original class for this instance.
909 pthread_mutex_lock(&_NSResurrectedObjectLock);
910 NXMapInsert(_NSResurrectedObjectMap, ptr, cls);
911 pthread_mutex_unlock(&_NSResurrectedObjectLock);
912 object_setClass(object, _NSResurrectedObjectClass);
913 }
914 }
915
916 /***********************************************************************
917 * Pretty printing support
918 * For development purposes.
919 **********************************************************************/
920
921
922 static char *name_for_address(auto_zone_t *zone, vm_address_t base, vm_address_t offset, int withRetainCount);
923
924 static char* objc_name_for_address(auto_zone_t *zone, vm_address_t base, vm_address_t offset)
925 {
926 return name_for_address(zone, base, offset, false);
927 }
928
929 static const char* objc_name_for_object(auto_zone_t *zone, void *object) {
930 Class cls = *(Class *)object;
931 if (!objc_isRegisteredClass(cls)) return "";
932 return class_getName(cls);
933 }
934
935 /* Compaction support */
936
937 PRIVATE_EXTERN void objc_disableCompaction() {
938 if (UseCompaction) {
939 UseCompaction = NO;
940 auto_zone_disable_compaction(gc_zone);
941 }
942 }
943
944 /***********************************************************************
945 * Collection support
946 **********************************************************************/
947
948 static BOOL objc_isRegisteredClass(Class candidate);
949
950 static const unsigned char *objc_layout_for_address(auto_zone_t *zone, void *address) {
951 id object = (id)address;
952 Class cls = (volatile Class)_object_getClass(object);
953 return objc_isRegisteredClass(cls) ? _object_getIvarLayout(cls, object) : NULL;
954 }
955
956 static const unsigned char *objc_weak_layout_for_address(auto_zone_t *zone, void *address) {
957 id object = (id)address;
958 Class cls = (volatile Class)_object_getClass(object);
959 return objc_isRegisteredClass(cls) ? class_getWeakIvarLayout(cls) : NULL;
960 }
961
962 PRIVATE_EXTERN void gc_register_datasegment(uintptr_t base, size_t size) {
963 auto_zone_register_datasegment(gc_zone, (void*)base, size);
964 }
965
966 PRIVATE_EXTERN void gc_unregister_datasegment(uintptr_t base, size_t size) {
967 auto_zone_unregister_datasegment(gc_zone, (void*)base, size);
968 }
969
970 #define countof(array) (sizeof(array) / sizeof(array[0]))
971
972 // defined in objc-externalref.m.
973 extern objc_xref_t _object_addExternalReference_gc(id obj, objc_xref_t type);
974 extern objc_xref_t _object_addExternalReference_rr(id obj, objc_xref_t type);
975 extern id _object_readExternalReference_gc(objc_xref_t ref);
976 extern id _object_readExternalReference_rr(objc_xref_t ref);
977 extern void _object_removeExternalReference_gc(objc_xref_t ref);
978 extern void _object_removeExternalReference_rr(objc_xref_t ref);
979
980 PRIVATE_EXTERN void gc_fixup_barrier_stubs(const struct dyld_image_info *info) {
981 static const char *symbols[] = {
982 "_objc_assign_strongCast", "_objc_assign_ivar",
983 "_objc_assign_global", "_objc_assign_threadlocal",
984 "_objc_read_weak", "_objc_assign_weak",
985 "_objc_getProperty", "_objc_setProperty",
986 "_objc_getAssociatedObject", "_objc_setAssociatedObject",
987 "__object_addExternalReference", "__object_readExternalReference", "__object_removeExternalReference"
988 };
989 if (UseGC) {
990 // resolve barrier symbols using GC functions.
991 static void *gc_functions[] = {
992 &objc_assign_strongCast_gc, &objc_assign_ivar_gc,
993 &objc_assign_global_gc, &objc_assign_threadlocal_gc,
994 &objc_read_weak_gc, &objc_assign_weak_gc,
995 &objc_getProperty_gc, &objc_setProperty_gc,
996 &objc_getAssociatedObject_gc, &objc_setAssociatedObject_gc,
997 &_object_addExternalReference_gc, &_object_readExternalReference_gc, &_object_removeExternalReference_gc
998 };
999 assert(countof(symbols) == countof(gc_functions));
1000 _objc_update_stubs_in_mach_header(info->imageLoadAddress, countof(symbols), symbols, gc_functions);
1001 } else {
1002 // resolve barrier symbols using non-GC functions.
1003 static void *nongc_functions[] = {
1004 &objc_assign_strongCast_non_gc, &objc_assign_ivar_non_gc,
1005 &objc_assign_global_non_gc, &objc_assign_threadlocal_non_gc,
1006 &objc_read_weak_non_gc, &objc_assign_weak_non_gc,
1007 &objc_getProperty_non_gc, &objc_setProperty_non_gc,
1008 &objc_getAssociatedObject_non_gc, &objc_setAssociatedObject_non_gc,
1009 &_object_addExternalReference_rr, &_object_readExternalReference_rr, &_object_removeExternalReference_rr
1010 };
1011 assert(countof(symbols) == countof(nongc_functions));
1012 _objc_update_stubs_in_mach_header(info->imageLoadAddress, countof(symbols), symbols, nongc_functions);
1013 }
1014 }
1015
1016 /***********************************************************************
1017 * Initialization
1018 **********************************************************************/
1019
1020 static void objc_will_grow(auto_zone_t *zone, auto_heap_growth_info_t info) {
1021 if (auto_zone_is_collecting(gc_zone)) {
1022 ;
1023 }
1024 else {
1025 auto_zone_collect(gc_zone, AUTO_ZONE_COLLECT_COALESCE|AUTO_ZONE_COLLECT_RATIO_COLLECTION);
1026 }
1027 }
1028
1029
1030 static auto_zone_t *gc_zone_init(BOOL wantsCompaction)
1031 {
1032 auto_zone_t *result;
1033 static int didOnce = 0;
1034 if (!didOnce) {
1035 didOnce = 1;
1036
1037 // initialize the batch finalization queue
1038 MainThreadWorkQ.head = NULL;
1039 MainThreadWorkQ.tail = NULL;
1040 pthread_mutex_init(&MainThreadWorkQ.mutex, NULL);
1041 pthread_cond_init(&MainThreadWorkQ.condition, NULL);
1042 }
1043
1044 result = auto_zone_create("auto_zone");
1045
1046 if (!wantsCompaction) auto_zone_disable_compaction(result);
1047
1048 auto_collection_control_t *control = auto_collection_parameters(result);
1049
1050 // set up the magic control parameters
1051 control->batch_invalidate = BatchInvalidate;
1052 control->will_grow = objc_will_grow;
1053 control->resurrect = resurrectZombie;
1054 control->layout_for_address = objc_layout_for_address;
1055 control->weak_layout_for_address = objc_weak_layout_for_address;
1056 control->name_for_address = objc_name_for_address;
1057
1058 if (control->version >= sizeof(auto_collection_control_t)) {
1059 control->name_for_object = objc_name_for_object;
1060 }
1061
1062 return result;
1063 }
1064
1065
1066 /* should be defined in /usr/local/include/libdispatch_private.h. */
1067 extern void (*dispatch_begin_thread_4GC)(void);
1068 extern void (*dispatch_end_thread_4GC)(void);
1069
1070 static void objc_reapThreadLocalBlocks()
1071 {
1072 if (UseGC) auto_zone_reap_all_local_blocks(gc_zone);
1073 }
1074
1075 void objc_registerThreadWithCollector()
1076 {
1077 if (UseGC) auto_zone_register_thread(gc_zone);
1078 }
1079
1080 void objc_unregisterThreadWithCollector()
1081 {
1082 if (UseGC) auto_zone_unregister_thread(gc_zone);
1083 }
1084
1085 void objc_assertRegisteredThreadWithCollector()
1086 {
1087 if (UseGC) auto_zone_assert_thread_registered(gc_zone);
1088 }
1089
1090 // Always called by _objcInit, even if GC is off.
1091 PRIVATE_EXTERN void gc_init(BOOL wantsGC, BOOL wantsCompaction)
1092 {
1093 UseGC = wantsGC;
1094 UseCompaction = wantsCompaction;
1095
1096 if (PrintGC) {
1097 _objc_inform("GC: is %s", wantsGC ? "ON" : "OFF");
1098 _objc_inform("Compaction: is %s", wantsCompaction ? "ON" : "OFF");
1099 }
1100
1101 if (UseGC) {
1102 // Set up the GC zone
1103 gc_zone = gc_zone_init(wantsCompaction);
1104
1105 // tell libdispatch to register its threads with the GC.
1106 dispatch_begin_thread_4GC = objc_registerThreadWithCollector;
1107 dispatch_end_thread_4GC = objc_reapThreadLocalBlocks;
1108
1109 // no NSObject until Foundation calls objc_collect_init()
1110 _NSObject_finalize = &_objc_msgForward_internal;
1111
1112 // set up the registered classes list
1113 registeredClassTableInit();
1114
1115 // tell Blocks to use collectable memory. CF will cook up the classes separately.
1116 gc_block_init();
1117 }
1118
1119 // Add GC state to crash log reports
1120 _objc_inform_on_crash("garbage collection is %s",
1121 wantsGC ? "ON" : "OFF");
1122 }
1123
1124
1125
1126 // Called by Foundation to install auto's interruption callback.
1127 malloc_zone_t *objc_collect_init(int (*callback)(void))
1128 {
1129 // Find NSObject's finalize method now that Foundation is loaded.
1130 // fixme only look for the base implementation, not a category's
1131 _NSObject_finalize = class_getMethodImplementation(objc_getClass("NSObject"), @selector(finalize));
1132 if (_NSObject_finalize == &_objc_msgForward /* not _internal! */) {
1133 _objc_fatal("GC: -[NSObject finalize] unimplemented!");
1134 }
1135
1136 // create the _NSResurrectedObject class used to track resurrections.
1137 _NSResurrectedObject_initialize();
1138
1139 return (malloc_zone_t *)gc_zone;
1140 }
1141
1142 /*
1143 * Support routines for the Block implementation
1144 */
1145
1146
1147 // The Block runtime now needs to sometimes allocate a Block that is an Object - namely
1148 // when it neesd to have a finalizer which, for now, is only if there are C++ destructors
1149 // in the helper function. Hence the isObject parameter.
1150 // Under GC a -copy message should allocate a refcount 0 block, ergo the isOne parameter.
1151 static void *block_gc_alloc5(const unsigned long size, const bool isOne, const bool isObject) {
1152 auto_memory_type_t type = isObject ? (AUTO_OBJECT|AUTO_MEMORY_SCANNED) : AUTO_MEMORY_SCANNED;
1153 return auto_zone_allocate_object(gc_zone, size, type, isOne, false);
1154 }
1155
1156 // The Blocks runtime keeps track of everything above 1 and so it only calls
1157 // up to the collector to tell it about the 0->1 transition and then the 1->0 transition
1158 static void block_gc_setHasRefcount(const void *block, const bool hasRefcount) {
1159 if (hasRefcount)
1160 auto_zone_retain(gc_zone, (void *)block);
1161 else
1162 auto_zone_release(gc_zone, (void *)block);
1163 }
1164
1165 static void block_gc_memmove(void *dst, void *src, unsigned long size) {
1166 auto_zone_write_barrier_memmove(gc_zone, dst, src, (size_t)size);
1167 }
1168
1169 static void gc_block_init(void) {
1170 _Block_use_GC(
1171 block_gc_alloc5,
1172 block_gc_setHasRefcount,
1173 (void (*)(void *, void **))objc_assign_strongCast_gc,
1174 (void (*)(const void *, void *))objc_assign_weak,
1175 block_gc_memmove
1176 );
1177 }
1178
1179
1180 /***********************************************************************
1181 * Track classes.
1182 * In addition to the global class hashtable (set) indexed by name, we
1183 * also keep one based purely by pointer when running under Garbage Collection.
1184 * This allows the background collector to race against objects recycled from TLC.
1185 * Specifically, the background collector can read the admin byte and see that
1186 * a thread local object is an object, get scheduled out, and the TLC recovers it,
1187 * linking it into the cache, then the background collector reads the isa field and
1188 * finds linkage info. By qualifying all isa fields read we avoid this.
1189 **********************************************************************/
1190
1191 // This is a self-contained hash table of all classes. The first two elements contain the (size-1) and count.
1192 static volatile Class *AllClasses = nil;
1193
1194 #define SHIFT 3
1195 #define INITIALSIZE 512
1196 #define REMOVED -1
1197
1198 // Allocate the side table.
1199 static void registeredClassTableInit() {
1200 assert(UseGC);
1201 // allocate a collectable (refcount 0) zeroed hunk of unscanned memory
1202 uintptr_t *table = (uintptr_t *)auto_zone_allocate_object(gc_zone, INITIALSIZE*sizeof(void *), AUTO_MEMORY_UNSCANNED, true, true);
1203 // set initial capacity (as mask)
1204 table[0] = INITIALSIZE - 1;
1205 // set initial count
1206 table[1] = 0;
1207 // Compaction: we allocate it refcount 1 and then decr when done.
1208 AllClasses = (Class *)table;
1209 }
1210
1211 // Verify that a particular pointer is to a class.
1212 // Safe from any thread anytime
1213 static BOOL objc_isRegisteredClass(Class candidate) {
1214 assert(UseGC);
1215 // nil is never a valid ISA.
1216 if (candidate == nil) return NO;
1217 // We don't care about a race with another thread adding a class to which we randomly might have a pointer
1218 // Get local copy of classes so that we're immune from updates.
1219 // We keep the size of the list as the first element so there is no race as the list & size get updated.
1220 uintptr_t *allClasses = (uintptr_t *)AllClasses;
1221 // Slot 0 is always the size of the list in log 2 masked terms (e.g. size - 1) where size is always power of 2
1222 // Slot 1 is count
1223 uintptr_t slot = (((uintptr_t)candidate) >> SHIFT) & allClasses[0];
1224 // avoid slot 0 and 1
1225 if (slot < 2) slot = 2;
1226 for(;;) {
1227 long int slotValue = allClasses[slot];
1228 if (slotValue == (long int)candidate) {
1229 return YES;
1230 }
1231 if (slotValue == 0) {
1232 return NO;
1233 }
1234 ++slot;
1235 if (slot > allClasses[0])
1236 slot = 2; // skip size, count
1237 }
1238 }
1239
1240 // Utility used when growing
1241 // Assumes lock held
1242 static void addClassHelper(uintptr_t *table, uintptr_t candidate) {
1243 uintptr_t slot = (((long int)candidate) >> SHIFT) & table[0];
1244 if (slot < 2) slot = 2;
1245 for(;;) {
1246 uintptr_t slotValue = table[slot];
1247 if (slotValue == 0) {
1248 table[slot] = candidate;
1249 ++table[1];
1250 return;
1251 }
1252 ++slot;
1253 if (slot > table[0])
1254 slot = 2; // skip size, count
1255 }
1256 }
1257
1258 // lock held by callers
1259 PRIVATE_EXTERN
1260 void objc_addRegisteredClass(Class candidate) {
1261 if (!UseGC) return;
1262 uintptr_t *table = (uintptr_t *)AllClasses;
1263 // Slot 0 is always the size of the list in log 2 masked terms (e.g. size - 1) where size is always power of 2
1264 // Slot 1 is count - always non-zero
1265 uintptr_t slot = (((long int)candidate) >> SHIFT) & table[0];
1266 if (slot < 2) slot = 2;
1267 for(;;) {
1268 uintptr_t slotValue = table[slot];
1269 assert(slotValue != (uintptr_t)candidate);
1270 if (slotValue == REMOVED) {
1271 table[slot] = (long)candidate;
1272 return;
1273 }
1274 else if (slotValue == 0) {
1275 table[slot] = (long)candidate;
1276 if (2*++table[1] > table[0]) { // add to count; check if we cross 50% utilization
1277 // grow
1278 uintptr_t oldSize = table[0]+1;
1279 uintptr_t *newTable = (uintptr_t *)auto_zone_allocate_object(gc_zone, oldSize*2*sizeof(void *), AUTO_MEMORY_UNSCANNED, true, true);
1280 uintptr_t i;
1281 newTable[0] = 2*oldSize - 1;
1282 newTable[1] = 0;
1283 for (i = 2; i < oldSize; ++i) {
1284 if (table[i] && table[i] != REMOVED)
1285 addClassHelper(newTable, table[i]);
1286 }
1287 AllClasses = (Class *)newTable;
1288 // let the old table be collected when other threads are no longer reading it.
1289 auto_zone_release(gc_zone, (void *)table);
1290 }
1291 return;
1292 }
1293 ++slot;
1294 if (slot > table[0])
1295 slot = 2; // skip size, count
1296 }
1297 }
1298
1299 // lock held by callers
1300 PRIVATE_EXTERN
1301 void objc_removeRegisteredClass(Class candidate) {
1302 if (!UseGC) return;
1303 uintptr_t *table = (uintptr_t *)AllClasses;
1304 // Slot 0 is always the size of the list in log 2 masked terms (e.g. size - 1) where size is always power of 2
1305 // Slot 1 is count - always non-zero
1306 uintptr_t slot = (((uintptr_t)candidate) >> SHIFT) & table[0];
1307 if (slot < 2) slot = 2;
1308 for(;;) {
1309 uintptr_t slotValue = table[slot];
1310 if (slotValue == (uintptr_t)candidate) {
1311 table[slot] = REMOVED; // if next slot == 0 we could set to 0 here and decr count
1312 return;
1313 }
1314 assert(slotValue != 0);
1315 ++slot;
1316 if (slot > table[0])
1317 slot = 2; // skip size, count
1318 }
1319 }
1320
1321
1322 /***********************************************************************
1323 * Debugging - support for smart printouts when errors occur
1324 **********************************************************************/
1325
1326
1327 static malloc_zone_t *objc_debug_zone(void)
1328 {
1329 static malloc_zone_t *z = NULL;
1330 if (!z) {
1331 z = malloc_create_zone(4096, 0);
1332 malloc_set_zone_name(z, "objc-auto debug");
1333 }
1334 return z;
1335 }
1336
1337 static char *_malloc_append_unsigned(uintptr_t value, unsigned base, char *head) {
1338 if (!value) {
1339 head[0] = '0';
1340 } else {
1341 if (value >= base) head = _malloc_append_unsigned(value / base, base, head);
1342 value = value % base;
1343 head[0] = (value < 10) ? '0' + value : 'a' + value - 10;
1344 }
1345 return head+1;
1346 }
1347
1348 static void strlcati(char *str, uintptr_t value, size_t bufSize)
1349 {
1350 if ( (bufSize - strlen(str)) < 30)
1351 return;
1352 str = _malloc_append_unsigned(value, 10, str + strlen(str));
1353 str[0] = '\0';
1354 }
1355
1356
1357 static Ivar ivar_for_offset(Class cls, vm_address_t offset)
1358 {
1359 int i;
1360 ptrdiff_t ivar_offset;
1361 Ivar super_ivar, result;
1362 Ivar *ivars;
1363 unsigned int ivar_count;
1364
1365 if (!cls) return NULL;
1366
1367 // scan base classes FIRST
1368 super_ivar = ivar_for_offset(class_getSuperclass(cls), offset);
1369 // result is best-effort; our ivars may be closer
1370
1371 ivars = class_copyIvarList(cls, &ivar_count);
1372 if (ivars && ivar_count) {
1373 // Try our first ivar. If it's too big, use super's best ivar.
1374 // (lose 64-bit precision)
1375 ivar_offset = ivar_getOffset(ivars[0]);
1376 if (ivar_offset > offset) result = super_ivar;
1377 else if (ivar_offset == offset) result = ivars[0];
1378 else result = NULL;
1379
1380 // Try our other ivars. If any is too big, use the previous.
1381 for (i = 1; result == NULL && i < ivar_count; i++) {
1382 ivar_offset = ivar_getOffset(ivars[i]);
1383 if (ivar_offset == offset) {
1384 result = ivars[i];
1385 } else if (ivar_offset > offset) {
1386 result = ivars[i - 1];
1387 }
1388 }
1389
1390 // Found nothing. Return our last ivar.
1391 if (result == NULL)
1392 result = ivars[ivar_count - 1];
1393
1394 free(ivars);
1395 } else {
1396 result = super_ivar;
1397 }
1398
1399 return result;
1400 }
1401
1402 static void append_ivar_at_offset(char *buf, Class cls, vm_address_t offset, size_t bufSize)
1403 {
1404 Ivar ivar = NULL;
1405
1406 if (offset == 0) return; // don't bother with isa
1407 if (offset >= class_getInstanceSize(cls)) {
1408 strlcat(buf, ".<extra>+", bufSize);
1409 strlcati(buf, offset, bufSize);
1410 return;
1411 }
1412
1413 ivar = ivar_for_offset(cls, offset);
1414 if (!ivar) {
1415 strlcat(buf, ".<?>", bufSize);
1416 return;
1417 }
1418
1419 // fixme doesn't handle structs etc.
1420
1421 strlcat(buf, ".", bufSize);
1422 const char *ivar_name = ivar_getName(ivar);
1423 if (ivar_name) strlcat(buf, ivar_name, bufSize);
1424 else strlcat(buf, "<anonymous ivar>", bufSize);
1425
1426 offset -= ivar_getOffset(ivar);
1427 if (offset > 0) {
1428 strlcat(buf, "+", bufSize);
1429 strlcati(buf, offset, bufSize);
1430 }
1431 }
1432
1433
1434 static const char *cf_class_for_object(void *cfobj)
1435 {
1436 // ick - we don't link against CF anymore
1437
1438 const char *result;
1439 void *dlh;
1440 size_t (*CFGetTypeID)(void *);
1441 void * (*_CFRuntimeGetClassWithTypeID)(size_t);
1442
1443 result = "anonymous_NSCFType";
1444
1445 dlh = dlopen("/System/Library/Frameworks/CoreFoundation.framework/Versions/A/CoreFoundation", RTLD_LAZY | RTLD_NOLOAD | RTLD_FIRST);
1446 if (!dlh) return result;
1447
1448 CFGetTypeID = (size_t(*)(void*)) dlsym(dlh, "CFGetTypeID");
1449 _CFRuntimeGetClassWithTypeID = (void*(*)(size_t)) dlsym(dlh, "_CFRuntimeGetClassWithTypeID");
1450
1451 if (CFGetTypeID && _CFRuntimeGetClassWithTypeID) {
1452 struct {
1453 size_t version;
1454 const char *className;
1455 // don't care about the rest
1456 } *cfcls;
1457 size_t cfid;
1458 cfid = (*CFGetTypeID)(cfobj);
1459 cfcls = (*_CFRuntimeGetClassWithTypeID)(cfid);
1460 result = cfcls->className;
1461 }
1462
1463 dlclose(dlh);
1464 return result;
1465 }
1466
1467
1468 static char *name_for_address(auto_zone_t *zone, vm_address_t base, vm_address_t offset, int withRetainCount)
1469 {
1470 #define APPEND_SIZE(s) \
1471 strlcat(buf, "[", sizeof(buf)); \
1472 strlcati(buf, s, sizeof(buf)); \
1473 strlcat(buf, "]", sizeof(buf));
1474
1475 char buf[1500];
1476 char *result;
1477
1478 buf[0] = '\0';
1479
1480 size_t size =
1481 auto_zone_size(zone, (void *)base);
1482 auto_memory_type_t type = size ?
1483 auto_zone_get_layout_type(zone, (void *)base) : AUTO_TYPE_UNKNOWN;
1484 unsigned int refcount = size ?
1485 auto_zone_retain_count(zone, (void *)base) : 0;
1486
1487 switch (type) {
1488 case AUTO_OBJECT_SCANNED:
1489 case AUTO_OBJECT_UNSCANNED:
1490 case AUTO_OBJECT_ALL_POINTERS: {
1491 const char *class_name = object_getClassName((id)base);
1492 if ((0 == strcmp(class_name, "__NSCFType")) || (0 == strcmp(class_name, "NSCFType"))) {
1493 strlcat(buf, cf_class_for_object((void *)base), sizeof(buf));
1494 } else {
1495 strlcat(buf, class_name, sizeof(buf));
1496 }
1497 if (offset) {
1498 append_ivar_at_offset(buf, _object_getClass((id)base), offset, sizeof(buf));
1499 }
1500 APPEND_SIZE(size);
1501 break;
1502 }
1503 case AUTO_MEMORY_SCANNED:
1504 strlcat(buf, "{conservative-block}", sizeof(buf));
1505 APPEND_SIZE(size);
1506 break;
1507 case AUTO_MEMORY_UNSCANNED:
1508 strlcat(buf, "{no-pointers-block}", sizeof(buf));
1509 APPEND_SIZE(size);
1510 break;
1511 case AUTO_MEMORY_ALL_POINTERS:
1512 strlcat(buf, "{all-pointers-block}", sizeof(buf));
1513 APPEND_SIZE(size);
1514 break;
1515 case AUTO_MEMORY_ALL_WEAK_POINTERS:
1516 strlcat(buf, "{all-weak-pointers-block}", sizeof(buf));
1517 APPEND_SIZE(size);
1518 break;
1519 case AUTO_TYPE_UNKNOWN:
1520 strlcat(buf, "{uncollectable-memory}", sizeof(buf));
1521 break;
1522 default:
1523 strlcat(buf, "{unknown-memory-type}", sizeof(buf));
1524 }
1525
1526 if (withRetainCount && refcount > 0) {
1527 strlcat(buf, " [[refcount=", sizeof(buf));
1528 strlcati(buf, refcount, sizeof(buf));
1529 strlcat(buf, "]]", sizeof(buf));
1530 }
1531
1532 result = malloc_zone_malloc(objc_debug_zone(), 1 + strlen(buf));
1533 strlcpy(result, buf, sizeof(buf));
1534 return result;
1535
1536 #undef APPEND_SIZE
1537 }
1538
1539
1540
1541
1542
1543 #endif