]> git.saurik.com Git - apple/objc4.git/blob - runtime/objc-runtime-new.m
objc4-437.3.tar.gz
[apple/objc4.git] / runtime / objc-runtime-new.m
1 /*
2 * Copyright (c) 2005-2008 Apple Inc. All Rights Reserved.
3 *
4 * @APPLE_LICENSE_HEADER_START@
5 *
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. Please obtain a copy of the License at
10 * http://www.opensource.apple.com/apsl/ and read it before using this
11 * file.
12 *
13 * The Original Code and all software distributed under the License are
14 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
15 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
16 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
18 * Please see the License for the specific language governing rights and
19 * limitations under the License.
20 *
21 * @APPLE_LICENSE_HEADER_END@
22 */
23
24 /***********************************************************************
25 * objc-runtime-new.m
26 * Support for new-ABI classes and images.
27 **********************************************************************/
28
29 #if __OBJC2__
30
31 #include "objc-private.h"
32 #include "objc-runtime-new.h"
33 #include <objc/message.h>
34
35 #define newcls(cls) ((struct class_t *)cls)
36 #define newcat(cat) ((struct category_t *)cat)
37 #define newmethod(meth) ((struct method_t *)meth)
38 #define newivar(ivar) ((struct ivar_t *)ivar)
39 #define newcategory(cat) ((struct category_t *)cat)
40 #define newprotocol(p) ((struct protocol_t *)p)
41
42 #ifdef __LP64__
43 #define WORD_SHIFT 3UL
44 #define WORD_MASK 7UL
45 #else
46 #define WORD_SHIFT 2UL
47 #define WORD_MASK 3UL
48 #endif
49
50 static const char *getName(struct class_t *cls);
51 static uint32_t instanceSize(struct class_t *cls);
52 static BOOL isMetaClass(struct class_t *cls);
53 static struct class_t *getSuperclass(struct class_t *cls);
54 static void unload_class(class_t *cls, BOOL isMeta);
55 static class_t *setSuperclass(class_t *cls, class_t *newSuper);
56 static class_t *realizeClass(class_t *cls);
57 static void flushCaches(class_t *cls);
58 static void flushVtables(class_t *cls);
59 static method_t *getMethodNoSuper_nolock(struct class_t *cls, SEL sel);
60 static method_t *getMethod_nolock(class_t *cls, SEL sel);
61 static void changeInfo(class_t *cls, unsigned int set, unsigned int clear);
62 static IMP _method_getImplementation(method_t *m);
63
64
65 /***********************************************************************
66 * Lock management
67 * Every lock used anywhere must be managed here.
68 * Locks not managed here may cause gdb deadlocks.
69 **********************************************************************/
70 __private_extern__ rwlock_t runtimeLock = {0};
71 __private_extern__ rwlock_t selLock = {0};
72 __private_extern__ mutex_t cacheUpdateLock = MUTEX_INITIALIZER;
73 __private_extern__ recursive_mutex_t loadMethodLock = RECURSIVE_MUTEX_INITIALIZER;
74 static int debugger_runtimeLock;
75 static int debugger_selLock;
76 static int debugger_cacheUpdateLock;
77 static int debugger_loadMethodLock;
78 #define RDONLY 1
79 #define RDWR 2
80
81 __private_extern__ void lock_init(void)
82 {
83 rwlock_init(&selLock);
84 rwlock_init(&runtimeLock);
85 recursive_mutex_init(&loadMethodLock);
86 }
87
88
89 /***********************************************************************
90 * startDebuggerMode
91 * Attempt to acquire some locks for debugger mode.
92 * Returns 0 if debugger mode failed because too many locks are unavailable.
93 *
94 * Locks successfully acquired are held until endDebuggerMode().
95 * Locks not acquired are off-limits until endDebuggerMode(); any
96 * attempt to manipulate them will cause a trap.
97 * Locks not handled here may cause deadlocks in gdb.
98 **********************************************************************/
99 __private_extern__ int startDebuggerMode(void)
100 {
101 int result = DEBUGGER_FULL;
102
103 // runtimeLock is required (can't do much without it)
104 if (rwlock_try_write(&runtimeLock)) {
105 debugger_runtimeLock = RDWR;
106 } else if (rwlock_try_read(&runtimeLock)) {
107 debugger_runtimeLock = RDONLY;
108 result = DEBUGGER_PARTIAL;
109 } else {
110 return DEBUGGER_OFF;
111 }
112
113 // cacheUpdateLock is required (must not fail a necessary cache flush)
114 // must be AFTER runtimeLock to avoid lock inversion
115 if (mutex_try_lock(&cacheUpdateLock)) {
116 debugger_cacheUpdateLock = RDWR;
117 } else {
118 rwlock_unlock(&runtimeLock, debugger_runtimeLock);
119 debugger_runtimeLock = 0;
120 return DEBUGGER_OFF;
121 }
122
123 // selLock is optional
124 if (rwlock_try_write(&selLock)) {
125 debugger_selLock = RDWR;
126 } else if (rwlock_try_read(&selLock)) {
127 debugger_selLock = RDONLY;
128 result = DEBUGGER_PARTIAL;
129 } else {
130 debugger_selLock = 0;
131 result = DEBUGGER_PARTIAL;
132 }
133
134 // loadMethodLock is optional
135 if (recursive_mutex_try_lock(&loadMethodLock)) {
136 debugger_loadMethodLock = RDWR;
137 } else {
138 debugger_loadMethodLock = 0;
139 result = DEBUGGER_PARTIAL;
140 }
141
142 return result;
143 }
144
145 /***********************************************************************
146 * endDebuggerMode
147 * Relinquish locks acquired in startDebuggerMode().
148 **********************************************************************/
149 __private_extern__ void endDebuggerMode(void)
150 {
151 assert(debugger_runtimeLock != 0);
152
153 rwlock_unlock(&runtimeLock, debugger_runtimeLock);
154 debugger_runtimeLock = 0;
155
156 rwlock_unlock(&selLock, debugger_selLock);
157 debugger_selLock = 0;
158
159 assert(debugger_cacheUpdateLock == RDWR);
160 mutex_unlock(&cacheUpdateLock);
161 debugger_cacheUpdateLock = 0;
162
163 if (debugger_loadMethodLock) {
164 recursive_mutex_unlock(&loadMethodLock);
165 debugger_loadMethodLock = 0;
166 }
167 }
168
169 /***********************************************************************
170 * isManagedDuringDebugger
171 * Returns YES if the given lock is handled specially during debugger
172 * mode (i.e. debugger mode tries to acquire it).
173 **********************************************************************/
174 __private_extern__ BOOL isManagedDuringDebugger(void *lock)
175 {
176 if (lock == &selLock) return YES;
177 if (lock == &cacheUpdateLock) return YES;
178 if (lock == &runtimeLock) return YES;
179 if (lock == &loadMethodLock) return YES;
180 return NO;
181 }
182
183 /***********************************************************************
184 * isLockedDuringDebugger
185 * Returns YES if the given mutex was acquired by debugger mode.
186 * Locking a managed mutex during debugger mode causes a trap unless
187 * this returns YES.
188 **********************************************************************/
189 __private_extern__ BOOL isLockedDuringDebugger(mutex_t *lock)
190 {
191 assert(DebuggerMode);
192
193 if (lock == &cacheUpdateLock) return YES;
194 if (lock == (mutex_t *)&loadMethodLock) return YES;
195
196 return NO;
197 }
198
199 /***********************************************************************
200 * isReadingDuringDebugger
201 * Returns YES if the given rwlock was read-locked by debugger mode.
202 * Read-locking a managed rwlock during debugger mode causes a trap unless
203 * this returns YES.
204 **********************************************************************/
205 __private_extern__ BOOL isReadingDuringDebugger(rwlock_t *lock)
206 {
207 assert(DebuggerMode);
208
209 // read-lock is allowed even if debugger mode actually write-locked it
210 if (debugger_runtimeLock && lock == &runtimeLock) return YES;
211 if (debugger_selLock && lock == &selLock) return YES;
212
213 return NO;
214 }
215
216 /***********************************************************************
217 * isWritingDuringDebugger
218 * Returns YES if the given rwlock was write-locked by debugger mode.
219 * Write-locking a managed rwlock during debugger mode causes a trap unless
220 * this returns YES.
221 **********************************************************************/
222 __private_extern__ BOOL isWritingDuringDebugger(rwlock_t *lock)
223 {
224 assert(DebuggerMode);
225
226 if (debugger_runtimeLock == RDWR && lock == &runtimeLock) return YES;
227 if (debugger_selLock == RDWR && lock == &selLock) return YES;
228
229 return NO;
230 }
231
232
233 /***********************************************************************
234 * vtable dispatch
235 *
236 * Every class gets a vtable pointer. The vtable is an array of IMPs.
237 * The selectors represented in the vtable are the same for all classes
238 * (i.e. no class has a bigger or smaller vtable).
239 * Each vtable index has an associated trampoline which dispatches to
240 * the IMP at that index for the receiver class's vtable (after
241 * checking for NULL). Dispatch fixup uses these trampolines instead
242 * of objc_msgSend.
243 * Fragility: The vtable size and list of selectors is chosen at launch
244 * time. No compiler-generated code depends on any particular vtable
245 * configuration, or even the use of vtable dispatch at all.
246 * Memory size: If a class's vtable is identical to its superclass's
247 * (i.e. the class overrides none of the vtable selectors), then
248 * the class points directly to its superclass's vtable. This means
249 * selectors to be included in the vtable should be chosen so they are
250 * (1) frequently called, but (2) not too frequently overridden. In
251 * particular, -dealloc is a bad choice.
252 * Forwarding: If a class doesn't implement some vtable selector, that
253 * selector's IMP is set to objc_msgSend in that class's vtable.
254 * +initialize: Each class keeps the default vtable (which always
255 * redirects to objc_msgSend) until its +initialize is completed.
256 * Otherwise, the first message to a class could be a vtable dispatch,
257 * and the vtable trampoline doesn't include +initialize checking.
258 * Changes: Categories, addMethod, and setImplementation all force vtable
259 * reconstruction for the class and all of its subclasses, if the
260 * vtable selectors are affected.
261 **********************************************************************/
262
263 #define X8(x) \
264 x, x, x, x, x, x, x, x
265 #define X64(x) \
266 X8(x), X8(x), X8(x), X8(x), X8(x), X8(x), X8(x), X8(x)
267 #define X128(x) \
268 X64(x), X64(x)
269
270 #define vtableMax 128
271
272 IMP _objc_empty_vtable[vtableMax] = {
273 X128(objc_msgSend)
274 };
275
276 #ifndef NO_VTABLE
277
278 // Trampoline descriptors for gdb.
279
280 objc_trampoline_header *gdb_objc_trampolines = NULL;
281
282 void gdb_objc_trampolines_changed(objc_trampoline_header *thdr) __attribute__((noinline));
283 void gdb_objc_trampolines_changed(objc_trampoline_header *thdr)
284 {
285 rwlock_assert_writing(&runtimeLock);
286 assert(thdr == gdb_objc_trampolines);
287
288 if (PrintVtables) {
289 _objc_inform("VTABLES: gdb_objc_trampolines_changed(%p)", thdr);
290 }
291 }
292
293 // fixme workaround for rdar://6667753
294 static void appendTrampolines(objc_trampoline_header *thdr) __attribute__((noinline));
295
296 static void appendTrampolines(objc_trampoline_header *thdr)
297 {
298 rwlock_assert_writing(&runtimeLock);
299 assert(thdr->next == NULL);
300
301 if (gdb_objc_trampolines != thdr->next) {
302 thdr->next = gdb_objc_trampolines;
303 }
304 gdb_objc_trampolines = thdr;
305
306 gdb_objc_trampolines_changed(thdr);
307 }
308
309 // Vtable management.
310
311 static size_t vtableStrlen;
312 static size_t vtableCount;
313 static SEL *vtableSelectors;
314 static IMP *vtableTrampolines;
315 static const char * const defaultVtable[] = {
316 "allocWithZone:",
317 "alloc",
318 "class",
319 "self",
320 "isKindOfClass:",
321 "respondsToSelector:",
322 "isFlipped",
323 "length",
324 "objectForKey:",
325 "count",
326 "objectAtIndex:",
327 "isEqualToString:",
328 "isEqual:",
329 "retain",
330 "release",
331 "autorelease",
332 };
333 static const char * const defaultVtableGC[] = {
334 "allocWithZone:",
335 "alloc",
336 "class",
337 "self",
338 "isKindOfClass:",
339 "respondsToSelector:",
340 "isFlipped",
341 "length",
342 "objectForKey:",
343 "count",
344 "objectAtIndex:",
345 "isEqualToString:",
346 "isEqual:",
347 "hash",
348 "addObject:",
349 "countByEnumeratingWithState:objects:count:",
350 };
351
352 extern id objc_msgSend_vtable0(id, SEL, ...);
353 extern id objc_msgSend_vtable1(id, SEL, ...);
354 extern id objc_msgSend_vtable2(id, SEL, ...);
355 extern id objc_msgSend_vtable3(id, SEL, ...);
356 extern id objc_msgSend_vtable4(id, SEL, ...);
357 extern id objc_msgSend_vtable5(id, SEL, ...);
358 extern id objc_msgSend_vtable6(id, SEL, ...);
359 extern id objc_msgSend_vtable7(id, SEL, ...);
360 extern id objc_msgSend_vtable8(id, SEL, ...);
361 extern id objc_msgSend_vtable9(id, SEL, ...);
362 extern id objc_msgSend_vtable10(id, SEL, ...);
363 extern id objc_msgSend_vtable11(id, SEL, ...);
364 extern id objc_msgSend_vtable12(id, SEL, ...);
365 extern id objc_msgSend_vtable13(id, SEL, ...);
366 extern id objc_msgSend_vtable14(id, SEL, ...);
367 extern id objc_msgSend_vtable15(id, SEL, ...);
368
369 static IMP const defaultVtableTrampolines[] = {
370 objc_msgSend_vtable0,
371 objc_msgSend_vtable1,
372 objc_msgSend_vtable2,
373 objc_msgSend_vtable3,
374 objc_msgSend_vtable4,
375 objc_msgSend_vtable5,
376 objc_msgSend_vtable6,
377 objc_msgSend_vtable7,
378 objc_msgSend_vtable8,
379 objc_msgSend_vtable9,
380 objc_msgSend_vtable10,
381 objc_msgSend_vtable11,
382 objc_msgSend_vtable12,
383 objc_msgSend_vtable13,
384 objc_msgSend_vtable14,
385 objc_msgSend_vtable15,
386 };
387 extern objc_trampoline_header defaultVtableTrampolineDescriptors;
388
389 static void check_vtable_size(void) __unused;
390 static void check_vtable_size(void)
391 {
392 // Fail to compile if vtable sizes don't match.
393 int c1[sizeof(defaultVtableTrampolines)-sizeof(defaultVtable)] __unused;
394 int c2[sizeof(defaultVtable)-sizeof(defaultVtableTrampolines)] __unused;
395 int c3[sizeof(defaultVtableTrampolines)-sizeof(defaultVtableGC)] __unused;
396 int c4[sizeof(defaultVtableGC)-sizeof(defaultVtableTrampolines)] __unused;
397
398 // Fail to compile if vtableMax is too small
399 int c5[vtableMax - sizeof(defaultVtable)] __unused;
400 int c6[vtableMax - sizeof(defaultVtableGC)] __unused;
401 }
402
403 /*
404 x86_64
405
406 monomorphic (self rdi, sel* rsi, temp r10 and r11) {
407 test %rdi, %rdi
408 jeq returnZero // nil check
409 movq 8(%rsi), %rsi // load _cmd (fixme schedule)
410 movq $xxxx, %r10
411 cmp 0(%rdi), %r10 // isa check
412 jeq imp // fixme long branches
413 movq $yyyy, %r10
414 cmp 0(%rdi), %r10 // fixme load rdi once for multiple isas
415 jeq imp2 // fixme long branches
416 jmp objc_msgSend // fixme long branches
417 }
418
419 */
420 extern uint8_t vtable_prototype;
421 extern uint8_t vtable_ignored;
422 extern int vtable_prototype_size;
423 extern int vtable_prototype_index_offset;
424 static size_t makeVtableTrampoline(uint8_t *dst, size_t index)
425 {
426 // copy boilerplate
427 memcpy(dst, &vtable_prototype, vtable_prototype_size);
428
429 // insert index
430 #if defined(__x86_64__)
431 uint16_t *p = (uint16_t *)(dst + vtable_prototype_index_offset + 3);
432 if (*p != 0x7fff) _objc_fatal("vtable_prototype busted");
433 *p = index * 8;
434 #else
435 # warning unknown architecture
436 #endif
437
438 return vtable_prototype_size;
439 }
440
441
442 static void initVtables(void)
443 {
444 if (DisableVtables) {
445 if (PrintVtables) {
446 _objc_inform("VTABLES: vtable dispatch disabled by OBJC_DISABLE_VTABLES");
447 }
448 vtableCount = 0;
449 vtableSelectors = NULL;
450 vtableTrampolines = NULL;
451 return;
452 }
453
454 const char * const *names;
455 size_t i;
456
457 if (UseGC) {
458 names = defaultVtableGC;
459 vtableCount = sizeof(defaultVtableGC) / sizeof(defaultVtableGC[0]);
460 } else {
461 names = defaultVtable;
462 vtableCount = sizeof(defaultVtable) / sizeof(defaultVtable[0]);
463 }
464 if (vtableCount > vtableMax) vtableCount = vtableMax;
465
466 vtableSelectors = _malloc_internal(vtableCount * sizeof(SEL));
467 vtableTrampolines = _malloc_internal(vtableCount * sizeof(IMP));
468
469 // Built-in trampolines and their descriptors
470
471 size_t defaultVtableTrampolineCount =
472 sizeof(defaultVtableTrampolines) / sizeof(defaultVtableTrampolines[0]);
473 #ifndef NDEBUG
474 // debug: use generated code for 3/4 of the table
475 defaultVtableTrampolineCount /= 4;
476 #endif
477
478 for (i = 0; i < defaultVtableTrampolineCount && i < vtableCount; i++) {
479 vtableSelectors[i] = sel_registerName(names[i]);
480 vtableTrampolines[i] = defaultVtableTrampolines[i];
481 }
482 appendTrampolines(&defaultVtableTrampolineDescriptors);
483
484
485 // Generated trampolines and their descriptors
486
487 if (vtableCount > defaultVtableTrampolineCount) {
488 // Memory for trampoline code
489 size_t generatedCount =
490 vtableCount - defaultVtableTrampolineCount;
491
492 const int align = 16;
493 size_t codeSize =
494 round_page(sizeof(objc_trampoline_header) + align +
495 generatedCount * (sizeof(objc_trampoline_descriptor)
496 + vtable_prototype_size + align));
497 void *codeAddr = mmap(0, codeSize, PROT_READ|PROT_WRITE,
498 MAP_PRIVATE|MAP_ANON,
499 VM_MAKE_TAG(VM_MEMORY_OBJC_DISPATCHERS), 0);
500 uint8_t *t = (uint8_t *)codeAddr;
501
502 // Trampoline header
503 objc_trampoline_header *thdr = (objc_trampoline_header *)t;
504 thdr->headerSize = sizeof(objc_trampoline_header);
505 thdr->descSize = sizeof(objc_trampoline_descriptor);
506 thdr->descCount = (uint32_t)generatedCount;
507 thdr->next = NULL;
508
509 // Trampoline descriptors
510 objc_trampoline_descriptor *tdesc = (objc_trampoline_descriptor *)(thdr+1);
511 t = (uint8_t *)&tdesc[generatedCount];
512 t += align - ((uintptr_t)t % align);
513
514 // Dispatch code
515 size_t tdi;
516 for (i = defaultVtableTrampolineCount, tdi = 0;
517 i < vtableCount;
518 i++, tdi++)
519 {
520 vtableSelectors[i] = sel_registerName(names[i]);
521 if (vtableSelectors[i] == (SEL)kIgnore) {
522 vtableTrampolines[i] = (IMP)&vtable_ignored;
523 tdesc[tdi].offset = 0;
524 tdesc[tdi].flags = 0;
525 } else {
526 vtableTrampolines[i] = (IMP)t;
527 tdesc[tdi].offset =
528 (uint32_t)((uintptr_t)t - (uintptr_t)&tdesc[tdi]);
529 tdesc[tdi].flags =
530 OBJC_TRAMPOLINE_MESSAGE|OBJC_TRAMPOLINE_VTABLE;
531
532 t += makeVtableTrampoline(t, i);
533 t += align - ((uintptr_t)t % align);
534 }
535 }
536
537 appendTrampolines(thdr);
538 sys_icache_invalidate(codeAddr, codeSize);
539 mprotect(codeAddr, codeSize, PROT_READ|PROT_EXEC);
540 }
541
542
543 if (PrintVtables) {
544 for (i = 0; i < vtableCount; i++) {
545 _objc_inform("VTABLES: vtable[%zu] %p %s",
546 i, vtableTrampolines[i],
547 sel_getName(vtableSelectors[i]));
548 }
549 }
550
551 if (PrintVtableImages) {
552 _objc_inform("VTABLE IMAGES: '#' implemented by class");
553 _objc_inform("VTABLE IMAGES: '-' inherited from superclass");
554 _objc_inform("VTABLE IMAGES: ' ' not implemented");
555 for (i = 0; i <= vtableCount; i++) {
556 char spaces[vtableCount+1+1];
557 size_t j;
558 for (j = 0; j < i; j++) {
559 spaces[j] = '|';
560 }
561 spaces[j] = '\0';
562 _objc_inform("VTABLE IMAGES: %s%s", spaces,
563 i<vtableCount ? sel_getName(vtableSelectors[i]) : "");
564 }
565 }
566
567 if (PrintVtables || PrintVtableImages) {
568 vtableStrlen = 0;
569 for (i = 0; i < vtableCount; i++) {
570 vtableStrlen += strlen(sel_getName(vtableSelectors[i]));
571 }
572 }
573 }
574
575
576 static int vtable_getIndex(SEL sel)
577 {
578 int i;
579 for (i = 0; i < vtableCount; i++) {
580 if (vtableSelectors[i] == sel) return i;
581 }
582 return -1;
583 }
584
585 static BOOL vtable_containsSelector(SEL sel)
586 {
587 return (vtable_getIndex(sel) < 0) ? NO : YES;
588 }
589
590 static void printVtableOverrides(class_t *cls, class_t *supercls)
591 {
592 char overrideMap[vtableCount+1];
593 int i;
594
595 if (supercls) {
596 size_t overridesBufferSize = vtableStrlen + 2*vtableCount + 1;
597 char *overrides =
598 _calloc_internal(overridesBufferSize, 1);
599 for (i = 0; i < vtableCount; i++) {
600 if (vtableSelectors[i] == (SEL)kIgnore) {
601 overrideMap[i] = '-';
602 continue;
603 }
604 if (getMethodNoSuper_nolock(cls, vtableSelectors[i])) {
605 strlcat(overrides, sel_getName(vtableSelectors[i]), overridesBufferSize);
606 strlcat(overrides, ", ", overridesBufferSize);
607 overrideMap[i] = '#';
608 } else if (getMethod_nolock(cls, vtableSelectors[i])) {
609 overrideMap[i] = '-';
610 } else {
611 overrideMap[i] = ' ';
612 }
613 }
614 if (PrintVtables) {
615 _objc_inform("VTABLES: %s%s implements %s",
616 getName(cls), isMetaClass(cls) ? "(meta)" : "",
617 overrides);
618 }
619 _free_internal(overrides);
620 }
621 else {
622 for (i = 0; i < vtableCount; i++) {
623 overrideMap[i] = '#';
624 }
625 }
626
627 if (PrintVtableImages) {
628 overrideMap[vtableCount] = '\0';
629 _objc_inform("VTABLE IMAGES: %s %s%s", overrideMap,
630 getName(cls), isMetaClass(cls) ? "(meta)" : "");
631 }
632 }
633
634 /***********************************************************************
635 * updateVtable
636 * Rebuilds vtable for cls, using superclass's vtable if appropriate.
637 * Assumes superclass's vtable is up to date.
638 * Does nothing to subclass vtables.
639 * Locking: runtimeLock must be held by the caller.
640 **********************************************************************/
641 static void updateVtable(class_t *cls, BOOL force)
642 {
643 rwlock_assert_writing(&runtimeLock);
644
645 // Keep default vtable until +initialize is complete.
646 // Default vtable redirects to objc_msgSend, which
647 // enforces +initialize locking.
648 if (!force && !_class_isInitialized((Class)cls)) {
649 /*
650 if (PrintVtables) {
651 _objc_inform("VTABLES: KEEPING DEFAULT vtable for "
652 "uninitialized class %s%s",
653 getName(cls), isMetaClass(cls) ? "(meta)" : "");
654 }
655 */
656 return;
657 }
658
659 // Decide whether this class can share its superclass's vtable.
660
661 struct class_t *supercls = getSuperclass(cls);
662 BOOL needVtable = NO;
663 int i;
664 if (!supercls) {
665 // Root classes always need a vtable
666 needVtable = YES;
667 }
668 else if (cls->data->flags & RW_SPECIALIZED_VTABLE) {
669 // Once you have your own vtable, you never go back
670 needVtable = YES;
671 }
672 else {
673 for (i = 0; i < vtableCount; i++) {
674 if (vtableSelectors[i] == (SEL)kIgnore) continue;
675 method_t *m = getMethodNoSuper_nolock(cls, vtableSelectors[i]);
676 // assume any local implementation differs from super's
677 if (m) {
678 needVtable = YES;
679 break;
680 }
681 }
682 }
683
684 // Build a vtable for this class, or not.
685
686 if (!needVtable) {
687 if (PrintVtables) {
688 _objc_inform("VTABLES: USING SUPERCLASS vtable for class %s%s",
689 getName(cls), isMetaClass(cls) ? "(meta)" : "");
690 }
691 cls->vtable = supercls->vtable;
692 }
693 else {
694 if (PrintVtables) {
695 _objc_inform("VTABLES: %s vtable for class %s%s",
696 (cls->data->flags & RW_SPECIALIZED_VTABLE) ?
697 "UPDATING SPECIALIZED" : "CREATING SPECIALIZED",
698 getName(cls), isMetaClass(cls) ? "(meta)" : "");
699 }
700 if (PrintVtables || PrintVtableImages) {
701 printVtableOverrides(cls, supercls);
702 }
703
704 IMP *new_vtable = cls->vtable;
705 IMP *super_vtable = supercls ? supercls->vtable : _objc_empty_vtable;
706 // fixme use msgForward (instead of msgSend from empty vtable) ?
707
708 if (cls->data->flags & RW_SPECIALIZED_VTABLE) {
709 // update cls->vtable in place
710 new_vtable = cls->vtable;
711 assert(new_vtable != _objc_empty_vtable);
712 } else {
713 // make new vtable
714 new_vtable = malloc(vtableCount * sizeof(IMP));
715 changeInfo(cls, RW_SPECIALIZED_VTABLE, 0);
716 }
717
718 for (i = 0; i < vtableCount; i++) {
719 if (vtableSelectors[i] == (SEL)kIgnore) {
720 new_vtable[i] = (IMP)&vtable_ignored;
721 } else {
722 method_t *m = getMethodNoSuper_nolock(cls, vtableSelectors[i]);
723 if (m) new_vtable[i] = _method_getImplementation(m);
724 else new_vtable[i] = super_vtable[i];
725 }
726 }
727
728 if (cls->vtable != new_vtable) {
729 // don't let other threads see uninitialized parts of new_vtable
730 OSMemoryBarrier();
731 cls->vtable = new_vtable;
732 }
733 }
734 }
735
736 // ! NO_VTABLE
737 #else
738 // NO_VTABLE
739
740 static void initVtables(void)
741 {
742 if (PrintVtables) {
743 _objc_inform("VTABLES: no vtables on this architecture");
744 }
745 }
746
747 static BOOL vtable_containsSelector(SEL sel)
748 {
749 return NO;
750 }
751
752 static void updateVtable(class_t *cls, BOOL force)
753 {
754 }
755
756 // NO_VTABLE
757 #endif
758
759 typedef struct {
760 category_t *cat;
761 BOOL fromBundle;
762 } category_pair_t;
763
764 typedef struct {
765 uint32_t count;
766 category_pair_t list[0]; // variable-size
767 } category_list;
768
769 #define FOREACH_METHOD_LIST(_mlist, _cls, code) \
770 do { \
771 const method_list_t *_mlist; \
772 if (_cls->data->methods) { \
773 method_list_t **_mlistp; \
774 for (_mlistp = _cls->data->methods; *_mlistp; _mlistp++) { \
775 _mlist = *_mlistp; \
776 code \
777 } \
778 } \
779 } while (0)
780
781
782 // fixme don't chain property lists
783 typedef struct chained_property_list {
784 struct chained_property_list *next;
785 uint32_t count;
786 struct objc_property list[0]; // variable-size
787 } chained_property_list;
788
789 /*
790 Low two bits of mlist->entsize is used as the fixed-up marker.
791 PREOPTIMIZED VERSION:
792 Fixed-up method lists get entsize&3 == 3.
793 dyld shared cache sets this for method lists it preoptimizes.
794 UN-PREOPTIMIZED VERSION:
795 Fixed-up method lists get entsize&3 == 1.
796 dyld shared cache uses 3, but those aren't trusted.
797 */
798
799 static uint32_t fixed_up_method_list = 3;
800
801 __private_extern__ void
802 disableSelectorPreoptimization(void)
803 {
804 fixed_up_method_list = 1;
805 }
806
807 static BOOL isMethodListFixedUp(const method_list_t *mlist)
808 {
809 return (mlist->entsize_NEVER_USE & 3) == fixed_up_method_list;
810 }
811
812 static void setMethodListFixedUp(method_list_t *mlist)
813 {
814 rwlock_assert_writing(&runtimeLock);
815 assert(!isMethodListFixedUp(mlist));
816 mlist->entsize_NEVER_USE = (mlist->entsize_NEVER_USE & ~3) | fixed_up_method_list;
817 }
818
819 /*
820 static size_t chained_property_list_size(const chained_property_list *plist)
821 {
822 return sizeof(chained_property_list) +
823 plist->count * sizeof(struct objc_property);
824 }
825
826 static size_t protocol_list_size(const protocol_list_t *plist)
827 {
828 return sizeof(protocol_list_t) + plist->count * sizeof(protocol_t *);
829 }
830 */
831
832 // low bit used by dyld shared cache
833 static uint32_t method_list_entsize(const method_list_t *mlist)
834 {
835 return mlist->entsize_NEVER_USE & ~(uint32_t)3;
836 }
837
838 static size_t method_list_size(const method_list_t *mlist)
839 {
840 return sizeof(method_list_t) + (mlist->count-1)*method_list_entsize(mlist);
841 }
842
843 static method_t *method_list_nth(const method_list_t *mlist, uint32_t i)
844 {
845 return (method_t *)(i*method_list_entsize(mlist) + (char *)&mlist->first);
846 }
847
848
849 static size_t ivar_list_size(const ivar_list_t *ilist)
850 {
851 return sizeof(ivar_list_t) + (ilist->count-1) * ilist->entsize;
852 }
853
854 static ivar_t *ivar_list_nth(const ivar_list_t *ilist, uint32_t i)
855 {
856 return (ivar_t *)(i*ilist->entsize + (char *)&ilist->first);
857 }
858
859
860 static method_list_t *cat_method_list(const category_t *cat, BOOL isMeta)
861 {
862 if (!cat) return NULL;
863
864 if (isMeta) return cat->classMethods;
865 else return cat->instanceMethods;
866 }
867
868 static uint32_t cat_method_count(const category_t *cat, BOOL isMeta)
869 {
870 method_list_t *cmlist = cat_method_list(cat, isMeta);
871 return cmlist ? cmlist->count : 0;
872 }
873
874 static method_t *cat_method_nth(const category_t *cat, BOOL isMeta, uint32_t i)
875 {
876 method_list_t *cmlist = cat_method_list(cat, isMeta);
877 if (!cmlist) return NULL;
878
879 return method_list_nth(cmlist, i);
880 }
881
882
883 // part of ivar_t, with non-deprecated alignment
884 typedef struct {
885 uintptr_t *offset;
886 const char *name;
887 const char *type;
888 uint32_t alignment;
889 } ivar_alignment_t;
890
891 static uint32_t ivar_alignment(const ivar_t *ivar)
892 {
893 uint32_t alignment = ((ivar_alignment_t *)ivar)->alignment;
894 if (alignment == (uint32_t)-1) alignment = (uint32_t)WORD_SHIFT;
895 return 1<<alignment;
896 }
897
898
899 static void try_free(const void *p)
900 {
901 if (p && malloc_size(p)) free((void *)p);
902 }
903
904
905 /***********************************************************************
906 * make_ro_writeable
907 * Reallocates rw->ro if necessary to make it writeable.
908 * Locking: runtimeLock must be held by the caller.
909 **********************************************************************/
910 static class_ro_t *make_ro_writeable(class_rw_t *rw)
911 {
912 rwlock_assert_writing(&runtimeLock);
913
914 if (rw->flags & RW_COPIED_RO) {
915 // already writeable, do nothing
916 } else {
917 class_ro_t *ro = _memdup_internal(rw->ro, sizeof(*rw->ro));
918 rw->ro = ro;
919 rw->flags |= RW_COPIED_RO;
920 }
921 return (class_ro_t *)rw->ro;
922 }
923
924
925 /***********************************************************************
926 * unattachedCategories
927 * Returns the class => categories map of unattached categories.
928 * Locking: runtimeLock must be held by the caller.
929 **********************************************************************/
930 static NXMapTable *unattachedCategories(void)
931 {
932 rwlock_assert_writing(&runtimeLock);
933
934 static NXMapTable *category_map = NULL;
935
936 if (category_map) return category_map;
937
938 // fixme initial map size
939 category_map = NXCreateMapTableFromZone(NXPtrValueMapPrototype, 16,
940 _objc_internal_zone());
941
942 return category_map;
943 }
944
945
946 /***********************************************************************
947 * addUnattachedCategoryForClass
948 * Records an unattached category.
949 * Locking: runtimeLock must be held by the caller.
950 **********************************************************************/
951 static void addUnattachedCategoryForClass(category_t *cat, class_t *cls,
952 header_info *catHeader)
953 {
954 rwlock_assert_writing(&runtimeLock);
955
956 BOOL catFromBundle = (catHeader->mhdr->filetype == MH_BUNDLE) ? YES: NO;
957
958 // DO NOT use cat->cls!
959 // cls may be cat->cls->isa, or cat->cls may have been remapped.
960 NXMapTable *cats = unattachedCategories();
961 category_list *list;
962
963 list = NXMapGet(cats, cls);
964 if (!list) {
965 list = _calloc_internal(sizeof(*list) + sizeof(list->list[0]), 1);
966 } else {
967 list = _realloc_internal(list, sizeof(*list) + sizeof(list->list[0]) * (list->count + 1));
968 }
969 list->list[list->count++] = (category_pair_t){cat, catFromBundle};
970 NXMapInsert(cats, cls, list);
971 }
972
973
974 /***********************************************************************
975 * removeUnattachedCategoryForClass
976 * Removes an unattached category.
977 * Locking: runtimeLock must be held by the caller.
978 **********************************************************************/
979 static void removeUnattachedCategoryForClass(category_t *cat, class_t *cls)
980 {
981 rwlock_assert_writing(&runtimeLock);
982
983 // DO NOT use cat->cls!
984 // cls may be cat->cls->isa, or cat->cls may have been remapped.
985 NXMapTable *cats = unattachedCategories();
986 category_list *list;
987
988 list = NXMapGet(cats, cls);
989 if (!list) return;
990
991 uint32_t i;
992 for (i = 0; i < list->count; i++) {
993 if (list->list[i].cat == cat) {
994 // shift entries to preserve list order
995 memmove(&list->list[i], &list->list[i+1],
996 (list->count-i-1) * sizeof(list->list[i]));
997 list->count--;
998 return;
999 }
1000 }
1001 }
1002
1003
1004 /***********************************************************************
1005 * unattachedCategoriesForClass
1006 * Returns the list of unattached categories for a class, and
1007 * deletes them from the list.
1008 * The result must be freed by the caller.
1009 * Locking: runtimeLock must be held by the caller.
1010 **********************************************************************/
1011 static category_list *unattachedCategoriesForClass(class_t *cls)
1012 {
1013 rwlock_assert_writing(&runtimeLock);
1014 return NXMapRemove(unattachedCategories(), cls);
1015 }
1016
1017
1018 /***********************************************************************
1019 * isRealized
1020 * Returns YES if class cls has been realized.
1021 * Locking: To prevent concurrent realization, hold runtimeLock.
1022 **********************************************************************/
1023 static BOOL isRealized(class_t *cls)
1024 {
1025 return (cls->data->flags & RW_REALIZED) ? YES : NO;
1026 }
1027
1028
1029 /***********************************************************************
1030 * isFuture
1031 * Returns YES if class cls is an unrealized future class.
1032 * Locking: To prevent concurrent realization, hold runtimeLock.
1033 **********************************************************************/
1034 static BOOL isFuture(class_t *cls)
1035 {
1036 return (cls->data->flags & RW_FUTURE) ? YES : NO;
1037 }
1038
1039
1040 /***********************************************************************
1041 * printReplacements
1042 * Implementation of PrintReplacedMethods / OBJC_PRINT_REPLACED_METHODS.
1043 * Warn about methods from cats that override other methods in cats or cls.
1044 * Assumes no methods from cats have been added to cls yet.
1045 **********************************************************************/
1046 static void printReplacements(class_t *cls, category_list *cats)
1047 {
1048 uint32_t c;
1049 BOOL isMeta = isMetaClass(cls);
1050
1051 if (!cats) return;
1052
1053 // Newest categories are LAST in cats
1054 // Later categories override earlier ones.
1055 for (c = 0; c < cats->count; c++) {
1056 category_t *cat = cats->list[c].cat;
1057 uint32_t cmCount = cat_method_count(cat, isMeta);
1058 uint32_t m;
1059 for (m = 0; m < cmCount; m++) {
1060 uint32_t c2, m2;
1061 method_t *meth2 = NULL;
1062 method_t *meth = cat_method_nth(cat, isMeta, m);
1063 SEL s = sel_registerName((const char *)meth->name);
1064
1065 // Don't warn about GC-ignored selectors
1066 if (s == (SEL)kIgnore) continue;
1067
1068 // Look for method in earlier categories
1069 for (c2 = 0; c2 < c; c2++) {
1070 category_t *cat2 = cats->list[c2].cat;
1071 uint32_t cm2Count = cat_method_count(cat2, isMeta);
1072 for (m2 = 0; m2 < cm2Count; m2++) {
1073 meth2 = cat_method_nth(cat2, isMeta, m2);
1074 SEL s2 = sel_registerName((const char *)meth2->name);
1075 if (s == s2) goto whine;
1076 }
1077 }
1078
1079 // Look for method in cls
1080 FOREACH_METHOD_LIST(mlist, cls, {
1081 for (m2 = 0; m2 < mlist->count; m2++) {
1082 meth2 = method_list_nth(mlist, m2);
1083 SEL s2 = sel_registerName((const char *)meth2->name);
1084 if (s == s2) goto whine;
1085 }
1086 });
1087
1088 // Didn't find any override.
1089 continue;
1090
1091 whine:
1092 // Found an override.
1093 logReplacedMethod(getName(cls), s, isMetaClass(cls), cat->name,
1094 _method_getImplementation(meth2),
1095 _method_getImplementation(meth));
1096 }
1097 }
1098 }
1099
1100
1101 static BOOL isBundleClass(class_t *cls)
1102 {
1103 return (cls->data->ro->flags & RO_FROM_BUNDLE) ? YES : NO;
1104 }
1105
1106
1107 static void
1108 fixupMethodList(method_list_t *mlist, BOOL bundleCopy)
1109 {
1110 assert(!isMethodListFixedUp(mlist));
1111
1112 // fixme lock less in attachMethodLists ?
1113 sel_lock();
1114
1115 uint32_t m;
1116 for (m = 0; m < mlist->count; m++) {
1117 method_t *meth = method_list_nth(mlist, m);
1118 SEL sel = sel_registerNameNoLock((const char *)meth->name, bundleCopy);
1119 meth->name = sel;
1120
1121 if (sel == (SEL)kIgnore) {
1122 meth->imp = (IMP)&_objc_ignored_method;
1123 }
1124 }
1125
1126 sel_unlock();
1127
1128 setMethodListFixedUp(mlist);
1129 }
1130
1131 static void
1132 attachMethodLists(class_t *cls, method_list_t **lists, int count,
1133 BOOL methodsFromBundle, BOOL *outVtablesAffected)
1134 {
1135 rwlock_assert_writing(&runtimeLock);
1136
1137 BOOL vtablesAffected = NO;
1138 size_t listsSize = count * sizeof(*lists);
1139
1140 // Create or extend method list array
1141 // Leave `count` empty slots at the start of the array to be filled below.
1142
1143 if (!cls->data->methods) {
1144 // no bonus method lists yet
1145 cls->data->methods = _calloc_internal(1 + count, sizeof(*lists));
1146 } else {
1147 size_t oldSize = malloc_size(cls->data->methods);
1148 cls->data->methods =
1149 _realloc_internal(cls->data->methods, oldSize + listsSize);
1150 memmove(cls->data->methods + count, cls->data->methods, oldSize);
1151 }
1152
1153 // Add method lists to array.
1154 // Reallocate un-fixed method lists.
1155
1156 int i;
1157 for (i = 0; i < count; i++) {
1158 method_list_t *mlist = lists[i];
1159 if (!mlist) continue;
1160
1161 // Fixup selectors if necessary
1162 if (!isMethodListFixedUp(mlist)) {
1163 mlist = _memdup_internal(mlist, method_list_size(mlist));
1164 fixupMethodList(mlist, methodsFromBundle);
1165 }
1166
1167 // Scan for vtable updates
1168 if (outVtablesAffected && !vtablesAffected) {
1169 uint32_t m;
1170 for (m = 0; m < mlist->count; m++) {
1171 SEL sel = method_list_nth(mlist, m)->name;
1172 if (vtable_containsSelector(sel)) vtablesAffected = YES;
1173 }
1174 }
1175
1176 // Fill method list array
1177 cls->data->methods[i] = mlist;
1178 }
1179
1180 if (outVtablesAffected) *outVtablesAffected = vtablesAffected;
1181 }
1182
1183 static void
1184 attachCategoryMethods(class_t *cls, category_list *cats,
1185 BOOL *outVtablesAffected)
1186 {
1187 if (!cats) return;
1188 if (PrintReplacedMethods) printReplacements(cls, cats);
1189
1190 BOOL isMeta = isMetaClass(cls);
1191 method_list_t **mlists = _malloc_internal(cats->count * sizeof(*mlists));
1192
1193 // Count backwards through cats to get newest categories first
1194 int mcount = 0;
1195 int i = cats->count;
1196 BOOL fromBundle = NO;
1197 while (i--) {
1198 method_list_t *mlist = cat_method_list(cats->list[i].cat, isMeta);
1199 if (mlist) {
1200 mlists[mcount++] = mlist;
1201 fromBundle |= cats->list[i].fromBundle;
1202 }
1203 }
1204
1205 attachMethodLists(cls, mlists, mcount, fromBundle, outVtablesAffected);
1206
1207 _free_internal(mlists);
1208
1209 }
1210
1211
1212 static chained_property_list *
1213 buildPropertyList(const struct objc_property_list *plist, category_list *cats, BOOL isMeta)
1214 {
1215 // Do NOT use cat->cls! It may have been remapped.
1216 chained_property_list *newlist;
1217 uint32_t count = 0;
1218 uint32_t p, c;
1219
1220 // Count properties in all lists.
1221 if (plist) count = plist->count;
1222 if (cats) {
1223 for (c = 0; c < cats->count; c++) {
1224 category_t *cat = cats->list[c].cat;
1225 /*
1226 if (isMeta && cat->classProperties) {
1227 count += cat->classProperties->count;
1228 }
1229 else*/
1230 if (!isMeta && cat->instanceProperties) {
1231 count += cat->instanceProperties->count;
1232 }
1233 }
1234 }
1235
1236 if (count == 0) return NULL;
1237
1238 // Allocate new list.
1239 newlist = _malloc_internal(sizeof(*newlist) + count * sizeof(struct objc_property));
1240 newlist->count = 0;
1241 newlist->next = NULL;
1242
1243 // Copy properties; newest categories first, then ordinary properties
1244 if (cats) {
1245 c = cats->count;
1246 while (c--) {
1247 struct objc_property_list *cplist;
1248 category_t *cat = cats->list[c].cat;
1249 /*
1250 if (isMeta) {
1251 cplist = cat->classProperties;
1252 } else */
1253 {
1254 cplist = cat->instanceProperties;
1255 }
1256 if (cplist) {
1257 for (p = 0; p < cplist->count; p++) {
1258 newlist->list[newlist->count++] =
1259 *property_list_nth(cplist, p);
1260 }
1261 }
1262 }
1263 }
1264 if (plist) {
1265 for (p = 0; p < plist->count; p++) {
1266 newlist->list[newlist->count++] = *property_list_nth(plist, p);
1267 }
1268 }
1269
1270 assert(newlist->count == count);
1271
1272 return newlist;
1273 }
1274
1275
1276 static protocol_list_t **
1277 buildProtocolList(category_list *cats, struct protocol_list_t *base,
1278 struct protocol_list_t **protos)
1279 {
1280 // Do NOT use cat->cls! It may have been remapped.
1281 struct protocol_list_t **p, **newp;
1282 struct protocol_list_t **newprotos;
1283 int count = 0;
1284 int i;
1285
1286 // count protocol list in base
1287 if (base) count++;
1288
1289 // count protocol lists in cats
1290 if (cats) for (i = 0; i < cats->count; i++) {
1291 category_t *cat = cats->list[i].cat;
1292 if (cat->protocols) count++;
1293 }
1294
1295 // no base or category protocols? return existing protocols unchanged
1296 if (count == 0) return protos;
1297
1298 // count protocol lists in protos
1299 for (p = protos; p && *p; p++) {
1300 count++;
1301 }
1302
1303 if (count == 0) return NULL;
1304
1305 newprotos = (struct protocol_list_t **)
1306 _malloc_internal((count+1) * sizeof(struct protocol_list_t *));
1307 newp = newprotos;
1308
1309 if (base) {
1310 *newp++ = base;
1311 }
1312
1313 for (p = protos; p && *p; p++) {
1314 *newp++ = *p;
1315 }
1316
1317 if (cats) for (i = 0; i < cats->count; i++) {
1318 category_t *cat = cats->list[i].cat;
1319 if (cat->protocols) {
1320 *newp++ = cat->protocols;
1321 }
1322 }
1323
1324 *newp = NULL;
1325
1326 return newprotos;
1327 }
1328
1329
1330 /***********************************************************************
1331 * methodizeClass
1332 * Fixes up cls's method list, protocol list, and property list.
1333 * Attaches any outstanding categories.
1334 * Builds vtable.
1335 * Locking: runtimeLock must be held by the caller
1336 **********************************************************************/
1337 static void methodizeClass(struct class_t *cls)
1338 {
1339 category_list *cats;
1340 BOOL isMeta;
1341
1342 rwlock_assert_writing(&runtimeLock);
1343
1344 isMeta = isMetaClass(cls);
1345
1346 // Methodizing for the first time
1347 if (PrintConnecting) {
1348 _objc_inform("CLASS: methodizing class '%s' %s",
1349 getName(cls), isMeta ? "(meta)" : "");
1350 }
1351
1352 // Build method and protocol and property lists.
1353 // Include methods and protocols and properties from categories, if any
1354 // Do NOT use cat->cls! It may have been remapped.
1355
1356 attachMethodLists(cls, (method_list_t **)&cls->data->ro->baseMethods, 1,
1357 isBundleClass(cls), NULL);
1358
1359 cats = unattachedCategoriesForClass(cls);
1360 attachCategoryMethods(cls, cats, NULL);
1361
1362 if (cats || cls->data->ro->baseProperties) {
1363 cls->data->properties =
1364 buildPropertyList(cls->data->ro->baseProperties, cats, isMeta);
1365 }
1366
1367 if (cats || cls->data->ro->baseProtocols) {
1368 cls->data->protocols =
1369 buildProtocolList(cats, cls->data->ro->baseProtocols, NULL);
1370 }
1371
1372 if (PrintConnecting) {
1373 uint32_t i;
1374 if (cats) {
1375 for (i = 0; i < cats->count; i++) {
1376 _objc_inform("CLASS: attached category %c%s(%s)",
1377 isMeta ? '+' : '-',
1378 getName(cls), cats->list[i].cat->name);
1379 }
1380 }
1381 }
1382
1383 if (cats) _free_internal(cats);
1384
1385 // No vtable until +initialize completes
1386 assert(cls->vtable == _objc_empty_vtable);
1387 }
1388
1389
1390 /***********************************************************************
1391 * remethodizeClass
1392 * Attach outstanding categories to an existing class.
1393 * Fixes up cls's method list, protocol list, and property list.
1394 * Updates method caches and vtables for cls and its subclasses.
1395 * Locking: runtimeLock must be held by the caller
1396 **********************************************************************/
1397 static void remethodizeClass(struct class_t *cls)
1398 {
1399 category_list *cats;
1400 BOOL isMeta;
1401
1402 rwlock_assert_writing(&runtimeLock);
1403
1404 isMeta = isMetaClass(cls);
1405
1406 // Re-methodizing: check for more categories
1407 if ((cats = unattachedCategoriesForClass(cls))) {
1408 chained_property_list *newproperties;
1409 struct protocol_list_t **newprotos;
1410 BOOL vtableAffected = NO;
1411
1412 if (PrintConnecting) {
1413 _objc_inform("CLASS: attaching categories to class '%s' %s",
1414 getName(cls), isMeta ? "(meta)" : "");
1415 }
1416
1417 // Update methods, properties, protocols
1418
1419 attachCategoryMethods(cls, cats, &vtableAffected);
1420
1421 newproperties = buildPropertyList(NULL, cats, isMeta);
1422 if (newproperties) {
1423 newproperties->next = cls->data->properties;
1424 cls->data->properties = newproperties;
1425 }
1426
1427 newprotos = buildProtocolList(cats, NULL, cls->data->protocols);
1428 if (cls->data->protocols && cls->data->protocols != newprotos) {
1429 _free_internal(cls->data->protocols);
1430 }
1431 cls->data->protocols = newprotos;
1432
1433 _free_internal(cats);
1434
1435 // Update method caches and vtables
1436 flushCaches(cls);
1437 if (vtableAffected) flushVtables(cls);
1438 }
1439 }
1440
1441
1442 /***********************************************************************
1443 * changeInfo
1444 * Atomically sets and clears some bits in cls's info field.
1445 * set and clear must not overlap.
1446 **********************************************************************/
1447 static void changeInfo(class_t *cls, unsigned int set, unsigned int clear)
1448 {
1449 uint32_t oldf, newf;
1450
1451 assert(isFuture(cls) || isRealized(cls));
1452
1453 do {
1454 oldf = cls->data->flags;
1455 newf = (oldf | set) & ~clear;
1456 } while (!OSAtomicCompareAndSwap32Barrier(oldf, newf, (volatile int32_t *)&cls->data->flags));
1457 }
1458
1459
1460 /***********************************************************************
1461 * namedClasses
1462 * Returns the classname => class map of all non-meta classes.
1463 * Locking: runtimeLock must be read- or write-locked by the caller
1464 **********************************************************************/
1465
1466 NXMapTable *gdb_objc_realized_classes; // exported for debuggers in objc-gdb.h
1467
1468 static NXMapTable *namedClasses(void)
1469 {
1470 rwlock_assert_locked(&runtimeLock);
1471
1472 INIT_ONCE_PTR(gdb_objc_realized_classes,
1473 NXCreateMapTableFromZone(NXStrValueMapPrototype, 1024,
1474 _objc_internal_zone()),
1475 NXFreeMapTable(v) );
1476
1477 return gdb_objc_realized_classes;
1478 }
1479
1480
1481 /***********************************************************************
1482 * addNamedClass
1483 * Adds name => cls to the named non-meta class map.
1484 * Warns about duplicate class names and keeps the old mapping.
1485 * Locking: runtimeLock must be held by the caller
1486 **********************************************************************/
1487 static void addNamedClass(class_t *cls, const char *name)
1488 {
1489 rwlock_assert_writing(&runtimeLock);
1490 class_t *old;
1491 if ((old = NXMapGet(namedClasses(), name))) {
1492 inform_duplicate(name, (Class)old, (Class)cls);
1493 } else {
1494 NXMapInsert(namedClasses(), name, cls);
1495 }
1496 assert(!(cls->data->flags & RO_META));
1497
1498 // wrong: constructed classes are already realized when they get here
1499 // assert(!isRealized(cls));
1500 }
1501
1502
1503 /***********************************************************************
1504 * removeNamedClass
1505 * Removes cls from the name => cls map.
1506 * Locking: runtimeLock must be held by the caller
1507 **********************************************************************/
1508 static void removeNamedClass(class_t *cls, const char *name)
1509 {
1510 rwlock_assert_writing(&runtimeLock);
1511 assert(!(cls->data->flags & RO_META));
1512 if (cls == NXMapGet(namedClasses(), name)) {
1513 NXMapRemove(namedClasses(), name);
1514 } else {
1515 // cls has a name collision with another class - don't remove the other
1516 }
1517 }
1518
1519
1520 /***********************************************************************
1521 * realizedClasses
1522 * Returns the class list for realized non-meta classes.
1523 * Locking: runtimeLock must be read- or write-locked by the caller
1524 **********************************************************************/
1525 static NXHashTable *realizedClasses(void)
1526 {
1527 static NXHashTable *class_hash = NULL;
1528
1529 rwlock_assert_locked(&runtimeLock);
1530
1531 INIT_ONCE_PTR(class_hash,
1532 NXCreateHashTableFromZone(NXPtrPrototype, 1024, NULL,
1533 _objc_internal_zone()),
1534 NXFreeHashTable(v));
1535
1536 return class_hash;
1537 }
1538
1539
1540 /***********************************************************************
1541 * realizedMetaclasses
1542 * Returns the class list for realized metaclasses.
1543 * Locking: runtimeLock must be read- or write-locked by the caller
1544 **********************************************************************/
1545 static NXHashTable *realizedMetaclasses(void)
1546 {
1547 static NXHashTable *class_hash = NULL;
1548
1549 rwlock_assert_locked(&runtimeLock);
1550
1551 INIT_ONCE_PTR(class_hash,
1552 NXCreateHashTableFromZone(NXPtrPrototype, 1024, NULL,
1553 _objc_internal_zone()),
1554 NXFreeHashTable(v));
1555
1556 return class_hash;
1557 }
1558
1559
1560 /***********************************************************************
1561 * addRealizedClass
1562 * Adds cls to the realized non-meta class hash.
1563 * Locking: runtimeLock must be held by the caller
1564 **********************************************************************/
1565 static void addRealizedClass(class_t *cls)
1566 {
1567 rwlock_assert_writing(&runtimeLock);
1568 void *old;
1569 old = NXHashInsert(realizedClasses(), cls);
1570 objc_addRegisteredClass((Class)cls);
1571 assert(!isMetaClass(cls));
1572 assert(!old);
1573 }
1574
1575
1576 /***********************************************************************
1577 * removeRealizedClass
1578 * Removes cls from the realized non-meta class hash.
1579 * Locking: runtimeLock must be held by the caller
1580 **********************************************************************/
1581 static void removeRealizedClass(class_t *cls)
1582 {
1583 rwlock_assert_writing(&runtimeLock);
1584 if (isRealized(cls)) {
1585 assert(!isMetaClass(cls));
1586 NXHashRemove(realizedClasses(), cls);
1587 objc_removeRegisteredClass((Class)cls);
1588 }
1589 }
1590
1591
1592 /***********************************************************************
1593 * addRealizedMetaclass
1594 * Adds cls to the realized metaclass hash.
1595 * Locking: runtimeLock must be held by the caller
1596 **********************************************************************/
1597 static void addRealizedMetaclass(class_t *cls)
1598 {
1599 rwlock_assert_writing(&runtimeLock);
1600 void *old;
1601 old = NXHashInsert(realizedMetaclasses(), cls);
1602 assert(isMetaClass(cls));
1603 assert(!old);
1604 }
1605
1606
1607 /***********************************************************************
1608 * removeRealizedMetaclass
1609 * Removes cls from the realized metaclass hash.
1610 * Locking: runtimeLock must be held by the caller
1611 **********************************************************************/
1612 static void removeRealizedMetaclass(class_t *cls)
1613 {
1614 rwlock_assert_writing(&runtimeLock);
1615 if (isRealized(cls)) {
1616 assert(isMetaClass(cls));
1617 NXHashRemove(realizedMetaclasses(), cls);
1618 }
1619 }
1620
1621
1622 /***********************************************************************
1623 * uninitializedClasses
1624 * Returns the metaclass => class map for un-+initialized classes
1625 * Replaces the 32-bit cls = objc_getName(metacls) during +initialize.
1626 * Locking: runtimeLock must be read- or write-locked by the caller
1627 **********************************************************************/
1628 static NXMapTable *uninitializedClasses(void)
1629 {
1630 static NXMapTable *class_map = NULL;
1631
1632 rwlock_assert_locked(&runtimeLock);
1633
1634 INIT_ONCE_PTR(class_map,
1635 NXCreateMapTableFromZone(NXPtrValueMapPrototype, 1024,
1636 _objc_internal_zone()),
1637 NXFreeMapTable(v) );
1638
1639 return class_map;
1640 }
1641
1642
1643 /***********************************************************************
1644 * addUninitializedClass
1645 * Adds metacls => cls to the un-+initialized class map
1646 * Locking: runtimeLock must be held by the caller
1647 **********************************************************************/
1648 static void addUninitializedClass(class_t *cls, class_t *metacls)
1649 {
1650 rwlock_assert_writing(&runtimeLock);
1651 void *old;
1652 old = NXMapInsert(uninitializedClasses(), metacls, cls);
1653 assert(isRealized(metacls) ? isMetaClass(metacls) : metacls->data->flags & RO_META);
1654 assert(! (isRealized(cls) ? isMetaClass(cls) : cls->data->flags & RO_META));
1655 assert(!old);
1656 }
1657
1658
1659 static void removeUninitializedClass(class_t *cls)
1660 {
1661 rwlock_assert_writing(&runtimeLock);
1662 NXMapRemove(uninitializedClasses(), cls->isa);
1663 }
1664
1665
1666 /***********************************************************************
1667 * getNonMetaClass
1668 * Return the ordinary class for this class or metaclass.
1669 * Used by +initialize.
1670 * Locking: runtimeLock must be read- or write-locked by the caller
1671 **********************************************************************/
1672 static class_t *getNonMetaClass(class_t *cls)
1673 {
1674 rwlock_assert_locked(&runtimeLock);
1675 if (isMetaClass(cls)) {
1676 cls = NXMapGet(uninitializedClasses(), cls);
1677 }
1678 return cls;
1679 }
1680
1681
1682 /***********************************************************************
1683 * _class_getNonMetaClass
1684 * Return the ordinary class for this class or metaclass.
1685 * Used by +initialize.
1686 * Locking: acquires runtimeLock
1687 **********************************************************************/
1688 __private_extern__ Class _class_getNonMetaClass(Class cls_gen)
1689 {
1690 class_t *cls = newcls(cls_gen);
1691 rwlock_write(&runtimeLock);
1692 cls = getNonMetaClass(cls);
1693 realizeClass(cls);
1694 rwlock_unlock_write(&runtimeLock);
1695
1696 return (Class)cls;
1697 }
1698
1699
1700
1701 /***********************************************************************
1702 * futureClasses
1703 * Returns the classname => future class map for unrealized future classes.
1704 * Locking: runtimeLock must be held by the caller
1705 **********************************************************************/
1706 static NXMapTable *futureClasses(void)
1707 {
1708 rwlock_assert_writing(&runtimeLock);
1709
1710 static NXMapTable *future_class_map = NULL;
1711
1712 if (future_class_map) return future_class_map;
1713
1714 // future_class_map is big enough to hold CF's classes and a few others
1715 future_class_map = NXCreateMapTableFromZone(NXStrValueMapPrototype, 32,
1716 _objc_internal_zone());
1717
1718 return future_class_map;
1719 }
1720
1721
1722 /***********************************************************************
1723 * addFutureClass
1724 * Installs cls as the class structure to use for the named class if it appears.
1725 * Locking: runtimeLock must be held by the caller
1726 **********************************************************************/
1727 static void addFutureClass(const char *name, class_t *cls)
1728 {
1729 void *old;
1730
1731 rwlock_assert_writing(&runtimeLock);
1732
1733 if (PrintFuture) {
1734 _objc_inform("FUTURE: reserving %p for %s", cls, name);
1735 }
1736
1737 cls->data = _calloc_internal(sizeof(*cls->data), 1);
1738 cls->data->flags = RO_FUTURE;
1739
1740 old = NXMapKeyCopyingInsert(futureClasses(), name, cls);
1741 assert(!old);
1742 }
1743
1744
1745 /***********************************************************************
1746 * removeFutureClass
1747 * Removes the named class from the unrealized future class list,
1748 * because it has been realized.
1749 * Locking: runtimeLock must be held by the caller
1750 **********************************************************************/
1751 static void removeFutureClass(const char *name)
1752 {
1753 rwlock_assert_writing(&runtimeLock);
1754
1755 NXMapKeyFreeingRemove(futureClasses(), name);
1756 }
1757
1758
1759 /***********************************************************************
1760 * remappedClasses
1761 * Returns the oldClass => newClass map for realized future classes.
1762 * Returns the oldClass => NULL map for ignored weak-linked classes.
1763 * Locking: runtimeLock must be read- or write-locked by the caller
1764 **********************************************************************/
1765 static NXMapTable *remappedClasses(BOOL create)
1766 {
1767 static NXMapTable *remapped_class_map = NULL;
1768
1769 rwlock_assert_locked(&runtimeLock);
1770
1771 if (remapped_class_map) return remapped_class_map;
1772 if (!create) return NULL;
1773
1774 // remapped_class_map is big enough to hold CF's classes and a few others
1775 INIT_ONCE_PTR(remapped_class_map,
1776 NXCreateMapTableFromZone(NXPtrValueMapPrototype, 32,
1777 _objc_internal_zone()),
1778 NXFreeMapTable(v));
1779
1780 return remapped_class_map;
1781 }
1782
1783
1784 /***********************************************************************
1785 * noClassesRemapped
1786 * Returns YES if no classes have been remapped
1787 * Locking: runtimeLock must be read- or write-locked by the caller
1788 **********************************************************************/
1789 static BOOL noClassesRemapped(void)
1790 {
1791 rwlock_assert_locked(&runtimeLock);
1792
1793 BOOL result = (remappedClasses(NO) == NULL);
1794 return result;
1795 }
1796
1797
1798 /***********************************************************************
1799 * addRemappedClass
1800 * newcls is a realized future class, replacing oldcls.
1801 * OR newcls is NULL, replacing ignored weak-linked class oldcls.
1802 * Locking: runtimeLock must be write-locked by the caller
1803 **********************************************************************/
1804 static void addRemappedClass(class_t *oldcls, class_t *newcls)
1805 {
1806 rwlock_assert_writing(&runtimeLock);
1807
1808 if (PrintFuture) {
1809 _objc_inform("FUTURE: using %p instead of %p for %s",
1810 oldcls, newcls, getName(newcls));
1811 }
1812
1813 void *old;
1814 old = NXMapInsert(remappedClasses(YES), oldcls, newcls);
1815 assert(!old);
1816 }
1817
1818
1819 /***********************************************************************
1820 * remapClass
1821 * Returns the live class pointer for cls, which may be pointing to
1822 * a class struct that has been reallocated.
1823 * Returns NULL if cls is ignored because of weak linking.
1824 * Locking: runtimeLock must be read- or write-locked by the caller
1825 **********************************************************************/
1826 static class_t *remapClass(class_t *cls)
1827 {
1828 rwlock_assert_locked(&runtimeLock);
1829
1830 class_t *c2;
1831
1832 if (!cls) return NULL;
1833
1834 if (NXMapMember(remappedClasses(YES), cls, (void**)&c2) == NX_MAPNOTAKEY) {
1835 return cls;
1836 } else {
1837 return c2;
1838 }
1839 }
1840
1841
1842 /***********************************************************************
1843 * remapClassRef
1844 * Fix up a class ref, in case the class referenced has been reallocated
1845 * or is an ignored weak-linked class.
1846 * Locking: runtimeLock must be read- or write-locked by the caller
1847 **********************************************************************/
1848 static void remapClassRef(class_t **clsref)
1849 {
1850 rwlock_assert_locked(&runtimeLock);
1851
1852 class_t *newcls = remapClass(*clsref);
1853 if (*clsref != newcls) *clsref = newcls;
1854 }
1855
1856
1857 /***********************************************************************
1858 * addSubclass
1859 * Adds subcls as a subclass of supercls.
1860 * Locking: runtimeLock must be held by the caller.
1861 **********************************************************************/
1862 static void addSubclass(class_t *supercls, class_t *subcls)
1863 {
1864 rwlock_assert_writing(&runtimeLock);
1865
1866 if (supercls && subcls) {
1867 assert(isRealized(supercls));
1868 assert(isRealized(subcls));
1869 subcls->data->nextSiblingClass = supercls->data->firstSubclass;
1870 supercls->data->firstSubclass = subcls;
1871 }
1872 }
1873
1874
1875 /***********************************************************************
1876 * removeSubclass
1877 * Removes subcls as a subclass of supercls.
1878 * Locking: runtimeLock must be held by the caller.
1879 **********************************************************************/
1880 static void removeSubclass(class_t *supercls, class_t *subcls)
1881 {
1882 rwlock_assert_writing(&runtimeLock);
1883 assert(getSuperclass(subcls) == supercls);
1884
1885 class_t **cp;
1886 for (cp = &supercls->data->firstSubclass;
1887 *cp && *cp != subcls;
1888 cp = &(*cp)->data->nextSiblingClass)
1889 ;
1890 assert(*cp == subcls);
1891 *cp = subcls->data->nextSiblingClass;
1892 }
1893
1894
1895
1896 /***********************************************************************
1897 * protocols
1898 * Returns the protocol name => protocol map for protocols.
1899 * Locking: runtimeLock must read- or write-locked by the caller
1900 **********************************************************************/
1901 static NXMapTable *protocols(void)
1902 {
1903 static NXMapTable *protocol_map = NULL;
1904
1905 rwlock_assert_locked(&runtimeLock);
1906
1907 INIT_ONCE_PTR(protocol_map,
1908 NXCreateMapTableFromZone(NXStrValueMapPrototype, 16,
1909 _objc_internal_zone()),
1910 NXFreeMapTable(v) );
1911
1912 return protocol_map;
1913 }
1914
1915
1916 /***********************************************************************
1917 * remapProtocol
1918 * Returns the live protocol pointer for proto, which may be pointing to
1919 * a protocol struct that has been reallocated.
1920 * Locking: runtimeLock must be read- or write-locked by the caller
1921 **********************************************************************/
1922 static protocol_t *remapProtocol(protocol_ref_t proto)
1923 {
1924 rwlock_assert_locked(&runtimeLock);
1925
1926 protocol_t *newproto = NXMapGet(protocols(), ((protocol_t *)proto)->name);
1927 return newproto ? newproto : (protocol_t *)proto;
1928 }
1929
1930
1931 /***********************************************************************
1932 * remapProtocolRef
1933 * Fix up a protocol ref, in case the protocol referenced has been reallocated.
1934 * Locking: runtimeLock must be read- or write-locked by the caller
1935 **********************************************************************/
1936 static void remapProtocolRef(protocol_t **protoref)
1937 {
1938 rwlock_assert_locked(&runtimeLock);
1939
1940 protocol_t *newproto = remapProtocol((protocol_ref_t)*protoref);
1941 if (*protoref != newproto) *protoref = newproto;
1942 }
1943
1944
1945 /***********************************************************************
1946 * moveIvars
1947 * Slides a class's ivars to accommodate the given superclass size.
1948 * Also slides ivar and weak GC layouts if provided.
1949 * Ivars are NOT compacted to compensate for a superclass that shrunk.
1950 * Locking: runtimeLock must be held by the caller.
1951 **********************************************************************/
1952 static void moveIvars(class_ro_t *ro, uint32_t superSize,
1953 layout_bitmap *ivarBitmap, layout_bitmap *weakBitmap)
1954 {
1955 rwlock_assert_writing(&runtimeLock);
1956
1957 uint32_t diff;
1958 uint32_t i;
1959
1960 assert(superSize > ro->instanceStart);
1961 diff = superSize - ro->instanceStart;
1962
1963 if (ro->ivars) {
1964 // Find maximum alignment in this class's ivars
1965 uint32_t maxAlignment = 1;
1966 for (i = 0; i < ro->ivars->count; i++) {
1967 ivar_t *ivar = ivar_list_nth(ro->ivars, i);
1968 if (!ivar->offset) continue; // anonymous bitfield
1969
1970 uint32_t alignment = ivar_alignment(ivar);
1971 if (alignment > maxAlignment) maxAlignment = alignment;
1972 }
1973
1974 // Compute a slide value that preserves that alignment
1975 uint32_t alignMask = maxAlignment - 1;
1976 if (diff & alignMask) diff = (diff + alignMask) & ~alignMask;
1977
1978 // Slide all of this class's ivars en masse
1979 for (i = 0; i < ro->ivars->count; i++) {
1980 ivar_t *ivar = ivar_list_nth(ro->ivars, i);
1981 if (!ivar->offset) continue; // anonymous bitfield
1982
1983 uint32_t oldOffset = (uint32_t)*ivar->offset;
1984 uint32_t newOffset = oldOffset + diff;
1985 *ivar->offset = newOffset;
1986
1987 if (PrintIvars) {
1988 _objc_inform("IVARS: offset %u -> %u for %s (size %u, align %u)",
1989 oldOffset, newOffset, ivar->name,
1990 ivar->size, ivar_alignment(ivar));
1991 }
1992 }
1993
1994 // Slide GC layouts
1995 uint32_t oldOffset = ro->instanceStart;
1996 uint32_t newOffset = ro->instanceStart + diff;
1997
1998 if (ivarBitmap) {
1999 layout_bitmap_slide(ivarBitmap,
2000 oldOffset >> WORD_SHIFT,
2001 newOffset >> WORD_SHIFT);
2002 }
2003 if (weakBitmap) {
2004 layout_bitmap_slide(weakBitmap,
2005 oldOffset >> WORD_SHIFT,
2006 newOffset >> WORD_SHIFT);
2007 }
2008 }
2009
2010 *(uint32_t *)&ro->instanceStart += diff;
2011 *(uint32_t *)&ro->instanceSize += diff;
2012
2013 if (!ro->ivars) {
2014 // No ivars slid, but superclass changed size.
2015 // Expand bitmap in preparation for layout_bitmap_splat().
2016 if (ivarBitmap) layout_bitmap_grow(ivarBitmap, ro->instanceSize >> WORD_SHIFT);
2017 if (weakBitmap) layout_bitmap_grow(weakBitmap, ro->instanceSize >> WORD_SHIFT);
2018 }
2019 }
2020
2021
2022 /***********************************************************************
2023 * getIvar
2024 * Look up an ivar by name.
2025 * Locking: runtimeLock must be read- or write-locked by the caller.
2026 **********************************************************************/
2027 static ivar_t *getIvar(class_t *cls, const char *name)
2028 {
2029 rwlock_assert_locked(&runtimeLock);
2030
2031 const ivar_list_t *ivars;
2032 assert(isRealized(cls));
2033 if ((ivars = cls->data->ro->ivars)) {
2034 uint32_t i;
2035 for (i = 0; i < ivars->count; i++) {
2036 struct ivar_t *ivar = ivar_list_nth(ivars, i);
2037 if (!ivar->offset) continue; // anonymous bitfield
2038
2039 // ivar->name may be NULL for anonymous bitfields etc.
2040 if (ivar->name && 0 == strcmp(name, ivar->name)) {
2041 return ivar;
2042 }
2043 }
2044 }
2045
2046 return NULL;
2047 }
2048
2049
2050 /***********************************************************************
2051 * realizeClass
2052 * Performs first-time initialization on class cls,
2053 * including allocating its read-write data.
2054 * Returns the real class structure for the class.
2055 * Locking: runtimeLock must be write-locked by the caller
2056 **********************************************************************/
2057 static class_t *realizeClass(class_t *cls)
2058 {
2059 rwlock_assert_writing(&runtimeLock);
2060
2061 const class_ro_t *ro;
2062 class_rw_t *rw;
2063 class_t *supercls;
2064 class_t *metacls;
2065 BOOL isMeta;
2066
2067 if (!cls) return NULL;
2068 if (isRealized(cls)) return cls;
2069 assert(cls == remapClass(cls));
2070
2071 ro = (const class_ro_t *)cls->data;
2072 if (ro->flags & RO_FUTURE) {
2073 // This was a future class. rw data is already allocated.
2074 rw = cls->data;
2075 ro = cls->data->ro;
2076 changeInfo(cls, RW_REALIZED, RW_FUTURE);
2077 } else {
2078 // Normal class. Allocate writeable class data.
2079 rw = _calloc_internal(sizeof(class_rw_t), 1);
2080 rw->ro = ro;
2081 rw->flags = RW_REALIZED;
2082 cls->data = rw;
2083 }
2084
2085 isMeta = (ro->flags & RO_META) ? YES : NO;
2086
2087 rw->version = isMeta ? 7 : 0; // old runtime went up to 6
2088
2089 if (PrintConnecting) {
2090 _objc_inform("CLASS: realizing class '%s' %s %p %p",
2091 ro->name, isMeta ? "(meta)" : "", cls, ro);
2092 }
2093
2094 // Realize superclass and metaclass, if they aren't already.
2095 // This needs to be done after RW_REALIZED is set above, for root classes.
2096 supercls = realizeClass(remapClass(cls->superclass));
2097 metacls = realizeClass(remapClass(cls->isa));
2098
2099 // Check for remapped superclass
2100 // fixme doesn't handle remapped metaclass
2101 assert(metacls == cls->isa);
2102 if (supercls != cls->superclass) {
2103 cls->superclass = supercls;
2104 }
2105
2106 /* debug: print them all
2107 if (ro->ivars) {
2108 uint32_t i;
2109 for (i = 0; i < ro->ivars->count; i++) {
2110 ivar_t *ivar = ivar_list_nth(ro->ivars, i);
2111 if (!ivar->offset) continue; // anonymous bitfield
2112
2113 _objc_inform("IVARS: %s.%s (offset %u, size %u, align %u)",
2114 ro->name, ivar->name,
2115 *ivar->offset, ivar->size, ivar_alignment(ivar));
2116 }
2117 }
2118 */
2119
2120
2121 if (supercls) {
2122 // Non-fragile ivars - reconcile this class with its superclass
2123 layout_bitmap ivarBitmap;
2124 layout_bitmap weakBitmap;
2125 BOOL layoutsChanged = NO;
2126
2127 if (UseGC) {
2128 // fixme can optimize for "class has no new ivars", etc
2129 // WARNING: gcc c++ sets instanceStart/Size=0 for classes with
2130 // no local ivars, but does provide a layout bitmap.
2131 // Handle that case specially so layout_bitmap_create doesn't die
2132 // The other ivar sliding code below still works fine, and
2133 // the final result is a good class.
2134 if (ro->instanceStart == 0 && ro->instanceSize == 0) {
2135 // We can't use ro->ivarLayout because we don't know
2136 // how long it is. Force a new layout to be created.
2137 if (PrintIvars) {
2138 _objc_inform("IVARS: instanceStart/Size==0 for class %s; "
2139 "disregarding ivar layout", ro->name);
2140 }
2141 ivarBitmap =
2142 layout_bitmap_create(NULL,
2143 supercls->data->ro->instanceSize,
2144 supercls->data->ro->instanceSize, NO);
2145 weakBitmap =
2146 layout_bitmap_create(NULL,
2147 supercls->data->ro->instanceSize,
2148 supercls->data->ro->instanceSize, YES);
2149 layoutsChanged = YES;
2150 } else {
2151 ivarBitmap =
2152 layout_bitmap_create(ro->ivarLayout,
2153 ro->instanceSize,
2154 ro->instanceSize, NO);
2155 weakBitmap =
2156 layout_bitmap_create(ro->weakIvarLayout,
2157 ro->instanceSize,
2158 ro->instanceSize, YES);
2159 }
2160 }
2161
2162 if (ro->instanceStart < supercls->data->ro->instanceSize) {
2163 // Superclass has changed size. This class's ivars must move.
2164 // Also slide layout bits in parallel.
2165 // This code is incapable of compacting the subclass to
2166 // compensate for a superclass that shrunk, so don't do that.
2167 if (PrintIvars) {
2168 _objc_inform("IVARS: sliding ivars for class %s "
2169 "(superclass was %u bytes, now %u)",
2170 ro->name, ro->instanceStart,
2171 supercls->data->ro->instanceSize);
2172 }
2173 class_ro_t *ro_w = make_ro_writeable(rw);
2174 ro = rw->ro;
2175 moveIvars(ro_w, supercls->data->ro->instanceSize,
2176 UseGC ? &ivarBitmap : NULL, UseGC ? &weakBitmap : NULL);
2177 gdb_objc_class_changed((Class)cls, OBJC_CLASS_IVARS_CHANGED, ro->name);
2178 layoutsChanged = YES;
2179 }
2180
2181 if (UseGC) {
2182 // Check superclass's layout against this class's layout.
2183 // This needs to be done even if the superclass is not bigger.
2184 layout_bitmap superBitmap =
2185 layout_bitmap_create(supercls->data->ro->ivarLayout,
2186 supercls->data->ro->instanceSize,
2187 supercls->data->ro->instanceSize, NO);
2188 layoutsChanged |= layout_bitmap_splat(ivarBitmap, superBitmap,
2189 ro->instanceStart);
2190 layout_bitmap_free(superBitmap);
2191
2192 superBitmap =
2193 layout_bitmap_create(supercls->data->ro->weakIvarLayout,
2194 supercls->data->ro->instanceSize,
2195 supercls->data->ro->instanceSize, YES);
2196 layoutsChanged |= layout_bitmap_splat(weakBitmap, superBitmap,
2197 ro->instanceStart);
2198 layout_bitmap_free(superBitmap);
2199
2200 if (layoutsChanged) {
2201 // Rebuild layout strings.
2202 if (PrintIvars) {
2203 _objc_inform("IVARS: gc layout changed for class %s",
2204 ro->name);
2205 }
2206 class_ro_t *ro_w = make_ro_writeable(rw);
2207 ro = rw->ro;
2208 ro_w->ivarLayout = layout_string_create(ivarBitmap);
2209 ro_w->weakIvarLayout = layout_string_create(weakBitmap);
2210 }
2211
2212 layout_bitmap_free(ivarBitmap);
2213 layout_bitmap_free(weakBitmap);
2214 }
2215 }
2216
2217 // Connect this class to its superclass's subclass lists
2218 if (supercls) {
2219 addSubclass(supercls, cls);
2220 }
2221
2222 // Attach categories
2223 methodizeClass(cls);
2224
2225 if (!isMeta) {
2226 addRealizedClass(cls);
2227 } else {
2228 addRealizedMetaclass(cls);
2229 }
2230
2231 return cls;
2232 }
2233
2234
2235 /***********************************************************************
2236 * getClass
2237 * Looks up a class by name. The class MIGHT NOT be realized.
2238 * Locking: runtimeLock must be read- or write-locked by the caller.
2239 **********************************************************************/
2240 static class_t *getClass(const char *name)
2241 {
2242 rwlock_assert_locked(&runtimeLock);
2243
2244 return (class_t *)NXMapGet(namedClasses(), name);
2245 }
2246
2247
2248 /***********************************************************************
2249 * missingWeakSuperclass
2250 * Return YES if some superclass of cls was weak-linked and is missing.
2251 **********************************************************************/
2252 static BOOL
2253 missingWeakSuperclass(class_t *cls)
2254 {
2255 assert(!isRealized(cls));
2256
2257 if (!cls->superclass) {
2258 // superclass NULL. This is normal for root classes only.
2259 return (!(cls->data->flags & RO_ROOT));
2260 } else {
2261 // superclass not NULL. Check if a higher superclass is missing.
2262 class_t *supercls = remapClass(cls->superclass);
2263 if (!supercls) return YES;
2264 if (isRealized(supercls)) return NO;
2265 return missingWeakSuperclass(supercls);
2266 }
2267 }
2268
2269
2270 /***********************************************************************
2271 * realizeAllClassesInImage
2272 * Non-lazily realizes all unrealized classes in the given image.
2273 * Locking: runtimeLock must be held by the caller.
2274 **********************************************************************/
2275 static void realizeAllClassesInImage(header_info *hi)
2276 {
2277 rwlock_assert_writing(&runtimeLock);
2278
2279 size_t count, i;
2280 class_t **classlist;
2281
2282 if (hi->allClassesRealized) return;
2283
2284 classlist = _getObjc2ClassList(hi, &count);
2285
2286 for (i = 0; i < count; i++) {
2287 realizeClass(remapClass(classlist[i]));
2288 }
2289
2290 hi->allClassesRealized = YES;
2291 }
2292
2293
2294 /***********************************************************************
2295 * realizeAllClasses
2296 * Non-lazily realizes all unrealized classes in all known images.
2297 * Locking: runtimeLock must be held by the caller.
2298 **********************************************************************/
2299 static void realizeAllClasses(void)
2300 {
2301 rwlock_assert_writing(&runtimeLock);
2302
2303 header_info *hi;
2304 for (hi = FirstHeader; hi; hi = hi->next) {
2305 realizeAllClassesInImage(hi);
2306 }
2307 }
2308
2309
2310 /***********************************************************************
2311 * _objc_allocateFutureClass
2312 * Allocate an unresolved future class for the given class name.
2313 * Returns any existing allocation if one was already made.
2314 * Assumes the named class doesn't exist yet.
2315 * Locking: acquires runtimeLock
2316 **********************************************************************/
2317 __private_extern__ Class _objc_allocateFutureClass(const char *name)
2318 {
2319 rwlock_write(&runtimeLock);
2320
2321 struct class_t *cls;
2322 NXMapTable *future_class_map = futureClasses();
2323
2324 if ((cls = NXMapGet(future_class_map, name))) {
2325 // Already have a future class for this name.
2326 rwlock_unlock_write(&runtimeLock);
2327 return (Class)cls;
2328 }
2329
2330 cls = (class_t *)_calloc_class(sizeof(*cls));
2331 addFutureClass(name, cls);
2332
2333 rwlock_unlock_write(&runtimeLock);
2334 return (Class)cls;
2335 }
2336
2337
2338 /***********************************************************************
2339 *
2340 **********************************************************************/
2341 void objc_setFutureClass(Class cls, const char *name)
2342 {
2343 // fixme hack do nothing - NSCFString handled specially elsewhere
2344 }
2345
2346
2347 #define FOREACH_REALIZED_SUBCLASS(_c, _cls, code) \
2348 do { \
2349 rwlock_assert_writing(&runtimeLock); \
2350 class_t *_top = _cls; \
2351 class_t *_c = _top; \
2352 if (_c) { \
2353 while (1) { \
2354 code \
2355 if (_c->data->firstSubclass) { \
2356 _c = _c->data->firstSubclass; \
2357 } else { \
2358 while (!_c->data->nextSiblingClass && _c != _top) { \
2359 _c = getSuperclass(_c); \
2360 } \
2361 if (_c == _top) break; \
2362 _c = _c->data->nextSiblingClass; \
2363 } \
2364 } \
2365 } else { \
2366 /* nil means all realized classes */ \
2367 NXHashTable *_classes = realizedClasses(); \
2368 NXHashTable *_metaclasses = realizedMetaclasses(); \
2369 NXHashState _state; \
2370 _state = NXInitHashState(_classes); \
2371 while (NXNextHashState(_classes, &_state, (void**)&_c)) \
2372 { \
2373 code \
2374 } \
2375 _state = NXInitHashState(_metaclasses); \
2376 while (NXNextHashState(_metaclasses, &_state, (void**)&_c)) \
2377 { \
2378 code \
2379 } \
2380 } \
2381 } while (0)
2382
2383
2384 /***********************************************************************
2385 * flushVtables
2386 * Rebuilds vtables for cls and its realized subclasses.
2387 * If cls is Nil, all realized classes and metaclasses are touched.
2388 * Locking: runtimeLock must be held by the caller.
2389 **********************************************************************/
2390 static void flushVtables(class_t *cls)
2391 {
2392 rwlock_assert_writing(&runtimeLock);
2393
2394 if (PrintVtables && !cls) {
2395 _objc_inform("VTABLES: ### EXPENSIVE ### global vtable flush!");
2396 }
2397
2398 FOREACH_REALIZED_SUBCLASS(c, cls, {
2399 updateVtable(c, NO);
2400 });
2401 }
2402
2403
2404 /***********************************************************************
2405 * flushCaches
2406 * Flushes caches for cls and its realized subclasses.
2407 * Does not update vtables.
2408 * If cls is Nil, all realized and metaclasses classes are touched.
2409 * Locking: runtimeLock must be held by the caller.
2410 **********************************************************************/
2411 static void flushCaches(class_t *cls)
2412 {
2413 rwlock_assert_writing(&runtimeLock);
2414
2415 FOREACH_REALIZED_SUBCLASS(c, cls, {
2416 flush_cache((Class)c);
2417 });
2418 }
2419
2420
2421 /***********************************************************************
2422 * flush_caches
2423 * Flushes caches and rebuilds vtables for cls, its subclasses,
2424 * and optionally its metaclass.
2425 * Locking: acquires runtimeLock
2426 **********************************************************************/
2427 __private_extern__ void flush_caches(Class cls_gen, BOOL flush_meta)
2428 {
2429 class_t *cls = newcls(cls_gen);
2430 rwlock_write(&runtimeLock);
2431 // fixme optimize vtable flushing? (only needed for vtable'd selectors)
2432 flushCaches(cls);
2433 flushVtables(cls);
2434 // don't flush root class's metaclass twice (it's a subclass of the root)
2435 if (flush_meta && getSuperclass(cls)) {
2436 flushCaches(cls->isa);
2437 flushVtables(cls->isa);
2438 }
2439 rwlock_unlock_write(&runtimeLock);
2440 }
2441
2442
2443 /***********************************************************************
2444 * map_images
2445 * Process the given images which are being mapped in by dyld.
2446 * Calls ABI-agnostic code after taking ABI-specific locks.
2447 *
2448 * Locking: write-locks runtimeLock
2449 **********************************************************************/
2450 __private_extern__ const char *
2451 map_images(enum dyld_image_states state, uint32_t infoCount,
2452 const struct dyld_image_info infoList[])
2453 {
2454 const char *err;
2455
2456 rwlock_write(&runtimeLock);
2457 err = map_images_nolock(state, infoCount, infoList);
2458 rwlock_unlock_write(&runtimeLock);
2459 return err;
2460 }
2461
2462
2463 /***********************************************************************
2464 * load_images
2465 * Process +load in the given images which are being mapped in by dyld.
2466 * Calls ABI-agnostic code after taking ABI-specific locks.
2467 *
2468 * Locking: write-locks runtimeLock and loadMethodLock
2469 **********************************************************************/
2470 __private_extern__ const char *
2471 load_images(enum dyld_image_states state, uint32_t infoCount,
2472 const struct dyld_image_info infoList[])
2473 {
2474 BOOL found;
2475
2476 recursive_mutex_lock(&loadMethodLock);
2477
2478 // Discover load methods
2479 rwlock_write(&runtimeLock);
2480 found = load_images_nolock(state, infoCount, infoList);
2481 rwlock_unlock_write(&runtimeLock);
2482
2483 // Call +load methods (without runtimeLock - re-entrant)
2484 if (found) {
2485 call_load_methods();
2486 }
2487
2488 recursive_mutex_unlock(&loadMethodLock);
2489
2490 return NULL;
2491 }
2492
2493
2494 /***********************************************************************
2495 * unmap_image
2496 * Process the given image which is about to be unmapped by dyld.
2497 * mh is mach_header instead of headerType because that's what
2498 * dyld_priv.h says even for 64-bit.
2499 *
2500 * Locking: write-locks runtimeLock and loadMethodLock
2501 **********************************************************************/
2502 __private_extern__ void
2503 unmap_image(const struct mach_header *mh, intptr_t vmaddr_slide)
2504 {
2505 recursive_mutex_lock(&loadMethodLock);
2506 rwlock_write(&runtimeLock);
2507
2508 unmap_image_nolock(mh, vmaddr_slide);
2509
2510 rwlock_unlock_write(&runtimeLock);
2511 recursive_mutex_unlock(&loadMethodLock);
2512 }
2513
2514
2515 /***********************************************************************
2516 * _read_images
2517 * Perform initial processing of the headers in the linked
2518 * list beginning with headerList.
2519 *
2520 * Called by: map_images_nolock
2521 *
2522 * Locking: runtimeLock acquired by map_images
2523 **********************************************************************/
2524 __private_extern__ void _read_images(header_info **hList, uint32_t hCount)
2525 {
2526 header_info *hi;
2527 uint32_t hIndex;
2528 size_t count;
2529 size_t i;
2530 class_t **resolvedFutureClasses = NULL;
2531 size_t resolvedFutureClassCount = 0;
2532 static BOOL doneOnce;
2533
2534 rwlock_assert_writing(&runtimeLock);
2535
2536 if (!doneOnce) {
2537 initVtables();
2538 doneOnce = YES;
2539 }
2540
2541 #define EACH_HEADER \
2542 hIndex = 0; \
2543 hIndex < hCount && (hi = hList[hIndex]); \
2544 hIndex++
2545
2546 // Complain about images that contain old-ABI data
2547 // fixme new-ABI compiler still emits some bits into __OBJC segment
2548 for (EACH_HEADER) {
2549 size_t count;
2550 if (_getObjcSelectorRefs(hi, &count) || _getObjcModules(hi, &count)) {
2551 _objc_inform("found old-ABI metadata in image %s !",
2552 hi->os.dl_info.dli_fname);
2553 }
2554 }
2555
2556 // fixme hack
2557 static BOOL hackedNSCFString = NO;
2558 if (!hackedNSCFString) {
2559 // Insert future class __CFConstantStringClassReference == NSCFString
2560 void *dlh = dlopen("/System/Library/Frameworks/CoreFoundation.framework/CoreFoundation", RTLD_LAZY | RTLD_NOLOAD | RTLD_FIRST);
2561 if (dlh) {
2562 void *addr = dlsym(dlh, "__CFConstantStringClassReference");
2563 if (addr) {
2564 addFutureClass("NSCFString", (class_t *)addr);
2565 hackedNSCFString = YES;
2566 }
2567 dlclose(dlh);
2568 }
2569 }
2570
2571 // Discover classes. Fix up unresolved future classes. Mark bundle classes.
2572 NXMapTable *future_class_map = futureClasses();
2573 for (EACH_HEADER) {
2574 class_t **classlist = _getObjc2ClassList(hi, &count);
2575 for (i = 0; i < count; i++) {
2576 const char *name = getName(classlist[i]);
2577
2578 if (missingWeakSuperclass(classlist[i])) {
2579 // No superclass (probably weak-linked).
2580 // Disavow any knowledge of this subclass.
2581 if (PrintConnecting) {
2582 _objc_inform("CLASS: IGNORING class '%s' with "
2583 "missing weak-linked superclass", name);
2584 }
2585 addRemappedClass(classlist[i], NULL);
2586 classlist[i]->superclass = NULL;
2587 classlist[i] = NULL;
2588 continue;
2589 }
2590
2591 if (NXCountMapTable(future_class_map) > 0) {
2592 class_t *newCls = NXMapGet(future_class_map, name);
2593 if (newCls) {
2594 // Copy class_t to future class's struct.
2595 // Preserve future's rw data block.
2596 class_rw_t *rw = newCls->data;
2597 memcpy(newCls, classlist[i], sizeof(class_t));
2598 rw->ro = (class_ro_t *)newCls->data;
2599 newCls->data = rw;
2600
2601 removeFutureClass(name);
2602 addRemappedClass(classlist[i], newCls);
2603 classlist[i] = newCls;
2604 // Non-lazily realize the class below.
2605 resolvedFutureClasses = (class_t **)
2606 _realloc_internal(resolvedFutureClasses,
2607 (resolvedFutureClassCount+1)
2608 * sizeof(class_t *));
2609 resolvedFutureClasses[resolvedFutureClassCount++] = newCls;
2610 }
2611 }
2612 addNamedClass(classlist[i], name);
2613 addUninitializedClass(classlist[i], classlist[i]->isa);
2614 if (hi->mhdr->filetype == MH_BUNDLE) {
2615 classlist[i]->data->flags |= RO_FROM_BUNDLE;
2616 classlist[i]->isa->data->flags |= RO_FROM_BUNDLE;
2617 }
2618 }
2619 }
2620
2621 // Fix up remapped classes
2622 // classlist is up to date, but classrefs may not be
2623
2624 if (!noClassesRemapped()) {
2625 for (EACH_HEADER) {
2626 class_t **classrefs = _getObjc2ClassRefs(hi, &count);
2627 for (i = 0; i < count; i++) {
2628 remapClassRef(&classrefs[i]);
2629 }
2630 // fixme why doesn't test future1 catch the absence of this?
2631 classrefs = _getObjc2SuperRefs(hi, &count);
2632 for (i = 0; i < count; i++) {
2633 remapClassRef(&classrefs[i]);
2634 }
2635 }
2636 }
2637
2638
2639 // Fix up @selector references
2640 sel_lock();
2641 for (EACH_HEADER) {
2642 if (PrintPreopt) {
2643 if (sel_preoptimizationValid(hi)) {
2644 _objc_inform("PREOPTIMIZATION: honoring preoptimized selectors in %s",
2645 _nameForHeader(hi->mhdr));
2646 }
2647 else if (_objcHeaderOptimizedByDyld(hi)) {
2648 _objc_inform("PREOPTIMIZATION: IGNORING preoptimized selectors in %s",
2649 _nameForHeader(hi->mhdr));
2650 }
2651 }
2652
2653 if (sel_preoptimizationValid(hi)) continue;
2654
2655 SEL *sels = _getObjc2SelectorRefs(hi, &count);
2656 BOOL isBundle = hi->mhdr->filetype == MH_BUNDLE;
2657 for (i = 0; i < count; i++) {
2658 sels[i] = sel_registerNameNoLock((const char *)sels[i], isBundle);
2659 }
2660 }
2661 sel_unlock();
2662
2663 // Discover protocols. Fix up protocol refs.
2664 NXMapTable *protocol_map = protocols();
2665 for (EACH_HEADER) {
2666 extern struct class_t OBJC_CLASS_$_Protocol;
2667 Class cls = (Class)&OBJC_CLASS_$_Protocol;
2668 assert(cls);
2669 protocol_t **protocols = _getObjc2ProtocolList(hi, &count);
2670 // fixme duplicate protocol from bundle
2671 for (i = 0; i < count; i++) {
2672 if (!NXMapGet(protocol_map, protocols[i]->name)) {
2673 protocols[i]->isa = cls;
2674 NXMapKeyCopyingInsert(protocol_map,
2675 protocols[i]->name, protocols[i]);
2676 if (PrintProtocols) {
2677 _objc_inform("PROTOCOLS: protocol at %p is %s",
2678 protocols[i], protocols[i]->name);
2679 }
2680 } else {
2681 if (PrintProtocols) {
2682 _objc_inform("PROTOCOLS: protocol at %p is %s (duplicate)",
2683 protocols[i], protocols[i]->name);
2684 }
2685 }
2686 }
2687 }
2688 for (EACH_HEADER) {
2689 protocol_t **protocols;
2690 protocols = _getObjc2ProtocolRefs(hi, &count);
2691 for (i = 0; i < count; i++) {
2692 remapProtocolRef(&protocols[i]);
2693 }
2694 }
2695
2696 // Realize non-lazy classes (for +load methods and static instances)
2697 for (EACH_HEADER) {
2698 class_t **classlist =
2699 _getObjc2NonlazyClassList(hi, &count);
2700 for (i = 0; i < count; i++) {
2701 realizeClass(remapClass(classlist[i]));
2702 }
2703 }
2704
2705 // Realize newly-resolved future classes, in case CF manipulates them
2706 if (resolvedFutureClasses) {
2707 for (i = 0; i < resolvedFutureClassCount; i++) {
2708 realizeClass(resolvedFutureClasses[i]);
2709 }
2710 _free_internal(resolvedFutureClasses);
2711 }
2712
2713 // Discover categories.
2714 for (EACH_HEADER) {
2715 category_t **catlist =
2716 _getObjc2CategoryList(hi, &count);
2717 for (i = 0; i < count; i++) {
2718 category_t *cat = catlist[i];
2719 // Do NOT use cat->cls! It may have been remapped.
2720 class_t *cls = remapClass(cat->cls);
2721
2722 if (!cls) {
2723 // Category's target class is missing (probably weak-linked).
2724 // Disavow any knowledge of this category.
2725 catlist[i] = NULL;
2726 if (PrintConnecting) {
2727 _objc_inform("CLASS: IGNORING category \?\?\?(%s) %p with "
2728 "missing weak-linked target class",
2729 cat->name, cat);
2730 }
2731 continue;
2732 }
2733
2734 // Process this category.
2735 // First, register the category with its target class.
2736 // Then, rebuild the class's method lists (etc) if
2737 // the class is realized.
2738 BOOL classExists = NO;
2739 if (cat->instanceMethods || cat->protocols
2740 || cat->instanceProperties)
2741 {
2742 addUnattachedCategoryForClass(cat, cls, hi);
2743 if (isRealized(cls)) {
2744 remethodizeClass(cls);
2745 classExists = YES;
2746 }
2747 if (PrintConnecting) {
2748 _objc_inform("CLASS: found category -%s(%s) %s",
2749 getName(cls), cat->name,
2750 classExists ? "on existing class" : "");
2751 }
2752 }
2753
2754 if (cat->classMethods || cat->protocols
2755 /* || cat->classProperties */)
2756 {
2757 addUnattachedCategoryForClass(cat, cls->isa, hi);
2758 if (isRealized(cls->isa)) {
2759 remethodizeClass(cls->isa);
2760 }
2761 if (PrintConnecting) {
2762 _objc_inform("CLASS: found category +%s(%s)",
2763 getName(cls), cat->name);
2764 }
2765 }
2766 }
2767 }
2768
2769 // Category discovery MUST BE LAST to avoid potential races
2770 // when other threads call the new category code before
2771 // this thread finishes its fixups.
2772
2773 // +load handled by prepare_load_methods()
2774
2775 #undef EACH_HEADER
2776 }
2777
2778
2779 /***********************************************************************
2780 * prepare_load_methods
2781 * Schedule +load for classes in this image, any un-+load-ed
2782 * superclasses in other images, and any categories in this image.
2783 **********************************************************************/
2784 // Recursively schedule +load for cls and any un-+load-ed superclasses.
2785 // cls must already be connected.
2786 static void schedule_class_load(class_t *cls)
2787 {
2788 if (!cls) return;
2789 assert(isRealized(cls)); // _read_images should realize
2790
2791 if (cls->data->flags & RW_LOADED) return;
2792
2793 // Ensure superclass-first ordering
2794 schedule_class_load(getSuperclass(cls));
2795
2796 add_class_to_loadable_list((Class)cls);
2797 changeInfo(cls, RW_LOADED, 0);
2798 }
2799
2800 __private_extern__ void prepare_load_methods(header_info *hi)
2801 {
2802 size_t count, i;
2803
2804 rwlock_assert_writing(&runtimeLock);
2805
2806 class_t **classlist =
2807 _getObjc2NonlazyClassList(hi, &count);
2808 for (i = 0; i < count; i++) {
2809 schedule_class_load(remapClass(classlist[i]));
2810 }
2811
2812 category_t **categorylist = _getObjc2NonlazyCategoryList(hi, &count);
2813 for (i = 0; i < count; i++) {
2814 category_t *cat = categorylist[i];
2815 // Do NOT use cat->cls! It may have been remapped.
2816 class_t *cls = remapClass(cat->cls);
2817 if (!cls) continue; // category for ignored weak-linked class
2818 realizeClass(cls);
2819 assert(isRealized(cls->isa));
2820 add_category_to_loadable_list((Category)cat);
2821 }
2822 }
2823
2824
2825 /***********************************************************************
2826 * _unload_image
2827 * Only handles MH_BUNDLE for now.
2828 * Locking: write-lock and loadMethodLock acquired by unmap_image
2829 **********************************************************************/
2830 __private_extern__ void _unload_image(header_info *hi)
2831 {
2832 size_t count, i;
2833
2834 recursive_mutex_assert_locked(&loadMethodLock);
2835 rwlock_assert_writing(&runtimeLock);
2836
2837 // Unload unattached categories and categories waiting for +load.
2838
2839 category_t **catlist = _getObjc2CategoryList(hi, &count);
2840 for (i = 0; i < count; i++) {
2841 category_t *cat = catlist[i];
2842 if (!cat) continue; // category for ignored weak-linked class
2843 class_t *cls = remapClass(cat->cls);
2844 assert(cls); // shouldn't have live category for dead class
2845
2846 // fixme for MH_DYLIB cat's class may have been unloaded already
2847
2848 // unattached list
2849 removeUnattachedCategoryForClass(cat, cls);
2850
2851 // +load queue
2852 remove_category_from_loadable_list((Category)cat);
2853 }
2854
2855 // Unload classes.
2856
2857 class_t **classlist = _getObjc2ClassList(hi, &count);
2858 for (i = 0; i < count; i++) {
2859 class_t *cls = classlist[i];
2860 // fixme remapped classes?
2861 // fixme ignored weak-linked classes
2862 if (cls) {
2863 remove_class_from_loadable_list((Class)cls);
2864 unload_class(cls->isa, YES);
2865 unload_class(cls, NO);
2866 }
2867 }
2868
2869 // Clean up protocols.
2870 #warning fixme protocol unload
2871
2872 // fixme DebugUnload
2873 }
2874
2875
2876 /***********************************************************************
2877 * method_getDescription
2878 * Returns a pointer to this method's objc_method_description.
2879 * Locking: none
2880 **********************************************************************/
2881 struct objc_method_description *
2882 method_getDescription(Method m)
2883 {
2884 if (!m) return NULL;
2885 return (struct objc_method_description *)newmethod(m);
2886 }
2887
2888
2889 /***********************************************************************
2890 * method_getImplementation
2891 * Returns this method's IMP.
2892 * Locking: none
2893 **********************************************************************/
2894 static IMP
2895 _method_getImplementation(method_t *m)
2896 {
2897 if (!m) return NULL;
2898 return m->imp;
2899 }
2900
2901 IMP
2902 method_getImplementation(Method m)
2903 {
2904 return _method_getImplementation(newmethod(m));
2905 }
2906
2907
2908 /***********************************************************************
2909 * method_getName
2910 * Returns this method's selector.
2911 * The method must not be NULL.
2912 * The method must already have been fixed-up.
2913 * Locking: none
2914 **********************************************************************/
2915 SEL
2916 method_getName(Method m_gen)
2917 {
2918 struct method_t *m = newmethod(m_gen);
2919 if (!m) return NULL;
2920
2921 assert((SEL)m->name == sel_registerName((char *)m->name));
2922 return (SEL)m->name;
2923 }
2924
2925
2926 /***********************************************************************
2927 * method_getTypeEncoding
2928 * Returns this method's old-style type encoding string.
2929 * The method must not be NULL.
2930 * Locking: none
2931 **********************************************************************/
2932 const char *
2933 method_getTypeEncoding(Method m)
2934 {
2935 if (!m) return NULL;
2936 return newmethod(m)->types;
2937 }
2938
2939
2940 /***********************************************************************
2941 * method_setImplementation
2942 * Sets this method's implementation to imp.
2943 * The previous implementation is returned.
2944 **********************************************************************/
2945 static IMP
2946 _method_setImplementation(class_t *cls, method_t *m, IMP imp)
2947 {
2948 rwlock_assert_writing(&runtimeLock);
2949
2950 if (!m) return NULL;
2951 if (!imp) return NULL;
2952
2953 if (m->name == (SEL)kIgnore) {
2954 // Ignored methods stay ignored
2955 return m->imp;
2956 }
2957
2958 IMP old = _method_getImplementation(m);
2959 m->imp = imp;
2960
2961 // No cache flushing needed - cache contains Methods not IMPs.
2962
2963 if (vtable_containsSelector(newmethod(m)->name)) {
2964 // Will be slow if cls is NULL (i.e. unknown)
2965 // fixme build list of classes whose Methods are known externally?
2966 flushVtables(cls);
2967 }
2968
2969 // fixme update monomorphism if necessary
2970
2971 return old;
2972 }
2973
2974 IMP
2975 method_setImplementation(Method m, IMP imp)
2976 {
2977 // Don't know the class - will be slow if vtables are affected
2978 // fixme build list of classes whose Methods are known externally?
2979 IMP result;
2980 rwlock_write(&runtimeLock);
2981 result = _method_setImplementation(Nil, newmethod(m), imp);
2982 rwlock_unlock_write(&runtimeLock);
2983 return result;
2984 }
2985
2986
2987 void method_exchangeImplementations(Method m1_gen, Method m2_gen)
2988 {
2989 method_t *m1 = newmethod(m1_gen);
2990 method_t *m2 = newmethod(m2_gen);
2991 if (!m1 || !m2) return;
2992
2993 rwlock_write(&runtimeLock);
2994
2995 if (m1->name == (SEL)kIgnore || m2->name == (SEL)kIgnore) {
2996 // Ignored methods stay ignored. Now they're both ignored.
2997 m1->imp = (IMP)&_objc_ignored_method;
2998 m2->imp = (IMP)&_objc_ignored_method;
2999 rwlock_unlock_write(&runtimeLock);
3000 return;
3001 }
3002
3003 IMP m1_imp = m1->imp;
3004 m1->imp = m2->imp;
3005 m2->imp = m1_imp;
3006
3007 if (vtable_containsSelector(m1->name) ||
3008 vtable_containsSelector(m2->name))
3009 {
3010 // Don't know the class - will be slow if vtables are affected
3011 // fixme build list of classes whose Methods are known externally?
3012 flushVtables(NULL);
3013 }
3014
3015 // fixme update monomorphism if necessary
3016
3017 rwlock_unlock_write(&runtimeLock);
3018 }
3019
3020
3021 /***********************************************************************
3022 * ivar_getOffset
3023 * fixme
3024 * Locking: none
3025 **********************************************************************/
3026 ptrdiff_t
3027 ivar_getOffset(Ivar ivar)
3028 {
3029 if (!ivar) return 0;
3030 return *newivar(ivar)->offset;
3031 }
3032
3033
3034 /***********************************************************************
3035 * ivar_getName
3036 * fixme
3037 * Locking: none
3038 **********************************************************************/
3039 const char *
3040 ivar_getName(Ivar ivar)
3041 {
3042 if (!ivar) return NULL;
3043 return newivar(ivar)->name;
3044 }
3045
3046
3047 /***********************************************************************
3048 * ivar_getTypeEncoding
3049 * fixme
3050 * Locking: none
3051 **********************************************************************/
3052 const char *
3053 ivar_getTypeEncoding(Ivar ivar)
3054 {
3055 if (!ivar) return NULL;
3056 return newivar(ivar)->type;
3057 }
3058
3059
3060 /***********************************************************************
3061 * _protocol_getMethod_nolock
3062 * Locking: runtimeLock must be write-locked by the caller
3063 **********************************************************************/
3064 static Method
3065 _protocol_getMethod_nolock(protocol_t *proto, SEL sel,
3066 BOOL isRequiredMethod, BOOL isInstanceMethod)
3067 {
3068 rwlock_assert_writing(&runtimeLock);
3069
3070 uint32_t i;
3071 if (!proto || !sel) return NULL;
3072
3073 method_list_t **mlistp = NULL;
3074
3075 if (isRequiredMethod) {
3076 if (isInstanceMethod) {
3077 mlistp = &proto->instanceMethods;
3078 } else {
3079 mlistp = &proto->classMethods;
3080 }
3081 } else {
3082 if (isInstanceMethod) {
3083 mlistp = &proto->optionalInstanceMethods;
3084 } else {
3085 mlistp = &proto->optionalClassMethods;
3086 }
3087 }
3088
3089 if (*mlistp) {
3090 method_list_t *mlist = *mlistp;
3091 if (!isMethodListFixedUp(mlist)) {
3092 mlist = _memdup_internal(mlist, method_list_size(mlist));
3093 fixupMethodList(mlist, YES/*always copy for simplicity*/);
3094 *mlistp = mlist;
3095 }
3096 for (i = 0; i < mlist->count; i++) {
3097 method_t *m = method_list_nth(mlist, i);
3098 if (sel == m->name) return (Method)m;
3099 }
3100 }
3101
3102 if (proto->protocols) {
3103 Method m;
3104 for (i = 0; i < proto->protocols->count; i++) {
3105 protocol_t *realProto = remapProtocol(proto->protocols->list[i]);
3106 m = _protocol_getMethod_nolock(realProto, sel,
3107 isRequiredMethod, isInstanceMethod);
3108 if (m) return m;
3109 }
3110 }
3111
3112 return NULL;
3113 }
3114
3115
3116 /***********************************************************************
3117 * _protocol_getMethod
3118 * fixme
3119 * Locking: write-locks runtimeLock
3120 **********************************************************************/
3121 __private_extern__ Method
3122 _protocol_getMethod(Protocol *p, SEL sel, BOOL isRequiredMethod, BOOL isInstanceMethod)
3123 {
3124 rwlock_write(&runtimeLock);
3125 Method result = _protocol_getMethod_nolock(newprotocol(p), sel,
3126 isRequiredMethod,
3127 isInstanceMethod);
3128 rwlock_unlock_write(&runtimeLock);
3129 return result;
3130 }
3131
3132
3133 /***********************************************************************
3134 * protocol_getName
3135 * Returns the name of the given protocol.
3136 * Locking: runtimeLock must not be held by the caller
3137 **********************************************************************/
3138 const char *
3139 protocol_getName(Protocol *proto)
3140 {
3141 return newprotocol(proto)->name;
3142 }
3143
3144
3145 /***********************************************************************
3146 * protocol_getInstanceMethodDescription
3147 * Returns the description of a named instance method.
3148 * Locking: runtimeLock must not be held by the caller
3149 **********************************************************************/
3150 struct objc_method_description
3151 protocol_getMethodDescription(Protocol *p, SEL aSel,
3152 BOOL isRequiredMethod, BOOL isInstanceMethod)
3153 {
3154 Method m =
3155 _protocol_getMethod(p, aSel, isRequiredMethod, isInstanceMethod);
3156 if (m) return *method_getDescription(m);
3157 else return (struct objc_method_description){NULL, NULL};
3158 }
3159
3160
3161 /***********************************************************************
3162 * _protocol_conformsToProtocol_nolock
3163 * Returns YES if self conforms to other.
3164 * Locking: runtimeLock must be held by the caller.
3165 **********************************************************************/
3166 static BOOL _protocol_conformsToProtocol_nolock(protocol_t *self, protocol_t *other)
3167 {
3168 if (!self || !other) {
3169 return NO;
3170 }
3171
3172 if (0 == strcmp(self->name, other->name)) {
3173 return YES;
3174 }
3175
3176 if (self->protocols) {
3177 int i;
3178 for (i = 0; i < self->protocols->count; i++) {
3179 protocol_t *proto = remapProtocol(self->protocols->list[i]);
3180 if (0 == strcmp(other->name, proto->name)) {
3181 return YES;
3182 }
3183 if (_protocol_conformsToProtocol_nolock(proto, other)) {
3184 return YES;
3185 }
3186 }
3187 }
3188
3189 return NO;
3190 }
3191
3192
3193 /***********************************************************************
3194 * protocol_conformsToProtocol
3195 * Returns YES if self conforms to other.
3196 * Locking: acquires runtimeLock
3197 **********************************************************************/
3198 BOOL protocol_conformsToProtocol(Protocol *self, Protocol *other)
3199 {
3200 BOOL result;
3201 rwlock_read(&runtimeLock);
3202 result = _protocol_conformsToProtocol_nolock(newprotocol(self),
3203 newprotocol(other));
3204 rwlock_unlock_read(&runtimeLock);
3205 return result;
3206 }
3207
3208
3209 /***********************************************************************
3210 * protocol_isEqual
3211 * Return YES if two protocols are equal (i.e. conform to each other)
3212 * Locking: acquires runtimeLock
3213 **********************************************************************/
3214 BOOL protocol_isEqual(Protocol *self, Protocol *other)
3215 {
3216 if (self == other) return YES;
3217 if (!self || !other) return NO;
3218
3219 if (!protocol_conformsToProtocol(self, other)) return NO;
3220 if (!protocol_conformsToProtocol(other, self)) return NO;
3221
3222 return YES;
3223 }
3224
3225
3226 /***********************************************************************
3227 * protocol_copyMethodDescriptionList
3228 * Returns descriptions of a protocol's methods.
3229 * Locking: acquires runtimeLock
3230 **********************************************************************/
3231 struct objc_method_description *
3232 protocol_copyMethodDescriptionList(Protocol *p,
3233 BOOL isRequiredMethod,BOOL isInstanceMethod,
3234 unsigned int *outCount)
3235 {
3236 struct protocol_t *proto = newprotocol(p);
3237 struct objc_method_description *result = NULL;
3238 unsigned int count = 0;
3239
3240 if (!proto) {
3241 if (outCount) *outCount = 0;
3242 return NULL;
3243 }
3244
3245 rwlock_read(&runtimeLock);
3246
3247 method_list_t *mlist = NULL;
3248
3249 if (isRequiredMethod) {
3250 if (isInstanceMethod) {
3251 mlist = proto->instanceMethods;
3252 } else {
3253 mlist = proto->classMethods;
3254 }
3255 } else {
3256 if (isInstanceMethod) {
3257 mlist = proto->optionalInstanceMethods;
3258 } else {
3259 mlist = proto->optionalClassMethods;
3260 }
3261 }
3262
3263 if (mlist) {
3264 unsigned int i;
3265 count = mlist->count;
3266 result = calloc(count + 1, sizeof(struct objc_method_description));
3267 for (i = 0; i < count; i++) {
3268 method_t *m = method_list_nth(mlist, i);
3269 result[i].name = sel_registerName((const char *)m->name);
3270 result[i].types = (char *)m->types;
3271 }
3272 }
3273
3274 rwlock_unlock_read(&runtimeLock);
3275
3276 if (outCount) *outCount = count;
3277 return result;
3278 }
3279
3280
3281 /***********************************************************************
3282 * protocol_getProperty
3283 * fixme
3284 * Locking: acquires runtimeLock
3285 **********************************************************************/
3286 static Property
3287 _protocol_getProperty_nolock(protocol_t *proto, const char *name,
3288 BOOL isRequiredProperty, BOOL isInstanceProperty)
3289 {
3290 if (!isRequiredProperty || !isInstanceProperty) {
3291 // Only required instance properties are currently supported
3292 return NULL;
3293 }
3294
3295 struct objc_property_list *plist;
3296 if ((plist = proto->instanceProperties)) {
3297 uint32_t i;
3298 for (i = 0; i < plist->count; i++) {
3299 Property prop = property_list_nth(plist, i);
3300 if (0 == strcmp(name, prop->name)) {
3301 return prop;
3302 }
3303 }
3304 }
3305
3306 if (proto->protocols) {
3307 uintptr_t i;
3308 for (i = 0; i < proto->protocols->count; i++) {
3309 protocol_t *p = remapProtocol(proto->protocols->list[i]);
3310 Property prop =
3311 _protocol_getProperty_nolock(p, name,
3312 isRequiredProperty,
3313 isInstanceProperty);
3314 if (prop) return prop;
3315 }
3316 }
3317
3318 return NULL;
3319 }
3320
3321 Property protocol_getProperty(Protocol *p, const char *name,
3322 BOOL isRequiredProperty, BOOL isInstanceProperty)
3323 {
3324 Property result;
3325
3326 if (!p || !name) return NULL;
3327
3328 rwlock_read(&runtimeLock);
3329 result = _protocol_getProperty_nolock(newprotocol(p), name,
3330 isRequiredProperty,
3331 isInstanceProperty);
3332 rwlock_unlock_read(&runtimeLock);
3333
3334 return result;
3335 }
3336
3337
3338 /***********************************************************************
3339 * protocol_copyPropertyList
3340 * fixme
3341 * Locking: acquires runtimeLock
3342 **********************************************************************/
3343 Property *protocol_copyPropertyList(Protocol *proto, unsigned int *outCount)
3344 {
3345 Property *result = NULL;
3346
3347 if (!proto) {
3348 if (outCount) *outCount = 0;
3349 return NULL;
3350 }
3351
3352 rwlock_read(&runtimeLock);
3353
3354 struct objc_property_list *plist = newprotocol(proto)->instanceProperties;
3355 result = copyPropertyList(plist, outCount);
3356
3357 rwlock_unlock_read(&runtimeLock);
3358
3359 return result;
3360 }
3361
3362
3363 /***********************************************************************
3364 * protocol_copyProtocolList
3365 * Copies this protocol's incorporated protocols.
3366 * Does not copy those protocol's incorporated protocols in turn.
3367 * Locking: acquires runtimeLock
3368 **********************************************************************/
3369 Protocol **protocol_copyProtocolList(Protocol *p, unsigned int *outCount)
3370 {
3371 unsigned int count = 0;
3372 Protocol **result = NULL;
3373 protocol_t *proto = newprotocol(p);
3374
3375 if (!proto) {
3376 if (outCount) *outCount = 0;
3377 return NULL;
3378 }
3379
3380 rwlock_read(&runtimeLock);
3381
3382 if (proto->protocols) {
3383 count = (unsigned int)proto->protocols->count;
3384 }
3385 if (count > 0) {
3386 result = malloc((count+1) * sizeof(Protocol *));
3387
3388 unsigned int i;
3389 for (i = 0; i < count; i++) {
3390 result[i] = (Protocol *)remapProtocol(proto->protocols->list[i]);
3391 }
3392 result[i] = NULL;
3393 }
3394
3395 rwlock_unlock_read(&runtimeLock);
3396
3397 if (outCount) *outCount = count;
3398 return result;
3399 }
3400
3401
3402 /***********************************************************************
3403 * objc_getClassList
3404 * Returns pointers to all classes.
3405 * This requires all classes be realized, which is regretfully non-lazy.
3406 * Locking: acquires runtimeLock
3407 **********************************************************************/
3408 int
3409 objc_getClassList(Class *buffer, int bufferLen)
3410 {
3411 rwlock_write(&runtimeLock);
3412
3413 realizeAllClasses();
3414
3415 int count;
3416 class_t *cls;
3417 NXHashState state;
3418 NXHashTable *classes = realizedClasses();
3419 int allCount = NXCountHashTable(classes);
3420
3421 if (!buffer) {
3422 rwlock_unlock_write(&runtimeLock);
3423 return allCount;
3424 }
3425
3426 count = 0;
3427 state = NXInitHashState(classes);
3428 while (count < bufferLen &&
3429 NXNextHashState(classes, &state, (void **)&cls))
3430 {
3431 buffer[count++] = (Class)cls;
3432 }
3433
3434 rwlock_unlock_write(&runtimeLock);
3435
3436 return allCount;
3437 }
3438
3439
3440 /***********************************************************************
3441 * objc_copyProtocolList
3442 * Returns pointers to all protocols.
3443 * Locking: read-locks runtimeLock
3444 **********************************************************************/
3445 Protocol **
3446 objc_copyProtocolList(unsigned int *outCount)
3447 {
3448 rwlock_read(&runtimeLock);
3449
3450 int count, i;
3451 Protocol *proto;
3452 const char *name;
3453 NXMapState state;
3454 NXMapTable *protocol_map = protocols();
3455 Protocol **result;
3456
3457 count = NXCountMapTable(protocol_map);
3458 if (count == 0) {
3459 rwlock_unlock_read(&runtimeLock);
3460 if (outCount) *outCount = 0;
3461 return NULL;
3462 }
3463
3464 result = calloc(1 + count, sizeof(Protocol *));
3465
3466 i = 0;
3467 state = NXInitMapState(protocol_map);
3468 while (NXNextMapState(protocol_map, &state,
3469 (const void **)&name, (const void **)&proto))
3470 {
3471 result[i++] = proto;
3472 }
3473
3474 result[i++] = NULL;
3475 assert(i == count+1);
3476
3477 rwlock_unlock_read(&runtimeLock);
3478
3479 if (outCount) *outCount = count;
3480 return result;
3481 }
3482
3483
3484 /***********************************************************************
3485 * objc_getProtocol
3486 * Get a protocol by name, or return NULL
3487 * Locking: read-locks runtimeLock
3488 **********************************************************************/
3489 Protocol *objc_getProtocol(const char *name)
3490 {
3491 rwlock_read(&runtimeLock);
3492 Protocol *result = (Protocol *)NXMapGet(protocols(), name);
3493 rwlock_unlock_read(&runtimeLock);
3494 return result;
3495 }
3496
3497
3498 /***********************************************************************
3499 * class_copyMethodList
3500 * fixme
3501 * Locking: read-locks runtimeLock
3502 **********************************************************************/
3503 Method *
3504 class_copyMethodList(Class cls_gen, unsigned int *outCount)
3505 {
3506 struct class_t *cls = newcls(cls_gen);
3507 unsigned int count = 0;
3508 Method *result = NULL;
3509
3510 if (!cls) {
3511 if (outCount) *outCount = 0;
3512 return NULL;
3513 }
3514
3515 rwlock_read(&runtimeLock);
3516
3517 assert(isRealized(cls));
3518
3519 FOREACH_METHOD_LIST(mlist, cls, {
3520 count += mlist->count;
3521 });
3522
3523 if (count > 0) {
3524 unsigned int m;
3525 result = malloc((count + 1) * sizeof(Method));
3526
3527 m = 0;
3528 FOREACH_METHOD_LIST(mlist, cls, {
3529 unsigned int i;
3530 for (i = 0; i < mlist->count; i++) {
3531 Method aMethod = (Method)method_list_nth(mlist, i);
3532 if (method_getName(aMethod) == (SEL)kIgnore) {
3533 count--;
3534 continue;
3535 }
3536 result[m++] = aMethod;
3537 }
3538 });
3539 result[m] = NULL;
3540 }
3541
3542 rwlock_unlock_read(&runtimeLock);
3543
3544 if (outCount) *outCount = count;
3545 return result;
3546 }
3547
3548
3549 /***********************************************************************
3550 * class_copyIvarList
3551 * fixme
3552 * Locking: read-locks runtimeLock
3553 **********************************************************************/
3554 Ivar *
3555 class_copyIvarList(Class cls_gen, unsigned int *outCount)
3556 {
3557 struct class_t *cls = newcls(cls_gen);
3558 const ivar_list_t *ivars;
3559 Ivar *result = NULL;
3560 unsigned int count = 0;
3561 unsigned int i;
3562
3563 if (!cls) {
3564 if (outCount) *outCount = 0;
3565 return NULL;
3566 }
3567
3568 rwlock_read(&runtimeLock);
3569
3570 assert(isRealized(cls));
3571
3572 if ((ivars = cls->data->ro->ivars) && ivars->count) {
3573 result = malloc((ivars->count+1) * sizeof(Ivar));
3574
3575 for (i = 0; i < ivars->count; i++) {
3576 ivar_t *ivar = ivar_list_nth(ivars, i);
3577 if (!ivar->offset) continue; // anonymous bitfield
3578 result[count++] = (Ivar)ivar;
3579 }
3580 result[count] = NULL;
3581 }
3582
3583 rwlock_unlock_read(&runtimeLock);
3584
3585 if (outCount) *outCount = count;
3586 return result;
3587 }
3588
3589
3590 /***********************************************************************
3591 * class_copyPropertyList. Returns a heap block containing the
3592 * properties declared in the class, or NULL if the class
3593 * declares no properties. Caller must free the block.
3594 * Does not copy any superclass's properties.
3595 * Locking: read-locks runtimeLock
3596 **********************************************************************/
3597 Property *
3598 class_copyPropertyList(Class cls_gen, unsigned int *outCount)
3599 {
3600 struct class_t *cls = newcls(cls_gen);
3601 chained_property_list *plist;
3602 unsigned int count = 0;
3603 Property *result = NULL;
3604
3605 if (!cls) {
3606 if (outCount) *outCount = 0;
3607 return NULL;
3608 }
3609
3610 rwlock_read(&runtimeLock);
3611
3612 assert(isRealized(cls));
3613
3614 for (plist = cls->data->properties; plist; plist = plist->next) {
3615 count += plist->count;
3616 }
3617
3618 if (count > 0) {
3619 unsigned int p;
3620 result = malloc((count + 1) * sizeof(Property));
3621
3622 p = 0;
3623 for (plist = cls->data->properties; plist; plist = plist->next) {
3624 unsigned int i;
3625 for (i = 0; i < plist->count; i++) {
3626 result[p++] = (Property)&plist->list[i];
3627 }
3628 }
3629 result[p] = NULL;
3630 }
3631
3632 rwlock_unlock_read(&runtimeLock);
3633
3634 if (outCount) *outCount = count;
3635 return result;
3636 }
3637
3638
3639 /***********************************************************************
3640 * _class_getLoadMethod
3641 * fixme
3642 * Called only from add_class_to_loadable_list.
3643 * Locking: runtimeLock must be read- or write-locked by the caller.
3644 **********************************************************************/
3645 __private_extern__ IMP
3646 _class_getLoadMethod(Class cls_gen)
3647 {
3648 rwlock_assert_locked(&runtimeLock);
3649
3650 struct class_t *cls = newcls(cls_gen);
3651 const method_list_t *mlist;
3652 int i;
3653
3654 assert(isRealized(cls));
3655 assert(isRealized(cls->isa));
3656 assert(!isMetaClass(cls));
3657 assert(isMetaClass(cls->isa));
3658
3659 mlist = cls->isa->data->ro->baseMethods;
3660 if (mlist) for (i = 0; i < mlist->count; i++) {
3661 method_t *m = method_list_nth(mlist, i);
3662 if (0 == strcmp((const char *)m->name, "load")) {
3663 return m->imp;
3664 }
3665 }
3666
3667 return NULL;
3668 }
3669
3670
3671 /***********************************************************************
3672 * _category_getName
3673 * Returns a category's name.
3674 * Locking: none
3675 **********************************************************************/
3676 __private_extern__ const char *
3677 _category_getName(Category cat)
3678 {
3679 return newcategory(cat)->name;
3680 }
3681
3682
3683 /***********************************************************************
3684 * _category_getClassName
3685 * Returns a category's class's name
3686 * Called only from add_category_to_loadable_list and
3687 * remove_category_from_loadable_list.
3688 * Locking: runtimeLock must be read- or write-locked by the caller
3689 **********************************************************************/
3690 __private_extern__ const char *
3691 _category_getClassName(Category cat)
3692 {
3693 rwlock_assert_locked(&runtimeLock);
3694 // cat->cls may have been remapped
3695 return getName(remapClass(newcategory(cat)->cls));
3696 }
3697
3698
3699 /***********************************************************************
3700 * _category_getClass
3701 * Returns a category's class
3702 * Called only by call_category_loads.
3703 * Locking: read-locks runtimeLock
3704 **********************************************************************/
3705 __private_extern__ Class
3706 _category_getClass(Category cat)
3707 {
3708 rwlock_read(&runtimeLock);
3709 // cat->cls may have been remapped
3710 struct class_t *result = remapClass(newcategory(cat)->cls);
3711 assert(isRealized(result)); // ok for call_category_loads' usage
3712 rwlock_unlock_read(&runtimeLock);
3713 return (Class)result;
3714 }
3715
3716
3717 /***********************************************************************
3718 * _category_getLoadMethod
3719 * fixme
3720 * Called only from add_category_to_loadable_list
3721 * Locking: runtimeLock must be read- or write-locked by the caller
3722 **********************************************************************/
3723 __private_extern__ IMP
3724 _category_getLoadMethod(Category cat)
3725 {
3726 rwlock_assert_locked(&runtimeLock);
3727
3728 const method_list_t *mlist;
3729 int i;
3730
3731 mlist = newcategory(cat)->classMethods;
3732 if (mlist) for (i = 0; i < mlist->count; i++) {
3733 method_t *m = method_list_nth(mlist, i);
3734 if (0 == strcmp((const char *)m->name, "load")) {
3735 return m->imp;
3736 }
3737 }
3738
3739 return NULL;
3740 }
3741
3742
3743 /***********************************************************************
3744 * class_copyProtocolList
3745 * fixme
3746 * Locking: read-locks runtimeLock
3747 **********************************************************************/
3748 Protocol **
3749 class_copyProtocolList(Class cls_gen, unsigned int *outCount)
3750 {
3751 struct class_t *cls = newcls(cls_gen);
3752 Protocol **r;
3753 struct protocol_list_t **p;
3754 unsigned int count = 0;
3755 unsigned int i;
3756 Protocol **result = NULL;
3757
3758 if (!cls) {
3759 if (outCount) *outCount = 0;
3760 return NULL;
3761 }
3762
3763 rwlock_read(&runtimeLock);
3764
3765 assert(isRealized(cls));
3766
3767 for (p = cls->data->protocols; p && *p; p++) {
3768 count += (uint32_t)(*p)->count;
3769 }
3770
3771 if (count) {
3772 result = malloc((count+1) * sizeof(Protocol *));
3773 r = result;
3774 for (p = cls->data->protocols; p && *p; p++) {
3775 for (i = 0; i < (*p)->count; i++) {
3776 *r++ = (Protocol *)remapProtocol((*p)->list[i]);
3777 }
3778 }
3779 *r++ = NULL;
3780 }
3781
3782 rwlock_unlock_read(&runtimeLock);
3783
3784 if (outCount) *outCount = count;
3785 return result;
3786 }
3787
3788
3789 /***********************************************************************
3790 * _objc_copyClassNamesForImage
3791 * fixme
3792 * Locking: read-locks runtimeLock
3793 **********************************************************************/
3794 __private_extern__ const char **
3795 _objc_copyClassNamesForImage(header_info *hi, unsigned int *outCount)
3796 {
3797 size_t count, i, shift;
3798 class_t **classlist;
3799 const char **names;
3800
3801 rwlock_read(&runtimeLock);
3802
3803 classlist = _getObjc2ClassList(hi, &count);
3804 names = malloc((count+1) * sizeof(const char *));
3805
3806 shift = 0;
3807 for (i = 0; i < count; i++) {
3808 class_t *cls = remapClass(classlist[i]);
3809 if (cls) {
3810 names[i-shift] = getName(classlist[i]);
3811 } else {
3812 shift++; // ignored weak-linked class
3813 }
3814 }
3815 count -= shift;
3816 names[count] = NULL;
3817
3818 rwlock_unlock_read(&runtimeLock);
3819
3820 if (outCount) *outCount = (unsigned int)count;
3821 return names;
3822 }
3823
3824
3825 /***********************************************************************
3826 * _class_getCache
3827 * fixme
3828 * Locking: none
3829 **********************************************************************/
3830 __private_extern__ Cache
3831 _class_getCache(Class cls)
3832 {
3833 return newcls(cls)->cache;
3834 }
3835
3836
3837 /***********************************************************************
3838 * _class_getInstanceSize
3839 * fixme
3840 * Locking: none
3841 **********************************************************************/
3842 __private_extern__ size_t
3843 _class_getInstanceSize(Class cls)
3844 {
3845 if (!cls) return 0;
3846 return instanceSize(newcls(cls));
3847 }
3848
3849 static uint32_t
3850 instanceSize(struct class_t *cls)
3851 {
3852 assert(cls);
3853 assert(isRealized(cls));
3854 // fixme rdar://5244378
3855 return (uint32_t)((cls->data->ro->instanceSize + WORD_MASK) & ~WORD_MASK);
3856 }
3857
3858
3859 /***********************************************************************
3860 * class_getVersion
3861 * fixme
3862 * Locking: none
3863 **********************************************************************/
3864 int
3865 class_getVersion(Class cls)
3866 {
3867 if (!cls) return 0;
3868 assert(isRealized(newcls(cls)));
3869 return newcls(cls)->data->version;
3870 }
3871
3872
3873 /***********************************************************************
3874 * _class_setCache
3875 * fixme
3876 * Locking: none
3877 **********************************************************************/
3878 __private_extern__ void
3879 _class_setCache(Class cls, Cache cache)
3880 {
3881 newcls(cls)->cache = cache;
3882 }
3883
3884
3885 /***********************************************************************
3886 * class_setVersion
3887 * fixme
3888 * Locking: none
3889 **********************************************************************/
3890 void
3891 class_setVersion(Class cls, int version)
3892 {
3893 if (!cls) return;
3894 assert(isRealized(newcls(cls)));
3895 newcls(cls)->data->version = version;
3896 }
3897
3898
3899 /***********************************************************************
3900 * _class_getName
3901 * fixme
3902 * Locking: acquires runtimeLock
3903 **********************************************************************/
3904 __private_extern__ const char *_class_getName(Class cls)
3905 {
3906 if (!cls) return "nil";
3907 // fixme hack rwlock_write(&runtimeLock);
3908 const char *name = getName(newcls(cls));
3909 // rwlock_unlock_write(&runtimeLock);
3910 return name;
3911 }
3912
3913
3914 /***********************************************************************
3915 * getName
3916 * fixme
3917 * Locking: runtimeLock must be held by the caller
3918 **********************************************************************/
3919 static const char *
3920 getName(struct class_t *cls)
3921 {
3922 // fixme hack rwlock_assert_writing(&runtimeLock);
3923 assert(cls);
3924
3925 if (isRealized(cls)) {
3926 return cls->data->ro->name;
3927 } else {
3928 return ((const struct class_ro_t *)cls->data)->name;
3929 }
3930 }
3931
3932
3933 /***********************************************************************
3934 * getMethodNoSuper_nolock
3935 * fixme
3936 * Locking: runtimeLock must be read- or write-locked by the caller
3937 **********************************************************************/
3938 static method_t *
3939 getMethodNoSuper_nolock(struct class_t *cls, SEL sel)
3940 {
3941 rwlock_assert_locked(&runtimeLock);
3942
3943 uint32_t i;
3944
3945 assert(isRealized(cls));
3946 // fixme nil cls?
3947 // fixme NULL sel?
3948
3949 FOREACH_METHOD_LIST(mlist, cls, {
3950 for (i = 0; i < mlist->count; i++) {
3951 method_t *m = method_list_nth(mlist, i);
3952 if (m->name == sel) return m;
3953 }
3954 });
3955
3956 return NULL;
3957 }
3958
3959
3960 /***********************************************************************
3961 * _class_getMethodNoSuper
3962 * fixme
3963 * Locking: read-locks runtimeLock
3964 **********************************************************************/
3965 __private_extern__ Method
3966 _class_getMethodNoSuper(Class cls, SEL sel)
3967 {
3968 rwlock_read(&runtimeLock);
3969 Method result = (Method)getMethodNoSuper_nolock(newcls(cls), sel);
3970 rwlock_unlock_read(&runtimeLock);
3971 return result;
3972 }
3973
3974 /***********************************************************************
3975 * _class_getMethodNoSuper
3976 * For use inside lockForMethodLookup() only.
3977 * Locking: read-locks runtimeLock
3978 **********************************************************************/
3979 __private_extern__ Method
3980 _class_getMethodNoSuper_nolock(Class cls, SEL sel)
3981 {
3982 return (Method)getMethodNoSuper_nolock(newcls(cls), sel);
3983 }
3984
3985
3986 /***********************************************************************
3987 * getMethod_nolock
3988 * fixme
3989 * Locking: runtimeLock must be read- or write-locked by the caller
3990 **********************************************************************/
3991 static method_t *
3992 getMethod_nolock(class_t *cls, SEL sel)
3993 {
3994 method_t *m = NULL;
3995
3996 rwlock_assert_locked(&runtimeLock);
3997
3998 // fixme nil cls?
3999 // fixme NULL sel?
4000
4001 assert(isRealized(cls));
4002
4003 while (cls && ((m = getMethodNoSuper_nolock(cls, sel))) == NULL) {
4004 cls = getSuperclass(cls);
4005 }
4006
4007 return m;
4008 }
4009
4010
4011 /***********************************************************************
4012 * _class_getMethod
4013 * fixme
4014 * Locking: read-locks runtimeLock
4015 **********************************************************************/
4016 __private_extern__ Method _class_getMethod(Class cls, SEL sel)
4017 {
4018 Method m;
4019 rwlock_read(&runtimeLock);
4020 m = (Method)getMethod_nolock(newcls(cls), sel);
4021 rwlock_unlock_read(&runtimeLock);
4022 return m;
4023 }
4024
4025
4026 /***********************************************************************
4027 * ABI-specific lookUpMethod helpers.
4028 * Locking: read- and write-locks runtimeLock.
4029 **********************************************************************/
4030 __private_extern__ void lockForMethodLookup(void)
4031 {
4032 rwlock_read(&runtimeLock);
4033 }
4034 __private_extern__ void unlockForMethodLookup(void)
4035 {
4036 rwlock_unlock_read(&runtimeLock);
4037 }
4038
4039 __private_extern__ IMP prepareForMethodLookup(Class cls, SEL sel, BOOL init)
4040 {
4041 rwlock_assert_unlocked(&runtimeLock);
4042
4043 if (!isRealized(newcls(cls))) {
4044 rwlock_write(&runtimeLock);
4045 realizeClass(newcls(cls));
4046 rwlock_unlock_write(&runtimeLock);
4047 }
4048
4049 if (init && !_class_isInitialized(cls)) {
4050 _class_initialize (cls);
4051 // If sel == initialize, _class_initialize will send +initialize and
4052 // then the messenger will send +initialize again after this
4053 // procedure finishes. Of course, if this is not being called
4054 // from the messenger then it won't happen. 2778172
4055 }
4056
4057 return NULL;
4058 }
4059
4060
4061 /***********************************************************************
4062 * class_getProperty
4063 * fixme
4064 * Locking: read-locks runtimeLock
4065 **********************************************************************/
4066 Property class_getProperty(Class cls_gen, const char *name)
4067 {
4068 Property result = NULL;
4069 chained_property_list *plist;
4070 struct class_t *cls = newcls(cls_gen);
4071
4072 if (!cls || !name) return NULL;
4073
4074 rwlock_read(&runtimeLock);
4075
4076 assert(isRealized(cls));
4077
4078 for ( ; cls; cls = getSuperclass(cls)) {
4079 for (plist = cls->data->properties; plist; plist = plist->next) {
4080 uint32_t i;
4081 for (i = 0; i < plist->count; i++) {
4082 if (0 == strcmp(name, plist->list[i].name)) {
4083 result = &plist->list[i];
4084 goto done;
4085 }
4086 }
4087 }
4088 }
4089
4090 done:
4091 rwlock_unlock_read(&runtimeLock);
4092
4093 return result;
4094 }
4095
4096
4097 /***********************************************************************
4098 * Locking: fixme
4099 **********************************************************************/
4100 __private_extern__ BOOL _class_isMetaClass(Class cls)
4101 {
4102 if (!cls) return NO;
4103 return isMetaClass(newcls(cls));
4104 }
4105
4106 static BOOL
4107 isMetaClass(struct class_t *cls)
4108 {
4109 assert(cls);
4110 assert(isRealized(cls));
4111 return (cls->data->ro->flags & RO_META) ? YES : NO;
4112 }
4113
4114
4115 __private_extern__ Class _class_getMeta(Class cls)
4116 {
4117 assert(cls);
4118 if (isMetaClass(newcls(cls))) return cls;
4119 else return ((id)cls)->isa;
4120 }
4121
4122 Class gdb_class_getClass(Class cls)
4123 {
4124 const char *className = strdup(getName(newcls(cls)));
4125 if(!className) return Nil;
4126 Class rCls = look_up_class(className, NO, NO);
4127 free((char*)className);
4128 return rCls;
4129 }
4130
4131 BOOL gdb_objc_isRuntimeLocked()
4132 {
4133 if (rwlock_try_write(&runtimeLock)) {
4134 rwlock_unlock_write(&runtimeLock);
4135 } else
4136 return YES;
4137
4138 if (mutex_try_lock(&cacheUpdateLock)) {
4139 mutex_unlock(&cacheUpdateLock);
4140 } else
4141 return YES;
4142
4143 return NO;
4144 }
4145
4146 /***********************************************************************
4147 * Locking: fixme
4148 **********************************************************************/
4149 __private_extern__ BOOL
4150 _class_isInitializing(Class cls_gen)
4151 {
4152 struct class_t *cls = newcls(_class_getMeta(cls_gen));
4153 return (cls->data->flags & RW_INITIALIZING) ? YES : NO;
4154 }
4155
4156
4157 /***********************************************************************
4158 * Locking: fixme
4159 **********************************************************************/
4160 __private_extern__ BOOL
4161 _class_isInitialized(Class cls_gen)
4162 {
4163 struct class_t *cls = newcls(_class_getMeta(cls_gen));
4164 return (cls->data->flags & RW_INITIALIZED) ? YES : NO;
4165 }
4166
4167
4168 /***********************************************************************
4169 * Locking: fixme
4170 **********************************************************************/
4171 __private_extern__ void
4172 _class_setInitializing(Class cls_gen)
4173 {
4174 struct class_t *cls = newcls(_class_getMeta(cls_gen));
4175 changeInfo(cls, RW_INITIALIZING, 0);
4176 }
4177
4178
4179 /***********************************************************************
4180 * Locking: write-locks runtimeLock
4181 **********************************************************************/
4182 __private_extern__ void
4183 _class_setInitialized(Class cls_gen)
4184 {
4185
4186 struct class_t *metacls;
4187 struct class_t *cls;
4188
4189 rwlock_write(&runtimeLock);
4190 metacls = newcls(_class_getMeta(cls_gen));
4191 cls = getNonMetaClass(metacls);
4192
4193 // Update vtables (initially postponed pending +initialize completion)
4194 // Do cls first because root metacls is a subclass of root cls
4195 updateVtable(cls, YES);
4196 updateVtable(metacls, YES);
4197
4198 rwlock_unlock_write(&runtimeLock);
4199
4200 changeInfo(metacls, RW_INITIALIZED, RW_INITIALIZING);
4201 }
4202
4203
4204 /***********************************************************************
4205 * Locking: fixme
4206 **********************************************************************/
4207 __private_extern__ BOOL
4208 _class_shouldGrowCache(Class cls)
4209 {
4210 return YES; // fixme good or bad for memory use?
4211 }
4212
4213
4214 /***********************************************************************
4215 * Locking: fixme
4216 **********************************************************************/
4217 __private_extern__ void
4218 _class_setGrowCache(Class cls, BOOL grow)
4219 {
4220 // fixme good or bad for memory use?
4221 }
4222
4223
4224 /***********************************************************************
4225 * _class_isLoadable
4226 * fixme
4227 * Locking: none
4228 **********************************************************************/
4229 __private_extern__ BOOL
4230 _class_isLoadable(Class cls)
4231 {
4232 assert(isRealized(newcls(cls)));
4233 return YES; // any class registered for +load is definitely loadable
4234 }
4235
4236
4237 /***********************************************************************
4238 * Locking: fixme
4239 **********************************************************************/
4240 __private_extern__ BOOL
4241 _class_hasCxxStructorsNoSuper(Class cls)
4242 {
4243 assert(isRealized(newcls(cls)));
4244 return (newcls(cls)->data->ro->flags & RO_HAS_CXX_STRUCTORS) ? YES : NO;
4245 }
4246
4247
4248 /***********************************************************************
4249 * Locking: fixme
4250 **********************************************************************/
4251 __private_extern__ BOOL
4252 _class_shouldFinalizeOnMainThread(Class cls)
4253 {
4254 assert(isRealized(newcls(cls)));
4255 return (newcls(cls)->data->flags & RW_FINALIZE_ON_MAIN_THREAD) ? YES : NO;
4256 }
4257
4258
4259 /***********************************************************************
4260 * Locking: fixme
4261 **********************************************************************/
4262 __private_extern__ void
4263 _class_setFinalizeOnMainThread(Class cls)
4264 {
4265 assert(isRealized(newcls(cls)));
4266 changeInfo(newcls(cls), RW_FINALIZE_ON_MAIN_THREAD, 0);
4267 }
4268
4269
4270 /***********************************************************************
4271 * _class_instancesHaveAssociatedObjects
4272 * May manipulate unrealized future classes in the CF-bridged case.
4273 **********************************************************************/
4274 __private_extern__ BOOL
4275 _class_instancesHaveAssociatedObjects(Class cls_gen)
4276 {
4277 class_t *cls = newcls(cls_gen);
4278 assert(isFuture(cls) || isRealized(cls));
4279 return (cls->data->flags & RW_INSTANCES_HAVE_ASSOCIATED_OBJECTS) ? YES : NO;
4280 }
4281
4282
4283 /***********************************************************************
4284 * _class_assertInstancesHaveAssociatedObjects
4285 * May manipulate unrealized future classes in the CF-bridged case.
4286 **********************************************************************/
4287 __private_extern__ void
4288 _class_assertInstancesHaveAssociatedObjects(Class cls_gen)
4289 {
4290 class_t *cls = newcls(cls_gen);
4291 assert(isFuture(cls) || isRealized(cls));
4292 changeInfo(cls, RW_INSTANCES_HAVE_ASSOCIATED_OBJECTS, 0);
4293 }
4294
4295
4296 /***********************************************************************
4297 * Locking: none
4298 * fixme assert realized to get superclass remapping?
4299 **********************************************************************/
4300 __private_extern__ Class
4301 _class_getSuperclass(Class cls)
4302 {
4303 return (Class)getSuperclass(newcls(cls));
4304 }
4305
4306 static struct class_t *
4307 getSuperclass(struct class_t *cls)
4308 {
4309 if (!cls) return NULL;
4310 return cls->superclass;
4311 }
4312
4313
4314 /***********************************************************************
4315 * class_getIvarLayout
4316 * Called by the garbage collector.
4317 * The class must be NULL or already realized.
4318 * Locking: none
4319 **********************************************************************/
4320 const char *
4321 class_getIvarLayout(Class cls_gen)
4322 {
4323 class_t *cls = newcls(cls_gen);
4324 if (cls) return (const char *)cls->data->ro->ivarLayout;
4325 else return NULL;
4326 }
4327
4328
4329 /***********************************************************************
4330 * class_getWeakIvarLayout
4331 * Called by the garbage collector.
4332 * The class must be NULL or already realized.
4333 * Locking: none
4334 **********************************************************************/
4335 const char *
4336 class_getWeakIvarLayout(Class cls_gen)
4337 {
4338 class_t *cls = newcls(cls_gen);
4339 if (cls) return (const char *)cls->data->ro->weakIvarLayout;
4340 else return NULL;
4341 }
4342
4343
4344 /***********************************************************************
4345 * class_setIvarLayout
4346 * Changes the class's GC scan layout.
4347 * NULL layout means no unscanned ivars
4348 * The class must be under construction.
4349 * fixme: sanity-check layout vs instance size?
4350 * fixme: sanity-check layout vs superclass?
4351 * Locking: acquires runtimeLock
4352 **********************************************************************/
4353 void
4354 class_setIvarLayout(Class cls_gen, const char *layout)
4355 {
4356 class_t *cls = newcls(cls_gen);
4357 if (!cls) return;
4358
4359 rwlock_write(&runtimeLock);
4360
4361 // Can only change layout of in-construction classes.
4362 // note: if modifications to post-construction classes were
4363 // allowed, there would be a race below (us vs. concurrent GC scan)
4364 if (!(cls->data->flags & RW_CONSTRUCTING)) {
4365 _objc_inform("*** Can't set ivar layout for already-registered "
4366 "class '%s'", getName(cls));
4367 rwlock_unlock_write(&runtimeLock);
4368 return;
4369 }
4370
4371 class_ro_t *ro_w = make_ro_writeable(cls->data);
4372
4373 try_free(ro_w->ivarLayout);
4374 ro_w->ivarLayout = (unsigned char *)_strdup_internal(layout);
4375
4376 rwlock_unlock_write(&runtimeLock);
4377 }
4378
4379
4380 /***********************************************************************
4381 * class_setWeakIvarLayout
4382 * Changes the class's GC weak layout.
4383 * NULL layout means no weak ivars
4384 * The class must be under construction.
4385 * fixme: sanity-check layout vs instance size?
4386 * fixme: sanity-check layout vs superclass?
4387 * Locking: acquires runtimeLock
4388 **********************************************************************/
4389 void
4390 class_setWeakIvarLayout(Class cls_gen, const char *layout)
4391 {
4392 class_t *cls = newcls(cls_gen);
4393 if (!cls) return;
4394
4395 rwlock_write(&runtimeLock);
4396
4397 // Can only change layout of in-construction classes.
4398 // note: if modifications to post-construction classes were
4399 // allowed, there would be a race below (us vs. concurrent GC scan)
4400 if (!(cls->data->flags & RW_CONSTRUCTING)) {
4401 _objc_inform("*** Can't set weak ivar layout for already-registered "
4402 "class '%s'", getName(cls));
4403 rwlock_unlock_write(&runtimeLock);
4404 return;
4405 }
4406
4407 class_ro_t *ro_w = make_ro_writeable(cls->data);
4408
4409 try_free(ro_w->weakIvarLayout);
4410 ro_w->weakIvarLayout = (unsigned char *)_strdup_internal(layout);
4411
4412 rwlock_unlock_write(&runtimeLock);
4413 }
4414
4415
4416 /***********************************************************************
4417 * _class_getVariable
4418 * fixme
4419 * Locking: read-locks runtimeLock
4420 **********************************************************************/
4421 __private_extern__ Ivar
4422 _class_getVariable(Class cls, const char *name)
4423 {
4424 rwlock_read(&runtimeLock);
4425
4426 for ( ; cls != Nil; cls = class_getSuperclass(cls)) {
4427 struct ivar_t *ivar = getIvar(newcls(cls), name);
4428 if (ivar) {
4429 rwlock_unlock_read(&runtimeLock);
4430 return (Ivar)ivar;
4431 }
4432 }
4433
4434 rwlock_unlock_read(&runtimeLock);
4435
4436 return NULL;
4437 }
4438
4439
4440 /***********************************************************************
4441 * class_conformsToProtocol
4442 * fixme
4443 * Locking: read-locks runtimeLock
4444 **********************************************************************/
4445 BOOL class_conformsToProtocol(Class cls_gen, Protocol *proto)
4446 {
4447 Protocol **protocols;
4448 unsigned int count, i;
4449 BOOL result = NO;
4450
4451 if (!cls_gen) return NO;
4452 if (!proto) return NO;
4453
4454 // fixme null cls?
4455
4456 protocols = class_copyProtocolList(cls_gen, &count);
4457
4458 for (i = 0; i < count; i++) {
4459 if (protocols[i] == proto ||
4460 protocol_conformsToProtocol(protocols[i], proto))
4461 {
4462 result = YES;
4463 break;
4464 }
4465 }
4466
4467 if (protocols) free(protocols);
4468
4469 return result;
4470 }
4471
4472
4473 /***********************************************************************
4474 * class_addMethod
4475 * fixme
4476 * Locking: write-locks runtimeLock
4477 **********************************************************************/
4478 static IMP
4479 _class_addMethod(Class cls_gen, SEL name, IMP imp,
4480 const char *types, BOOL replace)
4481 {
4482 struct class_t *cls = newcls(cls_gen);
4483 IMP result = NULL;
4484
4485 if (!types) types = "";
4486
4487 rwlock_write(&runtimeLock);
4488
4489 assert(isRealized(cls));
4490
4491 method_t *m;
4492 if ((m = getMethodNoSuper_nolock(cls, name))) {
4493 // already exists
4494 if (!replace) {
4495 result = _method_getImplementation(m);
4496 } else {
4497 result = _method_setImplementation(cls, m, imp);
4498 }
4499 } else {
4500 // fixme optimize
4501 method_list_t *newlist;
4502 newlist = _calloc_internal(sizeof(*newlist), 1);
4503 newlist->entsize_NEVER_USE = (uint32_t)sizeof(method_t) | fixed_up_method_list;
4504 newlist->count = 1;
4505 newlist->first.name = name;
4506 newlist->first.types = strdup(types);
4507 if (name != (SEL)kIgnore) {
4508 newlist->first.imp = imp;
4509 } else {
4510 newlist->first.imp = (IMP)&_objc_ignored_method;
4511 }
4512
4513 BOOL vtablesAffected;
4514 attachMethodLists(cls, &newlist, 1, NO, &vtablesAffected);
4515 flushCaches(cls);
4516 if (vtablesAffected) flushVtables(cls);
4517
4518 result = NULL;
4519 }
4520
4521 rwlock_unlock_write(&runtimeLock);
4522
4523 return result;
4524 }
4525
4526
4527 BOOL
4528 class_addMethod(Class cls, SEL name, IMP imp, const char *types)
4529 {
4530 if (!cls) return NO;
4531
4532 IMP old = _class_addMethod(cls, name, imp, types, NO);
4533 return old ? NO : YES;
4534 }
4535
4536
4537 IMP
4538 class_replaceMethod(Class cls, SEL name, IMP imp, const char *types)
4539 {
4540 if (!cls) return NULL;
4541
4542 return _class_addMethod(cls, name, imp, types, YES);
4543 }
4544
4545
4546 /***********************************************************************
4547 * class_addIvar
4548 * Adds an ivar to a class.
4549 * Locking: acquires runtimeLock
4550 **********************************************************************/
4551 BOOL
4552 class_addIvar(Class cls_gen, const char *name, size_t size,
4553 uint8_t alignment, const char *type)
4554 {
4555 struct class_t *cls = newcls(cls_gen);
4556
4557 if (!cls) return NO;
4558
4559 if (!type) type = "";
4560 if (name && 0 == strcmp(name, "")) name = NULL;
4561
4562 rwlock_write(&runtimeLock);
4563
4564 assert(isRealized(cls));
4565
4566 // No class variables
4567 if (isMetaClass(cls)) {
4568 rwlock_unlock_write(&runtimeLock);
4569 return NO;
4570 }
4571
4572 // Can only add ivars to in-construction classes.
4573 if (!(cls->data->flags & RW_CONSTRUCTING)) {
4574 rwlock_unlock_write(&runtimeLock);
4575 return NO;
4576 }
4577
4578 // Check for existing ivar with this name, unless it's anonymous.
4579 // Check for too-big ivar.
4580 // fixme check for superclass ivar too?
4581 if ((name && getIvar(cls, name)) || size > UINT32_MAX) {
4582 rwlock_unlock_write(&runtimeLock);
4583 return NO;
4584 }
4585
4586 class_ro_t *ro_w = make_ro_writeable(cls->data);
4587
4588 // fixme allocate less memory here
4589
4590 ivar_list_t *oldlist, *newlist;
4591 if ((oldlist = (ivar_list_t *)cls->data->ro->ivars)) {
4592 size_t oldsize = ivar_list_size(oldlist);
4593 newlist = _calloc_internal(oldsize + oldlist->entsize, 1);
4594 memcpy(newlist, oldlist, oldsize);
4595 _free_internal(oldlist);
4596 } else {
4597 newlist = _calloc_internal(sizeof(ivar_list_t), 1);
4598 newlist->entsize = (uint32_t)sizeof(ivar_t);
4599 }
4600
4601 uint32_t offset = instanceSize(cls);
4602 uint32_t alignMask = (1<<alignment)-1;
4603 offset = (offset + alignMask) & ~alignMask;
4604
4605 ivar_t *ivar = ivar_list_nth(newlist, newlist->count++);
4606 ivar->offset = _malloc_internal(sizeof(*ivar->offset));
4607 *ivar->offset = offset;
4608 ivar->name = name ? _strdup_internal(name) : NULL;
4609 ivar->type = _strdup_internal(type);
4610 ivar->alignment = alignment;
4611 ivar->size = (uint32_t)size;
4612
4613 ro_w->ivars = newlist;
4614 ro_w->instanceSize = (uint32_t)(offset + size);
4615
4616 // Ivar layout updated in registerClass.
4617
4618 rwlock_unlock_write(&runtimeLock);
4619
4620 return YES;
4621 }
4622
4623
4624 /***********************************************************************
4625 * class_addProtocol
4626 * Adds a protocol to a class.
4627 * Locking: acquires runtimeLock
4628 **********************************************************************/
4629 BOOL class_addProtocol(Class cls_gen, Protocol *protocol_gen)
4630 {
4631 class_t *cls = newcls(cls_gen);
4632 protocol_t *protocol = newprotocol(protocol_gen);
4633 protocol_list_t *plist;
4634 protocol_list_t **plistp;
4635
4636 if (!cls) return NO;
4637 if (class_conformsToProtocol(cls_gen, protocol_gen)) return NO;
4638
4639 rwlock_write(&runtimeLock);
4640
4641 assert(isRealized(cls));
4642
4643 // fixme optimize
4644 plist = _malloc_internal(sizeof(protocol_list_t) + sizeof(protocol_t *));
4645 plist->count = 1;
4646 plist->list[0] = (protocol_ref_t)protocol;
4647
4648 unsigned int count = 0;
4649 for (plistp = cls->data->protocols; plistp && *plistp; plistp++) {
4650 count++;
4651 }
4652
4653 cls->data->protocols =
4654 _realloc_internal(cls->data->protocols,
4655 (count+2) * sizeof(protocol_list_t *));
4656 cls->data->protocols[count] = plist;
4657 cls->data->protocols[count+1] = NULL;
4658
4659 // fixme metaclass?
4660
4661 rwlock_unlock_write(&runtimeLock);
4662
4663 return YES;
4664 }
4665
4666
4667 /***********************************************************************
4668 * look_up_class
4669 * Look up a class by name, and realize it.
4670 * Locking: acquires runtimeLock
4671 **********************************************************************/
4672 __private_extern__ id
4673 look_up_class(const char *name,
4674 BOOL includeUnconnected __attribute__((unused)),
4675 BOOL includeClassHandler __attribute__((unused)))
4676 {
4677 if (!name) return nil;
4678
4679 rwlock_read(&runtimeLock);
4680 class_t *result = getClass(name);
4681 BOOL unrealized = result && !isRealized(result);
4682 rwlock_unlock_read(&runtimeLock);
4683 if (unrealized) {
4684 rwlock_write(&runtimeLock);
4685 realizeClass(result);
4686 rwlock_unlock_write(&runtimeLock);
4687 }
4688 return (id)result;
4689 }
4690
4691
4692 /***********************************************************************
4693 * objc_duplicateClass
4694 * fixme
4695 * Locking: acquires runtimeLock
4696 **********************************************************************/
4697 Class
4698 objc_duplicateClass(Class original_gen, const char *name,
4699 size_t extraBytes)
4700 {
4701 struct class_t *original = newcls(original_gen);
4702 struct class_t *duplicate;
4703
4704 rwlock_write(&runtimeLock);
4705
4706 assert(isRealized(original));
4707 assert(!isMetaClass(original));
4708
4709 duplicate = (struct class_t *)
4710 _calloc_class(instanceSize(original->isa) + extraBytes);
4711 if (instanceSize(original->isa) < sizeof(class_t)) {
4712 _objc_inform("busted! %s\n", original->data->ro->name);
4713 }
4714
4715
4716 duplicate->isa = original->isa;
4717 duplicate->superclass = original->superclass;
4718 duplicate->cache = (Cache)&_objc_empty_cache;
4719 duplicate->vtable = _objc_empty_vtable;
4720
4721 duplicate->data = _calloc_internal(sizeof(*original->data), 1);
4722 duplicate->data->flags = (original->data->flags | RW_COPIED_RO) & ~RW_SPECIALIZED_VTABLE;
4723 duplicate->data->version = original->data->version;
4724 duplicate->data->firstSubclass = NULL;
4725 duplicate->data->nextSiblingClass = NULL;
4726
4727 duplicate->data->ro =
4728 _memdup_internal(original->data->ro, sizeof(*original->data->ro));
4729 *(char **)&duplicate->data->ro->name = _strdup_internal(name);
4730
4731 if (original->data->methods) {
4732 duplicate->data->methods =
4733 _memdup_internal(original->data->methods,
4734 malloc_size(original->data->methods));
4735 method_list_t **mlistp = duplicate->data->methods;
4736 for (mlistp = duplicate->data->methods; *mlistp; mlistp++) {
4737 *mlistp = _memdup_internal(*mlistp, method_list_size(*mlistp));
4738 }
4739 }
4740
4741 // fixme dies when categories are added to the base
4742 duplicate->data->properties = original->data->properties;
4743 duplicate->data->protocols = original->data->protocols;
4744
4745 if (duplicate->superclass) {
4746 addSubclass(duplicate->superclass, duplicate);
4747 }
4748
4749 // Don't methodize class - construction above is correct
4750
4751 addNamedClass(duplicate, duplicate->data->ro->name);
4752 addRealizedClass(duplicate);
4753 // no: duplicate->isa == original->isa
4754 // addRealizedMetaclass(duplicate->isa);
4755
4756 if (PrintConnecting) {
4757 _objc_inform("CLASS: realizing class '%s' (duplicate of %s) %p %p",
4758 name, original->data->ro->name,
4759 duplicate, duplicate->data->ro);
4760 }
4761
4762 rwlock_unlock_write(&runtimeLock);
4763
4764 return (Class)duplicate;
4765 }
4766
4767 /***********************************************************************
4768 * objc_initializeClassPair
4769 * Locking: runtimeLock must be write-locked by the caller
4770 **********************************************************************/
4771 static void objc_initializeClassPair_internal(Class superclass_gen, const char *name, Class cls_gen, Class meta_gen)
4772 {
4773 rwlock_assert_writing(&runtimeLock);
4774
4775 class_t *superclass = newcls(superclass_gen);
4776 class_t *cls = newcls(cls_gen);
4777 class_t *meta = newcls(meta_gen);
4778 class_ro_t *cls_ro_w, *meta_ro_w;
4779
4780 cls->data = _calloc_internal(sizeof(class_rw_t), 1);
4781 meta->data = _calloc_internal(sizeof(class_rw_t), 1);
4782 cls_ro_w = _calloc_internal(sizeof(class_ro_t), 1);
4783 meta_ro_w = _calloc_internal(sizeof(class_ro_t), 1);
4784 cls->data->ro = cls_ro_w;
4785 meta->data->ro = meta_ro_w;
4786
4787 // Set basic info
4788 cls->cache = (Cache)&_objc_empty_cache;
4789 meta->cache = (Cache)&_objc_empty_cache;
4790 cls->vtable = _objc_empty_vtable;
4791 meta->vtable = _objc_empty_vtable;
4792
4793 cls->data->flags = RW_CONSTRUCTING | RW_COPIED_RO | RW_REALIZED;
4794 meta->data->flags = RW_CONSTRUCTING | RW_COPIED_RO | RW_REALIZED;
4795 cls->data->version = 0;
4796 meta->data->version = 7;
4797
4798 cls_ro_w->flags = 0;
4799 meta_ro_w->flags = RO_META;
4800 if (!superclass) {
4801 cls_ro_w->flags |= RO_ROOT;
4802 meta_ro_w->flags |= RO_ROOT;
4803 }
4804 if (superclass) {
4805 cls_ro_w->instanceStart = instanceSize(superclass);
4806 meta_ro_w->instanceStart = instanceSize(superclass->isa);
4807 cls_ro_w->instanceSize = cls_ro_w->instanceStart;
4808 meta_ro_w->instanceSize = meta_ro_w->instanceStart;
4809 } else {
4810 cls_ro_w->instanceStart = 0;
4811 meta_ro_w->instanceStart = (uint32_t)sizeof(class_t);
4812 cls_ro_w->instanceSize = (uint32_t)sizeof(id); // just an isa
4813 meta_ro_w->instanceSize = meta_ro_w->instanceStart;
4814 }
4815
4816 cls_ro_w->name = _strdup_internal(name);
4817 meta_ro_w->name = _strdup_internal(name);
4818
4819 // Connect to superclasses and metaclasses
4820 cls->isa = meta;
4821 if (superclass) {
4822 meta->isa = superclass->isa->isa;
4823 cls->superclass = superclass;
4824 meta->superclass = superclass->isa;
4825 addSubclass(superclass, cls);
4826 addSubclass(superclass->isa, meta);
4827 } else {
4828 meta->isa = meta;
4829 cls->superclass = Nil;
4830 meta->superclass = cls;
4831 addSubclass(cls, meta);
4832 }
4833 }
4834
4835 /***********************************************************************
4836 * objc_initializeClassPair
4837 **********************************************************************/
4838 Class objc_initializeClassPair(Class superclass_gen, const char *name, Class cls_gen, Class meta_gen)
4839 {
4840 class_t *superclass = newcls(superclass_gen);
4841
4842 rwlock_write(&runtimeLock);
4843
4844 //
4845 // Common superclass integrity checks with objc_allocateClassPair
4846 //
4847 if (getClass(name)) {
4848 rwlock_unlock_write(&runtimeLock);
4849 return NO;
4850 }
4851 // fixme reserve class against simmultaneous allocation
4852
4853 if (superclass) assert(isRealized(superclass));
4854
4855 if (superclass && superclass->data->flags & RW_CONSTRUCTING) {
4856 // Can't make subclass of an in-construction class
4857 rwlock_unlock_write(&runtimeLock);
4858 return NO;
4859 }
4860
4861
4862 // just initialize what was supplied
4863 objc_initializeClassPair_internal(superclass_gen, name, cls_gen, meta_gen);
4864
4865 rwlock_unlock_write(&runtimeLock);
4866 return cls_gen;
4867 }
4868
4869 /***********************************************************************
4870 * objc_allocateClassPair
4871 * fixme
4872 * Locking: acquires runtimeLock
4873 **********************************************************************/
4874 Class objc_allocateClassPair(Class superclass_gen, const char *name,
4875 size_t extraBytes)
4876 {
4877 class_t *superclass = newcls(superclass_gen);
4878 Class cls, meta;
4879
4880 rwlock_write(&runtimeLock);
4881
4882 //
4883 // Common superclass integrity checks with objc_initializeClassPair
4884 //
4885 if (getClass(name)) {
4886 rwlock_unlock_write(&runtimeLock);
4887 return NO;
4888 }
4889 // fixme reserve class against simmultaneous allocation
4890
4891 if (superclass) assert(isRealized(superclass));
4892
4893 if (superclass && superclass->data->flags & RW_CONSTRUCTING) {
4894 // Can't make subclass of an in-construction class
4895 rwlock_unlock_write(&runtimeLock);
4896 return NO;
4897 }
4898
4899
4900
4901 // Allocate new classes.
4902 if (superclass) {
4903 cls = _calloc_class(instanceSize(superclass->isa) + extraBytes);
4904 meta = _calloc_class(instanceSize(superclass->isa->isa) + extraBytes);
4905 } else {
4906 cls = _calloc_class(sizeof(class_t) + extraBytes);
4907 meta = _calloc_class(sizeof(class_t) + extraBytes);
4908 }
4909
4910
4911 objc_initializeClassPair_internal(superclass_gen, name, cls, meta);
4912
4913 rwlock_unlock_write(&runtimeLock);
4914
4915 return (Class)cls;
4916 }
4917
4918
4919 /***********************************************************************
4920 * objc_registerClassPair
4921 * fixme
4922 * Locking: acquires runtimeLock
4923 **********************************************************************/
4924 void objc_registerClassPair(Class cls_gen)
4925 {
4926 class_t *cls = newcls(cls_gen);
4927
4928 rwlock_write(&runtimeLock);
4929
4930 if ((cls->data->flags & RW_CONSTRUCTED) ||
4931 (cls->isa->data->flags & RW_CONSTRUCTED))
4932 {
4933 _objc_inform("objc_registerClassPair: class '%s' was already "
4934 "registered!", cls->data->ro->name);
4935 rwlock_unlock_write(&runtimeLock);
4936 return;
4937 }
4938
4939 if (!(cls->data->flags & RW_CONSTRUCTING) ||
4940 !(cls->isa->data->flags & RW_CONSTRUCTING))
4941 {
4942 _objc_inform("objc_registerClassPair: class '%s' was not "
4943 "allocated with objc_allocateClassPair!",
4944 cls->data->ro->name);
4945 rwlock_unlock_write(&runtimeLock);
4946 return;
4947 }
4948
4949 // Build ivar layouts
4950 if (UseGC) {
4951 struct class_t *supercls = getSuperclass(cls);
4952 class_ro_t *ro_w = (class_ro_t *)cls->data->ro;
4953
4954 if (ro_w->ivarLayout) {
4955 // Class builder already called class_setIvarLayout.
4956 }
4957 else if (!supercls) {
4958 // Root class. Scan conservatively (should be isa ivar only).
4959 // ivar_layout is already NULL.
4960 }
4961 else if (ro_w->ivars == NULL) {
4962 // No local ivars. Use superclass's layouts.
4963 ro_w->ivarLayout = (unsigned char *)
4964 _strdup_internal((char *)supercls->data->ro->ivarLayout);
4965 }
4966 else {
4967 // Has local ivars. Build layouts based on superclass.
4968 layout_bitmap bitmap =
4969 layout_bitmap_create(supercls->data->ro->ivarLayout,
4970 instanceSize(supercls),
4971 instanceSize(cls), NO);
4972 uint32_t i;
4973 for (i = 0; i < ro_w->ivars->count; i++) {
4974 ivar_t *ivar = ivar_list_nth(ro_w->ivars, i);
4975 if (!ivar->offset) continue; // anonymous bitfield
4976
4977 layout_bitmap_set_ivar(bitmap, ivar->type, *ivar->offset);
4978 }
4979 ro_w->ivarLayout = layout_string_create(bitmap);
4980 layout_bitmap_free(bitmap);
4981 }
4982
4983 if (ro_w->weakIvarLayout) {
4984 // Class builder already called class_setWeakIvarLayout.
4985 }
4986 else if (!supercls) {
4987 // Root class. No weak ivars (should be isa ivar only).
4988 // weak_ivar_layout is already NULL.
4989 }
4990 else if (ro_w->ivars == NULL) {
4991 // No local ivars. Use superclass's layout.
4992 ro_w->weakIvarLayout = (unsigned char *)
4993 _strdup_internal((char *)supercls->data->ro->weakIvarLayout);
4994 }
4995 else {
4996 // Has local ivars. Build layout based on superclass.
4997 // No way to add weak ivars yet.
4998 ro_w->weakIvarLayout = (unsigned char *)
4999 _strdup_internal((char *)supercls->data->ro->weakIvarLayout);
5000 }
5001 }
5002
5003 // Clear "under construction" bit, set "done constructing" bit
5004 cls->data->flags &= ~RW_CONSTRUCTING;
5005 cls->isa->data->flags &= ~RW_CONSTRUCTING;
5006 cls->data->flags |= RW_CONSTRUCTED;
5007 cls->isa->data->flags |= RW_CONSTRUCTED;
5008
5009 // Add to realized and uninitialized classes
5010 addNamedClass(cls, cls->data->ro->name);
5011 addRealizedClass(cls);
5012 addRealizedMetaclass(cls->isa);
5013 addUninitializedClass(cls, cls->isa);
5014
5015 rwlock_unlock_write(&runtimeLock);
5016 }
5017
5018
5019 static void unload_class(class_t *cls, BOOL isMeta)
5020 {
5021 // Detach class from various lists
5022
5023 // categories not yet attached to this class
5024 category_list *cats;
5025 cats = unattachedCategoriesForClass(cls);
5026 if (cats) free(cats);
5027
5028 // class tables and +load queue
5029 if (!isMeta) {
5030 removeNamedClass(cls, getName(cls));
5031 removeRealizedClass(cls);
5032 removeUninitializedClass(cls);
5033 } else {
5034 removeRealizedMetaclass(cls);
5035 }
5036
5037 // superclass's subclass list
5038 if (isRealized(cls)) {
5039 class_t *supercls = getSuperclass(cls);
5040 if (supercls) removeSubclass(supercls, cls);
5041 }
5042
5043
5044 // Dispose the class's own data structures
5045
5046 if (isRealized(cls)) {
5047 uint32_t i;
5048
5049 // Dereferences the cache contents; do this before freeing methods
5050 if (cls->cache != (Cache)&_objc_empty_cache) _cache_free(cls->cache);
5051
5052 if (cls->data->methods) {
5053 method_list_t **mlistp;
5054 for (mlistp = cls->data->methods; *mlistp; mlistp++) {
5055 for (i = 0; i < (**mlistp).count; i++) {
5056 method_t *m = method_list_nth(*mlistp, i);
5057 try_free(m->types);
5058 }
5059 try_free(*mlistp);
5060 }
5061 try_free(cls->data->methods);
5062 }
5063
5064 const ivar_list_t *ilist = cls->data->ro->ivars;
5065 if (ilist) {
5066 for (i = 0; i < ilist->count; i++) {
5067 const ivar_t *ivar = ivar_list_nth(ilist, i);
5068 try_free(ivar->offset);
5069 try_free(ivar->name);
5070 try_free(ivar->type);
5071 }
5072 try_free(ilist);
5073 }
5074
5075 protocol_list_t **plistp = cls->data->protocols;
5076 for (plistp = cls->data->protocols; plistp && *plistp; plistp++) {
5077 try_free(*plistp);
5078 }
5079 try_free(cls->data->protocols);
5080
5081 // fixme:
5082 // properties
5083
5084 if (cls->vtable != _objc_empty_vtable &&
5085 cls->data->flags & RW_SPECIALIZED_VTABLE) try_free(cls->vtable);
5086 try_free(cls->data->ro->ivarLayout);
5087 try_free(cls->data->ro->weakIvarLayout);
5088 try_free(cls->data->ro->name);
5089 try_free(cls->data->ro);
5090 try_free(cls->data);
5091 try_free(cls);
5092 }
5093 }
5094
5095 void objc_disposeClassPair(Class cls_gen)
5096 {
5097 class_t *cls = newcls(cls_gen);
5098
5099 rwlock_write(&runtimeLock);
5100
5101 if (!(cls->data->flags & (RW_CONSTRUCTED|RW_CONSTRUCTING)) ||
5102 !(cls->isa->data->flags & (RW_CONSTRUCTED|RW_CONSTRUCTING)))
5103 {
5104 // class not allocated with objc_allocateClassPair
5105 // disposing still-unregistered class is OK!
5106 _objc_inform("objc_disposeClassPair: class '%s' was not "
5107 "allocated with objc_allocateClassPair!",
5108 cls->data->ro->name);
5109 rwlock_unlock_write(&runtimeLock);
5110 return;
5111 }
5112
5113 if (isMetaClass(cls)) {
5114 _objc_inform("objc_disposeClassPair: class '%s' is a metaclass, "
5115 "not a class!", cls->data->ro->name);
5116 rwlock_unlock_write(&runtimeLock);
5117 return;
5118 }
5119
5120 // Shouldn't have any live subclasses.
5121 if (cls->data->firstSubclass) {
5122 _objc_inform("objc_disposeClassPair: class '%s' still has subclasses, "
5123 "including '%s'!", cls->data->ro->name,
5124 getName(cls->data->firstSubclass));
5125 }
5126 if (cls->isa->data->firstSubclass) {
5127 _objc_inform("objc_disposeClassPair: class '%s' still has subclasses, "
5128 "including '%s'!", cls->data->ro->name,
5129 getName(cls->isa->data->firstSubclass));
5130 }
5131
5132 // don't remove_class_from_loadable_list()
5133 // - it's not there and we don't have the lock
5134 unload_class(cls->isa, YES);
5135 unload_class(cls, NO);
5136
5137 rwlock_unlock_write(&runtimeLock);
5138 }
5139
5140
5141
5142 /***********************************************************************
5143 * class_createInstanceFromZone
5144 * fixme
5145 * Locking: none
5146 **********************************************************************/
5147 id
5148 class_createInstanceFromZone(Class cls, size_t extraBytes, void *zone)
5149 {
5150 if (cls) assert(isRealized(newcls(cls)));
5151 return _internal_class_createInstanceFromZone(cls, extraBytes, zone);
5152 }
5153
5154
5155 /***********************************************************************
5156 * class_createInstance
5157 * fixme
5158 * Locking: none
5159 **********************************************************************/
5160 id
5161 class_createInstance(Class cls, size_t extraBytes)
5162 {
5163 return class_createInstanceFromZone(cls, extraBytes, NULL);
5164 }
5165
5166
5167 /***********************************************************************
5168 * object_copyFromZone
5169 * fixme
5170 * Locking: none
5171 **********************************************************************/
5172 id
5173 object_copyFromZone(id oldObj, size_t extraBytes, void *zone)
5174 {
5175 id obj;
5176 size_t size;
5177
5178 if (!oldObj) return nil;
5179
5180 size = _class_getInstanceSize(oldObj->isa) + extraBytes;
5181 #if !defined(NO_GC)
5182 if (UseGC) {
5183 obj = (id) auto_zone_allocate_object(gc_zone, size,
5184 AUTO_OBJECT_SCANNED, 0, 1);
5185 } else
5186 #endif
5187 if (zone) {
5188 obj = malloc_zone_calloc(zone, size, 1);
5189 } else {
5190 obj = (id) calloc(1, size);
5191 }
5192 if (!obj) return nil;
5193
5194 // fixme this doesn't handle C++ ivars correctly (#4619414)
5195 objc_memmove_collectable(obj, oldObj, size);
5196
5197 #if !defined(NO_GC)
5198 if (UseGC) gc_fixup_weakreferences(obj, oldObj);
5199 #endif
5200
5201 return obj;
5202 }
5203
5204
5205 /***********************************************************************
5206 * object_copy
5207 * fixme
5208 * Locking: none
5209 **********************************************************************/
5210 id
5211 object_copy(id oldObj, size_t extraBytes)
5212 {
5213 return object_copyFromZone(oldObj, extraBytes, malloc_default_zone());
5214 }
5215
5216
5217 /***********************************************************************
5218 * object_dispose
5219 * fixme
5220 * Locking: none
5221 **********************************************************************/
5222 id
5223 object_dispose(id obj)
5224 {
5225 return _internal_object_dispose(obj);
5226 }
5227
5228
5229 /***********************************************************************
5230 * _objc_getFreedObjectClass
5231 * fixme
5232 * Locking: none
5233 **********************************************************************/
5234 Class _objc_getFreedObjectClass (void)
5235 {
5236 return nil;
5237 }
5238
5239 #ifndef NO_FIXUP
5240
5241 extern id objc_msgSend_fixup(id, SEL, ...);
5242 extern id objc_msgSend_fixedup(id, SEL, ...);
5243 extern id objc_msgSendSuper2_fixup(id, SEL, ...);
5244 extern id objc_msgSendSuper2_fixedup(id, SEL, ...);
5245 extern id objc_msgSend_stret_fixup(id, SEL, ...);
5246 extern id objc_msgSend_stret_fixedup(id, SEL, ...);
5247 extern id objc_msgSendSuper2_stret_fixup(id, SEL, ...);
5248 extern id objc_msgSendSuper2_stret_fixedup(id, SEL, ...);
5249 #if defined(__i386__) || defined(__x86_64__)
5250 extern id objc_msgSend_fpret_fixup(id, SEL, ...);
5251 extern id objc_msgSend_fpret_fixedup(id, SEL, ...);
5252 #endif
5253 #if defined(__x86_64__)
5254 extern id objc_msgSend_fp2ret_fixup(id, SEL, ...);
5255 extern id objc_msgSend_fp2ret_fixedup(id, SEL, ...);
5256 #endif
5257
5258 /***********************************************************************
5259 * _objc_fixupMessageRef
5260 * Fixes up message ref *msg.
5261 * obj is the receiver. supr is NULL for non-super messages
5262 * Locking: acquires runtimeLock
5263 **********************************************************************/
5264 __private_extern__ IMP
5265 _objc_fixupMessageRef(id obj, struct objc_super2 *supr, message_ref *msg)
5266 {
5267 IMP imp;
5268 class_t *isa;
5269
5270 rwlock_assert_unlocked(&runtimeLock);
5271
5272 if (!supr) {
5273 // normal message - search obj->isa for the method implementation
5274 isa = (class_t *)obj->isa;
5275
5276 if (!isRealized(isa)) {
5277 // obj is a class object, isa is its metaclass
5278 class_t *cls;
5279 rwlock_write(&runtimeLock);
5280 cls = realizeClass((class_t *)obj);
5281 rwlock_unlock_write(&runtimeLock);
5282
5283 // shouldn't have instances of unrealized classes!
5284 assert(isMetaClass(isa));
5285 // shouldn't be relocating classes here!
5286 assert(cls == (class_t *)obj);
5287 }
5288 }
5289 else {
5290 // this is objc_msgSend_super, and supr->current_class->superclass
5291 // is the class to search for the method implementation
5292 assert(isRealized((class_t *)supr->current_class));
5293 isa = getSuperclass((class_t *)supr->current_class);
5294 }
5295
5296 msg->sel = sel_registerName((const char *)msg->sel);
5297
5298 #ifndef NO_VTABLE
5299 int vtableIndex;
5300 if (msg->imp == (IMP)&objc_msgSend_fixup &&
5301 (vtableIndex = vtable_getIndex(msg->sel)) >= 0)
5302 {
5303 // vtable dispatch
5304 msg->imp = vtableTrampolines[vtableIndex];
5305 imp = isa->vtable[vtableIndex];
5306 }
5307 else
5308 #endif
5309 {
5310 // ordinary dispatch
5311 imp = lookUpMethod((Class)isa, msg->sel, YES/*initialize*/, YES/*cache*/);
5312
5313 if (msg->imp == (IMP)&objc_msgSend_fixup) {
5314 msg->imp = (IMP)&objc_msgSend_fixedup;
5315 }
5316 else if (msg->imp == (IMP)&objc_msgSendSuper2_fixup) {
5317 msg->imp = (IMP)&objc_msgSendSuper2_fixedup;
5318 }
5319 else if (msg->imp == (IMP)&objc_msgSend_stret_fixup) {
5320 msg->imp = (IMP)&objc_msgSend_stret_fixedup;
5321 }
5322 else if (msg->imp == (IMP)&objc_msgSendSuper2_stret_fixup) {
5323 msg->imp = (IMP)&objc_msgSendSuper2_stret_fixedup;
5324 }
5325 #if defined(__i386__) || defined(__x86_64__)
5326 else if (msg->imp == (IMP)&objc_msgSend_fpret_fixup) {
5327 msg->imp = (IMP)&objc_msgSend_fpret_fixedup;
5328 }
5329 #endif
5330 #if defined(__x86_64__)
5331 else if (msg->imp == (IMP)&objc_msgSend_fp2ret_fixup) {
5332 msg->imp = (IMP)&objc_msgSend_fp2ret_fixedup;
5333 }
5334 #endif
5335 else {
5336 // The ref may already have been fixed up, either by another thread
5337 // or by +initialize via lookUpMethod above.
5338 }
5339 }
5340
5341 return imp;
5342 }
5343
5344 // ! NO_FIXUP
5345 #endif
5346
5347
5348 #warning fixme delete after #4586306
5349 Class class_poseAs(Class imposter, Class original)
5350 {
5351 _objc_fatal("Don't call class_poseAs.");
5352 }
5353
5354
5355 // ProKit SPI
5356 static class_t *setSuperclass(class_t *cls, class_t *newSuper)
5357 {
5358 class_t *oldSuper;
5359
5360 rwlock_assert_writing(&runtimeLock);
5361
5362 oldSuper = cls->superclass;
5363 removeSubclass(oldSuper, cls);
5364 removeSubclass(oldSuper->isa, cls->isa);
5365
5366 cls->superclass = newSuper;
5367 cls->isa->superclass = newSuper->isa;
5368 addSubclass(newSuper, cls);
5369 addSubclass(newSuper->isa, cls->isa);
5370
5371 flushCaches(cls);
5372 flushCaches(cls->isa);
5373 flushVtables(cls);
5374 flushVtables(cls->isa);
5375
5376 return oldSuper;
5377 }
5378
5379
5380 Class class_setSuperclass(Class cls_gen, Class newSuper_gen)
5381 {
5382 class_t *cls = newcls(cls_gen);
5383 class_t *newSuper = newcls(newSuper_gen);
5384 class_t *oldSuper;
5385
5386 rwlock_write(&runtimeLock);
5387 oldSuper = setSuperclass(cls, newSuper);
5388 rwlock_unlock_write(&runtimeLock);
5389
5390 return (Class)oldSuper;
5391 }
5392
5393 #endif