]> git.saurik.com Git - apple/objc4.git/blob - runtime/objc-runtime-new.m
a1c4d9c8e6a3278487034ea604dba56509322190
[apple/objc4.git] / runtime / objc-runtime-new.m
1 /*
2 * Copyright (c) 2005-2008 Apple Inc. All Rights Reserved.
3 *
4 * @APPLE_LICENSE_HEADER_START@
5 *
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. Please obtain a copy of the License at
10 * http://www.opensource.apple.com/apsl/ and read it before using this
11 * file.
12 *
13 * The Original Code and all software distributed under the License are
14 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
15 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
16 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
18 * Please see the License for the specific language governing rights and
19 * limitations under the License.
20 *
21 * @APPLE_LICENSE_HEADER_END@
22 */
23
24 /***********************************************************************
25 * objc-runtime-new.m
26 * Support for new-ABI classes and images.
27 **********************************************************************/
28
29 #if __OBJC2__
30
31 #include "objc-private.h"
32 #include "objc-runtime-new.h"
33 #include <objc/message.h>
34
35 #define newcls(cls) ((struct class_t *)cls)
36 #define newcat(cat) ((struct category_t *)cat)
37 #define newmethod(meth) ((struct method_t *)meth)
38 #define newivar(ivar) ((struct ivar_t *)ivar)
39 #define newcategory(cat) ((struct category_t *)cat)
40 #define newprotocol(p) ((struct protocol_t *)p)
41
42 #ifdef __LP64__
43 #define WORD_SHIFT 3UL
44 #define WORD_MASK 7UL
45 #else
46 #define WORD_SHIFT 2UL
47 #define WORD_MASK 3UL
48 #endif
49
50 static const char *getName(struct class_t *cls);
51 static uint32_t instanceSize(struct class_t *cls);
52 static BOOL isMetaClass(struct class_t *cls);
53 static struct class_t *getSuperclass(struct class_t *cls);
54 static void unload_class(class_t *cls, BOOL isMeta);
55 static class_t *setSuperclass(class_t *cls, class_t *newSuper);
56 static class_t *realizeClass(class_t *cls);
57 static void flushCaches(class_t *cls);
58 static void flushVtables(class_t *cls);
59 static method_t *getMethodNoSuper_nolock(struct class_t *cls, SEL sel);
60 static method_t *getMethod_nolock(class_t *cls, SEL sel);
61 static void changeInfo(class_t *cls, unsigned int set, unsigned int clear);
62 static IMP _method_getImplementation(method_t *m);
63
64
65 /***********************************************************************
66 * Lock management
67 * Every lock used anywhere must be managed here.
68 * Locks not managed here may cause gdb deadlocks.
69 **********************************************************************/
70 __private_extern__ rwlock_t runtimeLock = {0};
71 __private_extern__ rwlock_t selLock = {0};
72 __private_extern__ mutex_t cacheUpdateLock = MUTEX_INITIALIZER;
73 __private_extern__ recursive_mutex_t loadMethodLock = RECURSIVE_MUTEX_INITIALIZER;
74 static int debugger_runtimeLock;
75 static int debugger_selLock;
76 static int debugger_cacheUpdateLock;
77 static int debugger_loadMethodLock;
78 #define RDONLY 1
79 #define RDWR 2
80
81 __private_extern__ void lock_init(void)
82 {
83 rwlock_init(&selLock);
84 rwlock_init(&runtimeLock);
85 recursive_mutex_init(&loadMethodLock);
86 }
87
88
89 /***********************************************************************
90 * startDebuggerMode
91 * Attempt to acquire some locks for debugger mode.
92 * Returns 0 if debugger mode failed because too many locks are unavailable.
93 *
94 * Locks successfully acquired are held until endDebuggerMode().
95 * Locks not acquired are off-limits until endDebuggerMode(); any
96 * attempt to manipulate them will cause a trap.
97 * Locks not handled here may cause deadlocks in gdb.
98 **********************************************************************/
99 __private_extern__ int startDebuggerMode(void)
100 {
101 int result = DEBUGGER_FULL;
102
103 // runtimeLock is required (can't do much without it)
104 if (rwlock_try_write(&runtimeLock)) {
105 debugger_runtimeLock = RDWR;
106 } else if (rwlock_try_read(&runtimeLock)) {
107 debugger_runtimeLock = RDONLY;
108 result = DEBUGGER_PARTIAL;
109 } else {
110 return DEBUGGER_OFF;
111 }
112
113 // cacheUpdateLock is required (must not fail a necessary cache flush)
114 // must be AFTER runtimeLock to avoid lock inversion
115 if (mutex_try_lock(&cacheUpdateLock)) {
116 debugger_cacheUpdateLock = RDWR;
117 } else {
118 rwlock_unlock(&runtimeLock, debugger_runtimeLock);
119 debugger_runtimeLock = 0;
120 return DEBUGGER_OFF;
121 }
122
123 // selLock is optional
124 if (rwlock_try_write(&selLock)) {
125 debugger_selLock = RDWR;
126 } else if (rwlock_try_read(&selLock)) {
127 debugger_selLock = RDONLY;
128 result = DEBUGGER_PARTIAL;
129 } else {
130 debugger_selLock = 0;
131 result = DEBUGGER_PARTIAL;
132 }
133
134 // loadMethodLock is optional
135 if (recursive_mutex_try_lock(&loadMethodLock)) {
136 debugger_loadMethodLock = RDWR;
137 } else {
138 debugger_loadMethodLock = 0;
139 result = DEBUGGER_PARTIAL;
140 }
141
142 return result;
143 }
144
145 /***********************************************************************
146 * endDebuggerMode
147 * Relinquish locks acquired in startDebuggerMode().
148 **********************************************************************/
149 __private_extern__ void endDebuggerMode(void)
150 {
151 assert(debugger_runtimeLock != 0);
152
153 rwlock_unlock(&runtimeLock, debugger_runtimeLock);
154 debugger_runtimeLock = 0;
155
156 rwlock_unlock(&selLock, debugger_selLock);
157 debugger_selLock = 0;
158
159 assert(debugger_cacheUpdateLock == RDWR);
160 mutex_unlock(&cacheUpdateLock);
161 debugger_cacheUpdateLock = 0;
162
163 if (debugger_loadMethodLock) {
164 recursive_mutex_unlock(&loadMethodLock);
165 debugger_loadMethodLock = 0;
166 }
167 }
168
169 /***********************************************************************
170 * isManagedDuringDebugger
171 * Returns YES if the given lock is handled specially during debugger
172 * mode (i.e. debugger mode tries to acquire it).
173 **********************************************************************/
174 __private_extern__ BOOL isManagedDuringDebugger(void *lock)
175 {
176 if (lock == &selLock) return YES;
177 if (lock == &cacheUpdateLock) return YES;
178 if (lock == &runtimeLock) return YES;
179 if (lock == &loadMethodLock) return YES;
180 return NO;
181 }
182
183 /***********************************************************************
184 * isLockedDuringDebugger
185 * Returns YES if the given mutex was acquired by debugger mode.
186 * Locking a managed mutex during debugger mode causes a trap unless
187 * this returns YES.
188 **********************************************************************/
189 __private_extern__ BOOL isLockedDuringDebugger(mutex_t *lock)
190 {
191 assert(DebuggerMode);
192
193 if (lock == &cacheUpdateLock) return YES;
194 if (lock == (mutex_t *)&loadMethodLock) return YES;
195
196 return NO;
197 }
198
199 /***********************************************************************
200 * isReadingDuringDebugger
201 * Returns YES if the given rwlock was read-locked by debugger mode.
202 * Read-locking a managed rwlock during debugger mode causes a trap unless
203 * this returns YES.
204 **********************************************************************/
205 __private_extern__ BOOL isReadingDuringDebugger(rwlock_t *lock)
206 {
207 assert(DebuggerMode);
208
209 // read-lock is allowed even if debugger mode actually write-locked it
210 if (debugger_runtimeLock && lock == &runtimeLock) return YES;
211 if (debugger_selLock && lock == &selLock) return YES;
212
213 return NO;
214 }
215
216 /***********************************************************************
217 * isWritingDuringDebugger
218 * Returns YES if the given rwlock was write-locked by debugger mode.
219 * Write-locking a managed rwlock during debugger mode causes a trap unless
220 * this returns YES.
221 **********************************************************************/
222 __private_extern__ BOOL isWritingDuringDebugger(rwlock_t *lock)
223 {
224 assert(DebuggerMode);
225
226 if (debugger_runtimeLock == RDWR && lock == &runtimeLock) return YES;
227 if (debugger_selLock == RDWR && lock == &selLock) return YES;
228
229 return NO;
230 }
231
232
233 /***********************************************************************
234 * vtable dispatch
235 *
236 * Every class gets a vtable pointer. The vtable is an array of IMPs.
237 * The selectors represented in the vtable are the same for all classes
238 * (i.e. no class has a bigger or smaller vtable).
239 * Each vtable index has an associated trampoline which dispatches to
240 * the IMP at that index for the receiver class's vtable (after
241 * checking for NULL). Dispatch fixup uses these trampolines instead
242 * of objc_msgSend.
243 * Fragility: The vtable size and list of selectors is chosen at launch
244 * time. No compiler-generated code depends on any particular vtable
245 * configuration, or even the use of vtable dispatch at all.
246 * Memory size: If a class's vtable is identical to its superclass's
247 * (i.e. the class overrides none of the vtable selectors), then
248 * the class points directly to its superclass's vtable. This means
249 * selectors to be included in the vtable should be chosen so they are
250 * (1) frequently called, but (2) not too frequently overridden. In
251 * particular, -dealloc is a bad choice.
252 * Forwarding: If a class doesn't implement some vtable selector, that
253 * selector's IMP is set to objc_msgSend in that class's vtable.
254 * +initialize: Each class keeps the default vtable (which always
255 * redirects to objc_msgSend) until its +initialize is completed.
256 * Otherwise, the first message to a class could be a vtable dispatch,
257 * and the vtable trampoline doesn't include +initialize checking.
258 * Changes: Categories, addMethod, and setImplementation all force vtable
259 * reconstruction for the class and all of its subclasses, if the
260 * vtable selectors are affected.
261 **********************************************************************/
262
263 #define X8(x) \
264 x, x, x, x, x, x, x, x
265 #define X64(x) \
266 X8(x), X8(x), X8(x), X8(x), X8(x), X8(x), X8(x), X8(x)
267 #define X128(x) \
268 X64(x), X64(x)
269
270 #define vtableMax 128
271
272 IMP _objc_empty_vtable[vtableMax] = {
273 X128(objc_msgSend)
274 };
275
276 #ifndef NO_VTABLE
277
278 // Trampoline descriptors for gdb.
279
280 objc_trampoline_header *gdb_objc_trampolines = NULL;
281
282 void gdb_objc_trampolines_changed(objc_trampoline_header *thdr) __attribute__((noinline));
283 void gdb_objc_trampolines_changed(objc_trampoline_header *thdr)
284 {
285 rwlock_assert_writing(&runtimeLock);
286 assert(thdr == gdb_objc_trampolines);
287
288 if (PrintVtables) {
289 _objc_inform("VTABLES: gdb_objc_trampolines_changed(%p)", thdr);
290 }
291 }
292
293 // fixme workaround for rdar://6667753
294 static void appendTrampolines(objc_trampoline_header *thdr) __attribute__((noinline));
295
296 static void appendTrampolines(objc_trampoline_header *thdr)
297 {
298 rwlock_assert_writing(&runtimeLock);
299 assert(thdr->next == NULL);
300
301 if (gdb_objc_trampolines != thdr->next) {
302 thdr->next = gdb_objc_trampolines;
303 }
304 gdb_objc_trampolines = thdr;
305
306 gdb_objc_trampolines_changed(thdr);
307 }
308
309 // Vtable management.
310
311 static size_t vtableStrlen;
312 static size_t vtableCount;
313 static SEL *vtableSelectors;
314 static IMP *vtableTrampolines;
315 static const char * const defaultVtable[] = {
316 "allocWithZone:",
317 "alloc",
318 "class",
319 "self",
320 "isKindOfClass:",
321 "respondsToSelector:",
322 "isFlipped",
323 "length",
324 "objectForKey:",
325 "count",
326 "objectAtIndex:",
327 "isEqualToString:",
328 "isEqual:",
329 "retain",
330 "release",
331 "autorelease",
332 };
333 static const char * const defaultVtableGC[] = {
334 "allocWithZone:",
335 "alloc",
336 "class",
337 "self",
338 "isKindOfClass:",
339 "respondsToSelector:",
340 "isFlipped",
341 "length",
342 "objectForKey:",
343 "count",
344 "objectAtIndex:",
345 "isEqualToString:",
346 "isEqual:",
347 "hash",
348 "addObject:",
349 "countByEnumeratingWithState:objects:count:",
350 };
351
352 extern id objc_msgSend_vtable0(id, SEL, ...);
353 extern id objc_msgSend_vtable1(id, SEL, ...);
354 extern id objc_msgSend_vtable2(id, SEL, ...);
355 extern id objc_msgSend_vtable3(id, SEL, ...);
356 extern id objc_msgSend_vtable4(id, SEL, ...);
357 extern id objc_msgSend_vtable5(id, SEL, ...);
358 extern id objc_msgSend_vtable6(id, SEL, ...);
359 extern id objc_msgSend_vtable7(id, SEL, ...);
360 extern id objc_msgSend_vtable8(id, SEL, ...);
361 extern id objc_msgSend_vtable9(id, SEL, ...);
362 extern id objc_msgSend_vtable10(id, SEL, ...);
363 extern id objc_msgSend_vtable11(id, SEL, ...);
364 extern id objc_msgSend_vtable12(id, SEL, ...);
365 extern id objc_msgSend_vtable13(id, SEL, ...);
366 extern id objc_msgSend_vtable14(id, SEL, ...);
367 extern id objc_msgSend_vtable15(id, SEL, ...);
368
369 static IMP const defaultVtableTrampolines[] = {
370 objc_msgSend_vtable0,
371 objc_msgSend_vtable1,
372 objc_msgSend_vtable2,
373 objc_msgSend_vtable3,
374 objc_msgSend_vtable4,
375 objc_msgSend_vtable5,
376 objc_msgSend_vtable6,
377 objc_msgSend_vtable7,
378 objc_msgSend_vtable8,
379 objc_msgSend_vtable9,
380 objc_msgSend_vtable10,
381 objc_msgSend_vtable11,
382 objc_msgSend_vtable12,
383 objc_msgSend_vtable13,
384 objc_msgSend_vtable14,
385 objc_msgSend_vtable15,
386 };
387 extern objc_trampoline_header defaultVtableTrampolineDescriptors;
388
389 static void check_vtable_size(void) __unused;
390 static void check_vtable_size(void)
391 {
392 // Fail to compile if vtable sizes don't match.
393 int c1[sizeof(defaultVtableTrampolines)-sizeof(defaultVtable)] __unused;
394 int c2[sizeof(defaultVtable)-sizeof(defaultVtableTrampolines)] __unused;
395 int c3[sizeof(defaultVtableTrampolines)-sizeof(defaultVtableGC)] __unused;
396 int c4[sizeof(defaultVtableGC)-sizeof(defaultVtableTrampolines)] __unused;
397
398 // Fail to compile if vtableMax is too small
399 int c5[vtableMax - sizeof(defaultVtable)] __unused;
400 int c6[vtableMax - sizeof(defaultVtableGC)] __unused;
401 }
402
403 /*
404 x86_64
405
406 monomorphic (self rdi, sel* rsi, temp r10 and r11) {
407 test %rdi, %rdi
408 jeq returnZero // nil check
409 movq 8(%rsi), %rsi // load _cmd (fixme schedule)
410 movq $xxxx, %r10
411 cmp 0(%rdi), %r10 // isa check
412 jeq imp // fixme long branches
413 movq $yyyy, %r10
414 cmp 0(%rdi), %r10 // fixme load rdi once for multiple isas
415 jeq imp2 // fixme long branches
416 jmp objc_msgSend // fixme long branches
417 }
418
419 */
420 extern uint8_t vtable_prototype;
421 extern uint8_t vtable_ignored;
422 extern int vtable_prototype_size;
423 extern int vtable_prototype_index_offset;
424 static size_t makeVtableTrampoline(uint8_t *dst, size_t index)
425 {
426 // copy boilerplate
427 memcpy(dst, &vtable_prototype, vtable_prototype_size);
428
429 // insert index
430 #if defined(__x86_64__)
431 uint16_t *p = (uint16_t *)(dst + vtable_prototype_index_offset + 3);
432 if (*p != 0x7fff) _objc_fatal("vtable_prototype busted");
433 *p = index * 8;
434 #else
435 # warning unknown architecture
436 #endif
437
438 return vtable_prototype_size;
439 }
440
441
442 static void initVtables(void)
443 {
444 if (DisableVtables) {
445 if (PrintVtables) {
446 _objc_inform("VTABLES: vtable dispatch disabled by OBJC_DISABLE_VTABLES");
447 }
448 vtableCount = 0;
449 vtableSelectors = NULL;
450 vtableTrampolines = NULL;
451 return;
452 }
453
454 const char * const *names;
455 size_t i;
456
457 if (UseGC) {
458 names = defaultVtableGC;
459 vtableCount = sizeof(defaultVtableGC) / sizeof(defaultVtableGC[0]);
460 } else {
461 names = defaultVtable;
462 vtableCount = sizeof(defaultVtable) / sizeof(defaultVtable[0]);
463 }
464 if (vtableCount > vtableMax) vtableCount = vtableMax;
465
466 vtableSelectors = _malloc_internal(vtableCount * sizeof(SEL));
467 vtableTrampolines = _malloc_internal(vtableCount * sizeof(IMP));
468
469 // Built-in trampolines and their descriptors
470
471 size_t defaultVtableTrampolineCount =
472 sizeof(defaultVtableTrampolines) / sizeof(defaultVtableTrampolines[0]);
473 #ifndef NDEBUG
474 // debug: use generated code for 3/4 of the table
475 defaultVtableTrampolineCount /= 4;
476 #endif
477
478 for (i = 0; i < defaultVtableTrampolineCount && i < vtableCount; i++) {
479 vtableSelectors[i] = sel_registerName(names[i]);
480 vtableTrampolines[i] = defaultVtableTrampolines[i];
481 }
482 appendTrampolines(&defaultVtableTrampolineDescriptors);
483
484
485 // Generated trampolines and their descriptors
486
487 if (vtableCount > defaultVtableTrampolineCount) {
488 // Memory for trampoline code
489 size_t generatedCount =
490 vtableCount - defaultVtableTrampolineCount;
491
492 const int align = 16;
493 size_t codeSize =
494 round_page(sizeof(objc_trampoline_header) + align +
495 generatedCount * (sizeof(objc_trampoline_descriptor)
496 + vtable_prototype_size + align));
497 void *codeAddr = mmap(0, codeSize, PROT_READ|PROT_WRITE,
498 MAP_PRIVATE|MAP_ANON,
499 VM_MAKE_TAG(VM_MEMORY_OBJC_DISPATCHERS), 0);
500 uint8_t *t = (uint8_t *)codeAddr;
501
502 // Trampoline header
503 objc_trampoline_header *thdr = (objc_trampoline_header *)t;
504 thdr->headerSize = sizeof(objc_trampoline_header);
505 thdr->descSize = sizeof(objc_trampoline_descriptor);
506 thdr->descCount = (uint32_t)generatedCount;
507 thdr->next = NULL;
508
509 // Trampoline descriptors
510 objc_trampoline_descriptor *tdesc = (objc_trampoline_descriptor *)(thdr+1);
511 t = (uint8_t *)&tdesc[generatedCount];
512 t += align - ((uintptr_t)t % align);
513
514 // Dispatch code
515 size_t tdi;
516 for (i = defaultVtableTrampolineCount, tdi = 0;
517 i < vtableCount;
518 i++, tdi++)
519 {
520 vtableSelectors[i] = sel_registerName(names[i]);
521 if (vtableSelectors[i] == (SEL)kIgnore) {
522 vtableTrampolines[i] = (IMP)&vtable_ignored;
523 tdesc[tdi].offset = 0;
524 tdesc[tdi].flags = 0;
525 } else {
526 vtableTrampolines[i] = (IMP)t;
527 tdesc[tdi].offset =
528 (uint32_t)((uintptr_t)t - (uintptr_t)&tdesc[tdi]);
529 tdesc[tdi].flags =
530 OBJC_TRAMPOLINE_MESSAGE|OBJC_TRAMPOLINE_VTABLE;
531
532 t += makeVtableTrampoline(t, i);
533 t += align - ((uintptr_t)t % align);
534 }
535 }
536
537 appendTrampolines(thdr);
538 sys_icache_invalidate(codeAddr, codeSize);
539 mprotect(codeAddr, codeSize, PROT_READ|PROT_EXEC);
540 }
541
542
543 if (PrintVtables) {
544 for (i = 0; i < vtableCount; i++) {
545 _objc_inform("VTABLES: vtable[%zu] %p %s",
546 i, vtableTrampolines[i],
547 sel_getName(vtableSelectors[i]));
548 }
549 }
550
551 if (PrintVtableImages) {
552 _objc_inform("VTABLE IMAGES: '#' implemented by class");
553 _objc_inform("VTABLE IMAGES: '-' inherited from superclass");
554 _objc_inform("VTABLE IMAGES: ' ' not implemented");
555 for (i = 0; i <= vtableCount; i++) {
556 char spaces[vtableCount+1+1];
557 size_t j;
558 for (j = 0; j < i; j++) {
559 spaces[j] = '|';
560 }
561 spaces[j] = '\0';
562 _objc_inform("VTABLE IMAGES: %s%s", spaces,
563 i<vtableCount ? sel_getName(vtableSelectors[i]) : "");
564 }
565 }
566
567 if (PrintVtables || PrintVtableImages) {
568 vtableStrlen = 0;
569 for (i = 0; i < vtableCount; i++) {
570 vtableStrlen += strlen(sel_getName(vtableSelectors[i]));
571 }
572 }
573 }
574
575
576 static int vtable_getIndex(SEL sel)
577 {
578 int i;
579 for (i = 0; i < vtableCount; i++) {
580 if (vtableSelectors[i] == sel) return i;
581 }
582 return -1;
583 }
584
585 static BOOL vtable_containsSelector(SEL sel)
586 {
587 return (vtable_getIndex(sel) < 0) ? NO : YES;
588 }
589
590 static void printVtableOverrides(class_t *cls, class_t *supercls)
591 {
592 char overrideMap[vtableCount+1];
593 int i;
594
595 if (supercls) {
596 size_t overridesBufferSize = vtableStrlen + 2*vtableCount + 1;
597 char *overrides =
598 _calloc_internal(overridesBufferSize, 1);
599 for (i = 0; i < vtableCount; i++) {
600 if (vtableSelectors[i] == (SEL)kIgnore) {
601 overrideMap[i] = '-';
602 continue;
603 }
604 if (getMethodNoSuper_nolock(cls, vtableSelectors[i])) {
605 strlcat(overrides, sel_getName(vtableSelectors[i]), overridesBufferSize);
606 strlcat(overrides, ", ", overridesBufferSize);
607 overrideMap[i] = '#';
608 } else if (getMethod_nolock(cls, vtableSelectors[i])) {
609 overrideMap[i] = '-';
610 } else {
611 overrideMap[i] = ' ';
612 }
613 }
614 if (PrintVtables) {
615 _objc_inform("VTABLES: %s%s implements %s",
616 getName(cls), isMetaClass(cls) ? "(meta)" : "",
617 overrides);
618 }
619 _free_internal(overrides);
620 }
621 else {
622 for (i = 0; i < vtableCount; i++) {
623 overrideMap[i] = '#';
624 }
625 }
626
627 if (PrintVtableImages) {
628 overrideMap[vtableCount] = '\0';
629 _objc_inform("VTABLE IMAGES: %s %s%s", overrideMap,
630 getName(cls), isMetaClass(cls) ? "(meta)" : "");
631 }
632 }
633
634 /***********************************************************************
635 * updateVtable
636 * Rebuilds vtable for cls, using superclass's vtable if appropriate.
637 * Assumes superclass's vtable is up to date.
638 * Does nothing to subclass vtables.
639 * Locking: runtimeLock must be held by the caller.
640 **********************************************************************/
641 static void updateVtable(class_t *cls, BOOL force)
642 {
643 rwlock_assert_writing(&runtimeLock);
644
645 // Keep default vtable until +initialize is complete.
646 // Default vtable redirects to objc_msgSend, which
647 // enforces +initialize locking.
648 if (!force && !_class_isInitialized((Class)cls)) {
649 /*
650 if (PrintVtables) {
651 _objc_inform("VTABLES: KEEPING DEFAULT vtable for "
652 "uninitialized class %s%s",
653 getName(cls), isMetaClass(cls) ? "(meta)" : "");
654 }
655 */
656 return;
657 }
658
659 // Decide whether this class can share its superclass's vtable.
660
661 struct class_t *supercls = getSuperclass(cls);
662 BOOL needVtable = NO;
663 int i;
664 if (!supercls) {
665 // Root classes always need a vtable
666 needVtable = YES;
667 }
668 else if (cls->data->flags & RW_SPECIALIZED_VTABLE) {
669 // Once you have your own vtable, you never go back
670 needVtable = YES;
671 }
672 else {
673 for (i = 0; i < vtableCount; i++) {
674 if (vtableSelectors[i] == (SEL)kIgnore) continue;
675 method_t *m = getMethodNoSuper_nolock(cls, vtableSelectors[i]);
676 // assume any local implementation differs from super's
677 if (m) {
678 needVtable = YES;
679 break;
680 }
681 }
682 }
683
684 // Build a vtable for this class, or not.
685
686 if (!needVtable) {
687 if (PrintVtables) {
688 _objc_inform("VTABLES: USING SUPERCLASS vtable for class %s%s",
689 getName(cls), isMetaClass(cls) ? "(meta)" : "");
690 }
691 cls->vtable = supercls->vtable;
692 }
693 else {
694 if (PrintVtables) {
695 _objc_inform("VTABLES: %s vtable for class %s%s",
696 (cls->data->flags & RW_SPECIALIZED_VTABLE) ?
697 "UPDATING SPECIALIZED" : "CREATING SPECIALIZED",
698 getName(cls), isMetaClass(cls) ? "(meta)" : "");
699 }
700 if (PrintVtables || PrintVtableImages) {
701 printVtableOverrides(cls, supercls);
702 }
703
704 IMP *new_vtable = cls->vtable;
705 IMP *super_vtable = supercls ? supercls->vtable : _objc_empty_vtable;
706 // fixme use msgForward (instead of msgSend from empty vtable) ?
707
708 if (cls->data->flags & RW_SPECIALIZED_VTABLE) {
709 // update cls->vtable in place
710 new_vtable = cls->vtable;
711 assert(new_vtable != _objc_empty_vtable);
712 } else {
713 // make new vtable
714 new_vtable = malloc(vtableCount * sizeof(IMP));
715 changeInfo(cls, RW_SPECIALIZED_VTABLE, 0);
716 }
717
718 for (i = 0; i < vtableCount; i++) {
719 if (vtableSelectors[i] == (SEL)kIgnore) {
720 new_vtable[i] = (IMP)&vtable_ignored;
721 } else {
722 method_t *m = getMethodNoSuper_nolock(cls, vtableSelectors[i]);
723 if (m) new_vtable[i] = _method_getImplementation(m);
724 else new_vtable[i] = super_vtable[i];
725 }
726 }
727
728 if (cls->vtable != new_vtable) {
729 // don't let other threads see uninitialized parts of new_vtable
730 OSMemoryBarrier();
731 cls->vtable = new_vtable;
732 }
733 }
734 }
735
736 // ! NO_VTABLE
737 #else
738 // NO_VTABLE
739
740 static void initVtables(void)
741 {
742 if (PrintVtables) {
743 _objc_inform("VTABLES: no vtables on this architecture");
744 }
745 }
746
747 static BOOL vtable_containsSelector(SEL sel)
748 {
749 return NO;
750 }
751
752 static void updateVtable(class_t *cls, BOOL force)
753 {
754 }
755
756 // NO_VTABLE
757 #endif
758
759 typedef struct {
760 category_t *cat;
761 BOOL fromBundle;
762 } category_pair_t;
763
764 typedef struct {
765 uint32_t count;
766 category_pair_t list[0]; // variable-size
767 } category_list;
768
769 #define FOREACH_METHOD_LIST(_mlist, _cls, code) \
770 do { \
771 const method_list_t *_mlist; \
772 if (_cls->data->methods) { \
773 method_list_t **_mlistp; \
774 for (_mlistp = _cls->data->methods; *_mlistp; _mlistp++) { \
775 _mlist = *_mlistp; \
776 code \
777 } \
778 } \
779 } while (0)
780
781
782 // fixme don't chain property lists
783 typedef struct chained_property_list {
784 struct chained_property_list *next;
785 uint32_t count;
786 struct objc_property list[0]; // variable-size
787 } chained_property_list;
788
789 /*
790 Low two bits of mlist->entsize is used as the fixed-up marker.
791 PREOPTIMIZED VERSION:
792 Fixed-up method lists get entsize&3 == 3.
793 dyld shared cache sets this for method lists it preoptimizes.
794 UN-PREOPTIMIZED VERSION:
795 Fixed-up method lists get entsize&3 == 1.
796 dyld shared cache uses 3, but those aren't trusted.
797 */
798
799 static uint32_t fixed_up_method_list = 3;
800
801 __private_extern__ void
802 disableSelectorPreoptimization(void)
803 {
804 fixed_up_method_list = 1;
805 }
806
807 static BOOL isMethodListFixedUp(const method_list_t *mlist)
808 {
809 return (mlist->entsize_NEVER_USE & 3) == fixed_up_method_list;
810 }
811
812 static void setMethodListFixedUp(method_list_t *mlist)
813 {
814 rwlock_assert_writing(&runtimeLock);
815 assert(!isMethodListFixedUp(mlist));
816 mlist->entsize_NEVER_USE = (mlist->entsize_NEVER_USE & ~3) | fixed_up_method_list;
817 }
818
819 /*
820 static size_t chained_property_list_size(const chained_property_list *plist)
821 {
822 return sizeof(chained_property_list) +
823 plist->count * sizeof(struct objc_property);
824 }
825
826 static size_t protocol_list_size(const protocol_list_t *plist)
827 {
828 return sizeof(protocol_list_t) + plist->count * sizeof(protocol_t *);
829 }
830 */
831
832 // low bit used by dyld shared cache
833 static uint32_t method_list_entsize(const method_list_t *mlist)
834 {
835 return mlist->entsize_NEVER_USE & ~(uint32_t)3;
836 }
837
838 static size_t method_list_size(const method_list_t *mlist)
839 {
840 return sizeof(method_list_t) + (mlist->count-1)*method_list_entsize(mlist);
841 }
842
843 static method_t *method_list_nth(const method_list_t *mlist, uint32_t i)
844 {
845 return (method_t *)(i*method_list_entsize(mlist) + (char *)&mlist->first);
846 }
847
848
849 static size_t ivar_list_size(const ivar_list_t *ilist)
850 {
851 return sizeof(ivar_list_t) + (ilist->count-1) * ilist->entsize;
852 }
853
854 static ivar_t *ivar_list_nth(const ivar_list_t *ilist, uint32_t i)
855 {
856 return (ivar_t *)(i*ilist->entsize + (char *)&ilist->first);
857 }
858
859
860 static method_list_t *cat_method_list(const category_t *cat, BOOL isMeta)
861 {
862 if (!cat) return NULL;
863
864 if (isMeta) return cat->classMethods;
865 else return cat->instanceMethods;
866 }
867
868 static uint32_t cat_method_count(const category_t *cat, BOOL isMeta)
869 {
870 method_list_t *cmlist = cat_method_list(cat, isMeta);
871 return cmlist ? cmlist->count : 0;
872 }
873
874 static method_t *cat_method_nth(const category_t *cat, BOOL isMeta, uint32_t i)
875 {
876 method_list_t *cmlist = cat_method_list(cat, isMeta);
877 if (!cmlist) return NULL;
878
879 return method_list_nth(cmlist, i);
880 }
881
882
883 // part of ivar_t, with non-deprecated alignment
884 typedef struct {
885 uintptr_t *offset;
886 const char *name;
887 const char *type;
888 uint32_t alignment;
889 } ivar_alignment_t;
890
891 static uint32_t ivar_alignment(const ivar_t *ivar)
892 {
893 uint32_t alignment = ((ivar_alignment_t *)ivar)->alignment;
894 if (alignment == (uint32_t)-1) alignment = (uint32_t)WORD_SHIFT;
895 return 1<<alignment;
896 }
897
898
899 static void try_free(const void *p)
900 {
901 if (p && malloc_size(p)) free((void *)p);
902 }
903
904
905 /***********************************************************************
906 * make_ro_writeable
907 * Reallocates rw->ro if necessary to make it writeable.
908 * Locking: runtimeLock must be held by the caller.
909 **********************************************************************/
910 static class_ro_t *make_ro_writeable(class_rw_t *rw)
911 {
912 rwlock_assert_writing(&runtimeLock);
913
914 if (rw->flags & RW_COPIED_RO) {
915 // already writeable, do nothing
916 } else {
917 class_ro_t *ro = _memdup_internal(rw->ro, sizeof(*rw->ro));
918 rw->ro = ro;
919 rw->flags |= RW_COPIED_RO;
920 }
921 return (class_ro_t *)rw->ro;
922 }
923
924
925 /***********************************************************************
926 * unattachedCategories
927 * Returns the class => categories map of unattached categories.
928 * Locking: runtimeLock must be held by the caller.
929 **********************************************************************/
930 static NXMapTable *unattachedCategories(void)
931 {
932 rwlock_assert_writing(&runtimeLock);
933
934 static NXMapTable *category_map = NULL;
935
936 if (category_map) return category_map;
937
938 // fixme initial map size
939 category_map = NXCreateMapTableFromZone(NXPtrValueMapPrototype, 16,
940 _objc_internal_zone());
941
942 return category_map;
943 }
944
945
946 /***********************************************************************
947 * addUnattachedCategoryForClass
948 * Records an unattached category.
949 * Locking: runtimeLock must be held by the caller.
950 **********************************************************************/
951 static void addUnattachedCategoryForClass(category_t *cat, class_t *cls,
952 header_info *catHeader)
953 {
954 rwlock_assert_writing(&runtimeLock);
955
956 BOOL catFromBundle = (catHeader->mhdr->filetype == MH_BUNDLE) ? YES: NO;
957
958 // DO NOT use cat->cls!
959 // cls may be cat->cls->isa, or cat->cls may have been remapped.
960 NXMapTable *cats = unattachedCategories();
961 category_list *list;
962
963 list = NXMapGet(cats, cls);
964 if (!list) {
965 list = _calloc_internal(sizeof(*list) + sizeof(list->list[0]), 1);
966 } else {
967 list = _realloc_internal(list, sizeof(*list) + sizeof(list->list[0]) * (list->count + 1));
968 }
969 list->list[list->count++] = (category_pair_t){cat, catFromBundle};
970 NXMapInsert(cats, cls, list);
971 }
972
973
974 /***********************************************************************
975 * removeUnattachedCategoryForClass
976 * Removes an unattached category.
977 * Locking: runtimeLock must be held by the caller.
978 **********************************************************************/
979 static void removeUnattachedCategoryForClass(category_t *cat, class_t *cls)
980 {
981 rwlock_assert_writing(&runtimeLock);
982
983 // DO NOT use cat->cls!
984 // cls may be cat->cls->isa, or cat->cls may have been remapped.
985 NXMapTable *cats = unattachedCategories();
986 category_list *list;
987
988 list = NXMapGet(cats, cls);
989 if (!list) return;
990
991 uint32_t i;
992 for (i = 0; i < list->count; i++) {
993 if (list->list[i].cat == cat) {
994 // shift entries to preserve list order
995 memmove(&list->list[i], &list->list[i+1],
996 (list->count-i-1) * sizeof(list->list[i]));
997 list->count--;
998 return;
999 }
1000 }
1001 }
1002
1003
1004 /***********************************************************************
1005 * unattachedCategoriesForClass
1006 * Returns the list of unattached categories for a class, and
1007 * deletes them from the list.
1008 * The result must be freed by the caller.
1009 * Locking: runtimeLock must be held by the caller.
1010 **********************************************************************/
1011 static category_list *unattachedCategoriesForClass(class_t *cls)
1012 {
1013 rwlock_assert_writing(&runtimeLock);
1014 return NXMapRemove(unattachedCategories(), cls);
1015 }
1016
1017
1018 /***********************************************************************
1019 * isRealized
1020 * Returns YES if class cls has been realized.
1021 * Locking: To prevent concurrent realization, hold runtimeLock.
1022 **********************************************************************/
1023 static BOOL isRealized(class_t *cls)
1024 {
1025 return (cls->data->flags & RW_REALIZED) ? YES : NO;
1026 }
1027
1028
1029 /***********************************************************************
1030 * isFuture
1031 * Returns YES if class cls is an unrealized future class.
1032 * Locking: To prevent concurrent realization, hold runtimeLock.
1033 **********************************************************************/
1034 static BOOL isFuture(class_t *cls)
1035 {
1036 return (cls->data->flags & RW_FUTURE) ? YES : NO;
1037 }
1038
1039
1040 /***********************************************************************
1041 * printReplacements
1042 * Implementation of PrintReplacedMethods / OBJC_PRINT_REPLACED_METHODS.
1043 * Warn about methods from cats that override other methods in cats or cls.
1044 * Assumes no methods from cats have been added to cls yet.
1045 **********************************************************************/
1046 static void printReplacements(class_t *cls, category_list *cats)
1047 {
1048 uint32_t c;
1049 BOOL isMeta = isMetaClass(cls);
1050
1051 if (!cats) return;
1052
1053 // Newest categories are LAST in cats
1054 // Later categories override earlier ones.
1055 for (c = 0; c < cats->count; c++) {
1056 category_t *cat = cats->list[c].cat;
1057 uint32_t cmCount = cat_method_count(cat, isMeta);
1058 uint32_t m;
1059 for (m = 0; m < cmCount; m++) {
1060 uint32_t c2, m2;
1061 method_t *meth2 = NULL;
1062 method_t *meth = cat_method_nth(cat, isMeta, m);
1063 SEL s = sel_registerName((const char *)meth->name);
1064
1065 // Don't warn about GC-ignored selectors
1066 if (s == (SEL)kIgnore) continue;
1067
1068 // Look for method in earlier categories
1069 for (c2 = 0; c2 < c; c2++) {
1070 category_t *cat2 = cats->list[c2].cat;
1071 uint32_t cm2Count = cat_method_count(cat2, isMeta);
1072 for (m2 = 0; m2 < cm2Count; m2++) {
1073 meth2 = cat_method_nth(cat2, isMeta, m2);
1074 SEL s2 = sel_registerName((const char *)meth2->name);
1075 if (s == s2) goto whine;
1076 }
1077 }
1078
1079 // Look for method in cls
1080 FOREACH_METHOD_LIST(mlist, cls, {
1081 for (m2 = 0; m2 < mlist->count; m2++) {
1082 meth2 = method_list_nth(mlist, m2);
1083 SEL s2 = sel_registerName((const char *)meth2->name);
1084 if (s == s2) goto whine;
1085 }
1086 });
1087
1088 // Didn't find any override.
1089 continue;
1090
1091 whine:
1092 // Found an override.
1093 logReplacedMethod(getName(cls), s, isMetaClass(cls), cat->name,
1094 _method_getImplementation(meth2),
1095 _method_getImplementation(meth));
1096 }
1097 }
1098 }
1099
1100
1101 static BOOL isBundleClass(class_t *cls)
1102 {
1103 return (cls->data->ro->flags & RO_FROM_BUNDLE) ? YES : NO;
1104 }
1105
1106
1107 static void
1108 fixupMethodList(method_list_t *mlist, BOOL bundleCopy)
1109 {
1110 assert(!isMethodListFixedUp(mlist));
1111
1112 // fixme lock less in attachMethodLists ?
1113 sel_lock();
1114
1115 uint32_t m;
1116 for (m = 0; m < mlist->count; m++) {
1117 method_t *meth = method_list_nth(mlist, m);
1118 SEL sel = sel_registerNameNoLock((const char *)meth->name, bundleCopy);
1119 meth->name = sel;
1120
1121 if (sel == (SEL)kIgnore) {
1122 meth->imp = (IMP)&_objc_ignored_method;
1123 }
1124 }
1125
1126 sel_unlock();
1127
1128 setMethodListFixedUp(mlist);
1129 }
1130
1131 static void
1132 attachMethodLists(class_t *cls, method_list_t **lists, int count,
1133 BOOL methodsFromBundle, BOOL *outVtablesAffected)
1134 {
1135 rwlock_assert_writing(&runtimeLock);
1136
1137 BOOL vtablesAffected = NO;
1138 size_t listsSize = count * sizeof(*lists);
1139
1140 // Create or extend method list array
1141 // Leave `count` empty slots at the start of the array to be filled below.
1142
1143 if (!cls->data->methods) {
1144 // no bonus method lists yet
1145 cls->data->methods = _calloc_internal(1 + count, sizeof(*lists));
1146 } else {
1147 size_t oldSize = malloc_size(cls->data->methods);
1148 cls->data->methods =
1149 _realloc_internal(cls->data->methods, oldSize + listsSize);
1150 memmove(cls->data->methods + count, cls->data->methods, oldSize);
1151 }
1152
1153 // Add method lists to array.
1154 // Reallocate un-fixed method lists.
1155
1156 int i;
1157 for (i = 0; i < count; i++) {
1158 method_list_t *mlist = lists[i];
1159 if (!mlist) continue;
1160
1161 // Fixup selectors if necessary
1162 if (!isMethodListFixedUp(mlist)) {
1163 mlist = _memdup_internal(mlist, method_list_size(mlist));
1164 fixupMethodList(mlist, methodsFromBundle);
1165 }
1166
1167 // Scan for vtable updates
1168 if (outVtablesAffected && !vtablesAffected) {
1169 uint32_t m;
1170 for (m = 0; m < mlist->count; m++) {
1171 SEL sel = method_list_nth(mlist, m)->name;
1172 if (vtable_containsSelector(sel)) vtablesAffected = YES;
1173 }
1174 }
1175
1176 // Fill method list array
1177 cls->data->methods[i] = mlist;
1178 }
1179
1180 if (outVtablesAffected) *outVtablesAffected = vtablesAffected;
1181 }
1182
1183 static void
1184 attachCategoryMethods(class_t *cls, category_list *cats,
1185 BOOL *outVtablesAffected)
1186 {
1187 if (!cats) return;
1188 if (PrintReplacedMethods) printReplacements(cls, cats);
1189
1190 BOOL isMeta = isMetaClass(cls);
1191 method_list_t **mlists = _malloc_internal(cats->count * sizeof(*mlists));
1192
1193 // Count backwards through cats to get newest categories first
1194 int mcount = 0;
1195 int i = cats->count;
1196 BOOL fromBundle = NO;
1197 while (i--) {
1198 method_list_t *mlist = cat_method_list(cats->list[i].cat, isMeta);
1199 if (mlist) {
1200 mlists[mcount++] = mlist;
1201 fromBundle |= cats->list[i].fromBundle;
1202 }
1203 }
1204
1205 attachMethodLists(cls, mlists, mcount, fromBundle, outVtablesAffected);
1206
1207 _free_internal(mlists);
1208
1209 }
1210
1211
1212 static chained_property_list *
1213 buildPropertyList(const struct objc_property_list *plist, category_list *cats, BOOL isMeta)
1214 {
1215 // Do NOT use cat->cls! It may have been remapped.
1216 chained_property_list *newlist;
1217 uint32_t count = 0;
1218 uint32_t p, c;
1219
1220 // Count properties in all lists.
1221 if (plist) count = plist->count;
1222 if (cats) {
1223 for (c = 0; c < cats->count; c++) {
1224 category_t *cat = cats->list[c].cat;
1225 /*
1226 if (isMeta && cat->classProperties) {
1227 count += cat->classProperties->count;
1228 }
1229 else*/
1230 if (!isMeta && cat->instanceProperties) {
1231 count += cat->instanceProperties->count;
1232 }
1233 }
1234 }
1235
1236 if (count == 0) return NULL;
1237
1238 // Allocate new list.
1239 newlist = _malloc_internal(sizeof(*newlist) + count * sizeof(struct objc_property));
1240 newlist->count = 0;
1241 newlist->next = NULL;
1242
1243 // Copy properties; newest categories first, then ordinary properties
1244 if (cats) {
1245 c = cats->count;
1246 while (c--) {
1247 struct objc_property_list *cplist;
1248 category_t *cat = cats->list[c].cat;
1249 /*
1250 if (isMeta) {
1251 cplist = cat->classProperties;
1252 } else */
1253 {
1254 cplist = cat->instanceProperties;
1255 }
1256 if (cplist) {
1257 for (p = 0; p < cplist->count; p++) {
1258 newlist->list[newlist->count++] =
1259 *property_list_nth(cplist, p);
1260 }
1261 }
1262 }
1263 }
1264 if (plist) {
1265 for (p = 0; p < plist->count; p++) {
1266 newlist->list[newlist->count++] = *property_list_nth(plist, p);
1267 }
1268 }
1269
1270 assert(newlist->count == count);
1271
1272 return newlist;
1273 }
1274
1275
1276 static protocol_list_t **
1277 buildProtocolList(category_list *cats, struct protocol_list_t *base,
1278 struct protocol_list_t **protos)
1279 {
1280 // Do NOT use cat->cls! It may have been remapped.
1281 struct protocol_list_t **p, **newp;
1282 struct protocol_list_t **newprotos;
1283 int count = 0;
1284 int i;
1285
1286 // count protocol list in base
1287 if (base) count++;
1288
1289 // count protocol lists in cats
1290 if (cats) for (i = 0; i < cats->count; i++) {
1291 category_t *cat = cats->list[i].cat;
1292 if (cat->protocols) count++;
1293 }
1294
1295 // no base or category protocols? return existing protocols unchanged
1296 if (count == 0) return protos;
1297
1298 // count protocol lists in protos
1299 for (p = protos; p && *p; p++) {
1300 count++;
1301 }
1302
1303 if (count == 0) return NULL;
1304
1305 newprotos = (struct protocol_list_t **)
1306 _malloc_internal((count+1) * sizeof(struct protocol_list_t *));
1307 newp = newprotos;
1308
1309 if (base) {
1310 *newp++ = base;
1311 }
1312
1313 for (p = protos; p && *p; p++) {
1314 *newp++ = *p;
1315 }
1316
1317 if (cats) for (i = 0; i < cats->count; i++) {
1318 category_t *cat = cats->list[i].cat;
1319 if (cat->protocols) {
1320 *newp++ = cat->protocols;
1321 }
1322 }
1323
1324 *newp = NULL;
1325
1326 return newprotos;
1327 }
1328
1329
1330 /***********************************************************************
1331 * methodizeClass
1332 * Fixes up cls's method list, protocol list, and property list.
1333 * Attaches any outstanding categories.
1334 * Builds vtable.
1335 * Locking: runtimeLock must be held by the caller
1336 **********************************************************************/
1337 static void methodizeClass(struct class_t *cls)
1338 {
1339 category_list *cats;
1340 BOOL isMeta;
1341
1342 rwlock_assert_writing(&runtimeLock);
1343
1344 isMeta = isMetaClass(cls);
1345
1346 // Methodizing for the first time
1347 if (PrintConnecting) {
1348 _objc_inform("CLASS: methodizing class '%s' %s",
1349 getName(cls), isMeta ? "(meta)" : "");
1350 }
1351
1352 // Build method and protocol and property lists.
1353 // Include methods and protocols and properties from categories, if any
1354 // Do NOT use cat->cls! It may have been remapped.
1355
1356 attachMethodLists(cls, (method_list_t **)&cls->data->ro->baseMethods, 1,
1357 isBundleClass(cls), NULL);
1358
1359 cats = unattachedCategoriesForClass(cls);
1360 attachCategoryMethods(cls, cats, NULL);
1361
1362 if (cats || cls->data->ro->baseProperties) {
1363 cls->data->properties =
1364 buildPropertyList(cls->data->ro->baseProperties, cats, isMeta);
1365 }
1366
1367 if (cats || cls->data->ro->baseProtocols) {
1368 cls->data->protocols =
1369 buildProtocolList(cats, cls->data->ro->baseProtocols, NULL);
1370 }
1371
1372 if (PrintConnecting) {
1373 uint32_t i;
1374 if (cats) {
1375 for (i = 0; i < cats->count; i++) {
1376 _objc_inform("CLASS: attached category %c%s(%s)",
1377 isMeta ? '+' : '-',
1378 getName(cls), cats->list[i].cat->name);
1379 }
1380 }
1381 }
1382
1383 if (cats) _free_internal(cats);
1384
1385 // No vtable until +initialize completes
1386 assert(cls->vtable == _objc_empty_vtable);
1387 }
1388
1389
1390 /***********************************************************************
1391 * remethodizeClass
1392 * Attach outstanding categories to an existing class.
1393 * Fixes up cls's method list, protocol list, and property list.
1394 * Updates method caches and vtables for cls and its subclasses.
1395 * Locking: runtimeLock must be held by the caller
1396 **********************************************************************/
1397 static void remethodizeClass(struct class_t *cls)
1398 {
1399 category_list *cats;
1400 BOOL isMeta;
1401
1402 rwlock_assert_writing(&runtimeLock);
1403
1404 isMeta = isMetaClass(cls);
1405
1406 // Re-methodizing: check for more categories
1407 if ((cats = unattachedCategoriesForClass(cls))) {
1408 chained_property_list *newproperties;
1409 struct protocol_list_t **newprotos;
1410 BOOL vtableAffected = NO;
1411
1412 if (PrintConnecting) {
1413 _objc_inform("CLASS: attaching categories to class '%s' %s",
1414 getName(cls), isMeta ? "(meta)" : "");
1415 }
1416
1417 // Update methods, properties, protocols
1418
1419 attachCategoryMethods(cls, cats, &vtableAffected);
1420
1421 newproperties = buildPropertyList(NULL, cats, isMeta);
1422 if (newproperties) {
1423 newproperties->next = cls->data->properties;
1424 cls->data->properties = newproperties;
1425 }
1426
1427 newprotos = buildProtocolList(cats, NULL, cls->data->protocols);
1428 if (cls->data->protocols && cls->data->protocols != newprotos) {
1429 _free_internal(cls->data->protocols);
1430 }
1431 cls->data->protocols = newprotos;
1432
1433 _free_internal(cats);
1434
1435 // Update method caches and vtables
1436 flushCaches(cls);
1437 if (vtableAffected) flushVtables(cls);
1438 }
1439 }
1440
1441
1442 /***********************************************************************
1443 * changeInfo
1444 * Atomically sets and clears some bits in cls's info field.
1445 * set and clear must not overlap.
1446 **********************************************************************/
1447 static void changeInfo(class_t *cls, unsigned int set, unsigned int clear)
1448 {
1449 uint32_t oldf, newf;
1450
1451 assert(isFuture(cls) || isRealized(cls));
1452
1453 do {
1454 oldf = cls->data->flags;
1455 newf = (oldf | set) & ~clear;
1456 } while (!OSAtomicCompareAndSwap32Barrier(oldf, newf, (volatile int32_t *)&cls->data->flags));
1457 }
1458
1459
1460 /***********************************************************************
1461 * namedClasses
1462 * Returns the classname => class map of all non-meta classes.
1463 * Locking: runtimeLock must be read- or write-locked by the caller
1464 **********************************************************************/
1465
1466 NXMapTable *gdb_objc_realized_classes; // exported for debuggers in objc-gdb.h
1467
1468 static NXMapTable *namedClasses(void)
1469 {
1470 rwlock_assert_locked(&runtimeLock);
1471
1472 INIT_ONCE_PTR(gdb_objc_realized_classes,
1473 NXCreateMapTableFromZone(NXStrValueMapPrototype, 1024,
1474 _objc_internal_zone()),
1475 NXFreeMapTable(v) );
1476
1477 return gdb_objc_realized_classes;
1478 }
1479
1480
1481 /***********************************************************************
1482 * addNamedClass
1483 * Adds name => cls to the named non-meta class map.
1484 * Warns about duplicate class names and keeps the old mapping.
1485 * Locking: runtimeLock must be held by the caller
1486 **********************************************************************/
1487 static void addNamedClass(class_t *cls, const char *name)
1488 {
1489 rwlock_assert_writing(&runtimeLock);
1490 class_t *old;
1491 if ((old = NXMapGet(namedClasses(), name))) {
1492 inform_duplicate(name, (Class)old, (Class)cls);
1493 } else {
1494 NXMapInsert(namedClasses(), name, cls);
1495 }
1496 assert(!(cls->data->flags & RO_META));
1497
1498 // wrong: constructed classes are already realized when they get here
1499 // assert(!isRealized(cls));
1500 }
1501
1502
1503 /***********************************************************************
1504 * removeNamedClass
1505 * Removes cls from the name => cls map.
1506 * Locking: runtimeLock must be held by the caller
1507 **********************************************************************/
1508 static void removeNamedClass(class_t *cls, const char *name)
1509 {
1510 rwlock_assert_writing(&runtimeLock);
1511 assert(!(cls->data->flags & RO_META));
1512 if (cls == NXMapGet(namedClasses(), name)) {
1513 NXMapRemove(namedClasses(), name);
1514 } else {
1515 // cls has a name collision with another class - don't remove the other
1516 }
1517 }
1518
1519
1520 /***********************************************************************
1521 * realizedClasses
1522 * Returns the class list for realized non-meta classes.
1523 * Locking: runtimeLock must be read- or write-locked by the caller
1524 **********************************************************************/
1525 static NXHashTable *realizedClasses(void)
1526 {
1527 static NXHashTable *class_hash = NULL;
1528
1529 rwlock_assert_locked(&runtimeLock);
1530
1531 INIT_ONCE_PTR(class_hash,
1532 NXCreateHashTableFromZone(NXPtrPrototype, 1024, NULL,
1533 _objc_internal_zone()),
1534 NXFreeHashTable(v));
1535
1536 return class_hash;
1537 }
1538
1539
1540 /***********************************************************************
1541 * realizedMetaclasses
1542 * Returns the class list for realized metaclasses.
1543 * Locking: runtimeLock must be read- or write-locked by the caller
1544 **********************************************************************/
1545 static NXHashTable *realizedMetaclasses(void)
1546 {
1547 static NXHashTable *class_hash = NULL;
1548
1549 rwlock_assert_locked(&runtimeLock);
1550
1551 INIT_ONCE_PTR(class_hash,
1552 NXCreateHashTableFromZone(NXPtrPrototype, 1024, NULL,
1553 _objc_internal_zone()),
1554 NXFreeHashTable(v));
1555
1556 return class_hash;
1557 }
1558
1559
1560 /***********************************************************************
1561 * addRealizedClass
1562 * Adds cls to the realized non-meta class hash.
1563 * Locking: runtimeLock must be held by the caller
1564 **********************************************************************/
1565 static void addRealizedClass(class_t *cls)
1566 {
1567 rwlock_assert_writing(&runtimeLock);
1568 void *old;
1569 old = NXHashInsert(realizedClasses(), cls);
1570 objc_addRegisteredClass((Class)cls);
1571 assert(!isMetaClass(cls));
1572 assert(!old);
1573 }
1574
1575
1576 /***********************************************************************
1577 * removeRealizedClass
1578 * Removes cls from the realized non-meta class hash.
1579 * Locking: runtimeLock must be held by the caller
1580 **********************************************************************/
1581 static void removeRealizedClass(class_t *cls)
1582 {
1583 rwlock_assert_writing(&runtimeLock);
1584 if (isRealized(cls)) {
1585 assert(!isMetaClass(cls));
1586 NXHashRemove(realizedClasses(), cls);
1587 objc_removeRegisteredClass((Class)cls);
1588 }
1589 }
1590
1591
1592 /***********************************************************************
1593 * addRealizedMetaclass
1594 * Adds cls to the realized metaclass hash.
1595 * Locking: runtimeLock must be held by the caller
1596 **********************************************************************/
1597 static void addRealizedMetaclass(class_t *cls)
1598 {
1599 rwlock_assert_writing(&runtimeLock);
1600 void *old;
1601 old = NXHashInsert(realizedMetaclasses(), cls);
1602 assert(isMetaClass(cls));
1603 assert(!old);
1604 }
1605
1606
1607 /***********************************************************************
1608 * removeRealizedMetaclass
1609 * Removes cls from the realized metaclass hash.
1610 * Locking: runtimeLock must be held by the caller
1611 **********************************************************************/
1612 static void removeRealizedMetaclass(class_t *cls)
1613 {
1614 rwlock_assert_writing(&runtimeLock);
1615 if (isRealized(cls)) {
1616 assert(isMetaClass(cls));
1617 NXHashRemove(realizedMetaclasses(), cls);
1618 }
1619 }
1620
1621
1622 /***********************************************************************
1623 * uninitializedClasses
1624 * Returns the metaclass => class map for un-+initialized classes
1625 * Replaces the 32-bit cls = objc_getName(metacls) during +initialize.
1626 * Locking: runtimeLock must be read- or write-locked by the caller
1627 **********************************************************************/
1628 static NXMapTable *uninitializedClasses(void)
1629 {
1630 static NXMapTable *class_map = NULL;
1631
1632 rwlock_assert_locked(&runtimeLock);
1633
1634 INIT_ONCE_PTR(class_map,
1635 NXCreateMapTableFromZone(NXPtrValueMapPrototype, 1024,
1636 _objc_internal_zone()),
1637 NXFreeMapTable(v) );
1638
1639 return class_map;
1640 }
1641
1642
1643 /***********************************************************************
1644 * addUninitializedClass
1645 * Adds metacls => cls to the un-+initialized class map
1646 * Locking: runtimeLock must be held by the caller
1647 **********************************************************************/
1648 static void addUninitializedClass(class_t *cls, class_t *metacls)
1649 {
1650 rwlock_assert_writing(&runtimeLock);
1651 void *old;
1652 old = NXMapInsert(uninitializedClasses(), metacls, cls);
1653 assert(isRealized(metacls) ? isMetaClass(metacls) : metacls->data->flags & RO_META);
1654 assert(! (isRealized(cls) ? isMetaClass(cls) : cls->data->flags & RO_META));
1655 assert(!old);
1656 }
1657
1658
1659 static void removeUninitializedClass(class_t *cls)
1660 {
1661 rwlock_assert_writing(&runtimeLock);
1662 NXMapRemove(uninitializedClasses(), cls->isa);
1663 }
1664
1665
1666 /***********************************************************************
1667 * getNonMetaClass
1668 * Return the ordinary class for this class or metaclass.
1669 * Used by +initialize.
1670 * Locking: runtimeLock must be read- or write-locked by the caller
1671 **********************************************************************/
1672 static class_t *getNonMetaClass(class_t *cls)
1673 {
1674 rwlock_assert_locked(&runtimeLock);
1675 if (isMetaClass(cls)) {
1676 cls = NXMapGet(uninitializedClasses(), cls);
1677 }
1678 return cls;
1679 }
1680
1681
1682 /***********************************************************************
1683 * _class_getNonMetaClass
1684 * Return the ordinary class for this class or metaclass.
1685 * Used by +initialize.
1686 * Locking: acquires runtimeLock
1687 **********************************************************************/
1688 __private_extern__ Class _class_getNonMetaClass(Class cls_gen)
1689 {
1690 class_t *cls = newcls(cls_gen);
1691 rwlock_write(&runtimeLock);
1692 cls = getNonMetaClass(cls);
1693 realizeClass(cls);
1694 rwlock_unlock_write(&runtimeLock);
1695
1696 return (Class)cls;
1697 }
1698
1699
1700
1701 /***********************************************************************
1702 * futureClasses
1703 * Returns the classname => future class map for unrealized future classes.
1704 * Locking: runtimeLock must be held by the caller
1705 **********************************************************************/
1706 static NXMapTable *futureClasses(void)
1707 {
1708 rwlock_assert_writing(&runtimeLock);
1709
1710 static NXMapTable *future_class_map = NULL;
1711
1712 if (future_class_map) return future_class_map;
1713
1714 // future_class_map is big enough to hold CF's classes and a few others
1715 future_class_map = NXCreateMapTableFromZone(NXStrValueMapPrototype, 32,
1716 _objc_internal_zone());
1717
1718 return future_class_map;
1719 }
1720
1721
1722 /***********************************************************************
1723 * addFutureClass
1724 * Installs cls as the class structure to use for the named class if it appears.
1725 * Locking: runtimeLock must be held by the caller
1726 **********************************************************************/
1727 static void addFutureClass(const char *name, class_t *cls)
1728 {
1729 void *old;
1730
1731 rwlock_assert_writing(&runtimeLock);
1732
1733 if (PrintFuture) {
1734 _objc_inform("FUTURE: reserving %p for %s", cls, name);
1735 }
1736
1737 cls->data = _calloc_internal(sizeof(*cls->data), 1);
1738 cls->data->flags = RO_FUTURE;
1739
1740 old = NXMapKeyCopyingInsert(futureClasses(), name, cls);
1741 assert(!old);
1742 }
1743
1744
1745 /***********************************************************************
1746 * removeFutureClass
1747 * Removes the named class from the unrealized future class list,
1748 * because it has been realized.
1749 * Locking: runtimeLock must be held by the caller
1750 **********************************************************************/
1751 static void removeFutureClass(const char *name)
1752 {
1753 rwlock_assert_writing(&runtimeLock);
1754
1755 NXMapKeyFreeingRemove(futureClasses(), name);
1756 }
1757
1758
1759 /***********************************************************************
1760 * remappedClasses
1761 * Returns the oldClass => newClass map for realized future classes.
1762 * Locking: runtimeLock must be read- or write-locked by the caller
1763 **********************************************************************/
1764 static NXMapTable *remappedClasses(BOOL create)
1765 {
1766 static NXMapTable *remapped_class_map = NULL;
1767
1768 rwlock_assert_locked(&runtimeLock);
1769
1770 if (remapped_class_map) return remapped_class_map;
1771 if (!create) return NULL;
1772
1773 // remapped_class_map is big enough to hold CF's classes and a few others
1774 INIT_ONCE_PTR(remapped_class_map,
1775 NXCreateMapTableFromZone(NXPtrValueMapPrototype, 32,
1776 _objc_internal_zone()),
1777 NXFreeMapTable(v));
1778
1779 return remapped_class_map;
1780 }
1781
1782
1783 /***********************************************************************
1784 * noClassesRemapped
1785 * Returns YES if no classes have been remapped
1786 * Locking: runtimeLock must be read- or write-locked by the caller
1787 **********************************************************************/
1788 static BOOL noClassesRemapped(void)
1789 {
1790 rwlock_assert_locked(&runtimeLock);
1791
1792 BOOL result = (remappedClasses(NO) == NULL);
1793 return result;
1794 }
1795
1796
1797 /***********************************************************************
1798 * addRemappedClass
1799 * newcls is a realized future class, replacing oldcls.
1800 * Locking: runtimeLock must be write-locked by the caller
1801 **********************************************************************/
1802 static void addRemappedClass(class_t *oldcls, class_t *newcls)
1803 {
1804 rwlock_assert_writing(&runtimeLock);
1805
1806 if (PrintFuture) {
1807 _objc_inform("FUTURE: using %p instead of %p for %s",
1808 oldcls, newcls, getName(newcls));
1809 }
1810
1811 void *old;
1812 old = NXMapInsert(remappedClasses(YES), oldcls, newcls);
1813 assert(!old);
1814 }
1815
1816
1817 /***********************************************************************
1818 * remapClass
1819 * Returns the live class pointer for cls, which may be pointing to
1820 * a class struct that has been reallocated.
1821 * Locking: runtimeLock must be read- or write-locked by the caller
1822 **********************************************************************/
1823 static class_t *remapClass(class_t *cls)
1824 {
1825 rwlock_assert_locked(&runtimeLock);
1826
1827 class_t *newcls = NXMapGet(remappedClasses(YES), cls);
1828 return newcls ? newcls : cls;
1829 }
1830
1831
1832 /***********************************************************************
1833 * remapClassRef
1834 * Fix up a class ref, in case the class referenced has been reallocated.
1835 * Locking: runtimeLock must be read- or write-locked by the caller
1836 **********************************************************************/
1837 static void remapClassRef(class_t **clsref)
1838 {
1839 rwlock_assert_locked(&runtimeLock);
1840
1841 class_t *newcls = remapClass(*clsref);
1842 if (*clsref != newcls) *clsref = newcls;
1843 }
1844
1845
1846 /***********************************************************************
1847 * addSubclass
1848 * Adds subcls as a subclass of supercls.
1849 * Locking: runtimeLock must be held by the caller.
1850 **********************************************************************/
1851 static void addSubclass(class_t *supercls, class_t *subcls)
1852 {
1853 rwlock_assert_writing(&runtimeLock);
1854
1855 if (supercls && subcls) {
1856 assert(isRealized(supercls));
1857 assert(isRealized(subcls));
1858 subcls->data->nextSiblingClass = supercls->data->firstSubclass;
1859 supercls->data->firstSubclass = subcls;
1860 }
1861 }
1862
1863
1864 /***********************************************************************
1865 * removeSubclass
1866 * Removes subcls as a subclass of supercls.
1867 * Locking: runtimeLock must be held by the caller.
1868 **********************************************************************/
1869 static void removeSubclass(class_t *supercls, class_t *subcls)
1870 {
1871 rwlock_assert_writing(&runtimeLock);
1872 assert(getSuperclass(subcls) == supercls);
1873
1874 class_t **cp;
1875 for (cp = &supercls->data->firstSubclass;
1876 *cp && *cp != subcls;
1877 cp = &(*cp)->data->nextSiblingClass)
1878 ;
1879 assert(*cp == subcls);
1880 *cp = subcls->data->nextSiblingClass;
1881 }
1882
1883
1884
1885 /***********************************************************************
1886 * protocols
1887 * Returns the protocol name => protocol map for protocols.
1888 * Locking: runtimeLock must read- or write-locked by the caller
1889 **********************************************************************/
1890 static NXMapTable *protocols(void)
1891 {
1892 static NXMapTable *protocol_map = NULL;
1893
1894 rwlock_assert_locked(&runtimeLock);
1895
1896 INIT_ONCE_PTR(protocol_map,
1897 NXCreateMapTableFromZone(NXStrValueMapPrototype, 16,
1898 _objc_internal_zone()),
1899 NXFreeMapTable(v) );
1900
1901 return protocol_map;
1902 }
1903
1904
1905 /***********************************************************************
1906 * remapProtocol
1907 * Returns the live protocol pointer for proto, which may be pointing to
1908 * a protocol struct that has been reallocated.
1909 * Locking: runtimeLock must be read- or write-locked by the caller
1910 **********************************************************************/
1911 static protocol_t *remapProtocol(protocol_ref_t proto)
1912 {
1913 rwlock_assert_locked(&runtimeLock);
1914
1915 protocol_t *newproto = NXMapGet(protocols(), ((protocol_t *)proto)->name);
1916 return newproto ? newproto : (protocol_t *)proto;
1917 }
1918
1919
1920 /***********************************************************************
1921 * remapProtocolRef
1922 * Fix up a protocol ref, in case the protocol referenced has been reallocated.
1923 * Locking: runtimeLock must be read- or write-locked by the caller
1924 **********************************************************************/
1925 static void remapProtocolRef(protocol_t **protoref)
1926 {
1927 rwlock_assert_locked(&runtimeLock);
1928
1929 protocol_t *newproto = remapProtocol((protocol_ref_t)*protoref);
1930 if (*protoref != newproto) *protoref = newproto;
1931 }
1932
1933
1934 /***********************************************************************
1935 * moveIvars
1936 * Slides a class's ivars to accommodate the given superclass size.
1937 * Also slides ivar and weak GC layouts if provided.
1938 * Ivars are NOT compacted to compensate for a superclass that shrunk.
1939 * Locking: runtimeLock must be held by the caller.
1940 **********************************************************************/
1941 static void moveIvars(class_ro_t *ro, uint32_t superSize,
1942 layout_bitmap *ivarBitmap, layout_bitmap *weakBitmap)
1943 {
1944 rwlock_assert_writing(&runtimeLock);
1945
1946 uint32_t diff;
1947 uint32_t i;
1948
1949 assert(superSize > ro->instanceStart);
1950 diff = superSize - ro->instanceStart;
1951
1952 if (ro->ivars) {
1953 // Find maximum alignment in this class's ivars
1954 uint32_t maxAlignment = 1;
1955 for (i = 0; i < ro->ivars->count; i++) {
1956 ivar_t *ivar = ivar_list_nth(ro->ivars, i);
1957 if (!ivar->offset) continue; // anonymous bitfield
1958
1959 uint32_t alignment = ivar_alignment(ivar);
1960 if (alignment > maxAlignment) maxAlignment = alignment;
1961 }
1962
1963 // Compute a slide value that preserves that alignment
1964 uint32_t alignMask = maxAlignment - 1;
1965 if (diff & alignMask) diff = (diff + alignMask) & ~alignMask;
1966
1967 // Slide all of this class's ivars en masse
1968 for (i = 0; i < ro->ivars->count; i++) {
1969 ivar_t *ivar = ivar_list_nth(ro->ivars, i);
1970 if (!ivar->offset) continue; // anonymous bitfield
1971
1972 uint32_t oldOffset = (uint32_t)*ivar->offset;
1973 uint32_t newOffset = oldOffset + diff;
1974 *ivar->offset = newOffset;
1975
1976 if (PrintIvars) {
1977 _objc_inform("IVARS: offset %u -> %u for %s (size %u, align %u)",
1978 oldOffset, newOffset, ivar->name,
1979 ivar->size, ivar_alignment(ivar));
1980 }
1981 }
1982
1983 // Slide GC layouts
1984 uint32_t oldOffset = ro->instanceStart;
1985 uint32_t newOffset = ro->instanceStart + diff;
1986
1987 if (ivarBitmap) {
1988 layout_bitmap_slide(ivarBitmap,
1989 oldOffset >> WORD_SHIFT,
1990 newOffset >> WORD_SHIFT);
1991 }
1992 if (weakBitmap) {
1993 layout_bitmap_slide(weakBitmap,
1994 oldOffset >> WORD_SHIFT,
1995 newOffset >> WORD_SHIFT);
1996 }
1997 }
1998
1999 *(uint32_t *)&ro->instanceStart += diff;
2000 *(uint32_t *)&ro->instanceSize += diff;
2001
2002 if (!ro->ivars) {
2003 // No ivars slid, but superclass changed size.
2004 // Expand bitmap in preparation for layout_bitmap_splat().
2005 if (ivarBitmap) layout_bitmap_grow(ivarBitmap, ro->instanceSize >> WORD_SHIFT);
2006 if (weakBitmap) layout_bitmap_grow(weakBitmap, ro->instanceSize >> WORD_SHIFT);
2007 }
2008 }
2009
2010
2011 /***********************************************************************
2012 * getIvar
2013 * Look up an ivar by name.
2014 * Locking: runtimeLock must be read- or write-locked by the caller.
2015 **********************************************************************/
2016 static ivar_t *getIvar(class_t *cls, const char *name)
2017 {
2018 rwlock_assert_locked(&runtimeLock);
2019
2020 const ivar_list_t *ivars;
2021 assert(isRealized(cls));
2022 if ((ivars = cls->data->ro->ivars)) {
2023 uint32_t i;
2024 for (i = 0; i < ivars->count; i++) {
2025 struct ivar_t *ivar = ivar_list_nth(ivars, i);
2026 if (!ivar->offset) continue; // anonymous bitfield
2027
2028 // ivar->name may be NULL for anonymous bitfields etc.
2029 if (ivar->name && 0 == strcmp(name, ivar->name)) {
2030 return ivar;
2031 }
2032 }
2033 }
2034
2035 return NULL;
2036 }
2037
2038
2039 /***********************************************************************
2040 * realizeClass
2041 * Performs first-time initialization on class cls,
2042 * including allocating its read-write data.
2043 * Returns the real class structure for the class.
2044 * Locking: runtimeLock must be write-locked by the caller
2045 **********************************************************************/
2046 static class_t *realizeClass(class_t *cls)
2047 {
2048 rwlock_assert_writing(&runtimeLock);
2049
2050 const class_ro_t *ro;
2051 class_rw_t *rw;
2052 class_t *supercls;
2053 class_t *metacls;
2054 BOOL isMeta;
2055
2056 if (!cls) return NULL;
2057 if (isRealized(cls)) return cls;
2058 assert(cls == remapClass(cls));
2059
2060 ro = (const class_ro_t *)cls->data;
2061 if (ro->flags & RO_FUTURE) {
2062 // This was a future class. rw data is already allocated.
2063 rw = cls->data;
2064 ro = cls->data->ro;
2065 changeInfo(cls, RW_REALIZED, RW_FUTURE);
2066 } else {
2067 // Normal class. Allocate writeable class data.
2068 rw = _calloc_internal(sizeof(class_rw_t), 1);
2069 rw->ro = ro;
2070 rw->flags = RW_REALIZED;
2071 cls->data = rw;
2072 }
2073
2074 isMeta = (ro->flags & RO_META) ? YES : NO;
2075
2076 rw->version = isMeta ? 7 : 0; // old runtime went up to 6
2077
2078 if (PrintConnecting) {
2079 _objc_inform("CLASS: realizing class '%s' %s %p %p",
2080 ro->name, isMeta ? "(meta)" : "", cls, ro);
2081 }
2082
2083 // Realize superclass and metaclass, if they aren't already.
2084 // This needs to be done after RW_REALIZED is set above, for root classes.
2085 supercls = realizeClass(remapClass(cls->superclass));
2086 metacls = realizeClass(remapClass(cls->isa));
2087
2088 // Check for remapped superclass
2089 // fixme doesn't handle remapped metaclass
2090 assert(metacls == cls->isa);
2091 if (supercls != cls->superclass) {
2092 cls->superclass = supercls;
2093 }
2094
2095 /* debug: print them all
2096 if (ro->ivars) {
2097 uint32_t i;
2098 for (i = 0; i < ro->ivars->count; i++) {
2099 ivar_t *ivar = ivar_list_nth(ro->ivars, i);
2100 if (!ivar->offset) continue; // anonymous bitfield
2101
2102 _objc_inform("IVARS: %s.%s (offset %u, size %u, align %u)",
2103 ro->name, ivar->name,
2104 *ivar->offset, ivar->size, ivar_alignment(ivar));
2105 }
2106 }
2107 */
2108
2109
2110 if (supercls) {
2111 // Non-fragile ivars - reconcile this class with its superclass
2112 layout_bitmap ivarBitmap;
2113 layout_bitmap weakBitmap;
2114 BOOL layoutsChanged = NO;
2115
2116 if (UseGC) {
2117 // fixme can optimize for "class has no new ivars", etc
2118 // WARNING: gcc c++ sets instanceStart/Size=0 for classes with
2119 // no local ivars, but does provide a layout bitmap.
2120 // Handle that case specially so layout_bitmap_create doesn't die
2121 // The other ivar sliding code below still works fine, and
2122 // the final result is a good class.
2123 if (ro->instanceStart == 0 && ro->instanceSize == 0) {
2124 // We can't use ro->ivarLayout because we don't know
2125 // how long it is. Force a new layout to be created.
2126 if (PrintIvars) {
2127 _objc_inform("IVARS: instanceStart/Size==0 for class %s; "
2128 "disregarding ivar layout", ro->name);
2129 }
2130 ivarBitmap =
2131 layout_bitmap_create(NULL,
2132 supercls->data->ro->instanceSize,
2133 supercls->data->ro->instanceSize, NO);
2134 weakBitmap =
2135 layout_bitmap_create(NULL,
2136 supercls->data->ro->instanceSize,
2137 supercls->data->ro->instanceSize, YES);
2138 layoutsChanged = YES;
2139 } else {
2140 ivarBitmap =
2141 layout_bitmap_create(ro->ivarLayout,
2142 ro->instanceSize,
2143 ro->instanceSize, NO);
2144 weakBitmap =
2145 layout_bitmap_create(ro->weakIvarLayout,
2146 ro->instanceSize,
2147 ro->instanceSize, YES);
2148 }
2149 }
2150
2151 if (ro->instanceStart < supercls->data->ro->instanceSize) {
2152 // Superclass has changed size. This class's ivars must move.
2153 // Also slide layout bits in parallel.
2154 // This code is incapable of compacting the subclass to
2155 // compensate for a superclass that shrunk, so don't do that.
2156 if (PrintIvars) {
2157 _objc_inform("IVARS: sliding ivars for class %s "
2158 "(superclass was %u bytes, now %u)",
2159 ro->name, ro->instanceStart,
2160 supercls->data->ro->instanceSize);
2161 }
2162 class_ro_t *ro_w = make_ro_writeable(rw);
2163 ro = rw->ro;
2164 moveIvars(ro_w, supercls->data->ro->instanceSize,
2165 UseGC ? &ivarBitmap : NULL, UseGC ? &weakBitmap : NULL);
2166 gdb_objc_class_changed((Class)cls, OBJC_CLASS_IVARS_CHANGED, ro->name);
2167 layoutsChanged = YES;
2168 }
2169
2170 if (UseGC) {
2171 // Check superclass's layout against this class's layout.
2172 // This needs to be done even if the superclass is not bigger.
2173 layout_bitmap superBitmap =
2174 layout_bitmap_create(supercls->data->ro->ivarLayout,
2175 supercls->data->ro->instanceSize,
2176 supercls->data->ro->instanceSize, NO);
2177 layoutsChanged |= layout_bitmap_splat(ivarBitmap, superBitmap,
2178 ro->instanceStart);
2179 layout_bitmap_free(superBitmap);
2180
2181 superBitmap =
2182 layout_bitmap_create(supercls->data->ro->weakIvarLayout,
2183 supercls->data->ro->instanceSize,
2184 supercls->data->ro->instanceSize, YES);
2185 layoutsChanged |= layout_bitmap_splat(weakBitmap, superBitmap,
2186 ro->instanceStart);
2187 layout_bitmap_free(superBitmap);
2188
2189 if (layoutsChanged) {
2190 // Rebuild layout strings.
2191 if (PrintIvars) {
2192 _objc_inform("IVARS: gc layout changed for class %s",
2193 ro->name);
2194 }
2195 class_ro_t *ro_w = make_ro_writeable(rw);
2196 ro = rw->ro;
2197 ro_w->ivarLayout = layout_string_create(ivarBitmap);
2198 ro_w->weakIvarLayout = layout_string_create(weakBitmap);
2199 }
2200
2201 layout_bitmap_free(ivarBitmap);
2202 layout_bitmap_free(weakBitmap);
2203 }
2204 }
2205
2206 // Connect this class to its superclass's subclass lists
2207 if (supercls) {
2208 addSubclass(supercls, cls);
2209 }
2210
2211 // Attach categories
2212 methodizeClass(cls);
2213
2214 if (!isMeta) {
2215 addRealizedClass(cls);
2216 } else {
2217 addRealizedMetaclass(cls);
2218 }
2219
2220 return cls;
2221 }
2222
2223
2224 /***********************************************************************
2225 * getClass
2226 * Looks up a class by name. The class MIGHT NOT be realized.
2227 * Locking: runtimeLock must be read- or write-locked by the caller.
2228 **********************************************************************/
2229 static class_t *getClass(const char *name)
2230 {
2231 rwlock_assert_locked(&runtimeLock);
2232
2233 return (class_t *)NXMapGet(namedClasses(), name);
2234 }
2235
2236
2237 /***********************************************************************
2238 * realizeAllClassesInImage
2239 * Non-lazily realizes all unrealized classes in the given image.
2240 * Locking: runtimeLock must be held by the caller.
2241 **********************************************************************/
2242 static void realizeAllClassesInImage(header_info *hi)
2243 {
2244 rwlock_assert_writing(&runtimeLock);
2245
2246 size_t count, i;
2247 class_t **classlist;
2248
2249 if (hi->allClassesRealized) return;
2250
2251 classlist = _getObjc2ClassList(hi, &count);
2252
2253 for (i = 0; i < count; i++) {
2254 realizeClass(remapClass(classlist[i]));
2255 }
2256
2257 hi->allClassesRealized = YES;
2258 }
2259
2260
2261 /***********************************************************************
2262 * realizeAllClasses
2263 * Non-lazily realizes all unrealized classes in all known images.
2264 * Locking: runtimeLock must be held by the caller.
2265 **********************************************************************/
2266 static void realizeAllClasses(void)
2267 {
2268 rwlock_assert_writing(&runtimeLock);
2269
2270 header_info *hi;
2271 for (hi = FirstHeader; hi; hi = hi->next) {
2272 realizeAllClassesInImage(hi);
2273 }
2274 }
2275
2276
2277 /***********************************************************************
2278 * _objc_allocateFutureClass
2279 * Allocate an unresolved future class for the given class name.
2280 * Returns any existing allocation if one was already made.
2281 * Assumes the named class doesn't exist yet.
2282 * Locking: acquires runtimeLock
2283 **********************************************************************/
2284 __private_extern__ Class _objc_allocateFutureClass(const char *name)
2285 {
2286 rwlock_write(&runtimeLock);
2287
2288 struct class_t *cls;
2289 NXMapTable *future_class_map = futureClasses();
2290
2291 if ((cls = NXMapGet(future_class_map, name))) {
2292 // Already have a future class for this name.
2293 rwlock_unlock_write(&runtimeLock);
2294 return (Class)cls;
2295 }
2296
2297 cls = (class_t *)_calloc_class(sizeof(*cls));
2298 addFutureClass(name, cls);
2299
2300 rwlock_unlock_write(&runtimeLock);
2301 return (Class)cls;
2302 }
2303
2304
2305 /***********************************************************************
2306 *
2307 **********************************************************************/
2308 void objc_setFutureClass(Class cls, const char *name)
2309 {
2310 // fixme hack do nothing - NSCFString handled specially elsewhere
2311 }
2312
2313
2314 #define FOREACH_REALIZED_SUBCLASS(_c, _cls, code) \
2315 do { \
2316 rwlock_assert_writing(&runtimeLock); \
2317 class_t *_top = _cls; \
2318 class_t *_c = _top; \
2319 if (_c) { \
2320 while (1) { \
2321 code \
2322 if (_c->data->firstSubclass) { \
2323 _c = _c->data->firstSubclass; \
2324 } else { \
2325 while (!_c->data->nextSiblingClass && _c != _top) { \
2326 _c = getSuperclass(_c); \
2327 } \
2328 if (_c == _top) break; \
2329 _c = _c->data->nextSiblingClass; \
2330 } \
2331 } \
2332 } else { \
2333 /* nil means all realized classes */ \
2334 NXHashTable *_classes = realizedClasses(); \
2335 NXHashTable *_metaclasses = realizedMetaclasses(); \
2336 NXHashState _state; \
2337 _state = NXInitHashState(_classes); \
2338 while (NXNextHashState(_classes, &_state, (void**)&_c)) \
2339 { \
2340 code \
2341 } \
2342 _state = NXInitHashState(_metaclasses); \
2343 while (NXNextHashState(_metaclasses, &_state, (void**)&_c)) \
2344 { \
2345 code \
2346 } \
2347 } \
2348 } while (0)
2349
2350
2351 /***********************************************************************
2352 * flushVtables
2353 * Rebuilds vtables for cls and its realized subclasses.
2354 * If cls is Nil, all realized classes and metaclasses are touched.
2355 * Locking: runtimeLock must be held by the caller.
2356 **********************************************************************/
2357 static void flushVtables(class_t *cls)
2358 {
2359 rwlock_assert_writing(&runtimeLock);
2360
2361 if (PrintVtables && !cls) {
2362 _objc_inform("VTABLES: ### EXPENSIVE ### global vtable flush!");
2363 }
2364
2365 FOREACH_REALIZED_SUBCLASS(c, cls, {
2366 updateVtable(c, NO);
2367 });
2368 }
2369
2370
2371 /***********************************************************************
2372 * flushCaches
2373 * Flushes caches for cls and its realized subclasses.
2374 * Does not update vtables.
2375 * If cls is Nil, all realized and metaclasses classes are touched.
2376 * Locking: runtimeLock must be held by the caller.
2377 **********************************************************************/
2378 static void flushCaches(class_t *cls)
2379 {
2380 rwlock_assert_writing(&runtimeLock);
2381
2382 FOREACH_REALIZED_SUBCLASS(c, cls, {
2383 flush_cache((Class)c);
2384 });
2385 }
2386
2387
2388 /***********************************************************************
2389 * flush_caches
2390 * Flushes caches and rebuilds vtables for cls, its subclasses,
2391 * and optionally its metaclass.
2392 * Locking: acquires runtimeLock
2393 **********************************************************************/
2394 __private_extern__ void flush_caches(Class cls_gen, BOOL flush_meta)
2395 {
2396 class_t *cls = newcls(cls_gen);
2397 rwlock_write(&runtimeLock);
2398 // fixme optimize vtable flushing? (only needed for vtable'd selectors)
2399 flushCaches(cls);
2400 flushVtables(cls);
2401 // don't flush root class's metaclass twice (it's a subclass of the root)
2402 if (flush_meta && getSuperclass(cls)) {
2403 flushCaches(cls->isa);
2404 flushVtables(cls->isa);
2405 }
2406 rwlock_unlock_write(&runtimeLock);
2407 }
2408
2409
2410 /***********************************************************************
2411 * map_images
2412 * Process the given images which are being mapped in by dyld.
2413 * Calls ABI-agnostic code after taking ABI-specific locks.
2414 *
2415 * Locking: write-locks runtimeLock
2416 **********************************************************************/
2417 __private_extern__ const char *
2418 map_images(enum dyld_image_states state, uint32_t infoCount,
2419 const struct dyld_image_info infoList[])
2420 {
2421 const char *err;
2422
2423 rwlock_write(&runtimeLock);
2424 err = map_images_nolock(state, infoCount, infoList);
2425 rwlock_unlock_write(&runtimeLock);
2426 return err;
2427 }
2428
2429
2430 /***********************************************************************
2431 * load_images
2432 * Process +load in the given images which are being mapped in by dyld.
2433 * Calls ABI-agnostic code after taking ABI-specific locks.
2434 *
2435 * Locking: write-locks runtimeLock and loadMethodLock
2436 **********************************************************************/
2437 __private_extern__ const char *
2438 load_images(enum dyld_image_states state, uint32_t infoCount,
2439 const struct dyld_image_info infoList[])
2440 {
2441 BOOL found;
2442
2443 recursive_mutex_lock(&loadMethodLock);
2444
2445 // Discover load methods
2446 rwlock_write(&runtimeLock);
2447 found = load_images_nolock(state, infoCount, infoList);
2448 rwlock_unlock_write(&runtimeLock);
2449
2450 // Call +load methods (without runtimeLock - re-entrant)
2451 if (found) {
2452 call_load_methods();
2453 }
2454
2455 recursive_mutex_unlock(&loadMethodLock);
2456
2457 return NULL;
2458 }
2459
2460
2461 /***********************************************************************
2462 * unmap_image
2463 * Process the given image which is about to be unmapped by dyld.
2464 * mh is mach_header instead of headerType because that's what
2465 * dyld_priv.h says even for 64-bit.
2466 *
2467 * Locking: write-locks runtimeLock and loadMethodLock
2468 **********************************************************************/
2469 __private_extern__ void
2470 unmap_image(const struct mach_header *mh, intptr_t vmaddr_slide)
2471 {
2472 recursive_mutex_lock(&loadMethodLock);
2473 rwlock_write(&runtimeLock);
2474
2475 unmap_image_nolock(mh, vmaddr_slide);
2476
2477 rwlock_unlock_write(&runtimeLock);
2478 recursive_mutex_unlock(&loadMethodLock);
2479 }
2480
2481
2482 /***********************************************************************
2483 * _read_images
2484 * Perform initial processing of the headers in the linked
2485 * list beginning with headerList.
2486 *
2487 * Called by: map_images_nolock
2488 *
2489 * Locking: runtimeLock acquired by map_images
2490 **********************************************************************/
2491 __private_extern__ void _read_images(header_info **hList, uint32_t hCount)
2492 {
2493 header_info *hi;
2494 uint32_t hIndex;
2495 size_t count;
2496 size_t i;
2497 class_t **resolvedFutureClasses = NULL;
2498 size_t resolvedFutureClassCount = 0;
2499 static BOOL doneOnce;
2500
2501 rwlock_assert_writing(&runtimeLock);
2502
2503 if (!doneOnce) {
2504 initVtables();
2505 doneOnce = YES;
2506 }
2507
2508 #define EACH_HEADER \
2509 hIndex = 0; \
2510 hIndex < hCount && (hi = hList[hIndex]); \
2511 hIndex++
2512
2513 // Complain about images that contain old-ABI data
2514 // fixme new-ABI compiler still emits some bits into __OBJC segment
2515 for (EACH_HEADER) {
2516 size_t count;
2517 if (_getObjcSelectorRefs(hi, &count) || _getObjcModules(hi, &count)) {
2518 _objc_inform("found old-ABI metadata in image %s !",
2519 hi->os.dl_info.dli_fname);
2520 }
2521 }
2522
2523 // fixme hack
2524 static BOOL hackedNSCFString = NO;
2525 if (!hackedNSCFString) {
2526 // Insert future class __CFConstantStringClassReference == NSCFString
2527 void *dlh = dlopen("/System/Library/Frameworks/CoreFoundation.framework/CoreFoundation", RTLD_LAZY | RTLD_NOLOAD | RTLD_FIRST);
2528 if (dlh) {
2529 void *addr = dlsym(dlh, "__CFConstantStringClassReference");
2530 if (addr) {
2531 addFutureClass("NSCFString", (class_t *)addr);
2532 hackedNSCFString = YES;
2533 }
2534 dlclose(dlh);
2535 }
2536 }
2537
2538 // Discover classes. Fix up unresolved future classes. Mark bundle classes.
2539 NXMapTable *future_class_map = futureClasses();
2540 for (EACH_HEADER) {
2541 class_t **classlist = _getObjc2ClassList(hi, &count);
2542 for (i = 0; i < count; i++) {
2543 const char *name = getName(classlist[i]);
2544 if (NXCountMapTable(future_class_map) > 0) {
2545 class_t *newCls = NXMapGet(future_class_map, name);
2546 if (newCls) {
2547 // Copy class_t to future class's struct.
2548 // Preserve future's rw data block.
2549 class_rw_t *rw = newCls->data;
2550 memcpy(newCls, classlist[i], sizeof(class_t));
2551 rw->ro = (class_ro_t *)newCls->data;
2552 newCls->data = rw;
2553
2554 removeFutureClass(name);
2555 addRemappedClass(classlist[i], newCls);
2556 classlist[i] = newCls;
2557 // Non-lazily realize the class below.
2558 resolvedFutureClasses = (class_t **)
2559 _realloc_internal(resolvedFutureClasses,
2560 (resolvedFutureClassCount+1)
2561 * sizeof(class_t *));
2562 resolvedFutureClasses[resolvedFutureClassCount++] = newCls;
2563 }
2564 }
2565 addNamedClass(classlist[i], name);
2566 addUninitializedClass(classlist[i], classlist[i]->isa);
2567 if (hi->mhdr->filetype == MH_BUNDLE) {
2568 classlist[i]->data->flags |= RO_FROM_BUNDLE;
2569 classlist[i]->isa->data->flags |= RO_FROM_BUNDLE;
2570 }
2571 }
2572 }
2573
2574 // Fix up remapped classes
2575 // classlist is up to date, but classrefs may not be
2576
2577 if (!noClassesRemapped()) {
2578 for (EACH_HEADER) {
2579 class_t **classrefs = _getObjc2ClassRefs(hi, &count);
2580 for (i = 0; i < count; i++) {
2581 remapClassRef(&classrefs[i]);
2582 }
2583 // fixme why doesn't test future1 catch the absence of this?
2584 classrefs = _getObjc2SuperRefs(hi, &count);
2585 for (i = 0; i < count; i++) {
2586 remapClassRef(&classrefs[i]);
2587 }
2588 }
2589 }
2590
2591
2592 // Fix up @selector references
2593 sel_lock();
2594 for (EACH_HEADER) {
2595 if (PrintPreopt) {
2596 if (sel_preoptimizationValid(hi)) {
2597 _objc_inform("PREOPTIMIZATION: honoring preoptimized selectors in %s",
2598 _nameForHeader(hi->mhdr));
2599 }
2600 else if (_objcHeaderOptimizedByDyld(hi)) {
2601 _objc_inform("PREOPTIMIZATION: IGNORING preoptimized selectors in %s",
2602 _nameForHeader(hi->mhdr));
2603 }
2604 }
2605
2606 if (sel_preoptimizationValid(hi)) continue;
2607
2608 SEL *sels = _getObjc2SelectorRefs(hi, &count);
2609 BOOL isBundle = hi->mhdr->filetype == MH_BUNDLE;
2610 for (i = 0; i < count; i++) {
2611 sels[i] = sel_registerNameNoLock((const char *)sels[i], isBundle);
2612 }
2613 }
2614 sel_unlock();
2615
2616 // Discover protocols. Fix up protocol refs.
2617 NXMapTable *protocol_map = protocols();
2618 for (EACH_HEADER) {
2619 extern struct class_t OBJC_CLASS_$_Protocol;
2620 Class cls = (Class)&OBJC_CLASS_$_Protocol;
2621 assert(cls);
2622 protocol_t **protocols = _getObjc2ProtocolList(hi, &count);
2623 // fixme duplicate protocol from bundle
2624 for (i = 0; i < count; i++) {
2625 if (!NXMapGet(protocol_map, protocols[i]->name)) {
2626 protocols[i]->isa = cls;
2627 NXMapKeyCopyingInsert(protocol_map,
2628 protocols[i]->name, protocols[i]);
2629 if (PrintProtocols) {
2630 _objc_inform("PROTOCOLS: protocol at %p is %s",
2631 protocols[i], protocols[i]->name);
2632 }
2633 } else {
2634 if (PrintProtocols) {
2635 _objc_inform("PROTOCOLS: protocol at %p is %s (duplicate)",
2636 protocols[i], protocols[i]->name);
2637 }
2638 }
2639 }
2640 }
2641 for (EACH_HEADER) {
2642 protocol_t **protocols;
2643 protocols = _getObjc2ProtocolRefs(hi, &count);
2644 for (i = 0; i < count; i++) {
2645 remapProtocolRef(&protocols[i]);
2646 }
2647 }
2648
2649 // Realize non-lazy classes (for +load methods and static instances)
2650 for (EACH_HEADER) {
2651 class_t **classlist =
2652 _getObjc2NonlazyClassList(hi, &count);
2653 for (i = 0; i < count; i++) {
2654 realizeClass(remapClass(classlist[i]));
2655 }
2656 }
2657
2658 // Realize newly-resolved future classes, in case CF manipulates them
2659 if (resolvedFutureClasses) {
2660 for (i = 0; i < resolvedFutureClassCount; i++) {
2661 realizeClass(resolvedFutureClasses[i]);
2662 }
2663 _free_internal(resolvedFutureClasses);
2664 }
2665
2666 // Discover categories.
2667 for (EACH_HEADER) {
2668 category_t **catlist =
2669 _getObjc2CategoryList(hi, &count);
2670 for (i = 0; i < count; i++) {
2671 category_t *cat = catlist[i];
2672 // Do NOT use cat->cls! It may have been remapped.
2673 class_t *cls = remapClass(cat->cls);
2674
2675 // Process this category.
2676 // First, register the category with its target class.
2677 // Then, rebuild the class's method lists (etc) if
2678 // the class is realized.
2679 BOOL classExists = NO;
2680 if (cat->instanceMethods || cat->protocols
2681 || cat->instanceProperties)
2682 {
2683 addUnattachedCategoryForClass(cat, cls, hi);
2684 if (isRealized(cls)) {
2685 remethodizeClass(cls);
2686 classExists = YES;
2687 }
2688 if (PrintConnecting) {
2689 _objc_inform("CLASS: found category -%s(%s) %s",
2690 getName(cls), cat->name,
2691 classExists ? "on existing class" : "");
2692 }
2693 }
2694
2695 if (cat->classMethods || cat->protocols
2696 /* || cat->classProperties */)
2697 {
2698 addUnattachedCategoryForClass(cat, cls->isa, hi);
2699 if (isRealized(cls->isa)) {
2700 remethodizeClass(cls->isa);
2701 }
2702 if (PrintConnecting) {
2703 _objc_inform("CLASS: found category +%s(%s)",
2704 getName(cls), cat->name);
2705 }
2706 }
2707 }
2708 }
2709
2710 // Category discovery MUST BE LAST to avoid potential races
2711 // when other threads call the new category code before
2712 // this thread finishes its fixups.
2713
2714 // +load handled by prepare_load_methods()
2715
2716 #undef EACH_HEADER
2717 }
2718
2719
2720 /***********************************************************************
2721 * prepare_load_methods
2722 * Schedule +load for classes in this image, any un-+load-ed
2723 * superclasses in other images, and any categories in this image.
2724 **********************************************************************/
2725 // Recursively schedule +load for cls and any un-+load-ed superclasses.
2726 // cls must already be connected.
2727 static void schedule_class_load(class_t *cls)
2728 {
2729 assert(isRealized(cls)); // _read_images should realize
2730
2731 if (cls->data->flags & RW_LOADED) return;
2732
2733 class_t *supercls = getSuperclass(cls);
2734 if (supercls) schedule_class_load(supercls);
2735
2736 add_class_to_loadable_list((Class)cls);
2737 changeInfo(cls, RW_LOADED, 0);
2738 }
2739
2740 __private_extern__ void prepare_load_methods(header_info *hi)
2741 {
2742 size_t count, i;
2743
2744 rwlock_assert_writing(&runtimeLock);
2745
2746 class_t **classlist =
2747 _getObjc2NonlazyClassList(hi, &count);
2748 for (i = 0; i < count; i++) {
2749 class_t *cls = remapClass(classlist[i]);
2750 schedule_class_load(cls);
2751 }
2752
2753 category_t **categorylist = _getObjc2NonlazyCategoryList(hi, &count);
2754 for (i = 0; i < count; i++) {
2755 category_t *cat = categorylist[i];
2756 // Do NOT use cat->cls! It may have been remapped.
2757 class_t *cls = remapClass(cat->cls);
2758 realizeClass(cls);
2759 assert(isRealized(cls->isa));
2760 add_category_to_loadable_list((Category)cat);
2761 }
2762 }
2763
2764
2765 /***********************************************************************
2766 * _unload_image
2767 * Only handles MH_BUNDLE for now.
2768 * Locking: write-lock and loadMethodLock acquired by unmap_image
2769 **********************************************************************/
2770 __private_extern__ void _unload_image(header_info *hi)
2771 {
2772 size_t count, i;
2773
2774 recursive_mutex_assert_locked(&loadMethodLock);
2775 rwlock_assert_writing(&runtimeLock);
2776
2777 // Unload unattached categories and categories waiting for +load.
2778
2779 category_t **catlist = _getObjc2CategoryList(hi, &count);
2780 for (i = 0; i < count; i++) {
2781 category_t *cat = catlist[i];
2782 class_t *cls = remapClass(cat->cls);
2783 // fixme for MH_DYLIB cat's class may have been unloaded already
2784
2785 // unattached list
2786 removeUnattachedCategoryForClass(cat, cls);
2787
2788 // +load queue
2789 remove_category_from_loadable_list((Category)cat);
2790 }
2791
2792 // Unload classes.
2793
2794 class_t **classlist = _getObjc2ClassList(hi, &count);
2795 for (i = 0; i < count; i++) {
2796 class_t *cls = classlist[i];
2797 // fixme remapped classes?
2798 remove_class_from_loadable_list((Class)cls);
2799 unload_class(cls->isa, YES);
2800 unload_class(cls, NO);
2801 }
2802
2803 // Clean up protocols.
2804 #warning fixme protocol unload
2805
2806 // fixme DebugUnload
2807 }
2808
2809
2810 /***********************************************************************
2811 * method_getDescription
2812 * Returns a pointer to this method's objc_method_description.
2813 * Locking: none
2814 **********************************************************************/
2815 struct objc_method_description *
2816 method_getDescription(Method m)
2817 {
2818 if (!m) return NULL;
2819 return (struct objc_method_description *)newmethod(m);
2820 }
2821
2822
2823 /***********************************************************************
2824 * method_getImplementation
2825 * Returns this method's IMP.
2826 * Locking: none
2827 **********************************************************************/
2828 static IMP
2829 _method_getImplementation(method_t *m)
2830 {
2831 if (!m) return NULL;
2832 return m->imp;
2833 }
2834
2835 IMP
2836 method_getImplementation(Method m)
2837 {
2838 return _method_getImplementation(newmethod(m));
2839 }
2840
2841
2842 /***********************************************************************
2843 * method_getName
2844 * Returns this method's selector.
2845 * The method must not be NULL.
2846 * The method must already have been fixed-up.
2847 * Locking: none
2848 **********************************************************************/
2849 SEL
2850 method_getName(Method m_gen)
2851 {
2852 struct method_t *m = newmethod(m_gen);
2853 if (!m) return NULL;
2854
2855 assert((SEL)m->name == sel_registerName((char *)m->name));
2856 return (SEL)m->name;
2857 }
2858
2859
2860 /***********************************************************************
2861 * method_getTypeEncoding
2862 * Returns this method's old-style type encoding string.
2863 * The method must not be NULL.
2864 * Locking: none
2865 **********************************************************************/
2866 const char *
2867 method_getTypeEncoding(Method m)
2868 {
2869 if (!m) return NULL;
2870 return newmethod(m)->types;
2871 }
2872
2873
2874 /***********************************************************************
2875 * method_setImplementation
2876 * Sets this method's implementation to imp.
2877 * The previous implementation is returned.
2878 **********************************************************************/
2879 static IMP
2880 _method_setImplementation(class_t *cls, method_t *m, IMP imp)
2881 {
2882 rwlock_assert_writing(&runtimeLock);
2883
2884 if (!m) return NULL;
2885 if (!imp) return NULL;
2886
2887 if (m->name == (SEL)kIgnore) {
2888 // Ignored methods stay ignored
2889 return m->imp;
2890 }
2891
2892 IMP old = _method_getImplementation(m);
2893 m->imp = imp;
2894
2895 // No cache flushing needed - cache contains Methods not IMPs.
2896
2897 if (vtable_containsSelector(newmethod(m)->name)) {
2898 // Will be slow if cls is NULL (i.e. unknown)
2899 // fixme build list of classes whose Methods are known externally?
2900 flushVtables(cls);
2901 }
2902
2903 // fixme update monomorphism if necessary
2904
2905 return old;
2906 }
2907
2908 IMP
2909 method_setImplementation(Method m, IMP imp)
2910 {
2911 // Don't know the class - will be slow if vtables are affected
2912 // fixme build list of classes whose Methods are known externally?
2913 IMP result;
2914 rwlock_write(&runtimeLock);
2915 result = _method_setImplementation(Nil, newmethod(m), imp);
2916 rwlock_unlock_write(&runtimeLock);
2917 return result;
2918 }
2919
2920
2921 void method_exchangeImplementations(Method m1_gen, Method m2_gen)
2922 {
2923 method_t *m1 = newmethod(m1_gen);
2924 method_t *m2 = newmethod(m2_gen);
2925 if (!m1 || !m2) return;
2926
2927 rwlock_write(&runtimeLock);
2928
2929 if (m1->name == (SEL)kIgnore || m2->name == (SEL)kIgnore) {
2930 // Ignored methods stay ignored. Now they're both ignored.
2931 m1->imp = (IMP)&_objc_ignored_method;
2932 m2->imp = (IMP)&_objc_ignored_method;
2933 rwlock_unlock_write(&runtimeLock);
2934 return;
2935 }
2936
2937 IMP m1_imp = m1->imp;
2938 m1->imp = m2->imp;
2939 m2->imp = m1_imp;
2940
2941 if (vtable_containsSelector(m1->name) ||
2942 vtable_containsSelector(m2->name))
2943 {
2944 // Don't know the class - will be slow if vtables are affected
2945 // fixme build list of classes whose Methods are known externally?
2946 flushVtables(NULL);
2947 }
2948
2949 // fixme update monomorphism if necessary
2950
2951 rwlock_unlock_write(&runtimeLock);
2952 }
2953
2954
2955 /***********************************************************************
2956 * ivar_getOffset
2957 * fixme
2958 * Locking: none
2959 **********************************************************************/
2960 ptrdiff_t
2961 ivar_getOffset(Ivar ivar)
2962 {
2963 if (!ivar) return 0;
2964 return *newivar(ivar)->offset;
2965 }
2966
2967
2968 /***********************************************************************
2969 * ivar_getName
2970 * fixme
2971 * Locking: none
2972 **********************************************************************/
2973 const char *
2974 ivar_getName(Ivar ivar)
2975 {
2976 if (!ivar) return NULL;
2977 return newivar(ivar)->name;
2978 }
2979
2980
2981 /***********************************************************************
2982 * ivar_getTypeEncoding
2983 * fixme
2984 * Locking: none
2985 **********************************************************************/
2986 const char *
2987 ivar_getTypeEncoding(Ivar ivar)
2988 {
2989 if (!ivar) return NULL;
2990 return newivar(ivar)->type;
2991 }
2992
2993
2994 /***********************************************************************
2995 * _protocol_getMethod_nolock
2996 * Locking: runtimeLock must be write-locked by the caller
2997 **********************************************************************/
2998 static Method
2999 _protocol_getMethod_nolock(protocol_t *proto, SEL sel,
3000 BOOL isRequiredMethod, BOOL isInstanceMethod)
3001 {
3002 rwlock_assert_writing(&runtimeLock);
3003
3004 uint32_t i;
3005 if (!proto || !sel) return NULL;
3006
3007 method_list_t **mlistp = NULL;
3008
3009 if (isRequiredMethod) {
3010 if (isInstanceMethod) {
3011 mlistp = &proto->instanceMethods;
3012 } else {
3013 mlistp = &proto->classMethods;
3014 }
3015 } else {
3016 if (isInstanceMethod) {
3017 mlistp = &proto->optionalInstanceMethods;
3018 } else {
3019 mlistp = &proto->optionalClassMethods;
3020 }
3021 }
3022
3023 if (*mlistp) {
3024 method_list_t *mlist = *mlistp;
3025 if (!isMethodListFixedUp(mlist)) {
3026 mlist = _memdup_internal(mlist, method_list_size(mlist));
3027 fixupMethodList(mlist, YES/*always copy for simplicity*/);
3028 *mlistp = mlist;
3029 }
3030 for (i = 0; i < mlist->count; i++) {
3031 method_t *m = method_list_nth(mlist, i);
3032 if (sel == m->name) return (Method)m;
3033 }
3034 }
3035
3036 if (proto->protocols) {
3037 Method m;
3038 for (i = 0; i < proto->protocols->count; i++) {
3039 protocol_t *realProto = remapProtocol(proto->protocols->list[i]);
3040 m = _protocol_getMethod_nolock(realProto, sel,
3041 isRequiredMethod, isInstanceMethod);
3042 if (m) return m;
3043 }
3044 }
3045
3046 return NULL;
3047 }
3048
3049
3050 /***********************************************************************
3051 * _protocol_getMethod
3052 * fixme
3053 * Locking: write-locks runtimeLock
3054 **********************************************************************/
3055 __private_extern__ Method
3056 _protocol_getMethod(Protocol *p, SEL sel, BOOL isRequiredMethod, BOOL isInstanceMethod)
3057 {
3058 rwlock_write(&runtimeLock);
3059 Method result = _protocol_getMethod_nolock(newprotocol(p), sel,
3060 isRequiredMethod,
3061 isInstanceMethod);
3062 rwlock_unlock_write(&runtimeLock);
3063 return result;
3064 }
3065
3066
3067 /***********************************************************************
3068 * protocol_getName
3069 * Returns the name of the given protocol.
3070 * Locking: runtimeLock must not be held by the caller
3071 **********************************************************************/
3072 const char *
3073 protocol_getName(Protocol *proto)
3074 {
3075 return newprotocol(proto)->name;
3076 }
3077
3078
3079 /***********************************************************************
3080 * protocol_getInstanceMethodDescription
3081 * Returns the description of a named instance method.
3082 * Locking: runtimeLock must not be held by the caller
3083 **********************************************************************/
3084 struct objc_method_description
3085 protocol_getMethodDescription(Protocol *p, SEL aSel,
3086 BOOL isRequiredMethod, BOOL isInstanceMethod)
3087 {
3088 Method m =
3089 _protocol_getMethod(p, aSel, isRequiredMethod, isInstanceMethod);
3090 if (m) return *method_getDescription(m);
3091 else return (struct objc_method_description){NULL, NULL};
3092 }
3093
3094
3095 /***********************************************************************
3096 * _protocol_conformsToProtocol_nolock
3097 * Returns YES if self conforms to other.
3098 * Locking: runtimeLock must be held by the caller.
3099 **********************************************************************/
3100 static BOOL _protocol_conformsToProtocol_nolock(protocol_t *self, protocol_t *other)
3101 {
3102 if (!self || !other) {
3103 return NO;
3104 }
3105
3106 if (0 == strcmp(self->name, other->name)) {
3107 return YES;
3108 }
3109
3110 if (self->protocols) {
3111 int i;
3112 for (i = 0; i < self->protocols->count; i++) {
3113 protocol_t *proto = remapProtocol(self->protocols->list[i]);
3114 if (0 == strcmp(other->name, proto->name)) {
3115 return YES;
3116 }
3117 if (_protocol_conformsToProtocol_nolock(proto, other)) {
3118 return YES;
3119 }
3120 }
3121 }
3122
3123 return NO;
3124 }
3125
3126
3127 /***********************************************************************
3128 * protocol_conformsToProtocol
3129 * Returns YES if self conforms to other.
3130 * Locking: acquires runtimeLock
3131 **********************************************************************/
3132 BOOL protocol_conformsToProtocol(Protocol *self, Protocol *other)
3133 {
3134 BOOL result;
3135 rwlock_read(&runtimeLock);
3136 result = _protocol_conformsToProtocol_nolock(newprotocol(self),
3137 newprotocol(other));
3138 rwlock_unlock_read(&runtimeLock);
3139 return result;
3140 }
3141
3142
3143 /***********************************************************************
3144 * protocol_isEqual
3145 * Return YES if two protocols are equal (i.e. conform to each other)
3146 * Locking: acquires runtimeLock
3147 **********************************************************************/
3148 BOOL protocol_isEqual(Protocol *self, Protocol *other)
3149 {
3150 if (self == other) return YES;
3151 if (!self || !other) return NO;
3152
3153 if (!protocol_conformsToProtocol(self, other)) return NO;
3154 if (!protocol_conformsToProtocol(other, self)) return NO;
3155
3156 return YES;
3157 }
3158
3159
3160 /***********************************************************************
3161 * protocol_copyMethodDescriptionList
3162 * Returns descriptions of a protocol's methods.
3163 * Locking: acquires runtimeLock
3164 **********************************************************************/
3165 struct objc_method_description *
3166 protocol_copyMethodDescriptionList(Protocol *p,
3167 BOOL isRequiredMethod,BOOL isInstanceMethod,
3168 unsigned int *outCount)
3169 {
3170 struct protocol_t *proto = newprotocol(p);
3171 struct objc_method_description *result = NULL;
3172 unsigned int count = 0;
3173
3174 if (!proto) {
3175 if (outCount) *outCount = 0;
3176 return NULL;
3177 }
3178
3179 rwlock_read(&runtimeLock);
3180
3181 method_list_t *mlist = NULL;
3182
3183 if (isRequiredMethod) {
3184 if (isInstanceMethod) {
3185 mlist = proto->instanceMethods;
3186 } else {
3187 mlist = proto->classMethods;
3188 }
3189 } else {
3190 if (isInstanceMethod) {
3191 mlist = proto->optionalInstanceMethods;
3192 } else {
3193 mlist = proto->optionalClassMethods;
3194 }
3195 }
3196
3197 if (mlist) {
3198 unsigned int i;
3199 count = mlist->count;
3200 result = calloc(count + 1, sizeof(struct objc_method_description));
3201 for (i = 0; i < count; i++) {
3202 method_t *m = method_list_nth(mlist, i);
3203 result[i].name = sel_registerName((const char *)m->name);
3204 result[i].types = (char *)m->types;
3205 }
3206 }
3207
3208 rwlock_unlock_read(&runtimeLock);
3209
3210 if (outCount) *outCount = count;
3211 return result;
3212 }
3213
3214
3215 /***********************************************************************
3216 * protocol_getProperty
3217 * fixme
3218 * Locking: acquires runtimeLock
3219 **********************************************************************/
3220 static Property
3221 _protocol_getProperty_nolock(protocol_t *proto, const char *name,
3222 BOOL isRequiredProperty, BOOL isInstanceProperty)
3223 {
3224 if (!isRequiredProperty || !isInstanceProperty) {
3225 // Only required instance properties are currently supported
3226 return NULL;
3227 }
3228
3229 struct objc_property_list *plist;
3230 if ((plist = proto->instanceProperties)) {
3231 uint32_t i;
3232 for (i = 0; i < plist->count; i++) {
3233 Property prop = property_list_nth(plist, i);
3234 if (0 == strcmp(name, prop->name)) {
3235 return prop;
3236 }
3237 }
3238 }
3239
3240 if (proto->protocols) {
3241 uintptr_t i;
3242 for (i = 0; i < proto->protocols->count; i++) {
3243 protocol_t *p = remapProtocol(proto->protocols->list[i]);
3244 Property prop =
3245 _protocol_getProperty_nolock(p, name,
3246 isRequiredProperty,
3247 isInstanceProperty);
3248 if (prop) return prop;
3249 }
3250 }
3251
3252 return NULL;
3253 }
3254
3255 Property protocol_getProperty(Protocol *p, const char *name,
3256 BOOL isRequiredProperty, BOOL isInstanceProperty)
3257 {
3258 Property result;
3259
3260 if (!p || !name) return NULL;
3261
3262 rwlock_read(&runtimeLock);
3263 result = _protocol_getProperty_nolock(newprotocol(p), name,
3264 isRequiredProperty,
3265 isInstanceProperty);
3266 rwlock_unlock_read(&runtimeLock);
3267
3268 return result;
3269 }
3270
3271
3272 /***********************************************************************
3273 * protocol_copyPropertyList
3274 * fixme
3275 * Locking: acquires runtimeLock
3276 **********************************************************************/
3277 Property *protocol_copyPropertyList(Protocol *proto, unsigned int *outCount)
3278 {
3279 Property *result = NULL;
3280
3281 if (!proto) {
3282 if (outCount) *outCount = 0;
3283 return NULL;
3284 }
3285
3286 rwlock_read(&runtimeLock);
3287
3288 struct objc_property_list *plist = newprotocol(proto)->instanceProperties;
3289 result = copyPropertyList(plist, outCount);
3290
3291 rwlock_unlock_read(&runtimeLock);
3292
3293 return result;
3294 }
3295
3296
3297 /***********************************************************************
3298 * protocol_copyProtocolList
3299 * Copies this protocol's incorporated protocols.
3300 * Does not copy those protocol's incorporated protocols in turn.
3301 * Locking: acquires runtimeLock
3302 **********************************************************************/
3303 Protocol **protocol_copyProtocolList(Protocol *p, unsigned int *outCount)
3304 {
3305 unsigned int count = 0;
3306 Protocol **result = NULL;
3307 protocol_t *proto = newprotocol(p);
3308
3309 if (!proto) {
3310 if (outCount) *outCount = 0;
3311 return NULL;
3312 }
3313
3314 rwlock_read(&runtimeLock);
3315
3316 if (proto->protocols) {
3317 count = (unsigned int)proto->protocols->count;
3318 }
3319 if (count > 0) {
3320 result = malloc((count+1) * sizeof(Protocol *));
3321
3322 unsigned int i;
3323 for (i = 0; i < count; i++) {
3324 result[i] = (Protocol *)remapProtocol(proto->protocols->list[i]);
3325 }
3326 result[i] = NULL;
3327 }
3328
3329 rwlock_unlock_read(&runtimeLock);
3330
3331 if (outCount) *outCount = count;
3332 return result;
3333 }
3334
3335
3336 /***********************************************************************
3337 * objc_getClassList
3338 * Returns pointers to all classes.
3339 * This requires all classes be realized, which is regretfully non-lazy.
3340 * Locking: acquires runtimeLock
3341 **********************************************************************/
3342 int
3343 objc_getClassList(Class *buffer, int bufferLen)
3344 {
3345 rwlock_write(&runtimeLock);
3346
3347 realizeAllClasses();
3348
3349 int count;
3350 class_t *cls;
3351 NXHashState state;
3352 NXHashTable *classes = realizedClasses();
3353 int allCount = NXCountHashTable(classes);
3354
3355 if (!buffer) {
3356 rwlock_unlock_write(&runtimeLock);
3357 return allCount;
3358 }
3359
3360 count = 0;
3361 state = NXInitHashState(classes);
3362 while (count < bufferLen &&
3363 NXNextHashState(classes, &state, (void **)&cls))
3364 {
3365 buffer[count++] = (Class)cls;
3366 }
3367
3368 rwlock_unlock_write(&runtimeLock);
3369
3370 return allCount;
3371 }
3372
3373
3374 /***********************************************************************
3375 * objc_copyProtocolList
3376 * Returns pointers to all protocols.
3377 * Locking: read-locks runtimeLock
3378 **********************************************************************/
3379 Protocol **
3380 objc_copyProtocolList(unsigned int *outCount)
3381 {
3382 rwlock_read(&runtimeLock);
3383
3384 int count, i;
3385 Protocol *proto;
3386 const char *name;
3387 NXMapState state;
3388 NXMapTable *protocol_map = protocols();
3389 Protocol **result;
3390
3391 count = NXCountMapTable(protocol_map);
3392 if (count == 0) {
3393 rwlock_unlock_read(&runtimeLock);
3394 if (outCount) *outCount = 0;
3395 return NULL;
3396 }
3397
3398 result = calloc(1 + count, sizeof(Protocol *));
3399
3400 i = 0;
3401 state = NXInitMapState(protocol_map);
3402 while (NXNextMapState(protocol_map, &state,
3403 (const void **)&name, (const void **)&proto))
3404 {
3405 result[i++] = proto;
3406 }
3407
3408 result[i++] = NULL;
3409 assert(i == count+1);
3410
3411 rwlock_unlock_read(&runtimeLock);
3412
3413 if (outCount) *outCount = count;
3414 return result;
3415 }
3416
3417
3418 /***********************************************************************
3419 * objc_getProtocol
3420 * Get a protocol by name, or return NULL
3421 * Locking: read-locks runtimeLock
3422 **********************************************************************/
3423 Protocol *objc_getProtocol(const char *name)
3424 {
3425 rwlock_read(&runtimeLock);
3426 Protocol *result = (Protocol *)NXMapGet(protocols(), name);
3427 rwlock_unlock_read(&runtimeLock);
3428 return result;
3429 }
3430
3431
3432 /***********************************************************************
3433 * class_copyMethodList
3434 * fixme
3435 * Locking: read-locks runtimeLock
3436 **********************************************************************/
3437 Method *
3438 class_copyMethodList(Class cls_gen, unsigned int *outCount)
3439 {
3440 struct class_t *cls = newcls(cls_gen);
3441 unsigned int count = 0;
3442 Method *result = NULL;
3443
3444 if (!cls) {
3445 if (outCount) *outCount = 0;
3446 return NULL;
3447 }
3448
3449 rwlock_read(&runtimeLock);
3450
3451 assert(isRealized(cls));
3452
3453 FOREACH_METHOD_LIST(mlist, cls, {
3454 count += mlist->count;
3455 });
3456
3457 if (count > 0) {
3458 unsigned int m;
3459 result = malloc((count + 1) * sizeof(Method));
3460
3461 m = 0;
3462 FOREACH_METHOD_LIST(mlist, cls, {
3463 unsigned int i;
3464 for (i = 0; i < mlist->count; i++) {
3465 Method aMethod = (Method)method_list_nth(mlist, i);
3466 if (method_getName(aMethod) == (SEL)kIgnore) {
3467 count--;
3468 continue;
3469 }
3470 result[m++] = aMethod;
3471 }
3472 });
3473 result[m] = NULL;
3474 }
3475
3476 rwlock_unlock_read(&runtimeLock);
3477
3478 if (outCount) *outCount = count;
3479 return result;
3480 }
3481
3482
3483 /***********************************************************************
3484 * class_copyIvarList
3485 * fixme
3486 * Locking: read-locks runtimeLock
3487 **********************************************************************/
3488 Ivar *
3489 class_copyIvarList(Class cls_gen, unsigned int *outCount)
3490 {
3491 struct class_t *cls = newcls(cls_gen);
3492 const ivar_list_t *ivars;
3493 Ivar *result = NULL;
3494 unsigned int count = 0;
3495 unsigned int i;
3496
3497 if (!cls) {
3498 if (outCount) *outCount = 0;
3499 return NULL;
3500 }
3501
3502 rwlock_read(&runtimeLock);
3503
3504 assert(isRealized(cls));
3505
3506 if ((ivars = cls->data->ro->ivars) && ivars->count) {
3507 result = malloc((ivars->count+1) * sizeof(Ivar));
3508
3509 for (i = 0; i < ivars->count; i++) {
3510 ivar_t *ivar = ivar_list_nth(ivars, i);
3511 if (!ivar->offset) continue; // anonymous bitfield
3512 result[count++] = (Ivar)ivar;
3513 }
3514 result[count] = NULL;
3515 }
3516
3517 rwlock_unlock_read(&runtimeLock);
3518
3519 if (outCount) *outCount = count;
3520 return result;
3521 }
3522
3523
3524 /***********************************************************************
3525 * class_copyPropertyList. Returns a heap block containing the
3526 * properties declared in the class, or NULL if the class
3527 * declares no properties. Caller must free the block.
3528 * Does not copy any superclass's properties.
3529 * Locking: read-locks runtimeLock
3530 **********************************************************************/
3531 Property *
3532 class_copyPropertyList(Class cls_gen, unsigned int *outCount)
3533 {
3534 struct class_t *cls = newcls(cls_gen);
3535 chained_property_list *plist;
3536 unsigned int count = 0;
3537 Property *result = NULL;
3538
3539 if (!cls) {
3540 if (outCount) *outCount = 0;
3541 return NULL;
3542 }
3543
3544 rwlock_read(&runtimeLock);
3545
3546 assert(isRealized(cls));
3547
3548 for (plist = cls->data->properties; plist; plist = plist->next) {
3549 count += plist->count;
3550 }
3551
3552 if (count > 0) {
3553 unsigned int p;
3554 result = malloc((count + 1) * sizeof(Property));
3555
3556 p = 0;
3557 for (plist = cls->data->properties; plist; plist = plist->next) {
3558 unsigned int i;
3559 for (i = 0; i < plist->count; i++) {
3560 result[p++] = (Property)&plist->list[i];
3561 }
3562 }
3563 result[p] = NULL;
3564 }
3565
3566 rwlock_unlock_read(&runtimeLock);
3567
3568 if (outCount) *outCount = count;
3569 return result;
3570 }
3571
3572
3573 /***********************************************************************
3574 * _class_getLoadMethod
3575 * fixme
3576 * Called only from add_class_to_loadable_list.
3577 * Locking: runtimeLock must be read- or write-locked by the caller.
3578 **********************************************************************/
3579 __private_extern__ IMP
3580 _class_getLoadMethod(Class cls_gen)
3581 {
3582 rwlock_assert_locked(&runtimeLock);
3583
3584 struct class_t *cls = newcls(cls_gen);
3585 const method_list_t *mlist;
3586 int i;
3587
3588 assert(isRealized(cls));
3589 assert(isRealized(cls->isa));
3590 assert(!isMetaClass(cls));
3591 assert(isMetaClass(cls->isa));
3592
3593 mlist = cls->isa->data->ro->baseMethods;
3594 if (mlist) for (i = 0; i < mlist->count; i++) {
3595 method_t *m = method_list_nth(mlist, i);
3596 if (0 == strcmp((const char *)m->name, "load")) {
3597 return m->imp;
3598 }
3599 }
3600
3601 return NULL;
3602 }
3603
3604
3605 /***********************************************************************
3606 * _category_getName
3607 * Returns a category's name.
3608 * Locking: none
3609 **********************************************************************/
3610 __private_extern__ const char *
3611 _category_getName(Category cat)
3612 {
3613 return newcategory(cat)->name;
3614 }
3615
3616
3617 /***********************************************************************
3618 * _category_getClassName
3619 * Returns a category's class's name
3620 * Called only from add_category_to_loadable_list and
3621 * remove_category_from_loadable_list.
3622 * Locking: runtimeLock must be read- or write-locked by the caller
3623 **********************************************************************/
3624 __private_extern__ const char *
3625 _category_getClassName(Category cat)
3626 {
3627 rwlock_assert_locked(&runtimeLock);
3628 // cat->cls may have been remapped
3629 return getName(remapClass(newcategory(cat)->cls));
3630 }
3631
3632
3633 /***********************************************************************
3634 * _category_getClass
3635 * Returns a category's class
3636 * Called only by call_category_loads.
3637 * Locking: read-locks runtimeLock
3638 **********************************************************************/
3639 __private_extern__ Class
3640 _category_getClass(Category cat)
3641 {
3642 rwlock_read(&runtimeLock);
3643 // cat->cls may have been remapped
3644 struct class_t *result = remapClass(newcategory(cat)->cls);
3645 assert(isRealized(result)); // ok for call_category_loads' usage
3646 rwlock_unlock_read(&runtimeLock);
3647 return (Class)result;
3648 }
3649
3650
3651 /***********************************************************************
3652 * _category_getLoadMethod
3653 * fixme
3654 * Called only from add_category_to_loadable_list
3655 * Locking: runtimeLock must be read- or write-locked by the caller
3656 **********************************************************************/
3657 __private_extern__ IMP
3658 _category_getLoadMethod(Category cat)
3659 {
3660 rwlock_assert_locked(&runtimeLock);
3661
3662 const method_list_t *mlist;
3663 int i;
3664
3665 mlist = newcategory(cat)->classMethods;
3666 if (mlist) for (i = 0; i < mlist->count; i++) {
3667 method_t *m = method_list_nth(mlist, i);
3668 if (0 == strcmp((const char *)m->name, "load")) {
3669 return m->imp;
3670 }
3671 }
3672
3673 return NULL;
3674 }
3675
3676
3677 /***********************************************************************
3678 * class_copyProtocolList
3679 * fixme
3680 * Locking: read-locks runtimeLock
3681 **********************************************************************/
3682 Protocol **
3683 class_copyProtocolList(Class cls_gen, unsigned int *outCount)
3684 {
3685 struct class_t *cls = newcls(cls_gen);
3686 Protocol **r;
3687 struct protocol_list_t **p;
3688 unsigned int count = 0;
3689 unsigned int i;
3690 Protocol **result = NULL;
3691
3692 if (!cls) {
3693 if (outCount) *outCount = 0;
3694 return NULL;
3695 }
3696
3697 rwlock_read(&runtimeLock);
3698
3699 assert(isRealized(cls));
3700
3701 for (p = cls->data->protocols; p && *p; p++) {
3702 count += (uint32_t)(*p)->count;
3703 }
3704
3705 if (count) {
3706 result = malloc((count+1) * sizeof(Protocol *));
3707 r = result;
3708 for (p = cls->data->protocols; p && *p; p++) {
3709 for (i = 0; i < (*p)->count; i++) {
3710 *r++ = (Protocol *)remapProtocol((*p)->list[i]);
3711 }
3712 }
3713 *r++ = NULL;
3714 }
3715
3716 rwlock_unlock_read(&runtimeLock);
3717
3718 if (outCount) *outCount = count;
3719 return result;
3720 }
3721
3722
3723 /***********************************************************************
3724 * _objc_copyClassNamesForImage
3725 * fixme
3726 * Locking: read-locks runtimeLock
3727 **********************************************************************/
3728 __private_extern__ const char **
3729 _objc_copyClassNamesForImage(header_info *hi, unsigned int *outCount)
3730 {
3731 size_t count, i;
3732 class_t **classlist;
3733 const char **names;
3734
3735 rwlock_read(&runtimeLock);
3736
3737 classlist = _getObjc2ClassList(hi, &count);
3738 names = malloc((count+1) * sizeof(const char *));
3739
3740 for (i = 0; i < count; i++) {
3741 names[i] = getName(classlist[i]);
3742 }
3743 names[count] = NULL;
3744
3745 rwlock_unlock_read(&runtimeLock);
3746
3747 if (outCount) *outCount = (unsigned int)count;
3748 return names;
3749 }
3750
3751
3752 /***********************************************************************
3753 * _class_getCache
3754 * fixme
3755 * Locking: none
3756 **********************************************************************/
3757 __private_extern__ Cache
3758 _class_getCache(Class cls)
3759 {
3760 return newcls(cls)->cache;
3761 }
3762
3763
3764 /***********************************************************************
3765 * _class_getInstanceSize
3766 * fixme
3767 * Locking: none
3768 **********************************************************************/
3769 __private_extern__ size_t
3770 _class_getInstanceSize(Class cls)
3771 {
3772 if (!cls) return 0;
3773 return instanceSize(newcls(cls));
3774 }
3775
3776 static uint32_t
3777 instanceSize(struct class_t *cls)
3778 {
3779 assert(cls);
3780 assert(isRealized(cls));
3781 // fixme rdar://5244378
3782 return (uint32_t)((cls->data->ro->instanceSize + WORD_MASK) & ~WORD_MASK);
3783 }
3784
3785
3786 /***********************************************************************
3787 * class_getVersion
3788 * fixme
3789 * Locking: none
3790 **********************************************************************/
3791 int
3792 class_getVersion(Class cls)
3793 {
3794 if (!cls) return 0;
3795 assert(isRealized(newcls(cls)));
3796 return newcls(cls)->data->version;
3797 }
3798
3799
3800 /***********************************************************************
3801 * _class_setCache
3802 * fixme
3803 * Locking: none
3804 **********************************************************************/
3805 __private_extern__ void
3806 _class_setCache(Class cls, Cache cache)
3807 {
3808 newcls(cls)->cache = cache;
3809 }
3810
3811
3812 /***********************************************************************
3813 * class_setVersion
3814 * fixme
3815 * Locking: none
3816 **********************************************************************/
3817 void
3818 class_setVersion(Class cls, int version)
3819 {
3820 if (!cls) return;
3821 assert(isRealized(newcls(cls)));
3822 newcls(cls)->data->version = version;
3823 }
3824
3825
3826 /***********************************************************************
3827 * _class_getName
3828 * fixme
3829 * Locking: acquires runtimeLock
3830 **********************************************************************/
3831 __private_extern__ const char *_class_getName(Class cls)
3832 {
3833 if (!cls) return "nil";
3834 // fixme hack rwlock_write(&runtimeLock);
3835 const char *name = getName(newcls(cls));
3836 // rwlock_unlock_write(&runtimeLock);
3837 return name;
3838 }
3839
3840
3841 /***********************************************************************
3842 * getName
3843 * fixme
3844 * Locking: runtimeLock must be held by the caller
3845 **********************************************************************/
3846 static const char *
3847 getName(struct class_t *cls)
3848 {
3849 // fixme hack rwlock_assert_writing(&runtimeLock);
3850 assert(cls);
3851
3852 if (isRealized(cls)) {
3853 return cls->data->ro->name;
3854 } else {
3855 return ((const struct class_ro_t *)cls->data)->name;
3856 }
3857 }
3858
3859
3860 /***********************************************************************
3861 * getMethodNoSuper_nolock
3862 * fixme
3863 * Locking: runtimeLock must be read- or write-locked by the caller
3864 **********************************************************************/
3865 static method_t *
3866 getMethodNoSuper_nolock(struct class_t *cls, SEL sel)
3867 {
3868 rwlock_assert_locked(&runtimeLock);
3869
3870 uint32_t i;
3871
3872 assert(isRealized(cls));
3873 // fixme nil cls?
3874 // fixme NULL sel?
3875
3876 FOREACH_METHOD_LIST(mlist, cls, {
3877 for (i = 0; i < mlist->count; i++) {
3878 method_t *m = method_list_nth(mlist, i);
3879 if (m->name == sel) return m;
3880 }
3881 });
3882
3883 return NULL;
3884 }
3885
3886
3887 /***********************************************************************
3888 * _class_getMethodNoSuper
3889 * fixme
3890 * Locking: read-locks runtimeLock
3891 **********************************************************************/
3892 __private_extern__ Method
3893 _class_getMethodNoSuper(Class cls, SEL sel)
3894 {
3895 rwlock_read(&runtimeLock);
3896 Method result = (Method)getMethodNoSuper_nolock(newcls(cls), sel);
3897 rwlock_unlock_read(&runtimeLock);
3898 return result;
3899 }
3900
3901 /***********************************************************************
3902 * _class_getMethodNoSuper
3903 * For use inside lockForMethodLookup() only.
3904 * Locking: read-locks runtimeLock
3905 **********************************************************************/
3906 __private_extern__ Method
3907 _class_getMethodNoSuper_nolock(Class cls, SEL sel)
3908 {
3909 return (Method)getMethodNoSuper_nolock(newcls(cls), sel);
3910 }
3911
3912
3913 /***********************************************************************
3914 * getMethod_nolock
3915 * fixme
3916 * Locking: runtimeLock must be read- or write-locked by the caller
3917 **********************************************************************/
3918 static method_t *
3919 getMethod_nolock(class_t *cls, SEL sel)
3920 {
3921 method_t *m = NULL;
3922
3923 rwlock_assert_locked(&runtimeLock);
3924
3925 // fixme nil cls?
3926 // fixme NULL sel?
3927
3928 assert(isRealized(cls));
3929
3930 while (cls && ((m = getMethodNoSuper_nolock(cls, sel))) == NULL) {
3931 cls = getSuperclass(cls);
3932 }
3933
3934 return m;
3935 }
3936
3937
3938 /***********************************************************************
3939 * _class_getMethod
3940 * fixme
3941 * Locking: read-locks runtimeLock
3942 **********************************************************************/
3943 __private_extern__ Method _class_getMethod(Class cls, SEL sel)
3944 {
3945 Method m;
3946 rwlock_read(&runtimeLock);
3947 m = (Method)getMethod_nolock(newcls(cls), sel);
3948 rwlock_unlock_read(&runtimeLock);
3949 return m;
3950 }
3951
3952
3953 /***********************************************************************
3954 * ABI-specific lookUpMethod helpers.
3955 * Locking: read- and write-locks runtimeLock.
3956 **********************************************************************/
3957 __private_extern__ void lockForMethodLookup(void)
3958 {
3959 rwlock_read(&runtimeLock);
3960 }
3961 __private_extern__ void unlockForMethodLookup(void)
3962 {
3963 rwlock_unlock_read(&runtimeLock);
3964 }
3965
3966 __private_extern__ IMP prepareForMethodLookup(Class cls, SEL sel, BOOL init)
3967 {
3968 rwlock_assert_unlocked(&runtimeLock);
3969
3970 if (!isRealized(newcls(cls))) {
3971 rwlock_write(&runtimeLock);
3972 realizeClass(newcls(cls));
3973 rwlock_unlock_write(&runtimeLock);
3974 }
3975
3976 if (init && !_class_isInitialized(cls)) {
3977 _class_initialize (cls);
3978 // If sel == initialize, _class_initialize will send +initialize and
3979 // then the messenger will send +initialize again after this
3980 // procedure finishes. Of course, if this is not being called
3981 // from the messenger then it won't happen. 2778172
3982 }
3983
3984 return NULL;
3985 }
3986
3987
3988 /***********************************************************************
3989 * class_getProperty
3990 * fixme
3991 * Locking: read-locks runtimeLock
3992 **********************************************************************/
3993 Property class_getProperty(Class cls_gen, const char *name)
3994 {
3995 Property result = NULL;
3996 chained_property_list *plist;
3997 struct class_t *cls = newcls(cls_gen);
3998
3999 if (!cls || !name) return NULL;
4000
4001 rwlock_read(&runtimeLock);
4002
4003 assert(isRealized(cls));
4004
4005 for ( ; cls; cls = getSuperclass(cls)) {
4006 for (plist = cls->data->properties; plist; plist = plist->next) {
4007 uint32_t i;
4008 for (i = 0; i < plist->count; i++) {
4009 if (0 == strcmp(name, plist->list[i].name)) {
4010 result = &plist->list[i];
4011 goto done;
4012 }
4013 }
4014 }
4015 }
4016
4017 done:
4018 rwlock_unlock_read(&runtimeLock);
4019
4020 return result;
4021 }
4022
4023
4024 /***********************************************************************
4025 * Locking: fixme
4026 **********************************************************************/
4027 __private_extern__ BOOL _class_isMetaClass(Class cls)
4028 {
4029 if (!cls) return NO;
4030 return isMetaClass(newcls(cls));
4031 }
4032
4033 static BOOL
4034 isMetaClass(struct class_t *cls)
4035 {
4036 assert(cls);
4037 assert(isRealized(cls));
4038 return (cls->data->ro->flags & RO_META) ? YES : NO;
4039 }
4040
4041
4042 __private_extern__ Class _class_getMeta(Class cls)
4043 {
4044 assert(cls);
4045 if (isMetaClass(newcls(cls))) return cls;
4046 else return ((id)cls)->isa;
4047 }
4048
4049 Class gdb_class_getClass(Class cls)
4050 {
4051 const char *className = strdup(getName(newcls(cls)));
4052 if(!className) return Nil;
4053 Class rCls = look_up_class(className, NO, NO);
4054 free((char*)className);
4055 return rCls;
4056 }
4057
4058 BOOL gdb_objc_isRuntimeLocked()
4059 {
4060 if (rwlock_try_write(&runtimeLock)) {
4061 rwlock_unlock_write(&runtimeLock);
4062 } else
4063 return YES;
4064
4065 if (mutex_try_lock(&cacheUpdateLock)) {
4066 mutex_unlock(&cacheUpdateLock);
4067 } else
4068 return YES;
4069
4070 return NO;
4071 }
4072
4073 /***********************************************************************
4074 * Locking: fixme
4075 **********************************************************************/
4076 __private_extern__ BOOL
4077 _class_isInitializing(Class cls_gen)
4078 {
4079 struct class_t *cls = newcls(_class_getMeta(cls_gen));
4080 return (cls->data->flags & RW_INITIALIZING) ? YES : NO;
4081 }
4082
4083
4084 /***********************************************************************
4085 * Locking: fixme
4086 **********************************************************************/
4087 __private_extern__ BOOL
4088 _class_isInitialized(Class cls_gen)
4089 {
4090 struct class_t *cls = newcls(_class_getMeta(cls_gen));
4091 return (cls->data->flags & RW_INITIALIZED) ? YES : NO;
4092 }
4093
4094
4095 /***********************************************************************
4096 * Locking: fixme
4097 **********************************************************************/
4098 __private_extern__ void
4099 _class_setInitializing(Class cls_gen)
4100 {
4101 struct class_t *cls = newcls(_class_getMeta(cls_gen));
4102 changeInfo(cls, RW_INITIALIZING, 0);
4103 }
4104
4105
4106 /***********************************************************************
4107 * Locking: write-locks runtimeLock
4108 **********************************************************************/
4109 __private_extern__ void
4110 _class_setInitialized(Class cls_gen)
4111 {
4112
4113 struct class_t *metacls;
4114 struct class_t *cls;
4115
4116 rwlock_write(&runtimeLock);
4117 metacls = newcls(_class_getMeta(cls_gen));
4118 cls = getNonMetaClass(metacls);
4119
4120 // Update vtables (initially postponed pending +initialize completion)
4121 // Do cls first because root metacls is a subclass of root cls
4122 updateVtable(cls, YES);
4123 updateVtable(metacls, YES);
4124
4125 rwlock_unlock_write(&runtimeLock);
4126
4127 changeInfo(metacls, RW_INITIALIZED, RW_INITIALIZING);
4128 }
4129
4130
4131 /***********************************************************************
4132 * Locking: fixme
4133 **********************************************************************/
4134 __private_extern__ BOOL
4135 _class_shouldGrowCache(Class cls)
4136 {
4137 return YES; // fixme good or bad for memory use?
4138 }
4139
4140
4141 /***********************************************************************
4142 * Locking: fixme
4143 **********************************************************************/
4144 __private_extern__ void
4145 _class_setGrowCache(Class cls, BOOL grow)
4146 {
4147 // fixme good or bad for memory use?
4148 }
4149
4150
4151 /***********************************************************************
4152 * _class_isLoadable
4153 * fixme
4154 * Locking: none
4155 **********************************************************************/
4156 __private_extern__ BOOL
4157 _class_isLoadable(Class cls)
4158 {
4159 assert(isRealized(newcls(cls)));
4160 return YES; // any class registered for +load is definitely loadable
4161 }
4162
4163
4164 /***********************************************************************
4165 * Locking: fixme
4166 **********************************************************************/
4167 __private_extern__ BOOL
4168 _class_hasCxxStructorsNoSuper(Class cls)
4169 {
4170 assert(isRealized(newcls(cls)));
4171 return (newcls(cls)->data->ro->flags & RO_HAS_CXX_STRUCTORS) ? YES : NO;
4172 }
4173
4174
4175 /***********************************************************************
4176 * Locking: fixme
4177 **********************************************************************/
4178 __private_extern__ BOOL
4179 _class_shouldFinalizeOnMainThread(Class cls)
4180 {
4181 assert(isRealized(newcls(cls)));
4182 return (newcls(cls)->data->flags & RW_FINALIZE_ON_MAIN_THREAD) ? YES : NO;
4183 }
4184
4185
4186 /***********************************************************************
4187 * Locking: fixme
4188 **********************************************************************/
4189 __private_extern__ void
4190 _class_setFinalizeOnMainThread(Class cls)
4191 {
4192 assert(isRealized(newcls(cls)));
4193 changeInfo(newcls(cls), RW_FINALIZE_ON_MAIN_THREAD, 0);
4194 }
4195
4196
4197 /***********************************************************************
4198 * _class_instancesHaveAssociatedObjects
4199 * May manipulate unrealized future classes in the CF-bridged case.
4200 **********************************************************************/
4201 __private_extern__ BOOL
4202 _class_instancesHaveAssociatedObjects(Class cls_gen)
4203 {
4204 class_t *cls = newcls(cls_gen);
4205 assert(isFuture(cls) || isRealized(cls));
4206 return (cls->data->flags & RW_INSTANCES_HAVE_ASSOCIATED_OBJECTS) ? YES : NO;
4207 }
4208
4209
4210 /***********************************************************************
4211 * _class_assertInstancesHaveAssociatedObjects
4212 * May manipulate unrealized future classes in the CF-bridged case.
4213 **********************************************************************/
4214 __private_extern__ void
4215 _class_assertInstancesHaveAssociatedObjects(Class cls_gen)
4216 {
4217 class_t *cls = newcls(cls_gen);
4218 assert(isFuture(cls) || isRealized(cls));
4219 changeInfo(cls, RW_INSTANCES_HAVE_ASSOCIATED_OBJECTS, 0);
4220 }
4221
4222
4223 /***********************************************************************
4224 * Locking: none
4225 * fixme assert realized to get superclass remapping?
4226 **********************************************************************/
4227 __private_extern__ Class
4228 _class_getSuperclass(Class cls)
4229 {
4230 return (Class)getSuperclass(newcls(cls));
4231 }
4232
4233 static struct class_t *
4234 getSuperclass(struct class_t *cls)
4235 {
4236 if (!cls) return NULL;
4237 return cls->superclass;
4238 }
4239
4240
4241 /***********************************************************************
4242 * class_getIvarLayout
4243 * Called by the garbage collector.
4244 * The class must be NULL or already realized.
4245 * Locking: none
4246 **********************************************************************/
4247 const char *
4248 class_getIvarLayout(Class cls_gen)
4249 {
4250 class_t *cls = newcls(cls_gen);
4251 if (cls) return (const char *)cls->data->ro->ivarLayout;
4252 else return NULL;
4253 }
4254
4255
4256 /***********************************************************************
4257 * class_getWeakIvarLayout
4258 * Called by the garbage collector.
4259 * The class must be NULL or already realized.
4260 * Locking: none
4261 **********************************************************************/
4262 const char *
4263 class_getWeakIvarLayout(Class cls_gen)
4264 {
4265 class_t *cls = newcls(cls_gen);
4266 if (cls) return (const char *)cls->data->ro->weakIvarLayout;
4267 else return NULL;
4268 }
4269
4270
4271 /***********************************************************************
4272 * class_setIvarLayout
4273 * Changes the class's GC scan layout.
4274 * NULL layout means no unscanned ivars
4275 * The class must be under construction.
4276 * fixme: sanity-check layout vs instance size?
4277 * fixme: sanity-check layout vs superclass?
4278 * Locking: acquires runtimeLock
4279 **********************************************************************/
4280 void
4281 class_setIvarLayout(Class cls_gen, const char *layout)
4282 {
4283 class_t *cls = newcls(cls_gen);
4284 if (!cls) return;
4285
4286 rwlock_write(&runtimeLock);
4287
4288 // Can only change layout of in-construction classes.
4289 // note: if modifications to post-construction classes were
4290 // allowed, there would be a race below (us vs. concurrent GC scan)
4291 if (!(cls->data->flags & RW_CONSTRUCTING)) {
4292 _objc_inform("*** Can't set ivar layout for already-registered "
4293 "class '%s'", getName(cls));
4294 rwlock_unlock_write(&runtimeLock);
4295 return;
4296 }
4297
4298 class_ro_t *ro_w = make_ro_writeable(cls->data);
4299
4300 try_free(ro_w->ivarLayout);
4301 ro_w->ivarLayout = (unsigned char *)_strdup_internal(layout);
4302
4303 rwlock_unlock_write(&runtimeLock);
4304 }
4305
4306
4307 /***********************************************************************
4308 * class_setWeakIvarLayout
4309 * Changes the class's GC weak layout.
4310 * NULL layout means no weak ivars
4311 * The class must be under construction.
4312 * fixme: sanity-check layout vs instance size?
4313 * fixme: sanity-check layout vs superclass?
4314 * Locking: acquires runtimeLock
4315 **********************************************************************/
4316 void
4317 class_setWeakIvarLayout(Class cls_gen, const char *layout)
4318 {
4319 class_t *cls = newcls(cls_gen);
4320 if (!cls) return;
4321
4322 rwlock_write(&runtimeLock);
4323
4324 // Can only change layout of in-construction classes.
4325 // note: if modifications to post-construction classes were
4326 // allowed, there would be a race below (us vs. concurrent GC scan)
4327 if (!(cls->data->flags & RW_CONSTRUCTING)) {
4328 _objc_inform("*** Can't set weak ivar layout for already-registered "
4329 "class '%s'", getName(cls));
4330 rwlock_unlock_write(&runtimeLock);
4331 return;
4332 }
4333
4334 class_ro_t *ro_w = make_ro_writeable(cls->data);
4335
4336 try_free(ro_w->weakIvarLayout);
4337 ro_w->weakIvarLayout = (unsigned char *)_strdup_internal(layout);
4338
4339 rwlock_unlock_write(&runtimeLock);
4340 }
4341
4342
4343 /***********************************************************************
4344 * _class_getVariable
4345 * fixme
4346 * Locking: read-locks runtimeLock
4347 **********************************************************************/
4348 __private_extern__ Ivar
4349 _class_getVariable(Class cls, const char *name)
4350 {
4351 rwlock_read(&runtimeLock);
4352
4353 for ( ; cls != Nil; cls = class_getSuperclass(cls)) {
4354 struct ivar_t *ivar = getIvar(newcls(cls), name);
4355 if (ivar) {
4356 rwlock_unlock_read(&runtimeLock);
4357 return (Ivar)ivar;
4358 }
4359 }
4360
4361 rwlock_unlock_read(&runtimeLock);
4362
4363 return NULL;
4364 }
4365
4366
4367 /***********************************************************************
4368 * class_conformsToProtocol
4369 * fixme
4370 * Locking: read-locks runtimeLock
4371 **********************************************************************/
4372 BOOL class_conformsToProtocol(Class cls_gen, Protocol *proto)
4373 {
4374 Protocol **protocols;
4375 unsigned int count, i;
4376 BOOL result = NO;
4377
4378 if (!cls_gen) return NO;
4379 if (!proto) return NO;
4380
4381 // fixme null cls?
4382
4383 protocols = class_copyProtocolList(cls_gen, &count);
4384
4385 for (i = 0; i < count; i++) {
4386 if (protocols[i] == proto ||
4387 protocol_conformsToProtocol(protocols[i], proto))
4388 {
4389 result = YES;
4390 break;
4391 }
4392 }
4393
4394 if (protocols) free(protocols);
4395
4396 return result;
4397 }
4398
4399
4400 /***********************************************************************
4401 * class_addMethod
4402 * fixme
4403 * Locking: write-locks runtimeLock
4404 **********************************************************************/
4405 static IMP
4406 _class_addMethod(Class cls_gen, SEL name, IMP imp,
4407 const char *types, BOOL replace)
4408 {
4409 struct class_t *cls = newcls(cls_gen);
4410 IMP result = NULL;
4411
4412 if (!types) types = "";
4413
4414 rwlock_write(&runtimeLock);
4415
4416 assert(isRealized(cls));
4417
4418 method_t *m;
4419 if ((m = getMethodNoSuper_nolock(cls, name))) {
4420 // already exists
4421 if (!replace) {
4422 result = _method_getImplementation(m);
4423 } else {
4424 result = _method_setImplementation(cls, m, imp);
4425 }
4426 } else {
4427 // fixme optimize
4428 method_list_t *newlist;
4429 newlist = _calloc_internal(sizeof(*newlist), 1);
4430 newlist->entsize_NEVER_USE = (uint32_t)sizeof(method_t) | fixed_up_method_list;
4431 newlist->count = 1;
4432 newlist->first.name = name;
4433 newlist->first.types = strdup(types);
4434 if (name != (SEL)kIgnore) {
4435 newlist->first.imp = imp;
4436 } else {
4437 newlist->first.imp = (IMP)&_objc_ignored_method;
4438 }
4439
4440 BOOL vtablesAffected;
4441 attachMethodLists(cls, &newlist, 1, NO, &vtablesAffected);
4442 flushCaches(cls);
4443 if (vtablesAffected) flushVtables(cls);
4444
4445 result = NULL;
4446 }
4447
4448 rwlock_unlock_write(&runtimeLock);
4449
4450 return result;
4451 }
4452
4453
4454 BOOL
4455 class_addMethod(Class cls, SEL name, IMP imp, const char *types)
4456 {
4457 if (!cls) return NO;
4458
4459 IMP old = _class_addMethod(cls, name, imp, types, NO);
4460 return old ? NO : YES;
4461 }
4462
4463
4464 IMP
4465 class_replaceMethod(Class cls, SEL name, IMP imp, const char *types)
4466 {
4467 if (!cls) return NULL;
4468
4469 return _class_addMethod(cls, name, imp, types, YES);
4470 }
4471
4472
4473 /***********************************************************************
4474 * class_addIvar
4475 * Adds an ivar to a class.
4476 * Locking: acquires runtimeLock
4477 **********************************************************************/
4478 BOOL
4479 class_addIvar(Class cls_gen, const char *name, size_t size,
4480 uint8_t alignment, const char *type)
4481 {
4482 struct class_t *cls = newcls(cls_gen);
4483
4484 if (!cls) return NO;
4485
4486 if (!type) type = "";
4487 if (name && 0 == strcmp(name, "")) name = NULL;
4488
4489 rwlock_write(&runtimeLock);
4490
4491 assert(isRealized(cls));
4492
4493 // No class variables
4494 if (isMetaClass(cls)) {
4495 rwlock_unlock_write(&runtimeLock);
4496 return NO;
4497 }
4498
4499 // Can only add ivars to in-construction classes.
4500 if (!(cls->data->flags & RW_CONSTRUCTING)) {
4501 rwlock_unlock_write(&runtimeLock);
4502 return NO;
4503 }
4504
4505 // Check for existing ivar with this name, unless it's anonymous.
4506 // Check for too-big ivar.
4507 // fixme check for superclass ivar too?
4508 if ((name && getIvar(cls, name)) || size > UINT32_MAX) {
4509 rwlock_unlock_write(&runtimeLock);
4510 return NO;
4511 }
4512
4513 class_ro_t *ro_w = make_ro_writeable(cls->data);
4514
4515 // fixme allocate less memory here
4516
4517 ivar_list_t *oldlist, *newlist;
4518 if ((oldlist = (ivar_list_t *)cls->data->ro->ivars)) {
4519 size_t oldsize = ivar_list_size(oldlist);
4520 newlist = _calloc_internal(oldsize + oldlist->entsize, 1);
4521 memcpy(newlist, oldlist, oldsize);
4522 _free_internal(oldlist);
4523 } else {
4524 newlist = _calloc_internal(sizeof(ivar_list_t), 1);
4525 newlist->entsize = (uint32_t)sizeof(ivar_t);
4526 }
4527
4528 uint32_t offset = instanceSize(cls);
4529 uint32_t alignMask = (1<<alignment)-1;
4530 offset = (offset + alignMask) & ~alignMask;
4531
4532 ivar_t *ivar = ivar_list_nth(newlist, newlist->count++);
4533 ivar->offset = _malloc_internal(sizeof(*ivar->offset));
4534 *ivar->offset = offset;
4535 ivar->name = name ? _strdup_internal(name) : NULL;
4536 ivar->type = _strdup_internal(type);
4537 ivar->alignment = alignment;
4538 ivar->size = (uint32_t)size;
4539
4540 ro_w->ivars = newlist;
4541 ro_w->instanceSize = (uint32_t)(offset + size);
4542
4543 // Ivar layout updated in registerClass.
4544
4545 rwlock_unlock_write(&runtimeLock);
4546
4547 return YES;
4548 }
4549
4550
4551 /***********************************************************************
4552 * class_addProtocol
4553 * Adds a protocol to a class.
4554 * Locking: acquires runtimeLock
4555 **********************************************************************/
4556 BOOL class_addProtocol(Class cls_gen, Protocol *protocol_gen)
4557 {
4558 class_t *cls = newcls(cls_gen);
4559 protocol_t *protocol = newprotocol(protocol_gen);
4560 protocol_list_t *plist;
4561 protocol_list_t **plistp;
4562
4563 if (!cls) return NO;
4564 if (class_conformsToProtocol(cls_gen, protocol_gen)) return NO;
4565
4566 rwlock_write(&runtimeLock);
4567
4568 assert(isRealized(cls));
4569
4570 // fixme optimize
4571 plist = _malloc_internal(sizeof(protocol_list_t) + sizeof(protocol_t *));
4572 plist->count = 1;
4573 plist->list[0] = (protocol_ref_t)protocol;
4574
4575 unsigned int count = 0;
4576 for (plistp = cls->data->protocols; plistp && *plistp; plistp++) {
4577 count++;
4578 }
4579
4580 cls->data->protocols =
4581 _realloc_internal(cls->data->protocols,
4582 (count+2) * sizeof(protocol_list_t *));
4583 cls->data->protocols[count] = plist;
4584 cls->data->protocols[count+1] = NULL;
4585
4586 // fixme metaclass?
4587
4588 rwlock_unlock_write(&runtimeLock);
4589
4590 return YES;
4591 }
4592
4593
4594 /***********************************************************************
4595 * look_up_class
4596 * Look up a class by name, and realize it.
4597 * Locking: acquires runtimeLock
4598 **********************************************************************/
4599 __private_extern__ id
4600 look_up_class(const char *name,
4601 BOOL includeUnconnected __attribute__((unused)),
4602 BOOL includeClassHandler __attribute__((unused)))
4603 {
4604 if (!name) return nil;
4605
4606 rwlock_read(&runtimeLock);
4607 class_t *result = getClass(name);
4608 BOOL unrealized = result && !isRealized(result);
4609 rwlock_unlock_read(&runtimeLock);
4610 if (unrealized) {
4611 rwlock_write(&runtimeLock);
4612 realizeClass(result);
4613 rwlock_unlock_write(&runtimeLock);
4614 }
4615 return (id)result;
4616 }
4617
4618
4619 /***********************************************************************
4620 * objc_duplicateClass
4621 * fixme
4622 * Locking: acquires runtimeLock
4623 **********************************************************************/
4624 Class
4625 objc_duplicateClass(Class original_gen, const char *name,
4626 size_t extraBytes)
4627 {
4628 struct class_t *original = newcls(original_gen);
4629 struct class_t *duplicate;
4630
4631 rwlock_write(&runtimeLock);
4632
4633 assert(isRealized(original));
4634 assert(!isMetaClass(original));
4635
4636 duplicate = (struct class_t *)
4637 _calloc_class(instanceSize(original->isa) + extraBytes);
4638 if (instanceSize(original->isa) < sizeof(class_t)) {
4639 _objc_inform("busted! %s\n", original->data->ro->name);
4640 }
4641
4642
4643 duplicate->isa = original->isa;
4644 duplicate->superclass = original->superclass;
4645 duplicate->cache = (Cache)&_objc_empty_cache;
4646 duplicate->vtable = _objc_empty_vtable;
4647
4648 duplicate->data = _calloc_internal(sizeof(*original->data), 1);
4649 duplicate->data->flags = (original->data->flags | RW_COPIED_RO) & ~RW_SPECIALIZED_VTABLE;
4650 duplicate->data->version = original->data->version;
4651 duplicate->data->firstSubclass = NULL;
4652 duplicate->data->nextSiblingClass = NULL;
4653
4654 duplicate->data->ro =
4655 _memdup_internal(original->data->ro, sizeof(*original->data->ro));
4656 *(char **)&duplicate->data->ro->name = _strdup_internal(name);
4657
4658 if (original->data->methods) {
4659 duplicate->data->methods =
4660 _memdup_internal(original->data->methods,
4661 malloc_size(original->data->methods));
4662 method_list_t **mlistp = duplicate->data->methods;
4663 for (mlistp = duplicate->data->methods; *mlistp; mlistp++) {
4664 *mlistp = _memdup_internal(*mlistp, method_list_size(*mlistp));
4665 }
4666 }
4667
4668 // fixme dies when categories are added to the base
4669 duplicate->data->properties = original->data->properties;
4670 duplicate->data->protocols = original->data->protocols;
4671
4672 if (duplicate->superclass) {
4673 addSubclass(duplicate->superclass, duplicate);
4674 }
4675
4676 // Don't methodize class - construction above is correct
4677
4678 addNamedClass(duplicate, duplicate->data->ro->name);
4679 addRealizedClass(duplicate);
4680 // no: duplicate->isa == original->isa
4681 // addRealizedMetaclass(duplicate->isa);
4682
4683 if (PrintConnecting) {
4684 _objc_inform("CLASS: realizing class '%s' (duplicate of %s) %p %p",
4685 name, original->data->ro->name,
4686 duplicate, duplicate->data->ro);
4687 }
4688
4689 rwlock_unlock_write(&runtimeLock);
4690
4691 return (Class)duplicate;
4692 }
4693
4694 /***********************************************************************
4695 * objc_initializeClassPair
4696 * Locking: runtimeLock must be write-locked by the caller
4697 **********************************************************************/
4698 static void objc_initializeClassPair_internal(Class superclass_gen, const char *name, Class cls_gen, Class meta_gen)
4699 {
4700 rwlock_assert_writing(&runtimeLock);
4701
4702 class_t *superclass = newcls(superclass_gen);
4703 class_t *cls = newcls(cls_gen);
4704 class_t *meta = newcls(meta_gen);
4705 class_ro_t *cls_ro_w, *meta_ro_w;
4706
4707 cls->data = _calloc_internal(sizeof(class_rw_t), 1);
4708 meta->data = _calloc_internal(sizeof(class_rw_t), 1);
4709 cls_ro_w = _calloc_internal(sizeof(class_ro_t), 1);
4710 meta_ro_w = _calloc_internal(sizeof(class_ro_t), 1);
4711 cls->data->ro = cls_ro_w;
4712 meta->data->ro = meta_ro_w;
4713
4714 // Set basic info
4715 cls->cache = (Cache)&_objc_empty_cache;
4716 meta->cache = (Cache)&_objc_empty_cache;
4717 cls->vtable = _objc_empty_vtable;
4718 meta->vtable = _objc_empty_vtable;
4719
4720 cls->data->flags = RW_CONSTRUCTING | RW_COPIED_RO | RW_REALIZED;
4721 meta->data->flags = RW_CONSTRUCTING | RW_COPIED_RO | RW_REALIZED;
4722 cls->data->version = 0;
4723 meta->data->version = 7;
4724
4725 cls_ro_w->flags = 0;
4726 meta_ro_w->flags = RO_META;
4727 if (!superclass) {
4728 cls_ro_w->flags |= RO_ROOT;
4729 meta_ro_w->flags |= RO_ROOT;
4730 }
4731 if (superclass) {
4732 cls_ro_w->instanceStart = instanceSize(superclass);
4733 meta_ro_w->instanceStart = instanceSize(superclass->isa);
4734 cls_ro_w->instanceSize = cls_ro_w->instanceStart;
4735 meta_ro_w->instanceSize = meta_ro_w->instanceStart;
4736 } else {
4737 cls_ro_w->instanceStart = 0;
4738 meta_ro_w->instanceStart = (uint32_t)sizeof(class_t);
4739 cls_ro_w->instanceSize = (uint32_t)sizeof(id); // just an isa
4740 meta_ro_w->instanceSize = meta_ro_w->instanceStart;
4741 }
4742
4743 cls_ro_w->name = _strdup_internal(name);
4744 meta_ro_w->name = _strdup_internal(name);
4745
4746 // Connect to superclasses and metaclasses
4747 cls->isa = meta;
4748 if (superclass) {
4749 meta->isa = superclass->isa->isa;
4750 cls->superclass = superclass;
4751 meta->superclass = superclass->isa;
4752 addSubclass(superclass, cls);
4753 addSubclass(superclass->isa, meta);
4754 } else {
4755 meta->isa = meta;
4756 cls->superclass = Nil;
4757 meta->superclass = cls;
4758 addSubclass(cls, meta);
4759 }
4760 }
4761
4762 /***********************************************************************
4763 * objc_initializeClassPair
4764 **********************************************************************/
4765 Class objc_initializeClassPair(Class superclass_gen, const char *name, Class cls_gen, Class meta_gen)
4766 {
4767 class_t *superclass = newcls(superclass_gen);
4768
4769 rwlock_write(&runtimeLock);
4770
4771 //
4772 // Common superclass integrity checks with objc_allocateClassPair
4773 //
4774 if (getClass(name)) {
4775 rwlock_unlock_write(&runtimeLock);
4776 return NO;
4777 }
4778 // fixme reserve class against simmultaneous allocation
4779
4780 if (superclass) assert(isRealized(superclass));
4781
4782 if (superclass && superclass->data->flags & RW_CONSTRUCTING) {
4783 // Can't make subclass of an in-construction class
4784 rwlock_unlock_write(&runtimeLock);
4785 return NO;
4786 }
4787
4788
4789 // just initialize what was supplied
4790 objc_initializeClassPair_internal(superclass_gen, name, cls_gen, meta_gen);
4791
4792 rwlock_unlock_write(&runtimeLock);
4793 return cls_gen;
4794 }
4795
4796 /***********************************************************************
4797 * objc_allocateClassPair
4798 * fixme
4799 * Locking: acquires runtimeLock
4800 **********************************************************************/
4801 Class objc_allocateClassPair(Class superclass_gen, const char *name,
4802 size_t extraBytes)
4803 {
4804 class_t *superclass = newcls(superclass_gen);
4805 Class cls, meta;
4806
4807 rwlock_write(&runtimeLock);
4808
4809 //
4810 // Common superclass integrity checks with objc_initializeClassPair
4811 //
4812 if (getClass(name)) {
4813 rwlock_unlock_write(&runtimeLock);
4814 return NO;
4815 }
4816 // fixme reserve class against simmultaneous allocation
4817
4818 if (superclass) assert(isRealized(superclass));
4819
4820 if (superclass && superclass->data->flags & RW_CONSTRUCTING) {
4821 // Can't make subclass of an in-construction class
4822 rwlock_unlock_write(&runtimeLock);
4823 return NO;
4824 }
4825
4826
4827
4828 // Allocate new classes.
4829 if (superclass) {
4830 cls = _calloc_class(instanceSize(superclass->isa) + extraBytes);
4831 meta = _calloc_class(instanceSize(superclass->isa->isa) + extraBytes);
4832 } else {
4833 cls = _calloc_class(sizeof(class_t) + extraBytes);
4834 meta = _calloc_class(sizeof(class_t) + extraBytes);
4835 }
4836
4837
4838 objc_initializeClassPair_internal(superclass_gen, name, cls, meta);
4839
4840 rwlock_unlock_write(&runtimeLock);
4841
4842 return (Class)cls;
4843 }
4844
4845
4846 /***********************************************************************
4847 * objc_registerClassPair
4848 * fixme
4849 * Locking: acquires runtimeLock
4850 **********************************************************************/
4851 void objc_registerClassPair(Class cls_gen)
4852 {
4853 class_t *cls = newcls(cls_gen);
4854
4855 rwlock_write(&runtimeLock);
4856
4857 if ((cls->data->flags & RW_CONSTRUCTED) ||
4858 (cls->isa->data->flags & RW_CONSTRUCTED))
4859 {
4860 _objc_inform("objc_registerClassPair: class '%s' was already "
4861 "registered!", cls->data->ro->name);
4862 rwlock_unlock_write(&runtimeLock);
4863 return;
4864 }
4865
4866 if (!(cls->data->flags & RW_CONSTRUCTING) ||
4867 !(cls->isa->data->flags & RW_CONSTRUCTING))
4868 {
4869 _objc_inform("objc_registerClassPair: class '%s' was not "
4870 "allocated with objc_allocateClassPair!",
4871 cls->data->ro->name);
4872 rwlock_unlock_write(&runtimeLock);
4873 return;
4874 }
4875
4876 // Build ivar layouts
4877 if (UseGC) {
4878 struct class_t *supercls = getSuperclass(cls);
4879 class_ro_t *ro_w = (class_ro_t *)cls->data->ro;
4880
4881 if (ro_w->ivarLayout) {
4882 // Class builder already called class_setIvarLayout.
4883 }
4884 else if (!supercls) {
4885 // Root class. Scan conservatively (should be isa ivar only).
4886 // ivar_layout is already NULL.
4887 }
4888 else if (ro_w->ivars == NULL) {
4889 // No local ivars. Use superclass's layouts.
4890 ro_w->ivarLayout = (unsigned char *)
4891 _strdup_internal((char *)supercls->data->ro->ivarLayout);
4892 }
4893 else {
4894 // Has local ivars. Build layouts based on superclass.
4895 layout_bitmap bitmap =
4896 layout_bitmap_create(supercls->data->ro->ivarLayout,
4897 instanceSize(supercls),
4898 instanceSize(cls), NO);
4899 uint32_t i;
4900 for (i = 0; i < ro_w->ivars->count; i++) {
4901 ivar_t *ivar = ivar_list_nth(ro_w->ivars, i);
4902 if (!ivar->offset) continue; // anonymous bitfield
4903
4904 layout_bitmap_set_ivar(bitmap, ivar->type, *ivar->offset);
4905 }
4906 ro_w->ivarLayout = layout_string_create(bitmap);
4907 layout_bitmap_free(bitmap);
4908 }
4909
4910 if (ro_w->weakIvarLayout) {
4911 // Class builder already called class_setWeakIvarLayout.
4912 }
4913 else if (!supercls) {
4914 // Root class. No weak ivars (should be isa ivar only).
4915 // weak_ivar_layout is already NULL.
4916 }
4917 else if (ro_w->ivars == NULL) {
4918 // No local ivars. Use superclass's layout.
4919 ro_w->weakIvarLayout = (unsigned char *)
4920 _strdup_internal((char *)supercls->data->ro->weakIvarLayout);
4921 }
4922 else {
4923 // Has local ivars. Build layout based on superclass.
4924 // No way to add weak ivars yet.
4925 ro_w->weakIvarLayout = (unsigned char *)
4926 _strdup_internal((char *)supercls->data->ro->weakIvarLayout);
4927 }
4928 }
4929
4930 // Clear "under construction" bit, set "done constructing" bit
4931 cls->data->flags &= ~RW_CONSTRUCTING;
4932 cls->isa->data->flags &= ~RW_CONSTRUCTING;
4933 cls->data->flags |= RW_CONSTRUCTED;
4934 cls->isa->data->flags |= RW_CONSTRUCTED;
4935
4936 // Add to realized and uninitialized classes
4937 addNamedClass(cls, cls->data->ro->name);
4938 addRealizedClass(cls);
4939 addRealizedMetaclass(cls->isa);
4940 addUninitializedClass(cls, cls->isa);
4941
4942 rwlock_unlock_write(&runtimeLock);
4943 }
4944
4945
4946 static void unload_class(class_t *cls, BOOL isMeta)
4947 {
4948 // Detach class from various lists
4949
4950 // categories not yet attached to this class
4951 category_list *cats;
4952 cats = unattachedCategoriesForClass(cls);
4953 if (cats) free(cats);
4954
4955 // class tables and +load queue
4956 if (!isMeta) {
4957 removeNamedClass(cls, getName(cls));
4958 removeRealizedClass(cls);
4959 removeUninitializedClass(cls);
4960 } else {
4961 removeRealizedMetaclass(cls);
4962 }
4963
4964 // superclass's subclass list
4965 if (isRealized(cls)) {
4966 class_t *supercls = getSuperclass(cls);
4967 if (supercls) removeSubclass(supercls, cls);
4968 }
4969
4970
4971 // Dispose the class's own data structures
4972
4973 if (isRealized(cls)) {
4974 uint32_t i;
4975
4976 // Dereferences the cache contents; do this before freeing methods
4977 if (cls->cache != (Cache)&_objc_empty_cache) _cache_free(cls->cache);
4978
4979 if (cls->data->methods) {
4980 method_list_t **mlistp;
4981 for (mlistp = cls->data->methods; *mlistp; mlistp++) {
4982 for (i = 0; i < (**mlistp).count; i++) {
4983 method_t *m = method_list_nth(*mlistp, i);
4984 try_free(m->types);
4985 }
4986 try_free(*mlistp);
4987 }
4988 try_free(cls->data->methods);
4989 }
4990
4991 const ivar_list_t *ilist = cls->data->ro->ivars;
4992 if (ilist) {
4993 for (i = 0; i < ilist->count; i++) {
4994 const ivar_t *ivar = ivar_list_nth(ilist, i);
4995 try_free(ivar->offset);
4996 try_free(ivar->name);
4997 try_free(ivar->type);
4998 }
4999 try_free(ilist);
5000 }
5001
5002 protocol_list_t **plistp = cls->data->protocols;
5003 for (plistp = cls->data->protocols; plistp && *plistp; plistp++) {
5004 try_free(*plistp);
5005 }
5006 try_free(cls->data->protocols);
5007
5008 // fixme:
5009 // properties
5010
5011 if (cls->vtable != _objc_empty_vtable &&
5012 cls->data->flags & RW_SPECIALIZED_VTABLE) try_free(cls->vtable);
5013 try_free(cls->data->ro->ivarLayout);
5014 try_free(cls->data->ro->weakIvarLayout);
5015 try_free(cls->data->ro->name);
5016 try_free(cls->data->ro);
5017 try_free(cls->data);
5018 try_free(cls);
5019 }
5020 }
5021
5022 void objc_disposeClassPair(Class cls_gen)
5023 {
5024 class_t *cls = newcls(cls_gen);
5025
5026 rwlock_write(&runtimeLock);
5027
5028 if (!(cls->data->flags & (RW_CONSTRUCTED|RW_CONSTRUCTING)) ||
5029 !(cls->isa->data->flags & (RW_CONSTRUCTED|RW_CONSTRUCTING)))
5030 {
5031 // class not allocated with objc_allocateClassPair
5032 // disposing still-unregistered class is OK!
5033 _objc_inform("objc_disposeClassPair: class '%s' was not "
5034 "allocated with objc_allocateClassPair!",
5035 cls->data->ro->name);
5036 rwlock_unlock_write(&runtimeLock);
5037 return;
5038 }
5039
5040 if (isMetaClass(cls)) {
5041 _objc_inform("objc_disposeClassPair: class '%s' is a metaclass, "
5042 "not a class!", cls->data->ro->name);
5043 rwlock_unlock_write(&runtimeLock);
5044 return;
5045 }
5046
5047 // Shouldn't have any live subclasses.
5048 if (cls->data->firstSubclass) {
5049 _objc_inform("objc_disposeClassPair: class '%s' still has subclasses, "
5050 "including '%s'!", cls->data->ro->name,
5051 getName(cls->data->firstSubclass));
5052 }
5053 if (cls->isa->data->firstSubclass) {
5054 _objc_inform("objc_disposeClassPair: class '%s' still has subclasses, "
5055 "including '%s'!", cls->data->ro->name,
5056 getName(cls->isa->data->firstSubclass));
5057 }
5058
5059 // don't remove_class_from_loadable_list()
5060 // - it's not there and we don't have the lock
5061 unload_class(cls->isa, YES);
5062 unload_class(cls, NO);
5063
5064 rwlock_unlock_write(&runtimeLock);
5065 }
5066
5067
5068
5069 /***********************************************************************
5070 * class_createInstanceFromZone
5071 * fixme
5072 * Locking: none
5073 **********************************************************************/
5074 id
5075 class_createInstanceFromZone(Class cls, size_t extraBytes, void *zone)
5076 {
5077 if (cls) assert(isRealized(newcls(cls)));
5078 return _internal_class_createInstanceFromZone(cls, extraBytes, zone);
5079 }
5080
5081
5082 /***********************************************************************
5083 * class_createInstance
5084 * fixme
5085 * Locking: none
5086 **********************************************************************/
5087 id
5088 class_createInstance(Class cls, size_t extraBytes)
5089 {
5090 return class_createInstanceFromZone(cls, extraBytes, NULL);
5091 }
5092
5093
5094 /***********************************************************************
5095 * object_copyFromZone
5096 * fixme
5097 * Locking: none
5098 **********************************************************************/
5099 id
5100 object_copyFromZone(id oldObj, size_t extraBytes, void *zone)
5101 {
5102 id obj;
5103 size_t size;
5104
5105 if (!oldObj) return nil;
5106
5107 size = _class_getInstanceSize(oldObj->isa) + extraBytes;
5108 #if !defined(NO_GC)
5109 if (UseGC) {
5110 obj = (id) auto_zone_allocate_object(gc_zone, size,
5111 AUTO_OBJECT_SCANNED, 0, 1);
5112 } else
5113 #endif
5114 if (zone) {
5115 obj = malloc_zone_calloc(zone, size, 1);
5116 } else {
5117 obj = (id) calloc(1, size);
5118 }
5119 if (!obj) return nil;
5120
5121 // fixme this doesn't handle C++ ivars correctly (#4619414)
5122 objc_memmove_collectable(obj, oldObj, size);
5123
5124 #if !defined(NO_GC)
5125 if (UseGC) gc_fixup_weakreferences(obj, oldObj);
5126 #endif
5127
5128 return obj;
5129 }
5130
5131
5132 /***********************************************************************
5133 * object_copy
5134 * fixme
5135 * Locking: none
5136 **********************************************************************/
5137 id
5138 object_copy(id oldObj, size_t extraBytes)
5139 {
5140 return object_copyFromZone(oldObj, extraBytes, malloc_default_zone());
5141 }
5142
5143
5144 /***********************************************************************
5145 * object_dispose
5146 * fixme
5147 * Locking: none
5148 **********************************************************************/
5149 id
5150 object_dispose(id obj)
5151 {
5152 return _internal_object_dispose(obj);
5153 }
5154
5155
5156 /***********************************************************************
5157 * _objc_getFreedObjectClass
5158 * fixme
5159 * Locking: none
5160 **********************************************************************/
5161 Class _objc_getFreedObjectClass (void)
5162 {
5163 return nil;
5164 }
5165
5166 #ifndef NO_FIXUP
5167
5168 extern id objc_msgSend_fixup(id, SEL, ...);
5169 extern id objc_msgSend_fixedup(id, SEL, ...);
5170 extern id objc_msgSendSuper2_fixup(id, SEL, ...);
5171 extern id objc_msgSendSuper2_fixedup(id, SEL, ...);
5172 extern id objc_msgSend_stret_fixup(id, SEL, ...);
5173 extern id objc_msgSend_stret_fixedup(id, SEL, ...);
5174 extern id objc_msgSendSuper2_stret_fixup(id, SEL, ...);
5175 extern id objc_msgSendSuper2_stret_fixedup(id, SEL, ...);
5176 #if defined(__i386__) || defined(__x86_64__)
5177 extern id objc_msgSend_fpret_fixup(id, SEL, ...);
5178 extern id objc_msgSend_fpret_fixedup(id, SEL, ...);
5179 #endif
5180 #if defined(__x86_64__)
5181 extern id objc_msgSend_fp2ret_fixup(id, SEL, ...);
5182 extern id objc_msgSend_fp2ret_fixedup(id, SEL, ...);
5183 #endif
5184
5185 /***********************************************************************
5186 * _objc_fixupMessageRef
5187 * Fixes up message ref *msg.
5188 * obj is the receiver. supr is NULL for non-super messages
5189 * Locking: acquires runtimeLock
5190 **********************************************************************/
5191 __private_extern__ IMP
5192 _objc_fixupMessageRef(id obj, struct objc_super2 *supr, message_ref *msg)
5193 {
5194 IMP imp;
5195 class_t *isa;
5196
5197 rwlock_assert_unlocked(&runtimeLock);
5198
5199 if (!supr) {
5200 // normal message - search obj->isa for the method implementation
5201 isa = (class_t *)obj->isa;
5202
5203 if (!isRealized(isa)) {
5204 // obj is a class object, isa is its metaclass
5205 class_t *cls;
5206 rwlock_write(&runtimeLock);
5207 cls = realizeClass((class_t *)obj);
5208 rwlock_unlock_write(&runtimeLock);
5209
5210 // shouldn't have instances of unrealized classes!
5211 assert(isMetaClass(isa));
5212 // shouldn't be relocating classes here!
5213 assert(cls == (class_t *)obj);
5214 }
5215 }
5216 else {
5217 // this is objc_msgSend_super, and supr->current_class->superclass
5218 // is the class to search for the method implementation
5219 assert(isRealized((class_t *)supr->current_class));
5220 isa = getSuperclass((class_t *)supr->current_class);
5221 }
5222
5223 msg->sel = sel_registerName((const char *)msg->sel);
5224
5225 #ifndef NO_VTABLE
5226 int vtableIndex;
5227 if (msg->imp == (IMP)&objc_msgSend_fixup &&
5228 (vtableIndex = vtable_getIndex(msg->sel)) >= 0)
5229 {
5230 // vtable dispatch
5231 msg->imp = vtableTrampolines[vtableIndex];
5232 imp = isa->vtable[vtableIndex];
5233 }
5234 else
5235 #endif
5236 {
5237 // ordinary dispatch
5238 imp = lookUpMethod((Class)isa, msg->sel, YES/*initialize*/, YES/*cache*/);
5239
5240 if (msg->imp == (IMP)&objc_msgSend_fixup) {
5241 msg->imp = (IMP)&objc_msgSend_fixedup;
5242 }
5243 else if (msg->imp == (IMP)&objc_msgSendSuper2_fixup) {
5244 msg->imp = (IMP)&objc_msgSendSuper2_fixedup;
5245 }
5246 else if (msg->imp == (IMP)&objc_msgSend_stret_fixup) {
5247 msg->imp = (IMP)&objc_msgSend_stret_fixedup;
5248 }
5249 else if (msg->imp == (IMP)&objc_msgSendSuper2_stret_fixup) {
5250 msg->imp = (IMP)&objc_msgSendSuper2_stret_fixedup;
5251 }
5252 #if defined(__i386__) || defined(__x86_64__)
5253 else if (msg->imp == (IMP)&objc_msgSend_fpret_fixup) {
5254 msg->imp = (IMP)&objc_msgSend_fpret_fixedup;
5255 }
5256 #endif
5257 #if defined(__x86_64__)
5258 else if (msg->imp == (IMP)&objc_msgSend_fp2ret_fixup) {
5259 msg->imp = (IMP)&objc_msgSend_fp2ret_fixedup;
5260 }
5261 #endif
5262 else {
5263 // The ref may already have been fixed up, either by another thread
5264 // or by +initialize via lookUpMethod above.
5265 }
5266 }
5267
5268 return imp;
5269 }
5270
5271 // ! NO_FIXUP
5272 #endif
5273
5274
5275 #warning fixme delete after #4586306
5276 Class class_poseAs(Class imposter, Class original)
5277 {
5278 _objc_fatal("Don't call class_poseAs.");
5279 }
5280
5281
5282 // ProKit SPI
5283 static class_t *setSuperclass(class_t *cls, class_t *newSuper)
5284 {
5285 class_t *oldSuper;
5286
5287 rwlock_assert_writing(&runtimeLock);
5288
5289 oldSuper = cls->superclass;
5290 removeSubclass(oldSuper, cls);
5291 removeSubclass(oldSuper->isa, cls->isa);
5292
5293 cls->superclass = newSuper;
5294 cls->isa->superclass = newSuper->isa;
5295 addSubclass(newSuper, cls);
5296 addSubclass(newSuper->isa, cls->isa);
5297
5298 flushCaches(cls);
5299 flushCaches(cls->isa);
5300 flushVtables(cls);
5301 flushVtables(cls->isa);
5302
5303 return oldSuper;
5304 }
5305
5306
5307 Class class_setSuperclass(Class cls_gen, Class newSuper_gen)
5308 {
5309 class_t *cls = newcls(cls_gen);
5310 class_t *newSuper = newcls(newSuper_gen);
5311 class_t *oldSuper;
5312
5313 rwlock_write(&runtimeLock);
5314 oldSuper = setSuperclass(cls, newSuper);
5315 rwlock_unlock_write(&runtimeLock);
5316
5317 return (Class)oldSuper;
5318 }
5319
5320 #endif