]> git.saurik.com Git - apple/objc4.git/blob - runtime/objc-runtime-new.mm
objc4-532.2.tar.gz
[apple/objc4.git] / runtime / objc-runtime-new.mm
1 /*
2 * Copyright (c) 2005-2009 Apple Inc. All Rights Reserved.
3 *
4 * @APPLE_LICENSE_HEADER_START@
5 *
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. Please obtain a copy of the License at
10 * http://www.opensource.apple.com/apsl/ and read it before using this
11 * file.
12 *
13 * The Original Code and all software distributed under the License are
14 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
15 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
16 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
18 * Please see the License for the specific language governing rights and
19 * limitations under the License.
20 *
21 * @APPLE_LICENSE_HEADER_END@
22 */
23
24 /***********************************************************************
25 * objc-runtime-new.m
26 * Support for new-ABI classes and images.
27 **********************************************************************/
28
29 #if __OBJC2__
30
31 #include "objc-private.h"
32 #include "objc-runtime-new.h"
33 #include "objc-file.h"
34 #include <objc/message.h>
35 #include <mach/shared_region.h>
36
37 #define newcls(cls) ((class_t *)cls)
38 #define newmethod(meth) ((method_t *)meth)
39 #define newivar(ivar) ((ivar_t *)ivar)
40 #define newcategory(cat) ((category_t *)cat)
41 #define newprotocol(p) ((protocol_t *)p)
42 #define newproperty(p) ((property_t *)p)
43
44 static const char *getName(class_t *cls);
45 static uint32_t unalignedInstanceSize(class_t *cls);
46 static uint32_t alignedInstanceSize(class_t *cls);
47 static BOOL isMetaClass(class_t *cls);
48 static class_t *getSuperclass(class_t *cls);
49 static void detach_class(class_t *cls, BOOL isMeta);
50 static void free_class(class_t *cls);
51 static class_t *setSuperclass(class_t *cls, class_t *newSuper);
52 static class_t *realizeClass(class_t *cls);
53 static void flushCaches(class_t *cls);
54 static void flushVtables(class_t *cls);
55 static method_t *getMethodNoSuper_nolock(class_t *cls, SEL sel);
56 static method_t *getMethod_nolock(class_t *cls, SEL sel);
57 static void changeInfo(class_t *cls, unsigned int set, unsigned int clear);
58 static IMP _method_getImplementation(method_t *m);
59 static BOOL hasCxxStructors(class_t *cls);
60 static IMP addMethod(class_t *cls, SEL name, IMP imp, const char *types, BOOL replace);
61 static NXHashTable *realizedClasses(void);
62 static bool isRRSelector(SEL sel);
63 static bool isAWZSelector(SEL sel);
64 static void updateCustomRR_AWZ(class_t *cls, method_t *meth);
65 static method_t *search_method_list(const method_list_t *mlist, SEL sel);
66
67 id objc_noop_imp(id self, SEL _cmd __unused) {
68 return self;
69 }
70
71 /***********************************************************************
72 * Lock management
73 * Every lock used anywhere must be managed here.
74 * Locks not managed here may cause gdb deadlocks.
75 **********************************************************************/
76 rwlock_t runtimeLock;
77 rwlock_t selLock;
78 mutex_t cacheUpdateLock = MUTEX_INITIALIZER;
79 recursive_mutex_t loadMethodLock = RECURSIVE_MUTEX_INITIALIZER;
80 static int debugger_runtimeLock;
81 static int debugger_selLock;
82 static int debugger_cacheUpdateLock;
83 static int debugger_loadMethodLock;
84 #define RDONLY 1
85 #define RDWR 2
86
87 void lock_init(void)
88 {
89 rwlock_init(&selLock);
90 rwlock_init(&runtimeLock);
91 recursive_mutex_init(&loadMethodLock);
92 }
93
94
95 /***********************************************************************
96 * startDebuggerMode
97 * Attempt to acquire some locks for debugger mode.
98 * Returns 0 if debugger mode failed because too many locks are unavailable.
99 *
100 * Locks successfully acquired are held until endDebuggerMode().
101 * Locks not acquired are off-limits until endDebuggerMode(); any
102 * attempt to manipulate them will cause a trap.
103 * Locks not handled here may cause deadlocks in gdb.
104 **********************************************************************/
105 int startDebuggerMode(void)
106 {
107 int result = DEBUGGER_FULL;
108
109 // runtimeLock is required (can't do much without it)
110 if (rwlock_try_write(&runtimeLock)) {
111 debugger_runtimeLock = RDWR;
112 } else if (rwlock_try_read(&runtimeLock)) {
113 debugger_runtimeLock = RDONLY;
114 result = DEBUGGER_PARTIAL;
115 } else {
116 return DEBUGGER_OFF;
117 }
118
119 // cacheUpdateLock is required (must not fail a necessary cache flush)
120 // must be AFTER runtimeLock to avoid lock inversion
121 if (mutex_try_lock(&cacheUpdateLock)) {
122 debugger_cacheUpdateLock = RDWR;
123 } else {
124 rwlock_unlock(&runtimeLock, debugger_runtimeLock);
125 debugger_runtimeLock = 0;
126 return DEBUGGER_OFF;
127 }
128
129 // side table locks are not optional
130 if (!noSideTableLocksHeld()) {
131 rwlock_unlock(&runtimeLock, debugger_runtimeLock);
132 mutex_unlock(&cacheUpdateLock);
133 debugger_runtimeLock = 0;
134 return DEBUGGER_OFF;
135 }
136
137 // selLock is optional
138 if (rwlock_try_write(&selLock)) {
139 debugger_selLock = RDWR;
140 } else if (rwlock_try_read(&selLock)) {
141 debugger_selLock = RDONLY;
142 result = DEBUGGER_PARTIAL;
143 } else {
144 debugger_selLock = 0;
145 result = DEBUGGER_PARTIAL;
146 }
147
148 // loadMethodLock is optional
149 if (recursive_mutex_try_lock(&loadMethodLock)) {
150 debugger_loadMethodLock = RDWR;
151 } else {
152 debugger_loadMethodLock = 0;
153 result = DEBUGGER_PARTIAL;
154 }
155
156 return result;
157 }
158
159 /***********************************************************************
160 * endDebuggerMode
161 * Relinquish locks acquired in startDebuggerMode().
162 **********************************************************************/
163 void endDebuggerMode(void)
164 {
165 assert(debugger_runtimeLock != 0);
166
167 rwlock_unlock(&runtimeLock, debugger_runtimeLock);
168 debugger_runtimeLock = 0;
169
170 rwlock_unlock(&selLock, debugger_selLock);
171 debugger_selLock = 0;
172
173 assert(debugger_cacheUpdateLock == RDWR);
174 mutex_unlock(&cacheUpdateLock);
175 debugger_cacheUpdateLock = 0;
176
177 if (debugger_loadMethodLock) {
178 recursive_mutex_unlock(&loadMethodLock);
179 debugger_loadMethodLock = 0;
180 }
181 }
182
183 /***********************************************************************
184 * isManagedDuringDebugger
185 * Returns YES if the given lock is handled specially during debugger
186 * mode (i.e. debugger mode tries to acquire it).
187 **********************************************************************/
188 BOOL isManagedDuringDebugger(void *lock)
189 {
190 if (lock == &selLock) return YES;
191 if (lock == &cacheUpdateLock) return YES;
192 if (lock == &runtimeLock) return YES;
193 if (lock == &loadMethodLock) return YES;
194 return NO;
195 }
196
197 /***********************************************************************
198 * isLockedDuringDebugger
199 * Returns YES if the given mutex was acquired by debugger mode.
200 * Locking a managed mutex during debugger mode causes a trap unless
201 * this returns YES.
202 **********************************************************************/
203 BOOL isLockedDuringDebugger(void *lock)
204 {
205 assert(DebuggerMode);
206
207 if (lock == &cacheUpdateLock) return YES;
208 if (lock == (mutex_t *)&loadMethodLock) return YES;
209 return NO;
210 }
211
212 /***********************************************************************
213 * isReadingDuringDebugger
214 * Returns YES if the given rwlock was read-locked by debugger mode.
215 * Read-locking a managed rwlock during debugger mode causes a trap unless
216 * this returns YES.
217 **********************************************************************/
218 BOOL isReadingDuringDebugger(rwlock_t *lock)
219 {
220 assert(DebuggerMode);
221
222 // read-lock is allowed even if debugger mode actually write-locked it
223 if (debugger_runtimeLock && lock == &runtimeLock) return YES;
224 if (debugger_selLock && lock == &selLock) return YES;
225
226 return NO;
227 }
228
229 /***********************************************************************
230 * isWritingDuringDebugger
231 * Returns YES if the given rwlock was write-locked by debugger mode.
232 * Write-locking a managed rwlock during debugger mode causes a trap unless
233 * this returns YES.
234 **********************************************************************/
235 BOOL isWritingDuringDebugger(rwlock_t *lock)
236 {
237 assert(DebuggerMode);
238
239 if (debugger_runtimeLock == RDWR && lock == &runtimeLock) return YES;
240 if (debugger_selLock == RDWR && lock == &selLock) return YES;
241
242 return NO;
243 }
244
245
246 /***********************************************************************
247 * vtable dispatch
248 *
249 * Every class gets a vtable pointer. The vtable is an array of IMPs.
250 * The selectors represented in the vtable are the same for all classes
251 * (i.e. no class has a bigger or smaller vtable).
252 * Each vtable index has an associated trampoline which dispatches to
253 * the IMP at that index for the receiver class's vtable (after
254 * checking for NULL). Dispatch fixup uses these trampolines instead
255 * of objc_msgSend.
256 * Fragility: The vtable size and list of selectors is chosen at launch
257 * time. No compiler-generated code depends on any particular vtable
258 * configuration, or even the use of vtable dispatch at all.
259 * Memory size: If a class's vtable is identical to its superclass's
260 * (i.e. the class overrides none of the vtable selectors), then
261 * the class points directly to its superclass's vtable. This means
262 * selectors to be included in the vtable should be chosen so they are
263 * (1) frequently called, but (2) not too frequently overridden. In
264 * particular, -dealloc is a bad choice.
265 * Forwarding: If a class doesn't implement some vtable selector, that
266 * selector's IMP is set to objc_msgSend in that class's vtable.
267 * +initialize: Each class keeps the default vtable (which always
268 * redirects to objc_msgSend) until its +initialize is completed.
269 * Otherwise, the first message to a class could be a vtable dispatch,
270 * and the vtable trampoline doesn't include +initialize checking.
271 * Changes: Categories, addMethod, and setImplementation all force vtable
272 * reconstruction for the class and all of its subclasses, if the
273 * vtable selectors are affected.
274 **********************************************************************/
275
276 /***********************************************************************
277 * ABI WARNING ABI WARNING ABI WARNING ABI WARNING ABI WARNING
278 * vtable_prototype on x86_64 steals %rax and does not clear %rdx on return
279 * This means vtable dispatch must never be used for vararg calls
280 * or very large return values.
281 * ABI WARNING ABI WARNING ABI WARNING ABI WARNING ABI WARNING
282 **********************************************************************/
283
284 #define X8(x) \
285 x x x x x x x x
286 #define X64(x) \
287 X8(x) X8(x) X8(x) X8(x) X8(x) X8(x) X8(x) X8(x)
288 #define X128(x) \
289 X64(x) X64(x)
290
291 #define vtableMax 128
292
293 // hack to avoid conflicts with compiler's internal declaration
294 asm("\n .data"
295 "\n .globl __objc_empty_vtable "
296 "\n __objc_empty_vtable:"
297 #if __LP64__
298 X128("\n .quad _objc_msgSend")
299 #else
300 X128("\n .long _objc_msgSend")
301 #endif
302 );
303
304 #if SUPPORT_VTABLE
305
306 // Trampoline descriptors for gdb.
307
308 objc_trampoline_header *gdb_objc_trampolines = NULL;
309
310 void gdb_objc_trampolines_changed(objc_trampoline_header *thdr) __attribute__((noinline));
311 void gdb_objc_trampolines_changed(objc_trampoline_header *thdr)
312 {
313 rwlock_assert_writing(&runtimeLock);
314 assert(thdr == gdb_objc_trampolines);
315
316 if (PrintVtables) {
317 _objc_inform("VTABLES: gdb_objc_trampolines_changed(%p)", thdr);
318 }
319 }
320
321 // fixme workaround for rdar://6667753
322 static void appendTrampolines(objc_trampoline_header *thdr) __attribute__((noinline));
323
324 static void appendTrampolines(objc_trampoline_header *thdr)
325 {
326 rwlock_assert_writing(&runtimeLock);
327 assert(thdr->next == NULL);
328
329 if (gdb_objc_trampolines != thdr->next) {
330 thdr->next = gdb_objc_trampolines;
331 }
332 gdb_objc_trampolines = thdr;
333
334 gdb_objc_trampolines_changed(thdr);
335 }
336
337 // Vtable management.
338
339 static size_t vtableStrlen;
340 static size_t vtableCount;
341 static SEL *vtableSelectors;
342 static IMP *vtableTrampolines;
343 static const char * const defaultVtable[] = {
344 "allocWithZone:",
345 "alloc",
346 "class",
347 "self",
348 "isKindOfClass:",
349 "respondsToSelector:",
350 "isFlipped",
351 "length",
352 "objectForKey:",
353 "count",
354 "objectAtIndex:",
355 "isEqualToString:",
356 "isEqual:",
357 "retain",
358 "release",
359 "autorelease",
360 };
361 static const char * const defaultVtableGC[] = {
362 "allocWithZone:",
363 "alloc",
364 "class",
365 "self",
366 "isKindOfClass:",
367 "respondsToSelector:",
368 "isFlipped",
369 "length",
370 "objectForKey:",
371 "count",
372 "objectAtIndex:",
373 "isEqualToString:",
374 "isEqual:",
375 "hash",
376 "addObject:",
377 "countByEnumeratingWithState:objects:count:",
378 };
379
380 OBJC_EXTERN void objc_msgSend_vtable0(void);
381 OBJC_EXTERN void objc_msgSend_vtable1(void);
382 OBJC_EXTERN void objc_msgSend_vtable2(void);
383 OBJC_EXTERN void objc_msgSend_vtable3(void);
384 OBJC_EXTERN void objc_msgSend_vtable4(void);
385 OBJC_EXTERN void objc_msgSend_vtable5(void);
386 OBJC_EXTERN void objc_msgSend_vtable6(void);
387 OBJC_EXTERN void objc_msgSend_vtable7(void);
388 OBJC_EXTERN void objc_msgSend_vtable8(void);
389 OBJC_EXTERN void objc_msgSend_vtable9(void);
390 OBJC_EXTERN void objc_msgSend_vtable10(void);
391 OBJC_EXTERN void objc_msgSend_vtable11(void);
392 OBJC_EXTERN void objc_msgSend_vtable12(void);
393 OBJC_EXTERN void objc_msgSend_vtable13(void);
394 OBJC_EXTERN void objc_msgSend_vtable14(void);
395 OBJC_EXTERN void objc_msgSend_vtable15(void);
396
397 static IMP const defaultVtableTrampolines[] = {
398 (IMP)objc_msgSend_vtable0,
399 (IMP)objc_msgSend_vtable1,
400 (IMP)objc_msgSend_vtable2,
401 (IMP)objc_msgSend_vtable3,
402 (IMP)objc_msgSend_vtable4,
403 (IMP)objc_msgSend_vtable5,
404 (IMP)objc_msgSend_vtable6,
405 (IMP)objc_msgSend_vtable7,
406 (IMP)objc_msgSend_vtable8,
407 (IMP)objc_msgSend_vtable9,
408 (IMP)objc_msgSend_vtable10,
409 (IMP)objc_msgSend_vtable11,
410 (IMP)objc_msgSend_vtable12,
411 (IMP)objc_msgSend_vtable13,
412 (IMP)objc_msgSend_vtable14,
413 (IMP)objc_msgSend_vtable15,
414 };
415 extern objc_trampoline_header defaultVtableTrampolineDescriptors;
416
417 static void check_vtable_size(void) __unused;
418 static void check_vtable_size(void)
419 {
420 // Fail to compile if vtable sizes don't match.
421 int c1[sizeof(defaultVtableTrampolines)-sizeof(defaultVtable)] __unused;
422 int c2[sizeof(defaultVtable)-sizeof(defaultVtableTrampolines)] __unused;
423 int c3[sizeof(defaultVtableTrampolines)-sizeof(defaultVtableGC)] __unused;
424 int c4[sizeof(defaultVtableGC)-sizeof(defaultVtableTrampolines)] __unused;
425
426 // Fail to compile if vtableMax is too small
427 int c5[vtableMax - sizeof(defaultVtable)] __unused;
428 int c6[vtableMax - sizeof(defaultVtableGC)] __unused;
429 }
430
431
432 extern uint8_t vtable_prototype;
433 extern uint8_t vtable_ignored;
434 extern int vtable_prototype_size;
435 extern int vtable_prototype_index_offset;
436 extern int vtable_prototype_index2_offset;
437 extern int vtable_prototype_tagtable_offset;
438 extern int vtable_prototype_tagtable_size;
439 static size_t makeVtableTrampoline(uint8_t *dst, size_t index)
440 {
441 // copy boilerplate
442 memcpy(dst, &vtable_prototype, vtable_prototype_size);
443
444 // insert indexes
445 #if defined(__x86_64__)
446 if (index > 255) _objc_fatal("vtable_prototype busted");
447 {
448 // `jmpq *0x7fff(%rax)` ff a0 ff 7f
449 uint16_t *p = (uint16_t *)(dst + vtable_prototype_index_offset + 2);
450 if (*p != 0x7fff) _objc_fatal("vtable_prototype busted");
451 *p = index * 8;
452 }
453 {
454 uint16_t *p = (uint16_t *)(dst + vtable_prototype_index2_offset + 2);
455 if (*p != 0x7fff) _objc_fatal("vtable_prototype busted");
456 *p = index * 8;
457 }
458 #else
459 # warning unknown architecture
460 #endif
461
462 // insert tagged isa table
463 #if defined(__x86_64__)
464 {
465 // `movq $0x1122334455667788, %r10` 49 ba 88 77 66 55 44 33 22 11
466 if (vtable_prototype_tagtable_size != 10) {
467 _objc_fatal("vtable_prototype busted");
468 }
469 uint8_t *p = (uint8_t *)(dst + vtable_prototype_tagtable_offset);
470 if (*p++ != 0x49) _objc_fatal("vtable_prototype busted");
471 if (*p++ != 0xba) _objc_fatal("vtable_prototype busted");
472 if (*(uintptr_t *)p != 0x1122334455667788) {
473 _objc_fatal("vtable_prototype busted");
474 }
475 uintptr_t addr = (uintptr_t)_objc_tagged_isa_table;
476 memcpy(p, &addr, sizeof(addr));
477 }
478 #else
479 # warning unknown architecture
480 #endif
481
482 return vtable_prototype_size;
483 }
484
485
486 static void initVtables(void)
487 {
488 if (DisableVtables) {
489 if (PrintVtables) {
490 _objc_inform("VTABLES: vtable dispatch disabled by OBJC_DISABLE_VTABLES");
491 }
492 vtableCount = 0;
493 vtableSelectors = NULL;
494 vtableTrampolines = NULL;
495 return;
496 }
497
498 const char * const *names;
499 size_t i;
500
501 if (UseGC) {
502 names = defaultVtableGC;
503 vtableCount = sizeof(defaultVtableGC) / sizeof(defaultVtableGC[0]);
504 } else {
505 names = defaultVtable;
506 vtableCount = sizeof(defaultVtable) / sizeof(defaultVtable[0]);
507 }
508 if (vtableCount > vtableMax) vtableCount = vtableMax;
509
510 vtableSelectors = (SEL*)_malloc_internal(vtableCount * sizeof(SEL));
511 vtableTrampolines = (IMP*)_malloc_internal(vtableCount * sizeof(IMP));
512
513 // Built-in trampolines and their descriptors
514
515 size_t defaultVtableTrampolineCount =
516 sizeof(defaultVtableTrampolines) / sizeof(defaultVtableTrampolines[0]);
517 #ifndef NDEBUG
518 // debug: use generated code for 3/4 of the table
519 // Disabled even in Debug builds to avoid breaking backtrace symbol names.
520 // defaultVtableTrampolineCount /= 4;
521 #endif
522
523 for (i = 0; i < defaultVtableTrampolineCount && i < vtableCount; i++) {
524 vtableSelectors[i] = sel_registerName(names[i]);
525 vtableTrampolines[i] = defaultVtableTrampolines[i];
526 }
527 appendTrampolines(&defaultVtableTrampolineDescriptors);
528
529
530 // Generated trampolines and their descriptors
531
532 if (vtableCount > defaultVtableTrampolineCount) {
533 // Memory for trampoline code
534 size_t generatedCount =
535 vtableCount - defaultVtableTrampolineCount;
536
537 const int align = 16;
538 size_t codeSize =
539 round_page(sizeof(objc_trampoline_header) + align +
540 generatedCount * (sizeof(objc_trampoline_descriptor)
541 + vtable_prototype_size + align));
542 void *codeAddr = mmap(0, codeSize, PROT_READ|PROT_WRITE,
543 MAP_PRIVATE|MAP_ANON,
544 VM_MAKE_TAG(VM_MEMORY_OBJC_DISPATCHERS), 0);
545 uint8_t *t = (uint8_t *)codeAddr;
546
547 // Trampoline header
548 objc_trampoline_header *thdr = (objc_trampoline_header *)t;
549 thdr->headerSize = sizeof(objc_trampoline_header);
550 thdr->descSize = sizeof(objc_trampoline_descriptor);
551 thdr->descCount = (uint32_t)generatedCount;
552 thdr->next = NULL;
553
554 // Trampoline descriptors
555 objc_trampoline_descriptor *tdesc = (objc_trampoline_descriptor *)(thdr+1);
556 t = (uint8_t *)&tdesc[generatedCount];
557 t += align - ((uintptr_t)t % align);
558
559 // Dispatch code
560 size_t tdi;
561 for (i = defaultVtableTrampolineCount, tdi = 0;
562 i < vtableCount;
563 i++, tdi++)
564 {
565 vtableSelectors[i] = sel_registerName(names[i]);
566 if (ignoreSelector(vtableSelectors[i])) {
567 vtableTrampolines[i] = (IMP)&vtable_ignored;
568 tdesc[tdi].offset = 0;
569 tdesc[tdi].flags = 0;
570 } else {
571 vtableTrampolines[i] = (IMP)t;
572 tdesc[tdi].offset =
573 (uint32_t)((uintptr_t)t - (uintptr_t)&tdesc[tdi]);
574 tdesc[tdi].flags =
575 OBJC_TRAMPOLINE_MESSAGE|OBJC_TRAMPOLINE_VTABLE;
576
577 t += makeVtableTrampoline(t, i);
578 t += align - ((uintptr_t)t % align);
579 }
580 }
581
582 appendTrampolines(thdr);
583 sys_icache_invalidate(codeAddr, codeSize);
584 mprotect(codeAddr, codeSize, PROT_READ|PROT_EXEC);
585 }
586
587
588 if (PrintVtables) {
589 for (i = 0; i < vtableCount; i++) {
590 _objc_inform("VTABLES: vtable[%zu] %p %s",
591 i, vtableTrampolines[i],
592 sel_getName(vtableSelectors[i]));
593 }
594 }
595
596 if (PrintVtableImages) {
597 _objc_inform("VTABLE IMAGES: '#' implemented by class");
598 _objc_inform("VTABLE IMAGES: '-' inherited from superclass");
599 _objc_inform("VTABLE IMAGES: ' ' not implemented");
600 for (i = 0; i <= vtableCount; i++) {
601 char spaces[vtableCount+1+1];
602 size_t j;
603 for (j = 0; j < i; j++) {
604 spaces[j] = '|';
605 }
606 spaces[j] = '\0';
607 _objc_inform("VTABLE IMAGES: %s%s", spaces,
608 i<vtableCount ? sel_getName(vtableSelectors[i]) : "");
609 }
610 }
611
612 if (PrintVtables || PrintVtableImages) {
613 vtableStrlen = 0;
614 for (i = 0; i < vtableCount; i++) {
615 vtableStrlen += strlen(sel_getName(vtableSelectors[i]));
616 }
617 }
618 }
619
620
621 static int vtable_getIndex(SEL sel)
622 {
623 unsigned int i;
624 for (i = 0; i < vtableCount; i++) {
625 if (vtableSelectors[i] == sel) return i;
626 }
627 return -1;
628 }
629
630 static BOOL vtable_containsSelector(SEL sel)
631 {
632 return (vtable_getIndex(sel) < 0) ? NO : YES;
633 }
634
635 static void printVtableOverrides(class_t *cls, class_t *supercls)
636 {
637 char overrideMap[vtableCount+1];
638 unsigned int i;
639
640 if (supercls) {
641 size_t overridesBufferSize = vtableStrlen + 2*vtableCount + 1;
642 char *overrides =
643 (char *)_calloc_internal(overridesBufferSize, 1);
644 for (i = 0; i < vtableCount; i++) {
645 if (ignoreSelector(vtableSelectors[i])) {
646 overrideMap[i] = '-';
647 continue;
648 }
649 if (getMethodNoSuper_nolock(cls, vtableSelectors[i])) {
650 strlcat(overrides, sel_getName(vtableSelectors[i]), overridesBufferSize);
651 strlcat(overrides, ", ", overridesBufferSize);
652 overrideMap[i] = '#';
653 } else if (getMethod_nolock(cls, vtableSelectors[i])) {
654 overrideMap[i] = '-';
655 } else {
656 overrideMap[i] = ' ';
657 }
658 }
659 if (PrintVtables) {
660 _objc_inform("VTABLES: %s%s implements %s",
661 getName(cls), isMetaClass(cls) ? "(meta)" : "",
662 overrides);
663 }
664 _free_internal(overrides);
665 }
666 else {
667 for (i = 0; i < vtableCount; i++) {
668 overrideMap[i] = '#';
669 }
670 }
671
672 if (PrintVtableImages) {
673 overrideMap[vtableCount] = '\0';
674 _objc_inform("VTABLE IMAGES: %s %s%s", overrideMap,
675 getName(cls), isMetaClass(cls) ? "(meta)" : "");
676 }
677 }
678
679 /***********************************************************************
680 * updateVtable
681 * Rebuilds vtable for cls, using superclass's vtable if appropriate.
682 * Assumes superclass's vtable is up to date.
683 * Does nothing to subclass vtables.
684 * Locking: runtimeLock must be held by the caller.
685 **********************************************************************/
686 static void updateVtable(class_t *cls, BOOL force)
687 {
688 rwlock_assert_writing(&runtimeLock);
689
690 // Keep default vtable until +initialize is complete.
691 // Default vtable redirects to objc_msgSend, which
692 // enforces +initialize locking.
693 if (!force && !_class_isInitialized((Class)cls)) {
694 /*
695 if (PrintVtables) {
696 _objc_inform("VTABLES: KEEPING DEFAULT vtable for "
697 "uninitialized class %s%s",
698 getName(cls), isMetaClass(cls) ? "(meta)" : "");
699 }
700 */
701 return;
702 }
703
704 // Decide whether this class can share its superclass's vtable.
705
706 class_t *supercls = getSuperclass(cls);
707 BOOL needVtable = NO;
708 unsigned int i;
709 if (!supercls) {
710 // Root classes always need a vtable
711 needVtable = YES;
712 }
713 else if (cls->data()->flags & RW_SPECIALIZED_VTABLE) {
714 // Once you have your own vtable, you never go back
715 needVtable = YES;
716 }
717 else {
718 for (i = 0; i < vtableCount; i++) {
719 if (ignoreSelector(vtableSelectors[i])) continue;
720 method_t *m = getMethodNoSuper_nolock(cls, vtableSelectors[i]);
721 // assume any local implementation differs from super's
722 if (m) {
723 needVtable = YES;
724 break;
725 }
726 }
727 }
728
729 // Build a vtable for this class, or not.
730
731 if (!needVtable) {
732 if (PrintVtables) {
733 _objc_inform("VTABLES: USING SUPERCLASS vtable for class %s%s %p",
734 getName(cls), isMetaClass(cls) ? "(meta)" : "", cls);
735 }
736 cls->vtable = supercls->vtable;
737 }
738 else {
739 if (PrintVtables) {
740 _objc_inform("VTABLES: %s vtable for class %s%s %p",
741 (cls->data()->flags & RW_SPECIALIZED_VTABLE) ?
742 "UPDATING SPECIALIZED" : "CREATING SPECIALIZED",
743 getName(cls), isMetaClass(cls) ? "(meta)" : "", cls);
744 }
745 if (PrintVtables || PrintVtableImages) {
746 printVtableOverrides(cls, supercls);
747 }
748
749 IMP *new_vtable;
750 IMP *super_vtable = supercls ? supercls->vtable : &_objc_empty_vtable;
751 // fixme use msgForward (instead of msgSend from empty vtable) ?
752
753 if (cls->data()->flags & RW_SPECIALIZED_VTABLE) {
754 // update cls->vtable in place
755 new_vtable = cls->vtable;
756 if (new_vtable == &_objc_empty_vtable) {
757 // oops - our vtable is not as specialized as we thought
758 // This is probably the broken memcpy of __NSCFConstantString.
759 // rdar://8770551
760 new_vtable = (IMP*)malloc(vtableCount * sizeof(IMP));
761 }
762 assert(new_vtable != &_objc_empty_vtable);
763 } else {
764 // make new vtable
765 new_vtable = (IMP*)malloc(vtableCount * sizeof(IMP));
766 changeInfo(cls, RW_SPECIALIZED_VTABLE, 0);
767 }
768
769 for (i = 0; i < vtableCount; i++) {
770 if (ignoreSelector(vtableSelectors[i])) {
771 new_vtable[i] = (IMP)&vtable_ignored;
772 } else {
773 method_t *m = getMethodNoSuper_nolock(cls, vtableSelectors[i]);
774 if (m) new_vtable[i] = _method_getImplementation(m);
775 else new_vtable[i] = super_vtable[i];
776 }
777 }
778
779 if (cls->vtable != new_vtable) {
780 // don't let other threads see uninitialized parts of new_vtable
781 OSMemoryBarrier();
782 cls->vtable = new_vtable;
783 }
784 }
785 }
786
787 // SUPPORT_VTABLE
788 #else
789 // !SUPPORT_VTABLE
790
791 static void initVtables(void)
792 {
793 if (PrintVtables) {
794 _objc_inform("VTABLES: no vtables on this architecture");
795 }
796 }
797
798 static BOOL vtable_containsSelector(SEL sel)
799 {
800 return NO;
801 }
802
803 static void updateVtable(class_t *cls, BOOL force)
804 {
805 }
806
807 // !SUPPORT_VTABLE
808 #endif
809
810 typedef struct {
811 category_t *cat;
812 BOOL fromBundle;
813 } category_pair_t;
814
815 typedef struct {
816 uint32_t count;
817 category_pair_t list[0]; // variable-size
818 } category_list;
819
820 #define FOREACH_METHOD_LIST(_mlist, _cls, code) \
821 do { \
822 const method_list_t *_mlist; \
823 if (_cls->data()->method_lists) { \
824 if (_cls->data()->flags & RW_METHOD_ARRAY) { \
825 method_list_t **_mlistp; \
826 for (_mlistp=_cls->data()->method_lists; *_mlistp; _mlistp++){\
827 _mlist = *_mlistp; \
828 code \
829 } \
830 } else { \
831 _mlist = _cls->data()->method_list; \
832 code \
833 } \
834 } \
835 } while (0)
836
837 #define FOREACH_REALIZED_CLASS_AND_SUBCLASS(_c, _cls, code) \
838 do { \
839 rwlock_assert_writing(&runtimeLock); \
840 class_t *_top = _cls; \
841 class_t *_c = _top; \
842 if (_c) { \
843 while (1) { \
844 code \
845 if (_c->data()->firstSubclass) { \
846 _c = _c->data()->firstSubclass; \
847 } else { \
848 while (!_c->data()->nextSiblingClass && _c != _top) { \
849 _c = getSuperclass(_c); \
850 } \
851 if (_c == _top) break; \
852 _c = _c->data()->nextSiblingClass; \
853 } \
854 } \
855 } else { \
856 /* nil means all realized classes */ \
857 NXHashTable *_classes = realizedClasses(); \
858 NXHashTable *_metaclasses = realizedMetaclasses(); \
859 NXHashState _state; \
860 _state = NXInitHashState(_classes); \
861 while (NXNextHashState(_classes, &_state, (void**)&_c)) \
862 { \
863 code \
864 } \
865 _state = NXInitHashState(_metaclasses); \
866 while (NXNextHashState(_metaclasses, &_state, (void**)&_c)) \
867 { \
868 code \
869 } \
870 } \
871 } while (0)
872
873
874 /*
875 Low two bits of mlist->entsize is used as the fixed-up marker.
876 PREOPTIMIZED VERSION:
877 Fixed-up method lists get entsize&3 == 3.
878 dyld shared cache sets this for method lists it preoptimizes.
879 UN-PREOPTIMIZED VERSION:
880 Fixed-up method lists get entsize&3 == 1.
881 dyld shared cache uses 3, but those aren't trusted.
882 */
883
884 static uint32_t fixed_up_method_list = 3;
885
886 void
887 disableSharedCacheOptimizations(void)
888 {
889 fixed_up_method_list = 1;
890 }
891
892 static BOOL isMethodListFixedUp(const method_list_t *mlist)
893 {
894 return (mlist->entsize_NEVER_USE & 3) == fixed_up_method_list;
895 }
896
897 static void setMethodListFixedUp(method_list_t *mlist)
898 {
899 rwlock_assert_writing(&runtimeLock);
900 assert(!isMethodListFixedUp(mlist));
901 mlist->entsize_NEVER_USE = (mlist->entsize_NEVER_USE & ~3) | fixed_up_method_list;
902 }
903
904 /*
905 static size_t chained_property_list_size(const chained_property_list *plist)
906 {
907 return sizeof(chained_property_list) +
908 plist->count * sizeof(property_t);
909 }
910 */
911
912 static size_t protocol_list_size(const protocol_list_t *plist)
913 {
914 return sizeof(protocol_list_t) + plist->count * sizeof(protocol_t *);
915 }
916
917
918 // low bit used by dyld shared cache
919 static uint32_t method_list_entsize(const method_list_t *mlist)
920 {
921 return mlist->entsize_NEVER_USE & ~(uint32_t)3;
922 }
923
924 static size_t method_list_size(const method_list_t *mlist)
925 {
926 return sizeof(method_list_t) + (mlist->count-1)*method_list_entsize(mlist);
927 }
928
929 static method_t *method_list_nth(const method_list_t *mlist, uint32_t i)
930 {
931 assert(i < mlist->count);
932 return (method_t *)(i*method_list_entsize(mlist) + (char *)&mlist->first);
933 }
934
935 static uint32_t method_list_count(const method_list_t *mlist)
936 {
937 return mlist ? mlist->count : 0;
938 }
939
940 static void method_list_swap(method_list_t *mlist, uint32_t i, uint32_t j)
941 {
942 size_t entsize = method_list_entsize(mlist);
943 char temp[entsize];
944 memcpy(temp, method_list_nth(mlist, i), entsize);
945 memcpy(method_list_nth(mlist, i), method_list_nth(mlist, j), entsize);
946 memcpy(method_list_nth(mlist, j), temp, entsize);
947 }
948
949 static uint32_t method_list_index(const method_list_t *mlist,const method_t *m)
950 {
951 uint32_t i = (uint32_t)(((uintptr_t)m - (uintptr_t)mlist) / method_list_entsize(mlist));
952 assert(i < mlist->count);
953 return i;
954 }
955
956
957 static size_t ivar_list_size(const ivar_list_t *ilist)
958 {
959 return sizeof(ivar_list_t) + (ilist->count-1) * ilist->entsize;
960 }
961
962 static ivar_t *ivar_list_nth(const ivar_list_t *ilist, uint32_t i)
963 {
964 return (ivar_t *)(i*ilist->entsize + (char *)&ilist->first);
965 }
966
967
968 // part of ivar_t, with non-deprecated alignment
969 typedef struct {
970 uintptr_t *offset;
971 const char *name;
972 const char *type;
973 uint32_t alignment;
974 } ivar_alignment_t;
975
976 static uint32_t ivar_alignment(const ivar_t *ivar)
977 {
978 uint32_t alignment = ((ivar_alignment_t *)ivar)->alignment;
979 if (alignment == (uint32_t)-1) alignment = (uint32_t)WORD_SHIFT;
980 return 1<<alignment;
981 }
982
983
984 static method_list_t *cat_method_list(const category_t *cat, BOOL isMeta)
985 {
986 if (!cat) return NULL;
987
988 if (isMeta) return cat->classMethods;
989 else return cat->instanceMethods;
990 }
991
992 static uint32_t cat_method_count(const category_t *cat, BOOL isMeta)
993 {
994 method_list_t *cmlist = cat_method_list(cat, isMeta);
995 return cmlist ? cmlist->count : 0;
996 }
997
998 static method_t *cat_method_nth(const category_t *cat, BOOL isMeta, uint32_t i)
999 {
1000 method_list_t *cmlist = cat_method_list(cat, isMeta);
1001 if (!cmlist) return NULL;
1002
1003 return method_list_nth(cmlist, i);
1004 }
1005
1006
1007 static property_t *
1008 property_list_nth(const property_list_t *plist, uint32_t i)
1009 {
1010 return (property_t *)(i*plist->entsize + (char *)&plist->first);
1011 }
1012
1013 // fixme don't chain property lists
1014 typedef struct chained_property_list {
1015 struct chained_property_list *next;
1016 uint32_t count;
1017 property_t list[0]; // variable-size
1018 } chained_property_list;
1019
1020
1021 static void try_free(const void *p)
1022 {
1023 if (p && malloc_size(p)) free((void *)p);
1024 }
1025
1026
1027 /***********************************************************************
1028 * make_ro_writeable
1029 * Reallocates rw->ro if necessary to make it writeable.
1030 * Locking: runtimeLock must be held by the caller.
1031 **********************************************************************/
1032 static class_ro_t *make_ro_writeable(class_rw_t *rw)
1033 {
1034 rwlock_assert_writing(&runtimeLock);
1035
1036 if (rw->flags & RW_COPIED_RO) {
1037 // already writeable, do nothing
1038 } else {
1039 class_ro_t *ro = (class_ro_t *)
1040 _memdup_internal(rw->ro, sizeof(*rw->ro));
1041 rw->ro = ro;
1042 rw->flags |= RW_COPIED_RO;
1043 }
1044 return (class_ro_t *)rw->ro;
1045 }
1046
1047
1048 /***********************************************************************
1049 * unattachedCategories
1050 * Returns the class => categories map of unattached categories.
1051 * Locking: runtimeLock must be held by the caller.
1052 **********************************************************************/
1053 static NXMapTable *unattachedCategories(void)
1054 {
1055 rwlock_assert_writing(&runtimeLock);
1056
1057 static NXMapTable *category_map = NULL;
1058
1059 if (category_map) return category_map;
1060
1061 // fixme initial map size
1062 category_map = NXCreateMapTableFromZone(NXPtrValueMapPrototype, 16,
1063 _objc_internal_zone());
1064
1065 return category_map;
1066 }
1067
1068
1069 /***********************************************************************
1070 * addUnattachedCategoryForClass
1071 * Records an unattached category.
1072 * Locking: runtimeLock must be held by the caller.
1073 **********************************************************************/
1074 static void addUnattachedCategoryForClass(category_t *cat, class_t *cls,
1075 header_info *catHeader)
1076 {
1077 rwlock_assert_writing(&runtimeLock);
1078
1079 BOOL catFromBundle = (catHeader->mhdr->filetype == MH_BUNDLE) ? YES: NO;
1080
1081 // DO NOT use cat->cls! cls may be cat->cls->isa instead
1082 NXMapTable *cats = unattachedCategories();
1083 category_list *list;
1084
1085 list = (category_list *)NXMapGet(cats, cls);
1086 if (!list) {
1087 list = (category_list *)
1088 _calloc_internal(sizeof(*list) + sizeof(list->list[0]), 1);
1089 } else {
1090 list = (category_list *)
1091 _realloc_internal(list, sizeof(*list) + sizeof(list->list[0]) * (list->count + 1));
1092 }
1093 list->list[list->count++] = (category_pair_t){cat, catFromBundle};
1094 NXMapInsert(cats, cls, list);
1095 }
1096
1097
1098 /***********************************************************************
1099 * removeUnattachedCategoryForClass
1100 * Removes an unattached category.
1101 * Locking: runtimeLock must be held by the caller.
1102 **********************************************************************/
1103 static void removeUnattachedCategoryForClass(category_t *cat, class_t *cls)
1104 {
1105 rwlock_assert_writing(&runtimeLock);
1106
1107 // DO NOT use cat->cls! cls may be cat->cls->isa instead
1108 NXMapTable *cats = unattachedCategories();
1109 category_list *list;
1110
1111 list = (category_list *)NXMapGet(cats, cls);
1112 if (!list) return;
1113
1114 uint32_t i;
1115 for (i = 0; i < list->count; i++) {
1116 if (list->list[i].cat == cat) {
1117 // shift entries to preserve list order
1118 memmove(&list->list[i], &list->list[i+1],
1119 (list->count-i-1) * sizeof(list->list[i]));
1120 list->count--;
1121 return;
1122 }
1123 }
1124 }
1125
1126
1127 /***********************************************************************
1128 * unattachedCategoriesForClass
1129 * Returns the list of unattached categories for a class, and
1130 * deletes them from the list.
1131 * The result must be freed by the caller.
1132 * Locking: runtimeLock must be held by the caller.
1133 **********************************************************************/
1134 static category_list *unattachedCategoriesForClass(class_t *cls)
1135 {
1136 rwlock_assert_writing(&runtimeLock);
1137 return (category_list *)NXMapRemove(unattachedCategories(), cls);
1138 }
1139
1140
1141 /***********************************************************************
1142 * isRealized
1143 * Returns YES if class cls has been realized.
1144 * Locking: To prevent concurrent realization, hold runtimeLock.
1145 **********************************************************************/
1146 static BOOL isRealized(class_t *cls)
1147 {
1148 return (cls->data()->flags & RW_REALIZED) ? YES : NO;
1149 }
1150
1151
1152 /***********************************************************************
1153 * isFuture
1154 * Returns YES if class cls is an unrealized future class.
1155 * Locking: To prevent concurrent realization, hold runtimeLock.
1156 **********************************************************************/
1157 #ifndef NDEBUG
1158 // currently used in asserts only
1159 static BOOL isFuture(class_t *cls)
1160 {
1161 return (cls->data()->flags & RW_FUTURE) ? YES : NO;
1162 }
1163 #endif
1164
1165
1166 /***********************************************************************
1167 * classNSObject
1168 * Returns class NSObject.
1169 * Locking: none
1170 **********************************************************************/
1171 static class_t *classNSObject(void)
1172 {
1173 extern class_t OBJC_CLASS_$_NSObject;
1174 return &OBJC_CLASS_$_NSObject;
1175 }
1176
1177
1178 /***********************************************************************
1179 * printReplacements
1180 * Implementation of PrintReplacedMethods / OBJC_PRINT_REPLACED_METHODS.
1181 * Warn about methods from cats that override other methods in cats or cls.
1182 * Assumes no methods from cats have been added to cls yet.
1183 **********************************************************************/
1184 static void printReplacements(class_t *cls, category_list *cats)
1185 {
1186 uint32_t c;
1187 BOOL isMeta = isMetaClass(cls);
1188
1189 if (!cats) return;
1190
1191 // Newest categories are LAST in cats
1192 // Later categories override earlier ones.
1193 for (c = 0; c < cats->count; c++) {
1194 category_t *cat = cats->list[c].cat;
1195 uint32_t cmCount = cat_method_count(cat, isMeta);
1196 uint32_t m;
1197 for (m = 0; m < cmCount; m++) {
1198 uint32_t c2, m2;
1199 method_t *meth2 = NULL;
1200 method_t *meth = cat_method_nth(cat, isMeta, m);
1201 SEL s = sel_registerName((const char *)meth->name);
1202
1203 // Don't warn about GC-ignored selectors
1204 if (ignoreSelector(s)) continue;
1205
1206 // Look for method in earlier categories
1207 for (c2 = 0; c2 < c; c2++) {
1208 category_t *cat2 = cats->list[c2].cat;
1209 uint32_t cm2Count = cat_method_count(cat2, isMeta);
1210 for (m2 = 0; m2 < cm2Count; m2++) {
1211 meth2 = cat_method_nth(cat2, isMeta, m2);
1212 SEL s2 = sel_registerName((const char *)meth2->name);
1213 if (s == s2) goto whine;
1214 }
1215 }
1216
1217 // Look for method in cls
1218 FOREACH_METHOD_LIST(mlist, cls, {
1219 for (m2 = 0; m2 < mlist->count; m2++) {
1220 meth2 = method_list_nth(mlist, m2);
1221 SEL s2 = sel_registerName((const char *)meth2->name);
1222 if (s == s2) goto whine;
1223 }
1224 });
1225
1226 // Didn't find any override.
1227 continue;
1228
1229 whine:
1230 // Found an override.
1231 logReplacedMethod(getName(cls), s, isMetaClass(cls), cat->name,
1232 _method_getImplementation(meth2),
1233 _method_getImplementation(meth));
1234 }
1235 }
1236 }
1237
1238
1239 static BOOL isBundleClass(class_t *cls)
1240 {
1241 return (cls->data()->ro->flags & RO_FROM_BUNDLE) ? YES : NO;
1242 }
1243
1244
1245 static method_list_t *
1246 fixupMethodList(method_list_t *mlist, bool bundleCopy, bool sort)
1247 {
1248 assert(!isMethodListFixedUp(mlist));
1249
1250 mlist = (method_list_t *)
1251 _memdup_internal(mlist, method_list_size(mlist));
1252
1253 // fixme lock less in attachMethodLists ?
1254 sel_lock();
1255
1256 // Unique selectors in list.
1257 uint32_t m;
1258 for (m = 0; m < mlist->count; m++) {
1259 method_t *meth = method_list_nth(mlist, m);
1260 SEL sel = sel_registerNameNoLock((const char *)meth->name, bundleCopy);
1261 meth->name = sel;
1262
1263 if (ignoreSelector(sel)) {
1264 meth->imp = (IMP)&_objc_ignored_method;
1265 }
1266 }
1267
1268 sel_unlock();
1269
1270 // Sort by selector address.
1271 if (sort) {
1272 method_t::SortBySELAddress sorter;
1273 std::stable_sort(mlist->begin(), mlist->end(), sorter);
1274 }
1275
1276 // Mark method list as uniqued and sorted
1277 setMethodListFixedUp(mlist);
1278
1279 return mlist;
1280 }
1281
1282
1283 static void
1284 attachMethodLists(class_t *cls, method_list_t **addedLists, int addedCount,
1285 BOOL baseMethods, BOOL methodsFromBundle,
1286 BOOL *inoutVtablesAffected)
1287 {
1288 rwlock_assert_writing(&runtimeLock);
1289
1290 // Don't scan redundantly
1291 bool scanForCustomRR = !UseGC && !cls->hasCustomRR();
1292 bool scanForCustomAWZ = !UseGC && !cls->hasCustomAWZ();
1293
1294 // RR special cases:
1295 // NSObject's base instance methods are not custom RR.
1296 // All other root classes are custom RR.
1297 // updateCustomRR_AWZ also knows about these cases.
1298 if (baseMethods && scanForCustomRR && cls->isRootClass()) {
1299 if (cls != classNSObject()) {
1300 cls->setHasCustomRR();
1301 }
1302 scanForCustomRR = false;
1303 }
1304
1305 // AWZ special cases:
1306 // NSObject's base class methods are not custom AWZ.
1307 // All other root metaclasses are custom AWZ.
1308 // updateCustomRR_AWZ also knows about these cases.
1309 if (baseMethods && scanForCustomAWZ && cls->isRootMetaclass()) {
1310 if (cls != classNSObject()->isa) {
1311 cls->setHasCustomAWZ();
1312 }
1313 scanForCustomAWZ = false;
1314 }
1315
1316 // Method list array is NULL-terminated.
1317 // Some elements of lists are NULL; we must filter them out.
1318
1319 method_list_t *oldBuf[2];
1320 method_list_t **oldLists;
1321 int oldCount = 0;
1322 if (cls->data()->flags & RW_METHOD_ARRAY) {
1323 oldLists = cls->data()->method_lists;
1324 } else {
1325 oldBuf[0] = cls->data()->method_list;
1326 oldBuf[1] = NULL;
1327 oldLists = oldBuf;
1328 }
1329 if (oldLists) {
1330 while (oldLists[oldCount]) oldCount++;
1331 }
1332
1333 int newCount = oldCount;
1334 for (int i = 0; i < addedCount; i++) {
1335 if (addedLists[i]) newCount++; // only non-NULL entries get added
1336 }
1337
1338 method_list_t *newBuf[2];
1339 method_list_t **newLists;
1340 if (newCount > 1) {
1341 newLists = (method_list_t **)
1342 _malloc_internal((1 + newCount) * sizeof(*newLists));
1343 } else {
1344 newLists = newBuf;
1345 }
1346
1347 // Add method lists to array.
1348 // Reallocate un-fixed method lists.
1349 // The new methods are PREPENDED to the method list array.
1350
1351 newCount = 0;
1352 int i;
1353 for (i = 0; i < addedCount; i++) {
1354 method_list_t *mlist = addedLists[i];
1355 if (!mlist) continue;
1356
1357 // Fixup selectors if necessary
1358 if (!isMethodListFixedUp(mlist)) {
1359 mlist = fixupMethodList(mlist, methodsFromBundle, true/*sort*/);
1360 }
1361
1362 // Scan for vtable updates
1363 if (inoutVtablesAffected && !*inoutVtablesAffected) {
1364 uint32_t m;
1365 for (m = 0; m < mlist->count; m++) {
1366 SEL sel = method_list_nth(mlist, m)->name;
1367 if (vtable_containsSelector(sel)) {
1368 *inoutVtablesAffected = YES;
1369 break;
1370 }
1371 }
1372 }
1373
1374 // Scan for method implementations tracked by the class's flags
1375 for (uint32_t m = 0;
1376 (scanForCustomRR || scanForCustomAWZ) && m < mlist->count;
1377 m++)
1378 {
1379 SEL sel = method_list_nth(mlist, m)->name;
1380 if (scanForCustomRR && isRRSelector(sel)) {
1381 cls->setHasCustomRR();
1382 scanForCustomRR = false;
1383 } else if (scanForCustomAWZ && isAWZSelector(sel)) {
1384 cls->setHasCustomAWZ();
1385 scanForCustomAWZ = false;
1386 }
1387 }
1388
1389 // Fill method list array
1390 newLists[newCount++] = mlist;
1391 }
1392
1393 // Copy old methods to the method list array
1394 for (i = 0; i < oldCount; i++) {
1395 newLists[newCount++] = oldLists[i];
1396 }
1397 if (oldLists && oldLists != oldBuf) free(oldLists);
1398
1399 // NULL-terminate
1400 newLists[newCount] = NULL;
1401
1402 if (newCount > 1) {
1403 assert(newLists != newBuf);
1404 cls->data()->method_lists = newLists;
1405 changeInfo(cls, RW_METHOD_ARRAY, 0);
1406 } else {
1407 assert(newLists == newBuf);
1408 cls->data()->method_list = newLists[0];
1409 assert(!(cls->data()->flags & RW_METHOD_ARRAY));
1410 }
1411 }
1412
1413 static void
1414 attachCategoryMethods(class_t *cls, category_list *cats,
1415 BOOL *inoutVtablesAffected)
1416 {
1417 if (!cats) return;
1418 if (PrintReplacedMethods) printReplacements(cls, cats);
1419
1420 BOOL isMeta = isMetaClass(cls);
1421 method_list_t **mlists = (method_list_t **)
1422 _malloc_internal(cats->count * sizeof(*mlists));
1423
1424 // Count backwards through cats to get newest categories first
1425 int mcount = 0;
1426 int i = cats->count;
1427 BOOL fromBundle = NO;
1428 while (i--) {
1429 method_list_t *mlist = cat_method_list(cats->list[i].cat, isMeta);
1430 if (mlist) {
1431 mlists[mcount++] = mlist;
1432 fromBundle |= cats->list[i].fromBundle;
1433 }
1434 }
1435
1436 attachMethodLists(cls, mlists, mcount, NO, fromBundle, inoutVtablesAffected);
1437
1438 _free_internal(mlists);
1439
1440 }
1441
1442
1443 static chained_property_list *
1444 buildPropertyList(const property_list_t *plist, category_list *cats, BOOL isMeta)
1445 {
1446 chained_property_list *newlist;
1447 uint32_t count = 0;
1448 uint32_t p, c;
1449
1450 // Count properties in all lists.
1451 if (plist) count = plist->count;
1452 if (cats) {
1453 for (c = 0; c < cats->count; c++) {
1454 category_t *cat = cats->list[c].cat;
1455 /*
1456 if (isMeta && cat->classProperties) {
1457 count += cat->classProperties->count;
1458 }
1459 else*/
1460 if (!isMeta && cat->instanceProperties) {
1461 count += cat->instanceProperties->count;
1462 }
1463 }
1464 }
1465
1466 if (count == 0) return NULL;
1467
1468 // Allocate new list.
1469 newlist = (chained_property_list *)
1470 _malloc_internal(sizeof(*newlist) + count * sizeof(property_t));
1471 newlist->count = 0;
1472 newlist->next = NULL;
1473
1474 // Copy properties; newest categories first, then ordinary properties
1475 if (cats) {
1476 c = cats->count;
1477 while (c--) {
1478 property_list_t *cplist;
1479 category_t *cat = cats->list[c].cat;
1480 /*
1481 if (isMeta) {
1482 cplist = cat->classProperties;
1483 } else */
1484 {
1485 cplist = cat->instanceProperties;
1486 }
1487 if (cplist) {
1488 for (p = 0; p < cplist->count; p++) {
1489 newlist->list[newlist->count++] =
1490 *property_list_nth(cplist, p);
1491 }
1492 }
1493 }
1494 }
1495 if (plist) {
1496 for (p = 0; p < plist->count; p++) {
1497 newlist->list[newlist->count++] = *property_list_nth(plist, p);
1498 }
1499 }
1500
1501 assert(newlist->count == count);
1502
1503 return newlist;
1504 }
1505
1506
1507 static const protocol_list_t **
1508 buildProtocolList(category_list *cats, const protocol_list_t *base,
1509 const protocol_list_t **protos)
1510 {
1511 const protocol_list_t **p, **newp;
1512 const protocol_list_t **newprotos;
1513 unsigned int count = 0;
1514 unsigned int i;
1515
1516 // count protocol list in base
1517 if (base) count++;
1518
1519 // count protocol lists in cats
1520 if (cats) for (i = 0; i < cats->count; i++) {
1521 category_t *cat = cats->list[i].cat;
1522 if (cat->protocols) count++;
1523 }
1524
1525 // no base or category protocols? return existing protocols unchanged
1526 if (count == 0) return protos;
1527
1528 // count protocol lists in protos
1529 for (p = protos; p && *p; p++) {
1530 count++;
1531 }
1532
1533 if (count == 0) return NULL;
1534
1535 newprotos = (const protocol_list_t **)
1536 _malloc_internal((count+1) * sizeof(protocol_list_t *));
1537 newp = newprotos;
1538
1539 if (base) {
1540 *newp++ = base;
1541 }
1542
1543 for (p = protos; p && *p; p++) {
1544 *newp++ = *p;
1545 }
1546
1547 if (cats) for (i = 0; i < cats->count; i++) {
1548 category_t *cat = cats->list[i].cat;
1549 if (cat->protocols) {
1550 *newp++ = cat->protocols;
1551 }
1552 }
1553
1554 *newp = NULL;
1555
1556 return newprotos;
1557 }
1558
1559
1560 /***********************************************************************
1561 * methodizeClass
1562 * Fixes up cls's method list, protocol list, and property list.
1563 * Attaches any outstanding categories.
1564 * Builds vtable.
1565 * Locking: runtimeLock must be held by the caller
1566 **********************************************************************/
1567 static void methodizeClass(class_t *cls)
1568 {
1569 category_list *cats;
1570 BOOL isMeta;
1571
1572 rwlock_assert_writing(&runtimeLock);
1573
1574 isMeta = isMetaClass(cls);
1575
1576 // Methodizing for the first time
1577 if (PrintConnecting) {
1578 _objc_inform("CLASS: methodizing class '%s' %s",
1579 getName(cls), isMeta ? "(meta)" : "");
1580 }
1581
1582 // Build method and protocol and property lists.
1583 // Include methods and protocols and properties from categories, if any
1584
1585 attachMethodLists(cls, (method_list_t **)&cls->data()->ro->baseMethods, 1,
1586 YES, isBundleClass(cls), NULL);
1587
1588 // Root classes get bonus method implementations if they don't have
1589 // them already. These apply before category replacements.
1590
1591 if (cls->isRootMetaclass()) {
1592 // root metaclass
1593 addMethod(cls, SEL_initialize, (IMP)&objc_noop_imp, "", NO);
1594 }
1595
1596 cats = unattachedCategoriesForClass(cls);
1597 attachCategoryMethods(cls, cats, NULL);
1598
1599 if (cats || cls->data()->ro->baseProperties) {
1600 cls->data()->properties =
1601 buildPropertyList(cls->data()->ro->baseProperties, cats, isMeta);
1602 }
1603
1604 if (cats || cls->data()->ro->baseProtocols) {
1605 cls->data()->protocols =
1606 buildProtocolList(cats, cls->data()->ro->baseProtocols, NULL);
1607 }
1608
1609 if (PrintConnecting) {
1610 uint32_t i;
1611 if (cats) {
1612 for (i = 0; i < cats->count; i++) {
1613 _objc_inform("CLASS: attached category %c%s(%s)",
1614 isMeta ? '+' : '-',
1615 getName(cls), cats->list[i].cat->name);
1616 }
1617 }
1618 }
1619
1620 if (cats) _free_internal(cats);
1621
1622 // No vtable until +initialize completes
1623 assert(cls->vtable == &_objc_empty_vtable);
1624
1625 #ifndef NDEBUG
1626 // Debug: sanity-check all SELs; log method list contents
1627 FOREACH_METHOD_LIST(mlist, cls, {
1628 method_list_t::method_iterator iter = mlist->begin();
1629 method_list_t::method_iterator end = mlist->end();
1630 for ( ; iter != end; ++iter) {
1631 if (PrintConnecting) {
1632 _objc_inform("METHOD %c[%s %s]", isMeta ? '+' : '-',
1633 getName(cls), sel_getName(iter->name));
1634 }
1635 assert(ignoreSelector(iter->name) || sel_registerName(sel_getName(iter->name))==iter->name);
1636 }
1637 });
1638 #endif
1639 }
1640
1641
1642 /***********************************************************************
1643 * remethodizeClass
1644 * Attach outstanding categories to an existing class.
1645 * Fixes up cls's method list, protocol list, and property list.
1646 * Updates method caches and vtables for cls and its subclasses.
1647 * Locking: runtimeLock must be held by the caller
1648 **********************************************************************/
1649 static void remethodizeClass(class_t *cls)
1650 {
1651 category_list *cats;
1652 BOOL isMeta;
1653
1654 rwlock_assert_writing(&runtimeLock);
1655
1656 isMeta = isMetaClass(cls);
1657
1658 // Re-methodizing: check for more categories
1659 if ((cats = unattachedCategoriesForClass(cls))) {
1660 chained_property_list *newproperties;
1661 const protocol_list_t **newprotos;
1662
1663 if (PrintConnecting) {
1664 _objc_inform("CLASS: attaching categories to class '%s' %s",
1665 getName(cls), isMeta ? "(meta)" : "");
1666 }
1667
1668 // Update methods, properties, protocols
1669
1670 BOOL vtableAffected = NO;
1671 attachCategoryMethods(cls, cats, &vtableAffected);
1672
1673 newproperties = buildPropertyList(NULL, cats, isMeta);
1674 if (newproperties) {
1675 newproperties->next = cls->data()->properties;
1676 cls->data()->properties = newproperties;
1677 }
1678
1679 newprotos = buildProtocolList(cats, NULL, cls->data()->protocols);
1680 if (cls->data()->protocols && cls->data()->protocols != newprotos) {
1681 _free_internal(cls->data()->protocols);
1682 }
1683 cls->data()->protocols = newprotos;
1684
1685 _free_internal(cats);
1686
1687 // Update method caches and vtables
1688 flushCaches(cls);
1689 if (vtableAffected) flushVtables(cls);
1690 }
1691 }
1692
1693
1694 /***********************************************************************
1695 * changeInfo
1696 * Atomically sets and clears some bits in cls's info field.
1697 * set and clear must not overlap.
1698 **********************************************************************/
1699 static void changeInfo(class_t *cls, unsigned int set, unsigned int clear)
1700 {
1701 uint32_t oldf, newf;
1702
1703 assert(isFuture(cls) || isRealized(cls));
1704
1705 do {
1706 oldf = cls->data()->flags;
1707 newf = (oldf | set) & ~clear;
1708 } while (!OSAtomicCompareAndSwap32Barrier(oldf, newf, (volatile int32_t *)&cls->data()->flags));
1709 }
1710
1711
1712 /***********************************************************************
1713 * getClass
1714 * Looks up a class by name. The class MIGHT NOT be realized.
1715 * Locking: runtimeLock must be read- or write-locked by the caller.
1716 **********************************************************************/
1717
1718 // This is a misnomer: gdb_objc_realized_classes is actually a list of
1719 // named classes not in the dyld shared cache, whether realized or not.
1720 NXMapTable *gdb_objc_realized_classes; // exported for debuggers in objc-gdb.h
1721
1722 static class_t *getClass(const char *name)
1723 {
1724 rwlock_assert_locked(&runtimeLock);
1725
1726 // allocated in _read_images
1727 assert(gdb_objc_realized_classes);
1728
1729 // Try runtime-allocated table
1730 class_t *result = (class_t *)NXMapGet(gdb_objc_realized_classes, name);
1731 if (result) return result;
1732
1733 // Try table from dyld shared cache
1734 return getPreoptimizedClass(name);
1735 }
1736
1737
1738 /***********************************************************************
1739 * addNamedClass
1740 * Adds name => cls to the named non-meta class map.
1741 * Warns about duplicate class names and keeps the old mapping.
1742 * Locking: runtimeLock must be held by the caller
1743 **********************************************************************/
1744 static void addNamedClass(class_t *cls, const char *name)
1745 {
1746 rwlock_assert_writing(&runtimeLock);
1747 class_t *old;
1748 if ((old = getClass(name))) {
1749 inform_duplicate(name, (Class)old, (Class)cls);
1750 } else {
1751 NXMapInsert(gdb_objc_realized_classes, name, cls);
1752 }
1753 assert(!(cls->data()->flags & RO_META));
1754
1755 // wrong: constructed classes are already realized when they get here
1756 // assert(!isRealized(cls));
1757 }
1758
1759
1760 /***********************************************************************
1761 * removeNamedClass
1762 * Removes cls from the name => cls map.
1763 * Locking: runtimeLock must be held by the caller
1764 **********************************************************************/
1765 static void removeNamedClass(class_t *cls, const char *name)
1766 {
1767 rwlock_assert_writing(&runtimeLock);
1768 assert(!(cls->data()->flags & RO_META));
1769 if (cls == NXMapGet(gdb_objc_realized_classes, name)) {
1770 NXMapRemove(gdb_objc_realized_classes, name);
1771 } else {
1772 // cls has a name collision with another class - don't remove the other
1773 }
1774 }
1775
1776
1777 /***********************************************************************
1778 * realizedClasses
1779 * Returns the class list for realized non-meta classes.
1780 * Locking: runtimeLock must be read- or write-locked by the caller
1781 **********************************************************************/
1782 static NXHashTable *realized_class_hash = NULL;
1783
1784 static NXHashTable *realizedClasses(void)
1785 {
1786 rwlock_assert_locked(&runtimeLock);
1787
1788 // allocated in _read_images
1789 assert(realized_class_hash);
1790
1791 return realized_class_hash;
1792 }
1793
1794
1795 /***********************************************************************
1796 * realizedMetaclasses
1797 * Returns the class list for realized metaclasses.
1798 * Locking: runtimeLock must be read- or write-locked by the caller
1799 **********************************************************************/
1800 static NXHashTable *realized_metaclass_hash = NULL;
1801 static NXHashTable *realizedMetaclasses(void)
1802 {
1803 rwlock_assert_locked(&runtimeLock);
1804
1805 // allocated in _read_images
1806 assert(realized_metaclass_hash);
1807
1808 return realized_metaclass_hash;
1809 }
1810
1811
1812 /***********************************************************************
1813 * addRealizedClass
1814 * Adds cls to the realized non-meta class hash.
1815 * Locking: runtimeLock must be held by the caller
1816 **********************************************************************/
1817 static void addRealizedClass(class_t *cls)
1818 {
1819 rwlock_assert_writing(&runtimeLock);
1820 void *old;
1821 old = NXHashInsert(realizedClasses(), cls);
1822 objc_addRegisteredClass((Class)cls);
1823 assert(!isMetaClass(cls));
1824 assert(!old);
1825 }
1826
1827
1828 /***********************************************************************
1829 * removeRealizedClass
1830 * Removes cls from the realized non-meta class hash.
1831 * Locking: runtimeLock must be held by the caller
1832 **********************************************************************/
1833 static void removeRealizedClass(class_t *cls)
1834 {
1835 rwlock_assert_writing(&runtimeLock);
1836 if (isRealized(cls)) {
1837 assert(!isMetaClass(cls));
1838 NXHashRemove(realizedClasses(), cls);
1839 objc_removeRegisteredClass((Class)cls);
1840 }
1841 }
1842
1843
1844 /***********************************************************************
1845 * addRealizedMetaclass
1846 * Adds cls to the realized metaclass hash.
1847 * Locking: runtimeLock must be held by the caller
1848 **********************************************************************/
1849 static void addRealizedMetaclass(class_t *cls)
1850 {
1851 rwlock_assert_writing(&runtimeLock);
1852 void *old;
1853 old = NXHashInsert(realizedMetaclasses(), cls);
1854 assert(isMetaClass(cls));
1855 assert(!old);
1856 }
1857
1858
1859 /***********************************************************************
1860 * removeRealizedMetaclass
1861 * Removes cls from the realized metaclass hash.
1862 * Locking: runtimeLock must be held by the caller
1863 **********************************************************************/
1864 static void removeRealizedMetaclass(class_t *cls)
1865 {
1866 rwlock_assert_writing(&runtimeLock);
1867 if (isRealized(cls)) {
1868 assert(isMetaClass(cls));
1869 NXHashRemove(realizedMetaclasses(), cls);
1870 }
1871 }
1872
1873
1874 /***********************************************************************
1875 * futureNamedClasses
1876 * Returns the classname => future class map for unrealized future classes.
1877 * Locking: runtimeLock must be held by the caller
1878 **********************************************************************/
1879 static NXMapTable *futureNamedClasses(void)
1880 {
1881 rwlock_assert_writing(&runtimeLock);
1882
1883 static NXMapTable *future_named_class_map = NULL;
1884
1885 if (future_named_class_map) return future_named_class_map;
1886
1887 // future_named_class_map is big enough for CF's classes and a few others
1888 future_named_class_map =
1889 NXCreateMapTableFromZone(NXStrValueMapPrototype, 32,
1890 _objc_internal_zone());
1891
1892 return future_named_class_map;
1893 }
1894
1895
1896 /***********************************************************************
1897 * addFutureNamedClass
1898 * Installs cls as the class structure to use for the named class if it appears.
1899 * Locking: runtimeLock must be held by the caller
1900 **********************************************************************/
1901 static void addFutureNamedClass(const char *name, class_t *cls)
1902 {
1903 void *old;
1904
1905 rwlock_assert_writing(&runtimeLock);
1906
1907 if (PrintFuture) {
1908 _objc_inform("FUTURE: reserving %p for %s", cls, name);
1909 }
1910
1911 cls->setData((class_rw_t *)_calloc_internal(sizeof(*cls->data()), 1));
1912 cls->data()->flags = RO_FUTURE;
1913
1914 old = NXMapKeyCopyingInsert(futureNamedClasses(), name, cls);
1915 assert(!old);
1916 }
1917
1918
1919 /***********************************************************************
1920 * removeFutureNamedClass
1921 * Removes the named class from the unrealized future class list,
1922 * because it has been realized.
1923 * Locking: runtimeLock must be held by the caller
1924 **********************************************************************/
1925 static void removeFutureNamedClass(const char *name)
1926 {
1927 rwlock_assert_writing(&runtimeLock);
1928
1929 NXMapKeyFreeingRemove(futureNamedClasses(), name);
1930 }
1931
1932
1933 /***********************************************************************
1934 * remappedClasses
1935 * Returns the oldClass => newClass map for realized future classes.
1936 * Returns the oldClass => NULL map for ignored weak-linked classes.
1937 * Locking: runtimeLock must be read- or write-locked by the caller
1938 **********************************************************************/
1939 static NXMapTable *remappedClasses(BOOL create)
1940 {
1941 static NXMapTable *remapped_class_map = NULL;
1942
1943 rwlock_assert_locked(&runtimeLock);
1944
1945 if (remapped_class_map) return remapped_class_map;
1946 if (!create) return NULL;
1947
1948 // remapped_class_map is big enough to hold CF's classes and a few others
1949 INIT_ONCE_PTR(remapped_class_map,
1950 NXCreateMapTableFromZone(NXPtrValueMapPrototype, 32,
1951 _objc_internal_zone()),
1952 NXFreeMapTable(v));
1953
1954 return remapped_class_map;
1955 }
1956
1957
1958 /***********************************************************************
1959 * noClassesRemapped
1960 * Returns YES if no classes have been remapped
1961 * Locking: runtimeLock must be read- or write-locked by the caller
1962 **********************************************************************/
1963 static BOOL noClassesRemapped(void)
1964 {
1965 rwlock_assert_locked(&runtimeLock);
1966
1967 BOOL result = (remappedClasses(NO) == NULL);
1968 return result;
1969 }
1970
1971
1972 /***********************************************************************
1973 * addRemappedClass
1974 * newcls is a realized future class, replacing oldcls.
1975 * OR newcls is NULL, replacing ignored weak-linked class oldcls.
1976 * Locking: runtimeLock must be write-locked by the caller
1977 **********************************************************************/
1978 static void addRemappedClass(class_t *oldcls, class_t *newcls)
1979 {
1980 rwlock_assert_writing(&runtimeLock);
1981
1982 if (PrintFuture) {
1983 _objc_inform("FUTURE: using %p instead of %p for %s",
1984 oldcls, newcls, getName(oldcls));
1985 }
1986
1987 void *old;
1988 old = NXMapInsert(remappedClasses(YES), oldcls, newcls);
1989 assert(!old);
1990 }
1991
1992
1993 /***********************************************************************
1994 * remapClass
1995 * Returns the live class pointer for cls, which may be pointing to
1996 * a class struct that has been reallocated.
1997 * Returns NULL if cls is ignored because of weak linking.
1998 * Locking: runtimeLock must be read- or write-locked by the caller
1999 **********************************************************************/
2000 static class_t *remapClass(class_t *cls)
2001 {
2002 rwlock_assert_locked(&runtimeLock);
2003
2004 class_t *c2;
2005
2006 if (!cls) return NULL;
2007
2008 if (NXMapMember(remappedClasses(YES), cls, (void**)&c2) == NX_MAPNOTAKEY) {
2009 return cls;
2010 } else {
2011 return c2;
2012 }
2013 }
2014
2015 static class_t *remapClass(classref_t cls)
2016 {
2017 return remapClass((class_t *)cls);
2018 }
2019
2020 Class _class_remap(Class cls_gen)
2021 {
2022 rwlock_read(&runtimeLock);
2023 Class result = (Class)remapClass(newcls(cls_gen));
2024 rwlock_unlock_read(&runtimeLock);
2025 return result;
2026 }
2027
2028 /***********************************************************************
2029 * remapClassRef
2030 * Fix up a class ref, in case the class referenced has been reallocated
2031 * or is an ignored weak-linked class.
2032 * Locking: runtimeLock must be read- or write-locked by the caller
2033 **********************************************************************/
2034 static void remapClassRef(class_t **clsref)
2035 {
2036 rwlock_assert_locked(&runtimeLock);
2037
2038 class_t *newcls = remapClass(*clsref);
2039 if (*clsref != newcls) *clsref = newcls;
2040 }
2041
2042
2043 /***********************************************************************
2044 * nonMetaClasses
2045 * Returns the memoized metaclass => class map
2046 * Used for some cases of +initialize.
2047 * This map does not contain all classes and metaclasses. It only
2048 * contains memoized results from the slow path in getNonMetaClass(),
2049 * and classes that the slow path can't find (like objc_registerClassPair).
2050 * Locking: runtimeLock must be read- or write-locked by the caller
2051 **********************************************************************/
2052 static NXMapTable *nonmeta_class_map = NULL;
2053 static NXMapTable *nonMetaClasses(void)
2054 {
2055 rwlock_assert_locked(&runtimeLock);
2056
2057 if (nonmeta_class_map) return nonmeta_class_map;
2058
2059 // nonmeta_class_map is typically small
2060 INIT_ONCE_PTR(nonmeta_class_map,
2061 NXCreateMapTableFromZone(NXPtrValueMapPrototype, 32,
2062 _objc_internal_zone()),
2063 NXFreeMapTable(v));
2064
2065 return nonmeta_class_map;
2066 }
2067
2068
2069 /***********************************************************************
2070 * addNonMetaClass
2071 * Adds metacls => cls to the memoized metaclass map
2072 * Locking: runtimeLock must be held by the caller
2073 **********************************************************************/
2074 static void addNonMetaClass(class_t *cls)
2075 {
2076 rwlock_assert_writing(&runtimeLock);
2077 void *old;
2078 old = NXMapInsert(nonMetaClasses(), cls->isa, cls);
2079
2080 assert(isRealized(cls));
2081 assert(isRealized(cls->isa));
2082 assert(!isMetaClass(cls));
2083 assert(isMetaClass(cls->isa));
2084 assert(!old);
2085 }
2086
2087
2088 static void removeNonMetaClass(class_t *cls)
2089 {
2090 rwlock_assert_writing(&runtimeLock);
2091 NXMapRemove(nonMetaClasses(), cls->isa);
2092 }
2093
2094
2095 /***********************************************************************
2096 * getNonMetaClass
2097 * Return the ordinary class for this class or metaclass.
2098 * `inst` is an instance of `cls` or a subclass thereof, or nil.
2099 * Non-nil inst is faster.
2100 * Used by +initialize.
2101 * Locking: runtimeLock must be read- or write-locked by the caller
2102 **********************************************************************/
2103 static class_t *getNonMetaClass(class_t *metacls, id inst)
2104 {
2105 static int total, slow, memo;
2106 rwlock_assert_locked(&runtimeLock);
2107
2108 realizeClass(metacls);
2109
2110 total++;
2111
2112 // return cls itself if it's already a non-meta class
2113 if (!isMetaClass(metacls)) return metacls;
2114
2115 // metacls really is a metaclass
2116
2117 // special case for root metaclass
2118 // where inst == inst->isa == metacls is possible
2119 if (metacls->isa == metacls) {
2120 class_t *cls = metacls->superclass;
2121 assert(isRealized(cls));
2122 assert(!isMetaClass(cls));
2123 assert(cls->isa == metacls);
2124 if (cls->isa == metacls) return cls;
2125 }
2126
2127 // use inst if available
2128 if (inst) {
2129 class_t *cls = (class_t *)inst;
2130 realizeClass(cls);
2131 // cls may be a subclass - find the real class for metacls
2132 while (cls && cls->isa != metacls) {
2133 cls = cls->superclass;
2134 realizeClass(cls);
2135 }
2136 if (cls) {
2137 assert(!isMetaClass(cls));
2138 assert(cls->isa == metacls);
2139 return cls;
2140 }
2141 #if !NDEBUG
2142 _objc_fatal("cls is not an instance of metacls");
2143 #else
2144 // release build: be forgiving and fall through to slow lookups
2145 #endif
2146 }
2147
2148 // try memoized table
2149 class_t *cls = (class_t *)NXMapGet(nonMetaClasses(), metacls);
2150 if (cls) {
2151 memo++;
2152 if (PrintInitializing) {
2153 _objc_inform("INITIALIZE: %d/%d (%g%%) memoized metaclass lookups",
2154 memo, total, memo*100.0/total);
2155 }
2156
2157 assert(isRealized(cls));
2158 assert(!isMetaClass(cls));
2159 assert(cls->isa == metacls);
2160 return cls;
2161 }
2162
2163 // try slow lookup
2164 slow++;
2165 if (PrintInitializing) {
2166 _objc_inform("INITIALIZE: %d/%d (%g%%) slow metaclass lookups",
2167 slow, total, slow*100.0/total);
2168 }
2169
2170 for (header_info *hi = FirstHeader; hi; hi = hi->next) {
2171 size_t count;
2172 classref_t *classlist = _getObjc2ClassList(hi, &count);
2173 for (size_t i = 0; i < count; i++) {
2174 cls = remapClass(classlist[i]);
2175 if (cls && cls->isa == metacls) {
2176 // memoize result
2177 realizeClass(cls);
2178 addNonMetaClass(cls);
2179 return cls;
2180 }
2181 }
2182 }
2183
2184 _objc_fatal("no class for metaclass %p", metacls);
2185
2186 return cls;
2187 }
2188
2189
2190 /***********************************************************************
2191 * _class_getNonMetaClass
2192 * Return the ordinary class for this class or metaclass.
2193 * Used by +initialize.
2194 * Locking: acquires runtimeLock
2195 **********************************************************************/
2196 Class _class_getNonMetaClass(Class cls_gen, id obj)
2197 {
2198 class_t *cls = newcls(cls_gen);
2199 rwlock_write(&runtimeLock);
2200 cls = getNonMetaClass(cls, obj);
2201 assert(isRealized(cls));
2202 rwlock_unlock_write(&runtimeLock);
2203
2204 return (Class)cls;
2205 }
2206
2207
2208 /***********************************************************************
2209 * addSubclass
2210 * Adds subcls as a subclass of supercls.
2211 * Locking: runtimeLock must be held by the caller.
2212 **********************************************************************/
2213 static void addSubclass(class_t *supercls, class_t *subcls)
2214 {
2215 rwlock_assert_writing(&runtimeLock);
2216
2217 if (supercls && subcls) {
2218 assert(isRealized(supercls));
2219 assert(isRealized(subcls));
2220 subcls->data()->nextSiblingClass = supercls->data()->firstSubclass;
2221 supercls->data()->firstSubclass = subcls;
2222
2223 if (supercls->data()->flags & RW_HAS_CXX_STRUCTORS) {
2224 subcls->data()->flags |= RW_HAS_CXX_STRUCTORS;
2225 }
2226
2227 if (supercls->hasCustomRR()) {
2228 subcls->setHasCustomRR(true);
2229 }
2230
2231 if (supercls->hasCustomAWZ()) {
2232 subcls->setHasCustomAWZ(true);
2233 }
2234 }
2235 }
2236
2237
2238 /***********************************************************************
2239 * removeSubclass
2240 * Removes subcls as a subclass of supercls.
2241 * Locking: runtimeLock must be held by the caller.
2242 **********************************************************************/
2243 static void removeSubclass(class_t *supercls, class_t *subcls)
2244 {
2245 rwlock_assert_writing(&runtimeLock);
2246 assert(isRealized(supercls));
2247 assert(isRealized(subcls));
2248 assert(getSuperclass(subcls) == supercls);
2249
2250 class_t **cp;
2251 for (cp = &supercls->data()->firstSubclass;
2252 *cp && *cp != subcls;
2253 cp = &(*cp)->data()->nextSiblingClass)
2254 ;
2255 assert(*cp == subcls);
2256 *cp = subcls->data()->nextSiblingClass;
2257 }
2258
2259
2260
2261 /***********************************************************************
2262 * protocols
2263 * Returns the protocol name => protocol map for protocols.
2264 * Locking: runtimeLock must read- or write-locked by the caller
2265 **********************************************************************/
2266 static NXMapTable *protocols(void)
2267 {
2268 static NXMapTable *protocol_map = NULL;
2269
2270 rwlock_assert_locked(&runtimeLock);
2271
2272 INIT_ONCE_PTR(protocol_map,
2273 NXCreateMapTableFromZone(NXStrValueMapPrototype, 16,
2274 _objc_internal_zone()),
2275 NXFreeMapTable(v) );
2276
2277 return protocol_map;
2278 }
2279
2280
2281 /***********************************************************************
2282 * remapProtocol
2283 * Returns the live protocol pointer for proto, which may be pointing to
2284 * a protocol struct that has been reallocated.
2285 * Locking: runtimeLock must be read- or write-locked by the caller
2286 **********************************************************************/
2287 static protocol_t *remapProtocol(protocol_ref_t proto)
2288 {
2289 rwlock_assert_locked(&runtimeLock);
2290
2291 protocol_t *newproto = (protocol_t *)
2292 NXMapGet(protocols(), ((protocol_t *)proto)->name);
2293 return newproto ? newproto : (protocol_t *)proto;
2294 }
2295
2296
2297 /***********************************************************************
2298 * remapProtocolRef
2299 * Fix up a protocol ref, in case the protocol referenced has been reallocated.
2300 * Locking: runtimeLock must be read- or write-locked by the caller
2301 **********************************************************************/
2302 static void remapProtocolRef(protocol_t **protoref)
2303 {
2304 rwlock_assert_locked(&runtimeLock);
2305
2306 protocol_t *newproto = remapProtocol((protocol_ref_t)*protoref);
2307 if (*protoref != newproto) *protoref = newproto;
2308 }
2309
2310
2311 /***********************************************************************
2312 * moveIvars
2313 * Slides a class's ivars to accommodate the given superclass size.
2314 * Also slides ivar and weak GC layouts if provided.
2315 * Ivars are NOT compacted to compensate for a superclass that shrunk.
2316 * Locking: runtimeLock must be held by the caller.
2317 **********************************************************************/
2318 static void moveIvars(class_ro_t *ro, uint32_t superSize,
2319 layout_bitmap *ivarBitmap, layout_bitmap *weakBitmap)
2320 {
2321 rwlock_assert_writing(&runtimeLock);
2322
2323 uint32_t diff;
2324 uint32_t i;
2325
2326 assert(superSize > ro->instanceStart);
2327 diff = superSize - ro->instanceStart;
2328
2329 if (ro->ivars) {
2330 // Find maximum alignment in this class's ivars
2331 uint32_t maxAlignment = 1;
2332 for (i = 0; i < ro->ivars->count; i++) {
2333 ivar_t *ivar = ivar_list_nth(ro->ivars, i);
2334 if (!ivar->offset) continue; // anonymous bitfield
2335
2336 uint32_t alignment = ivar_alignment(ivar);
2337 if (alignment > maxAlignment) maxAlignment = alignment;
2338 }
2339
2340 // Compute a slide value that preserves that alignment
2341 uint32_t alignMask = maxAlignment - 1;
2342 if (diff & alignMask) diff = (diff + alignMask) & ~alignMask;
2343
2344 // Slide all of this class's ivars en masse
2345 for (i = 0; i < ro->ivars->count; i++) {
2346 ivar_t *ivar = ivar_list_nth(ro->ivars, i);
2347 if (!ivar->offset) continue; // anonymous bitfield
2348
2349 uint32_t oldOffset = (uint32_t)*ivar->offset;
2350 uint32_t newOffset = oldOffset + diff;
2351 *ivar->offset = newOffset;
2352
2353 if (PrintIvars) {
2354 _objc_inform("IVARS: offset %u -> %u for %s (size %u, align %u)",
2355 oldOffset, newOffset, ivar->name,
2356 ivar->size, ivar_alignment(ivar));
2357 }
2358 }
2359
2360 // Slide GC layouts
2361 uint32_t oldOffset = ro->instanceStart;
2362 uint32_t newOffset = ro->instanceStart + diff;
2363
2364 if (ivarBitmap) {
2365 layout_bitmap_slide(ivarBitmap,
2366 oldOffset >> WORD_SHIFT,
2367 newOffset >> WORD_SHIFT);
2368 }
2369 if (weakBitmap) {
2370 layout_bitmap_slide(weakBitmap,
2371 oldOffset >> WORD_SHIFT,
2372 newOffset >> WORD_SHIFT);
2373 }
2374 }
2375
2376 *(uint32_t *)&ro->instanceStart += diff;
2377 *(uint32_t *)&ro->instanceSize += diff;
2378
2379 if (!ro->ivars) {
2380 // No ivars slid, but superclass changed size.
2381 // Expand bitmap in preparation for layout_bitmap_splat().
2382 if (ivarBitmap) layout_bitmap_grow(ivarBitmap, ro->instanceSize >> WORD_SHIFT);
2383 if (weakBitmap) layout_bitmap_grow(weakBitmap, ro->instanceSize >> WORD_SHIFT);
2384 }
2385 }
2386
2387
2388 /***********************************************************************
2389 * getIvar
2390 * Look up an ivar by name.
2391 * Locking: runtimeLock must be read- or write-locked by the caller.
2392 **********************************************************************/
2393 static ivar_t *getIvar(class_t *cls, const char *name)
2394 {
2395 rwlock_assert_locked(&runtimeLock);
2396
2397 const ivar_list_t *ivars;
2398 assert(isRealized(cls));
2399 if ((ivars = cls->data()->ro->ivars)) {
2400 uint32_t i;
2401 for (i = 0; i < ivars->count; i++) {
2402 ivar_t *ivar = ivar_list_nth(ivars, i);
2403 if (!ivar->offset) continue; // anonymous bitfield
2404
2405 // ivar->name may be NULL for anonymous bitfields etc.
2406 if (ivar->name && 0 == strcmp(name, ivar->name)) {
2407 return ivar;
2408 }
2409 }
2410 }
2411
2412 return NULL;
2413 }
2414
2415 static void reconcileInstanceVariables(class_t *cls, class_t *supercls) {
2416 class_rw_t *rw = cls->data();
2417 const class_ro_t *ro = rw->ro;
2418
2419 if (supercls) {
2420 // Non-fragile ivars - reconcile this class with its superclass
2421 // Does this really need to happen for the isMETA case?
2422 layout_bitmap ivarBitmap;
2423 layout_bitmap weakBitmap;
2424 BOOL layoutsChanged = NO;
2425 BOOL mergeLayouts = UseGC;
2426 const class_ro_t *super_ro = supercls->data()->ro;
2427
2428 if (DebugNonFragileIvars) {
2429 // Debugging: Force non-fragile ivars to slide.
2430 // Intended to find compiler, runtime, and program bugs.
2431 // If it fails with this and works without, you have a problem.
2432
2433 // Operation: Reset everything to 0 + misalignment.
2434 // Then force the normal sliding logic to push everything back.
2435
2436 // Exceptions: root classes, metaclasses, *NSCF* classes,
2437 // __CF* classes, NSConstantString, NSSimpleCString
2438
2439 // (already know it's not root because supercls != nil)
2440 if (!strstr(getName(cls), "NSCF") &&
2441 0 != strncmp(getName(cls), "__CF", 4) &&
2442 0 != strcmp(getName(cls), "NSConstantString") &&
2443 0 != strcmp(getName(cls), "NSSimpleCString"))
2444 {
2445 uint32_t oldStart = ro->instanceStart;
2446 uint32_t oldSize = ro->instanceSize;
2447 class_ro_t *ro_w = make_ro_writeable(rw);
2448 ro = rw->ro;
2449
2450 // Find max ivar alignment in class.
2451 // default to word size to simplify ivar update
2452 uint32_t alignment = 1<<WORD_SHIFT;
2453 if (ro->ivars) {
2454 uint32_t i;
2455 for (i = 0; i < ro->ivars->count; i++) {
2456 ivar_t *ivar = ivar_list_nth(ro->ivars, i);
2457 if (ivar_alignment(ivar) > alignment) {
2458 alignment = ivar_alignment(ivar);
2459 }
2460 }
2461 }
2462 uint32_t misalignment = ro->instanceStart % alignment;
2463 uint32_t delta = ro->instanceStart - misalignment;
2464 ro_w->instanceStart = misalignment;
2465 ro_w->instanceSize -= delta;
2466
2467 if (PrintIvars) {
2468 _objc_inform("IVARS: DEBUG: forcing ivars for class '%s' "
2469 "to slide (instanceStart %zu -> %zu)",
2470 getName(cls), (size_t)oldStart,
2471 (size_t)ro->instanceStart);
2472 }
2473
2474 if (ro->ivars) {
2475 uint32_t i;
2476 for (i = 0; i < ro->ivars->count; i++) {
2477 ivar_t *ivar = ivar_list_nth(ro->ivars, i);
2478 if (!ivar->offset) continue; // anonymous bitfield
2479 *ivar->offset -= delta;
2480 }
2481 }
2482
2483 if (mergeLayouts) {
2484 layout_bitmap layout;
2485 if (ro->ivarLayout) {
2486 layout = layout_bitmap_create(ro->ivarLayout,
2487 oldSize, oldSize, NO);
2488 layout_bitmap_slide_anywhere(&layout,
2489 delta >> WORD_SHIFT, 0);
2490 ro_w->ivarLayout = layout_string_create(layout);
2491 layout_bitmap_free(layout);
2492 }
2493 if (ro->weakIvarLayout) {
2494 layout = layout_bitmap_create(ro->weakIvarLayout,
2495 oldSize, oldSize, YES);
2496 layout_bitmap_slide_anywhere(&layout,
2497 delta >> WORD_SHIFT, 0);
2498 ro_w->weakIvarLayout = layout_string_create(layout);
2499 layout_bitmap_free(layout);
2500 }
2501 }
2502 }
2503 }
2504
2505 // fixme can optimize for "class has no new ivars", etc
2506 // WARNING: gcc c++ sets instanceStart/Size=0 for classes with
2507 // no local ivars, but does provide a layout bitmap.
2508 // Handle that case specially so layout_bitmap_create doesn't die
2509 // The other ivar sliding code below still works fine, and
2510 // the final result is a good class.
2511 if (ro->instanceStart == 0 && ro->instanceSize == 0) {
2512 // We can't use ro->ivarLayout because we don't know
2513 // how long it is. Force a new layout to be created.
2514 if (PrintIvars) {
2515 _objc_inform("IVARS: instanceStart/Size==0 for class %s; "
2516 "disregarding ivar layout", ro->name);
2517 }
2518 ivarBitmap = layout_bitmap_create_empty(super_ro->instanceSize, NO);
2519 weakBitmap = layout_bitmap_create_empty(super_ro->instanceSize, YES);
2520 layoutsChanged = YES;
2521 } else {
2522 ivarBitmap =
2523 layout_bitmap_create(ro->ivarLayout,
2524 ro->instanceSize,
2525 ro->instanceSize, NO);
2526 weakBitmap =
2527 layout_bitmap_create(ro->weakIvarLayout,
2528 ro->instanceSize,
2529 ro->instanceSize, YES);
2530 }
2531
2532 if (ro->instanceStart < super_ro->instanceSize) {
2533 // Superclass has changed size. This class's ivars must move.
2534 // Also slide layout bits in parallel.
2535 // This code is incapable of compacting the subclass to
2536 // compensate for a superclass that shrunk, so don't do that.
2537 if (PrintIvars) {
2538 _objc_inform("IVARS: sliding ivars for class %s "
2539 "(superclass was %u bytes, now %u)",
2540 ro->name, ro->instanceStart,
2541 super_ro->instanceSize);
2542 }
2543 class_ro_t *ro_w = make_ro_writeable(rw);
2544 ro = rw->ro;
2545 moveIvars(ro_w, super_ro->instanceSize,
2546 mergeLayouts ? &ivarBitmap : NULL, mergeLayouts ? &weakBitmap : NULL);
2547 gdb_objc_class_changed((Class)cls, OBJC_CLASS_IVARS_CHANGED, ro->name);
2548 layoutsChanged = mergeLayouts;
2549 }
2550
2551 if (mergeLayouts) {
2552 // Check superclass's layout against this class's layout.
2553 // This needs to be done even if the superclass is not bigger.
2554 layout_bitmap superBitmap = layout_bitmap_create(super_ro->ivarLayout,
2555 super_ro->instanceSize,
2556 super_ro->instanceSize, NO);
2557 layoutsChanged |= layout_bitmap_splat(ivarBitmap, superBitmap,
2558 ro->instanceStart);
2559 layout_bitmap_free(superBitmap);
2560
2561 // check the superclass' weak layout.
2562 superBitmap = layout_bitmap_create(super_ro->weakIvarLayout,
2563 super_ro->instanceSize,
2564 super_ro->instanceSize, YES);
2565 layoutsChanged |= layout_bitmap_splat(weakBitmap, superBitmap,
2566 ro->instanceStart);
2567 layout_bitmap_free(superBitmap);
2568 }
2569
2570 if (layoutsChanged) {
2571 // Rebuild layout strings.
2572 if (PrintIvars) {
2573 _objc_inform("IVARS: gc layout changed for class %s",
2574 ro->name);
2575 }
2576 class_ro_t *ro_w = make_ro_writeable(rw);
2577 ro = rw->ro;
2578 if (DebugNonFragileIvars) {
2579 try_free(ro_w->ivarLayout);
2580 try_free(ro_w->weakIvarLayout);
2581 }
2582 ro_w->ivarLayout = layout_string_create(ivarBitmap);
2583 ro_w->weakIvarLayout = layout_string_create(weakBitmap);
2584 }
2585
2586 layout_bitmap_free(ivarBitmap);
2587 layout_bitmap_free(weakBitmap);
2588 }
2589 }
2590
2591 /***********************************************************************
2592 * realizeClass
2593 * Performs first-time initialization on class cls,
2594 * including allocating its read-write data.
2595 * Returns the real class structure for the class.
2596 * Locking: runtimeLock must be write-locked by the caller
2597 **********************************************************************/
2598 static class_t *realizeClass(class_t *cls)
2599 {
2600 rwlock_assert_writing(&runtimeLock);
2601
2602 const class_ro_t *ro;
2603 class_rw_t *rw;
2604 class_t *supercls;
2605 class_t *metacls;
2606 BOOL isMeta;
2607
2608 if (!cls) return NULL;
2609 if (isRealized(cls)) return cls;
2610 assert(cls == remapClass(cls));
2611
2612 ro = (const class_ro_t *)cls->data();
2613 if (ro->flags & RO_FUTURE) {
2614 // This was a future class. rw data is already allocated.
2615 rw = cls->data();
2616 ro = cls->data()->ro;
2617 changeInfo(cls, RW_REALIZED, RW_FUTURE);
2618 } else {
2619 // Normal class. Allocate writeable class data.
2620 rw = (class_rw_t *)_calloc_internal(sizeof(class_rw_t), 1);
2621 rw->ro = ro;
2622 rw->flags = RW_REALIZED;
2623 cls->setData(rw);
2624 }
2625
2626 isMeta = (ro->flags & RO_META) ? YES : NO;
2627
2628 rw->version = isMeta ? 7 : 0; // old runtime went up to 6
2629
2630 if (PrintConnecting) {
2631 _objc_inform("CLASS: realizing class '%s' %s %p %p",
2632 ro->name, isMeta ? "(meta)" : "", cls, ro);
2633 }
2634
2635 // Realize superclass and metaclass, if they aren't already.
2636 // This needs to be done after RW_REALIZED is set above, for root classes.
2637 supercls = realizeClass(remapClass(cls->superclass));
2638 metacls = realizeClass(remapClass(cls->isa));
2639
2640 // Check for remapped superclass and metaclass
2641 if (supercls != cls->superclass) {
2642 cls->superclass = supercls;
2643 }
2644 if (metacls != cls->isa) {
2645 cls->isa = metacls;
2646 }
2647
2648 /* debug: print them all
2649 if (ro->ivars) {
2650 uint32_t i;
2651 for (i = 0; i < ro->ivars->count; i++) {
2652 ivar_t *ivar = ivar_list_nth(ro->ivars, i);
2653 if (!ivar->offset) continue; // anonymous bitfield
2654
2655 _objc_inform("IVARS: %s.%s (offset %u, size %u, align %u)",
2656 ro->name, ivar->name,
2657 *ivar->offset, ivar->size, ivar_alignment(ivar));
2658 }
2659 }
2660 */
2661
2662 // Reconcile instance variable offsets / layout.
2663 if (!isMeta) reconcileInstanceVariables(cls, supercls);
2664
2665 // Copy some flags from ro to rw
2666 if (ro->flags & RO_HAS_CXX_STRUCTORS) rw->flags |= RW_HAS_CXX_STRUCTORS;
2667
2668 // Connect this class to its superclass's subclass lists
2669 if (supercls) {
2670 addSubclass(supercls, cls);
2671 }
2672
2673 // Attach categories
2674 methodizeClass(cls);
2675
2676 if (!isMeta) {
2677 addRealizedClass(cls);
2678 } else {
2679 addRealizedMetaclass(cls);
2680 }
2681
2682 return cls;
2683 }
2684
2685
2686 /***********************************************************************
2687 * missingWeakSuperclass
2688 * Return YES if some superclass of cls was weak-linked and is missing.
2689 **********************************************************************/
2690 static BOOL
2691 missingWeakSuperclass(class_t *cls)
2692 {
2693 assert(!isRealized(cls));
2694
2695 if (!cls->superclass) {
2696 // superclass NULL. This is normal for root classes only.
2697 return (!(cls->data()->flags & RO_ROOT));
2698 } else {
2699 // superclass not NULL. Check if a higher superclass is missing.
2700 class_t *supercls = remapClass(cls->superclass);
2701 assert(cls != cls->superclass);
2702 assert(cls != supercls);
2703 if (!supercls) return YES;
2704 if (isRealized(supercls)) return NO;
2705 return missingWeakSuperclass(supercls);
2706 }
2707 }
2708
2709
2710 /***********************************************************************
2711 * realizeAllClassesInImage
2712 * Non-lazily realizes all unrealized classes in the given image.
2713 * Locking: runtimeLock must be held by the caller.
2714 **********************************************************************/
2715 static void realizeAllClassesInImage(header_info *hi)
2716 {
2717 rwlock_assert_writing(&runtimeLock);
2718
2719 size_t count, i;
2720 classref_t *classlist;
2721
2722 if (hi->allClassesRealized) return;
2723
2724 classlist = _getObjc2ClassList(hi, &count);
2725
2726 for (i = 0; i < count; i++) {
2727 realizeClass(remapClass(classlist[i]));
2728 }
2729
2730 hi->allClassesRealized = YES;
2731 }
2732
2733
2734 /***********************************************************************
2735 * realizeAllClasses
2736 * Non-lazily realizes all unrealized classes in all known images.
2737 * Locking: runtimeLock must be held by the caller.
2738 **********************************************************************/
2739 static void realizeAllClasses(void)
2740 {
2741 rwlock_assert_writing(&runtimeLock);
2742
2743 header_info *hi;
2744 for (hi = FirstHeader; hi; hi = hi->next) {
2745 realizeAllClassesInImage(hi);
2746 }
2747 }
2748
2749
2750 /***********************************************************************
2751 * _objc_allocateFutureClass
2752 * Allocate an unresolved future class for the given class name.
2753 * Returns any existing allocation if one was already made.
2754 * Assumes the named class doesn't exist yet.
2755 * Locking: acquires runtimeLock
2756 **********************************************************************/
2757 Class _objc_allocateFutureClass(const char *name)
2758 {
2759 rwlock_write(&runtimeLock);
2760
2761 class_t *cls;
2762 NXMapTable *future_named_class_map = futureNamedClasses();
2763
2764 if ((cls = (class_t *)NXMapGet(future_named_class_map, name))) {
2765 // Already have a future class for this name.
2766 rwlock_unlock_write(&runtimeLock);
2767 return (Class)cls;
2768 }
2769
2770 cls = (class_t *)_calloc_class(sizeof(*cls));
2771 addFutureNamedClass(name, cls);
2772
2773 rwlock_unlock_write(&runtimeLock);
2774 return (Class)cls;
2775 }
2776
2777
2778 /***********************************************************************
2779 *
2780 **********************************************************************/
2781 void objc_setFutureClass(Class cls, const char *name)
2782 {
2783 // fixme hack do nothing - NSCFString handled specially elsewhere
2784 }
2785
2786
2787 /***********************************************************************
2788 * flushVtables
2789 * Rebuilds vtables for cls and its realized subclasses.
2790 * If cls is Nil, all realized classes and metaclasses are touched.
2791 * Locking: runtimeLock must be held by the caller.
2792 **********************************************************************/
2793 static void flushVtables(class_t *cls)
2794 {
2795 rwlock_assert_writing(&runtimeLock);
2796
2797 if (PrintVtables && !cls) {
2798 _objc_inform("VTABLES: ### EXPENSIVE ### global vtable flush!");
2799 }
2800
2801 FOREACH_REALIZED_CLASS_AND_SUBCLASS(c, cls, {
2802 updateVtable(c, NO);
2803 });
2804 }
2805
2806
2807 /***********************************************************************
2808 * flushCaches
2809 * Flushes caches for cls and its realized subclasses.
2810 * Does not update vtables.
2811 * If cls is Nil, all realized and metaclasses classes are touched.
2812 * Locking: runtimeLock must be held by the caller.
2813 **********************************************************************/
2814 static void flushCaches(class_t *cls)
2815 {
2816 rwlock_assert_writing(&runtimeLock);
2817
2818 FOREACH_REALIZED_CLASS_AND_SUBCLASS(c, cls, {
2819 flush_cache((Class)c);
2820 });
2821 }
2822
2823
2824 /***********************************************************************
2825 * flush_caches
2826 * Flushes caches and rebuilds vtables for cls, its subclasses,
2827 * and optionally its metaclass.
2828 * Locking: acquires runtimeLock
2829 **********************************************************************/
2830 void flush_caches(Class cls_gen, BOOL flush_meta)
2831 {
2832 class_t *cls = newcls(cls_gen);
2833 rwlock_write(&runtimeLock);
2834 // fixme optimize vtable flushing? (only needed for vtable'd selectors)
2835 flushCaches(cls);
2836 flushVtables(cls);
2837 // don't flush root class's metaclass twice (it's a subclass of the root)
2838 if (flush_meta && getSuperclass(cls)) {
2839 flushCaches(cls->isa);
2840 flushVtables(cls->isa);
2841 }
2842 rwlock_unlock_write(&runtimeLock);
2843 }
2844
2845
2846 /***********************************************************************
2847 * map_images
2848 * Process the given images which are being mapped in by dyld.
2849 * Calls ABI-agnostic code after taking ABI-specific locks.
2850 *
2851 * Locking: write-locks runtimeLock
2852 **********************************************************************/
2853 const char *
2854 map_images(enum dyld_image_states state, uint32_t infoCount,
2855 const struct dyld_image_info infoList[])
2856 {
2857 const char *err;
2858
2859 rwlock_write(&runtimeLock);
2860 err = map_images_nolock(state, infoCount, infoList);
2861 rwlock_unlock_write(&runtimeLock);
2862 return err;
2863 }
2864
2865
2866 /***********************************************************************
2867 * load_images
2868 * Process +load in the given images which are being mapped in by dyld.
2869 * Calls ABI-agnostic code after taking ABI-specific locks.
2870 *
2871 * Locking: write-locks runtimeLock and loadMethodLock
2872 **********************************************************************/
2873 const char *
2874 load_images(enum dyld_image_states state, uint32_t infoCount,
2875 const struct dyld_image_info infoList[])
2876 {
2877 BOOL found;
2878
2879 recursive_mutex_lock(&loadMethodLock);
2880
2881 // Discover load methods
2882 rwlock_write(&runtimeLock);
2883 found = load_images_nolock(state, infoCount, infoList);
2884 rwlock_unlock_write(&runtimeLock);
2885
2886 // Call +load methods (without runtimeLock - re-entrant)
2887 if (found) {
2888 call_load_methods();
2889 }
2890
2891 recursive_mutex_unlock(&loadMethodLock);
2892
2893 return NULL;
2894 }
2895
2896
2897 /***********************************************************************
2898 * unmap_image
2899 * Process the given image which is about to be unmapped by dyld.
2900 * mh is mach_header instead of headerType because that's what
2901 * dyld_priv.h says even for 64-bit.
2902 *
2903 * Locking: write-locks runtimeLock and loadMethodLock
2904 **********************************************************************/
2905 void
2906 unmap_image(const struct mach_header *mh, intptr_t vmaddr_slide)
2907 {
2908 recursive_mutex_lock(&loadMethodLock);
2909 rwlock_write(&runtimeLock);
2910
2911 unmap_image_nolock(mh);
2912
2913 rwlock_unlock_write(&runtimeLock);
2914 recursive_mutex_unlock(&loadMethodLock);
2915 }
2916
2917
2918
2919 /***********************************************************************
2920 * _read_images
2921 * Perform initial processing of the headers in the linked
2922 * list beginning with headerList.
2923 *
2924 * Called by: map_images_nolock
2925 *
2926 * Locking: runtimeLock acquired by map_images
2927 **********************************************************************/
2928 void _read_images(header_info **hList, uint32_t hCount)
2929 {
2930 header_info *hi;
2931 uint32_t hIndex;
2932 size_t count;
2933 size_t i;
2934 class_t **resolvedFutureClasses = NULL;
2935 size_t resolvedFutureClassCount = 0;
2936 static unsigned int totalMethodLists;
2937 static unsigned int preoptimizedMethodLists;
2938 static unsigned int totalClasses;
2939 static unsigned int preoptimizedClasses;
2940 static BOOL doneOnce;
2941
2942 rwlock_assert_writing(&runtimeLock);
2943
2944 #define EACH_HEADER \
2945 hIndex = 0; \
2946 crashlog_header_name(NULL) && hIndex < hCount && (hi = hList[hIndex]) && crashlog_header_name(hi); \
2947 hIndex++
2948
2949 if (!doneOnce) {
2950 doneOnce = YES;
2951 initVtables();
2952
2953 // Count classes. Size various table based on the total.
2954 size_t total = 0;
2955 size_t unoptimizedTotal = 0;
2956 for (EACH_HEADER) {
2957 if (_getObjc2ClassList(hi, &count)) {
2958 total += count;
2959 if (!hi->inSharedCache) unoptimizedTotal += count;
2960 }
2961 }
2962
2963 if (PrintConnecting) {
2964 _objc_inform("CLASS: found %zu classes during launch", total);
2965 }
2966
2967 // namedClasses (NOT realizedClasses)
2968 // Preoptimized classes don't go in this table.
2969 // 4/3 is NXMapTable's load factor
2970 size_t namedClassesSize =
2971 (isPreoptimized() ? unoptimizedTotal : total) * 4 / 3;
2972 gdb_objc_realized_classes =
2973 NXCreateMapTableFromZone(NXStrValueMapPrototype, namedClassesSize,
2974 _objc_internal_zone());
2975
2976 // realizedClasses and realizedMetaclasses - less than the full total
2977 realized_class_hash =
2978 NXCreateHashTableFromZone(NXPtrPrototype, total / 8, NULL,
2979 _objc_internal_zone());
2980 realized_metaclass_hash =
2981 NXCreateHashTableFromZone(NXPtrPrototype, total / 8, NULL,
2982 _objc_internal_zone());
2983 }
2984
2985
2986 // Discover classes. Fix up unresolved future classes. Mark bundle classes.
2987 NXMapTable *future_named_class_map = futureNamedClasses();
2988
2989 for (EACH_HEADER) {
2990 bool headerIsBundle = (hi->mhdr->filetype == MH_BUNDLE);
2991 bool headerInSharedCache = hi->inSharedCache;
2992
2993 classref_t *classlist = _getObjc2ClassList(hi, &count);
2994 for (i = 0; i < count; i++) {
2995 class_t *cls = (class_t *)classlist[i];
2996 const char *name = getName(cls);
2997
2998 if (missingWeakSuperclass(cls)) {
2999 // No superclass (probably weak-linked).
3000 // Disavow any knowledge of this subclass.
3001 if (PrintConnecting) {
3002 _objc_inform("CLASS: IGNORING class '%s' with "
3003 "missing weak-linked superclass", name);
3004 }
3005 addRemappedClass(cls, NULL);
3006 cls->superclass = NULL;
3007 continue;
3008 }
3009
3010 class_t *newCls = NULL;
3011 if (NXCountMapTable(future_named_class_map) > 0) {
3012 newCls = (class_t *)NXMapGet(future_named_class_map, name);
3013 removeFutureNamedClass(name);
3014 }
3015
3016 if (newCls) {
3017 // Copy class_t to future class's struct.
3018 // Preserve future's rw data block.
3019 class_rw_t *rw = newCls->data();
3020 memcpy(newCls, cls, sizeof(class_t));
3021 rw->ro = (class_ro_t *)newCls->data();
3022 newCls->setData(rw);
3023
3024 addRemappedClass(cls, newCls);
3025 cls = newCls;
3026
3027 // Non-lazily realize the class below.
3028 resolvedFutureClasses = (class_t **)
3029 _realloc_internal(resolvedFutureClasses,
3030 (resolvedFutureClassCount+1)
3031 * sizeof(class_t *));
3032 resolvedFutureClasses[resolvedFutureClassCount++] = newCls;
3033 }
3034
3035 totalClasses++;
3036 if (headerInSharedCache && isPreoptimized()) {
3037 // class list built in shared cache
3038 // fixme strict assert doesn't work because of duplicates
3039 // assert(cls == getClass(name));
3040 assert(getClass(name));
3041 preoptimizedClasses++;
3042 } else {
3043 addNamedClass(cls, name);
3044 }
3045
3046 // for future reference: shared cache never contains MH_BUNDLEs
3047 if (headerIsBundle) {
3048 cls->data()->flags |= RO_FROM_BUNDLE;
3049 cls->isa->data()->flags |= RO_FROM_BUNDLE;
3050 }
3051
3052 if (PrintPreopt) {
3053 const method_list_t *mlist;
3054 if ((mlist = ((class_ro_t *)cls->data())->baseMethods)) {
3055 totalMethodLists++;
3056 if (isMethodListFixedUp(mlist)) preoptimizedMethodLists++;
3057 }
3058 if ((mlist = ((class_ro_t *)cls->isa->data())->baseMethods)) {
3059 totalMethodLists++;
3060 if (isMethodListFixedUp(mlist)) preoptimizedMethodLists++;
3061 }
3062 }
3063 }
3064 }
3065
3066 if (PrintPreopt && totalMethodLists) {
3067 _objc_inform("PREOPTIMIZATION: %u/%u (%.3g%%) method lists pre-sorted",
3068 preoptimizedMethodLists, totalMethodLists,
3069 100.0*preoptimizedMethodLists/totalMethodLists);
3070 }
3071 if (PrintPreopt && totalClasses) {
3072 _objc_inform("PREOPTIMIZATION: %u/%u (%.3g%%) classes pre-registered",
3073 preoptimizedClasses, totalClasses,
3074 100.0*preoptimizedClasses/totalClasses);
3075 }
3076
3077 // Fix up remapped classes
3078 // Class list and nonlazy class list remain unremapped.
3079 // Class refs and super refs are remapped for message dispatching.
3080
3081 if (!noClassesRemapped()) {
3082 for (EACH_HEADER) {
3083 class_t **classrefs = _getObjc2ClassRefs(hi, &count);
3084 for (i = 0; i < count; i++) {
3085 remapClassRef(&classrefs[i]);
3086 }
3087 // fixme why doesn't test future1 catch the absence of this?
3088 classrefs = _getObjc2SuperRefs(hi, &count);
3089 for (i = 0; i < count; i++) {
3090 remapClassRef(&classrefs[i]);
3091 }
3092 }
3093 }
3094
3095
3096 // Fix up @selector references
3097 sel_lock();
3098 for (EACH_HEADER) {
3099 if (PrintPreopt) {
3100 if (sel_preoptimizationValid(hi)) {
3101 _objc_inform("PREOPTIMIZATION: honoring preoptimized selectors in %s",
3102 hi->fname);
3103 }
3104 else if (_objcHeaderOptimizedByDyld(hi)) {
3105 _objc_inform("PREOPTIMIZATION: IGNORING preoptimized selectors in %s",
3106 hi->fname);
3107 }
3108 }
3109
3110 if (sel_preoptimizationValid(hi)) continue;
3111
3112 SEL *sels = _getObjc2SelectorRefs(hi, &count);
3113 BOOL isBundle = hi->mhdr->filetype == MH_BUNDLE;
3114 for (i = 0; i < count; i++) {
3115 sels[i] = sel_registerNameNoLock((const char *)sels[i], isBundle);
3116 }
3117 }
3118 sel_unlock();
3119
3120 // Discover protocols. Fix up protocol refs.
3121 NXMapTable *protocol_map = protocols();
3122 for (EACH_HEADER) {
3123 extern class_t OBJC_CLASS_$_Protocol;
3124 Class cls = (Class)&OBJC_CLASS_$_Protocol;
3125 assert(cls);
3126 protocol_t **protocols = _getObjc2ProtocolList(hi, &count);
3127 // fixme duplicate protocol from bundle
3128 for (i = 0; i < count; i++) {
3129 if (!NXMapGet(protocol_map, protocols[i]->name)) {
3130 protocols[i]->isa = cls;
3131 NXMapKeyCopyingInsert(protocol_map,
3132 protocols[i]->name, protocols[i]);
3133 if (PrintProtocols) {
3134 _objc_inform("PROTOCOLS: protocol at %p is %s",
3135 protocols[i], protocols[i]->name);
3136 }
3137 } else {
3138 if (PrintProtocols) {
3139 _objc_inform("PROTOCOLS: protocol at %p is %s (duplicate)",
3140 protocols[i], protocols[i]->name);
3141 }
3142 }
3143 }
3144 }
3145 for (EACH_HEADER) {
3146 protocol_t **protocols;
3147 protocols = _getObjc2ProtocolRefs(hi, &count);
3148 for (i = 0; i < count; i++) {
3149 remapProtocolRef(&protocols[i]);
3150 }
3151 }
3152
3153 // Realize non-lazy classes (for +load methods and static instances)
3154 for (EACH_HEADER) {
3155 classref_t *classlist =
3156 _getObjc2NonlazyClassList(hi, &count);
3157 for (i = 0; i < count; i++) {
3158 realizeClass(remapClass(classlist[i]));
3159 }
3160 }
3161
3162 // Realize newly-resolved future classes, in case CF manipulates them
3163 if (resolvedFutureClasses) {
3164 for (i = 0; i < resolvedFutureClassCount; i++) {
3165 realizeClass(resolvedFutureClasses[i]);
3166 }
3167 _free_internal(resolvedFutureClasses);
3168 }
3169
3170 // Discover categories.
3171 for (EACH_HEADER) {
3172 category_t **catlist =
3173 _getObjc2CategoryList(hi, &count);
3174 for (i = 0; i < count; i++) {
3175 category_t *cat = catlist[i];
3176 class_t *cls = remapClass(cat->cls);
3177
3178 if (!cls) {
3179 // Category's target class is missing (probably weak-linked).
3180 // Disavow any knowledge of this category.
3181 catlist[i] = NULL;
3182 if (PrintConnecting) {
3183 _objc_inform("CLASS: IGNORING category \?\?\?(%s) %p with "
3184 "missing weak-linked target class",
3185 cat->name, cat);
3186 }
3187 continue;
3188 }
3189
3190 // Process this category.
3191 // First, register the category with its target class.
3192 // Then, rebuild the class's method lists (etc) if
3193 // the class is realized.
3194 BOOL classExists = NO;
3195 if (cat->instanceMethods || cat->protocols
3196 || cat->instanceProperties)
3197 {
3198 addUnattachedCategoryForClass(cat, cls, hi);
3199 if (isRealized(cls)) {
3200 remethodizeClass(cls);
3201 classExists = YES;
3202 }
3203 if (PrintConnecting) {
3204 _objc_inform("CLASS: found category -%s(%s) %s",
3205 getName(cls), cat->name,
3206 classExists ? "on existing class" : "");
3207 }
3208 }
3209
3210 if (cat->classMethods || cat->protocols
3211 /* || cat->classProperties */)
3212 {
3213 addUnattachedCategoryForClass(cat, cls->isa, hi);
3214 if (isRealized(cls->isa)) {
3215 remethodizeClass(cls->isa);
3216 }
3217 if (PrintConnecting) {
3218 _objc_inform("CLASS: found category +%s(%s)",
3219 getName(cls), cat->name);
3220 }
3221 }
3222 }
3223 }
3224
3225 // Category discovery MUST BE LAST to avoid potential races
3226 // when other threads call the new category code before
3227 // this thread finishes its fixups.
3228
3229 // +load handled by prepare_load_methods()
3230
3231 if (DebugNonFragileIvars) {
3232 realizeAllClasses();
3233 }
3234
3235 #undef EACH_HEADER
3236 }
3237
3238
3239 /***********************************************************************
3240 * prepare_load_methods
3241 * Schedule +load for classes in this image, any un-+load-ed
3242 * superclasses in other images, and any categories in this image.
3243 **********************************************************************/
3244 // Recursively schedule +load for cls and any un-+load-ed superclasses.
3245 // cls must already be connected.
3246 static void schedule_class_load(class_t *cls)
3247 {
3248 if (!cls) return;
3249 assert(isRealized(cls)); // _read_images should realize
3250
3251 if (cls->data()->flags & RW_LOADED) return;
3252
3253 // Ensure superclass-first ordering
3254 schedule_class_load(getSuperclass(cls));
3255
3256 add_class_to_loadable_list((Class)cls);
3257 changeInfo(cls, RW_LOADED, 0);
3258 }
3259
3260 void prepare_load_methods(header_info *hi)
3261 {
3262 size_t count, i;
3263
3264 rwlock_assert_writing(&runtimeLock);
3265
3266 classref_t *classlist =
3267 _getObjc2NonlazyClassList(hi, &count);
3268 for (i = 0; i < count; i++) {
3269 schedule_class_load(remapClass(classlist[i]));
3270 }
3271
3272 category_t **categorylist = _getObjc2NonlazyCategoryList(hi, &count);
3273 for (i = 0; i < count; i++) {
3274 category_t *cat = categorylist[i];
3275 class_t *cls = remapClass(cat->cls);
3276 if (!cls) continue; // category for ignored weak-linked class
3277 realizeClass(cls);
3278 assert(isRealized(cls->isa));
3279 add_category_to_loadable_list((Category)cat);
3280 }
3281 }
3282
3283
3284 /***********************************************************************
3285 * _unload_image
3286 * Only handles MH_BUNDLE for now.
3287 * Locking: write-lock and loadMethodLock acquired by unmap_image
3288 **********************************************************************/
3289 void _unload_image(header_info *hi)
3290 {
3291 size_t count, i;
3292
3293 recursive_mutex_assert_locked(&loadMethodLock);
3294 rwlock_assert_writing(&runtimeLock);
3295
3296 // Unload unattached categories and categories waiting for +load.
3297
3298 category_t **catlist = _getObjc2CategoryList(hi, &count);
3299 for (i = 0; i < count; i++) {
3300 category_t *cat = catlist[i];
3301 if (!cat) continue; // category for ignored weak-linked class
3302 class_t *cls = remapClass(cat->cls);
3303 assert(cls); // shouldn't have live category for dead class
3304
3305 // fixme for MH_DYLIB cat's class may have been unloaded already
3306
3307 // unattached list
3308 removeUnattachedCategoryForClass(cat, cls);
3309
3310 // +load queue
3311 remove_category_from_loadable_list((Category)cat);
3312 }
3313
3314 // Unload classes.
3315
3316 classref_t *classlist = _getObjc2ClassList(hi, &count);
3317
3318 // First detach classes from each other. Then free each class.
3319 // This avoid bugs where this loop unloads a subclass before its superclass
3320
3321 for (i = 0; i < count; i++) {
3322 class_t *cls = remapClass(classlist[i]);
3323 if (cls) {
3324 remove_class_from_loadable_list((Class)cls);
3325 detach_class(cls->isa, YES);
3326 detach_class(cls, NO);
3327 }
3328 }
3329
3330 for (i = 0; i < count; i++) {
3331 class_t *cls = remapClass(classlist[i]);
3332 if (cls) {
3333 free_class(cls->isa);
3334 free_class(cls);
3335 }
3336 }
3337
3338 // XXX FIXME -- Clean up protocols:
3339 // <rdar://problem/9033191> Support unloading protocols at dylib/image unload time
3340
3341 // fixme DebugUnload
3342 }
3343
3344
3345 /***********************************************************************
3346 * method_getDescription
3347 * Returns a pointer to this method's objc_method_description.
3348 * Locking: none
3349 **********************************************************************/
3350 struct objc_method_description *
3351 method_getDescription(Method m)
3352 {
3353 if (!m) return NULL;
3354 return (struct objc_method_description *)newmethod(m);
3355 }
3356
3357
3358 /***********************************************************************
3359 * method_getImplementation
3360 * Returns this method's IMP.
3361 * Locking: none
3362 **********************************************************************/
3363 static IMP
3364 _method_getImplementation(method_t *m)
3365 {
3366 if (!m) return NULL;
3367 return m->imp;
3368 }
3369
3370 IMP
3371 method_getImplementation(Method m)
3372 {
3373 return _method_getImplementation(newmethod(m));
3374 }
3375
3376
3377 /***********************************************************************
3378 * method_getName
3379 * Returns this method's selector.
3380 * The method must not be NULL.
3381 * The method must already have been fixed-up.
3382 * Locking: none
3383 **********************************************************************/
3384 SEL
3385 method_getName(Method m_gen)
3386 {
3387 method_t *m = newmethod(m_gen);
3388 if (!m) return NULL;
3389
3390 assert((SEL)m->name == sel_registerName((char *)m->name));
3391 return (SEL)m->name;
3392 }
3393
3394
3395 /***********************************************************************
3396 * method_getTypeEncoding
3397 * Returns this method's old-style type encoding string.
3398 * The method must not be NULL.
3399 * Locking: none
3400 **********************************************************************/
3401 const char *
3402 method_getTypeEncoding(Method m)
3403 {
3404 if (!m) return NULL;
3405 return newmethod(m)->types;
3406 }
3407
3408
3409 /***********************************************************************
3410 * method_setImplementation
3411 * Sets this method's implementation to imp.
3412 * The previous implementation is returned.
3413 **********************************************************************/
3414 static IMP
3415 _method_setImplementation(class_t *cls, method_t *m, IMP imp)
3416 {
3417 rwlock_assert_writing(&runtimeLock);
3418
3419 if (!m) return NULL;
3420 if (!imp) return NULL;
3421
3422 if (ignoreSelector(m->name)) {
3423 // Ignored methods stay ignored
3424 return m->imp;
3425 }
3426
3427 IMP old = _method_getImplementation(m);
3428 m->imp = imp;
3429
3430 // No cache flushing needed - cache contains Methods not IMPs.
3431
3432 // vtable and RR/AWZ updates are slow if cls is NULL (i.e. unknown)
3433 // fixme build list of classes whose Methods are known externally?
3434
3435 if (vtable_containsSelector(m->name)) {
3436 flushVtables(cls);
3437 }
3438
3439 // Catch changes to retain/release and allocWithZone implementations
3440 updateCustomRR_AWZ(cls, m);
3441
3442 // fixme update monomorphism if necessary
3443
3444 return old;
3445 }
3446
3447 IMP
3448 method_setImplementation(Method m, IMP imp)
3449 {
3450 // Don't know the class - will be slow if vtables are affected
3451 // fixme build list of classes whose Methods are known externally?
3452 IMP result;
3453 rwlock_write(&runtimeLock);
3454 result = _method_setImplementation(Nil, newmethod(m), imp);
3455 rwlock_unlock_write(&runtimeLock);
3456 return result;
3457 }
3458
3459
3460 void method_exchangeImplementations(Method m1_gen, Method m2_gen)
3461 {
3462 method_t *m1 = newmethod(m1_gen);
3463 method_t *m2 = newmethod(m2_gen);
3464 if (!m1 || !m2) return;
3465
3466 rwlock_write(&runtimeLock);
3467
3468 if (ignoreSelector(m1->name) || ignoreSelector(m2->name)) {
3469 // Ignored methods stay ignored. Now they're both ignored.
3470 m1->imp = (IMP)&_objc_ignored_method;
3471 m2->imp = (IMP)&_objc_ignored_method;
3472 rwlock_unlock_write(&runtimeLock);
3473 return;
3474 }
3475
3476 IMP m1_imp = m1->imp;
3477 m1->imp = m2->imp;
3478 m2->imp = m1_imp;
3479
3480 // vtable and RR/AWZ updates are slow because class is unknown
3481 // fixme build list of classes whose Methods are known externally?
3482
3483 if (vtable_containsSelector(m1->name) ||
3484 vtable_containsSelector(m2->name))
3485 {
3486 // Don't know the class - will be slow if vtables are affected
3487 // fixme build list of classes whose Methods are known externally?
3488 flushVtables(NULL);
3489 }
3490
3491 updateCustomRR_AWZ(nil, m1);
3492 updateCustomRR_AWZ(nil, m2);
3493
3494 // fixme update monomorphism if necessary
3495
3496 rwlock_unlock_write(&runtimeLock);
3497 }
3498
3499
3500 /***********************************************************************
3501 * ivar_getOffset
3502 * fixme
3503 * Locking: none
3504 **********************************************************************/
3505 ptrdiff_t
3506 ivar_getOffset(Ivar ivar)
3507 {
3508 if (!ivar) return 0;
3509 return *newivar(ivar)->offset;
3510 }
3511
3512
3513 /***********************************************************************
3514 * ivar_getName
3515 * fixme
3516 * Locking: none
3517 **********************************************************************/
3518 const char *
3519 ivar_getName(Ivar ivar)
3520 {
3521 if (!ivar) return NULL;
3522 return newivar(ivar)->name;
3523 }
3524
3525
3526 /***********************************************************************
3527 * ivar_getTypeEncoding
3528 * fixme
3529 * Locking: none
3530 **********************************************************************/
3531 const char *
3532 ivar_getTypeEncoding(Ivar ivar)
3533 {
3534 if (!ivar) return NULL;
3535 return newivar(ivar)->type;
3536 }
3537
3538
3539
3540 const char *property_getName(objc_property_t prop)
3541 {
3542 return newproperty(prop)->name;
3543 }
3544
3545 const char *property_getAttributes(objc_property_t prop)
3546 {
3547 return newproperty(prop)->attributes;
3548 }
3549
3550 objc_property_attribute_t *property_copyAttributeList(objc_property_t prop,
3551 unsigned int *outCount)
3552 {
3553 if (!prop) {
3554 if (outCount) *outCount = 0;
3555 return NULL;
3556 }
3557
3558 objc_property_attribute_t *result;
3559 rwlock_read(&runtimeLock);
3560 result = copyPropertyAttributeList(newproperty(prop)->attributes,outCount);
3561 rwlock_unlock_read(&runtimeLock);
3562 return result;
3563 }
3564
3565 char * property_copyAttributeValue(objc_property_t prop, const char *name)
3566 {
3567 if (!prop || !name || *name == '\0') return NULL;
3568
3569 char *result;
3570 rwlock_read(&runtimeLock);
3571 result = copyPropertyAttributeValue(newproperty(prop)->attributes, name);
3572 rwlock_unlock_read(&runtimeLock);
3573 return result;
3574 }
3575
3576
3577 /***********************************************************************
3578 * getExtendedTypesIndexesForMethod
3579 * Returns:
3580 * a is the count of methods in all method lists before m's method list
3581 * b is the index of m in m's method list
3582 * a+b is the index of m's extended types in the extended types array
3583 **********************************************************************/
3584 static void getExtendedTypesIndexesForMethod(protocol_t *proto, const method_t *m, BOOL isRequiredMethod, BOOL isInstanceMethod, uint32_t& a, uint32_t &b)
3585 {
3586 a = 0;
3587
3588 if (isRequiredMethod && isInstanceMethod) {
3589 b = method_list_index(proto->instanceMethods, m);
3590 return;
3591 }
3592 a += method_list_count(proto->instanceMethods);
3593
3594 if (isRequiredMethod && !isInstanceMethod) {
3595 b = method_list_index(proto->classMethods, m);
3596 return;
3597 }
3598 a += method_list_count(proto->classMethods);
3599
3600 if (!isRequiredMethod && isInstanceMethod) {
3601 b = method_list_index(proto->optionalInstanceMethods, m);
3602 return;
3603 }
3604 a += method_list_count(proto->optionalInstanceMethods);
3605
3606 if (!isRequiredMethod && !isInstanceMethod) {
3607 b = method_list_index(proto->optionalClassMethods, m);
3608 return;
3609 }
3610 a += method_list_count(proto->optionalClassMethods);
3611 }
3612
3613
3614 /***********************************************************************
3615 * getExtendedTypesIndexForMethod
3616 * Returns the index of m's extended types in proto's extended types array.
3617 **********************************************************************/
3618 static uint32_t getExtendedTypesIndexForMethod(protocol_t *proto, const method_t *m, BOOL isRequiredMethod, BOOL isInstanceMethod)
3619 {
3620 uint32_t a;
3621 uint32_t b;
3622 getExtendedTypesIndexesForMethod(proto, m, isRequiredMethod,
3623 isInstanceMethod, a, b);
3624 return a + b;
3625 }
3626
3627
3628 /***********************************************************************
3629 * _protocol_getMethod_nolock
3630 * Locking: runtimeLock must be write-locked by the caller
3631 **********************************************************************/
3632 static method_t *
3633 _protocol_getMethod_nolock(protocol_t *proto, SEL sel,
3634 BOOL isRequiredMethod, BOOL isInstanceMethod,
3635 BOOL recursive)
3636 {
3637 rwlock_assert_writing(&runtimeLock);
3638
3639 if (!proto || !sel) return NULL;
3640
3641 method_list_t **mlistp = NULL;
3642
3643 if (isRequiredMethod) {
3644 if (isInstanceMethod) {
3645 mlistp = &proto->instanceMethods;
3646 } else {
3647 mlistp = &proto->classMethods;
3648 }
3649 } else {
3650 if (isInstanceMethod) {
3651 mlistp = &proto->optionalInstanceMethods;
3652 } else {
3653 mlistp = &proto->optionalClassMethods;
3654 }
3655 }
3656
3657 if (*mlistp) {
3658 method_list_t *mlist = *mlistp;
3659 if (!isMethodListFixedUp(mlist)) {
3660 bool hasExtendedMethodTypes = proto->hasExtendedMethodTypes();
3661 mlist = fixupMethodList(mlist, true/*always copy for simplicity*/,
3662 !hasExtendedMethodTypes/*sort if no ext*/);
3663 *mlistp = mlist;
3664
3665 if (hasExtendedMethodTypes) {
3666 // Sort method list and extended method types together.
3667 // fixupMethodList() can't do this.
3668 // fixme COW stomp
3669 uint32_t count = method_list_count(mlist);
3670 uint32_t prefix;
3671 uint32_t unused;
3672 getExtendedTypesIndexesForMethod(proto, method_list_nth(mlist, 0), isRequiredMethod, isInstanceMethod, prefix, unused);
3673 const char **types = proto->extendedMethodTypes;
3674 for (uint32_t i = 0; i < count; i++) {
3675 for (uint32_t j = i+1; j < count; j++) {
3676 method_t *mi = method_list_nth(mlist, i);
3677 method_t *mj = method_list_nth(mlist, j);
3678 if (mi->name > mj->name) {
3679 method_list_swap(mlist, i, j);
3680 std::swap(types[prefix+i], types[prefix+j]);
3681 }
3682 }
3683 }
3684 }
3685 }
3686
3687 method_t *m = search_method_list(mlist, sel);
3688 if (m) return m;
3689 }
3690
3691 if (recursive && proto->protocols) {
3692 method_t *m;
3693 for (uint32_t i = 0; i < proto->protocols->count; i++) {
3694 protocol_t *realProto = remapProtocol(proto->protocols->list[i]);
3695 m = _protocol_getMethod_nolock(realProto, sel,
3696 isRequiredMethod, isInstanceMethod,
3697 true);
3698 if (m) return m;
3699 }
3700 }
3701
3702 return NULL;
3703 }
3704
3705
3706 /***********************************************************************
3707 * _protocol_getMethod
3708 * fixme
3709 * Locking: write-locks runtimeLock
3710 **********************************************************************/
3711 Method
3712 _protocol_getMethod(Protocol *p, SEL sel, BOOL isRequiredMethod, BOOL isInstanceMethod, BOOL recursive)
3713 {
3714 rwlock_write(&runtimeLock);
3715 method_t *result = _protocol_getMethod_nolock(newprotocol(p), sel,
3716 isRequiredMethod,
3717 isInstanceMethod,
3718 recursive);
3719 rwlock_unlock_write(&runtimeLock);
3720 return (Method)result;
3721 }
3722
3723
3724 /***********************************************************************
3725 * _protocol_getMethodTypeEncoding_nolock
3726 * Return the @encode string for the requested protocol method.
3727 * Returns NULL if the compiler did not emit any extended @encode data.
3728 * Locking: runtimeLock must be held for writing by the caller
3729 **********************************************************************/
3730 const char *
3731 _protocol_getMethodTypeEncoding_nolock(protocol_t *proto, SEL sel,
3732 BOOL isRequiredMethod,
3733 BOOL isInstanceMethod)
3734 {
3735 rwlock_assert_writing(&runtimeLock);
3736
3737 if (!proto) return NULL;
3738 if (!proto->hasExtendedMethodTypes()) return NULL;
3739
3740 method_t *m =
3741 _protocol_getMethod_nolock(proto, sel,
3742 isRequiredMethod, isInstanceMethod, false);
3743 if (m) {
3744 uint32_t i = getExtendedTypesIndexForMethod(proto, m,
3745 isRequiredMethod,
3746 isInstanceMethod);
3747 return proto->extendedMethodTypes[i];
3748 }
3749
3750 // No method with that name. Search incorporated protocols.
3751 if (proto->protocols) {
3752 for (uintptr_t i = 0; i < proto->protocols->count; i++) {
3753 const char *enc =
3754 _protocol_getMethodTypeEncoding_nolock(remapProtocol(proto->protocols->list[i]), sel, isRequiredMethod, isInstanceMethod);
3755 if (enc) return enc;
3756 }
3757 }
3758
3759 return NULL;
3760 }
3761
3762 /***********************************************************************
3763 * _protocol_getMethodTypeEncoding
3764 * Return the @encode string for the requested protocol method.
3765 * Returns NULL if the compiler did not emit any extended @encode data.
3766 * Locking: runtimeLock must not be held by the caller
3767 **********************************************************************/
3768 const char *
3769 _protocol_getMethodTypeEncoding(Protocol *proto_gen, SEL sel,
3770 BOOL isRequiredMethod, BOOL isInstanceMethod)
3771 {
3772 const char *enc;
3773 rwlock_write(&runtimeLock);
3774 enc = _protocol_getMethodTypeEncoding_nolock(newprotocol(proto_gen), sel,
3775 isRequiredMethod,
3776 isInstanceMethod);
3777 rwlock_unlock_write(&runtimeLock);
3778 return enc;
3779 }
3780
3781 /***********************************************************************
3782 * protocol_getName
3783 * Returns the name of the given protocol.
3784 * Locking: runtimeLock must not be held by the caller
3785 **********************************************************************/
3786 const char *
3787 protocol_getName(Protocol *proto)
3788 {
3789 return newprotocol(proto)->name;
3790 }
3791
3792
3793 /***********************************************************************
3794 * protocol_getInstanceMethodDescription
3795 * Returns the description of a named instance method.
3796 * Locking: runtimeLock must not be held by the caller
3797 **********************************************************************/
3798 struct objc_method_description
3799 protocol_getMethodDescription(Protocol *p, SEL aSel,
3800 BOOL isRequiredMethod, BOOL isInstanceMethod)
3801 {
3802 Method m =
3803 _protocol_getMethod(p, aSel, isRequiredMethod, isInstanceMethod, true);
3804 if (m) return *method_getDescription(m);
3805 else return (struct objc_method_description){NULL, NULL};
3806 }
3807
3808
3809 /***********************************************************************
3810 * _protocol_conformsToProtocol_nolock
3811 * Returns YES if self conforms to other.
3812 * Locking: runtimeLock must be held by the caller.
3813 **********************************************************************/
3814 static BOOL _protocol_conformsToProtocol_nolock(protocol_t *self, protocol_t *other)
3815 {
3816 if (!self || !other) {
3817 return NO;
3818 }
3819
3820 if (0 == strcmp(self->name, other->name)) {
3821 return YES;
3822 }
3823
3824 if (self->protocols) {
3825 uintptr_t i;
3826 for (i = 0; i < self->protocols->count; i++) {
3827 protocol_t *proto = remapProtocol(self->protocols->list[i]);
3828 if (0 == strcmp(other->name, proto->name)) {
3829 return YES;
3830 }
3831 if (_protocol_conformsToProtocol_nolock(proto, other)) {
3832 return YES;
3833 }
3834 }
3835 }
3836
3837 return NO;
3838 }
3839
3840
3841 /***********************************************************************
3842 * protocol_conformsToProtocol
3843 * Returns YES if self conforms to other.
3844 * Locking: acquires runtimeLock
3845 **********************************************************************/
3846 BOOL protocol_conformsToProtocol(Protocol *self, Protocol *other)
3847 {
3848 BOOL result;
3849 rwlock_read(&runtimeLock);
3850 result = _protocol_conformsToProtocol_nolock(newprotocol(self),
3851 newprotocol(other));
3852 rwlock_unlock_read(&runtimeLock);
3853 return result;
3854 }
3855
3856
3857 /***********************************************************************
3858 * protocol_isEqual
3859 * Return YES if two protocols are equal (i.e. conform to each other)
3860 * Locking: acquires runtimeLock
3861 **********************************************************************/
3862 BOOL protocol_isEqual(Protocol *self, Protocol *other)
3863 {
3864 if (self == other) return YES;
3865 if (!self || !other) return NO;
3866
3867 if (!protocol_conformsToProtocol(self, other)) return NO;
3868 if (!protocol_conformsToProtocol(other, self)) return NO;
3869
3870 return YES;
3871 }
3872
3873
3874 /***********************************************************************
3875 * protocol_copyMethodDescriptionList
3876 * Returns descriptions of a protocol's methods.
3877 * Locking: acquires runtimeLock
3878 **********************************************************************/
3879 struct objc_method_description *
3880 protocol_copyMethodDescriptionList(Protocol *p,
3881 BOOL isRequiredMethod,BOOL isInstanceMethod,
3882 unsigned int *outCount)
3883 {
3884 protocol_t *proto = newprotocol(p);
3885 struct objc_method_description *result = NULL;
3886 unsigned int count = 0;
3887
3888 if (!proto) {
3889 if (outCount) *outCount = 0;
3890 return NULL;
3891 }
3892
3893 rwlock_read(&runtimeLock);
3894
3895 method_list_t *mlist = NULL;
3896
3897 if (isRequiredMethod) {
3898 if (isInstanceMethod) {
3899 mlist = proto->instanceMethods;
3900 } else {
3901 mlist = proto->classMethods;
3902 }
3903 } else {
3904 if (isInstanceMethod) {
3905 mlist = proto->optionalInstanceMethods;
3906 } else {
3907 mlist = proto->optionalClassMethods;
3908 }
3909 }
3910
3911 if (mlist) {
3912 unsigned int i;
3913 count = mlist->count;
3914 result = (struct objc_method_description *)
3915 calloc(count + 1, sizeof(struct objc_method_description));
3916 for (i = 0; i < count; i++) {
3917 method_t *m = method_list_nth(mlist, i);
3918 result[i].name = sel_registerName((const char *)m->name);
3919 result[i].types = (char *)m->types;
3920 }
3921 }
3922
3923 rwlock_unlock_read(&runtimeLock);
3924
3925 if (outCount) *outCount = count;
3926 return result;
3927 }
3928
3929
3930 /***********************************************************************
3931 * protocol_getProperty
3932 * fixme
3933 * Locking: acquires runtimeLock
3934 **********************************************************************/
3935 static property_t *
3936 _protocol_getProperty_nolock(protocol_t *proto, const char *name,
3937 BOOL isRequiredProperty, BOOL isInstanceProperty)
3938 {
3939 if (!isRequiredProperty || !isInstanceProperty) {
3940 // Only required instance properties are currently supported
3941 return NULL;
3942 }
3943
3944 property_list_t *plist;
3945 if ((plist = proto->instanceProperties)) {
3946 uint32_t i;
3947 for (i = 0; i < plist->count; i++) {
3948 property_t *prop = property_list_nth(plist, i);
3949 if (0 == strcmp(name, prop->name)) {
3950 return prop;
3951 }
3952 }
3953 }
3954
3955 if (proto->protocols) {
3956 uintptr_t i;
3957 for (i = 0; i < proto->protocols->count; i++) {
3958 protocol_t *p = remapProtocol(proto->protocols->list[i]);
3959 property_t *prop =
3960 _protocol_getProperty_nolock(p, name,
3961 isRequiredProperty,
3962 isInstanceProperty);
3963 if (prop) return prop;
3964 }
3965 }
3966
3967 return NULL;
3968 }
3969
3970 objc_property_t protocol_getProperty(Protocol *p, const char *name,
3971 BOOL isRequiredProperty, BOOL isInstanceProperty)
3972 {
3973 property_t *result;
3974
3975 if (!p || !name) return NULL;
3976
3977 rwlock_read(&runtimeLock);
3978 result = _protocol_getProperty_nolock(newprotocol(p), name,
3979 isRequiredProperty,
3980 isInstanceProperty);
3981 rwlock_unlock_read(&runtimeLock);
3982
3983 return (objc_property_t)result;
3984 }
3985
3986
3987 /***********************************************************************
3988 * protocol_copyPropertyList
3989 * fixme
3990 * Locking: acquires runtimeLock
3991 **********************************************************************/
3992 static property_t **
3993 copyPropertyList(property_list_t *plist, unsigned int *outCount)
3994 {
3995 property_t **result = NULL;
3996 unsigned int count = 0;
3997
3998 if (plist) {
3999 count = plist->count;
4000 }
4001
4002 if (count > 0) {
4003 unsigned int i;
4004 result = (property_t **)malloc((count+1) * sizeof(property_t *));
4005
4006 for (i = 0; i < count; i++) {
4007 result[i] = property_list_nth(plist, i);
4008 }
4009 result[i] = NULL;
4010 }
4011
4012 if (outCount) *outCount = count;
4013 return result;
4014 }
4015
4016 objc_property_t *protocol_copyPropertyList(Protocol *proto, unsigned int *outCount)
4017 {
4018 property_t **result = NULL;
4019
4020 if (!proto) {
4021 if (outCount) *outCount = 0;
4022 return NULL;
4023 }
4024
4025 rwlock_read(&runtimeLock);
4026
4027 property_list_t *plist = newprotocol(proto)->instanceProperties;
4028 result = copyPropertyList(plist, outCount);
4029
4030 rwlock_unlock_read(&runtimeLock);
4031
4032 return (objc_property_t *)result;
4033 }
4034
4035
4036 /***********************************************************************
4037 * protocol_copyProtocolList
4038 * Copies this protocol's incorporated protocols.
4039 * Does not copy those protocol's incorporated protocols in turn.
4040 * Locking: acquires runtimeLock
4041 **********************************************************************/
4042 Protocol * __unsafe_unretained *
4043 protocol_copyProtocolList(Protocol *p, unsigned int *outCount)
4044 {
4045 unsigned int count = 0;
4046 Protocol **result = NULL;
4047 protocol_t *proto = newprotocol(p);
4048
4049 if (!proto) {
4050 if (outCount) *outCount = 0;
4051 return NULL;
4052 }
4053
4054 rwlock_read(&runtimeLock);
4055
4056 if (proto->protocols) {
4057 count = (unsigned int)proto->protocols->count;
4058 }
4059 if (count > 0) {
4060 result = (Protocol **)malloc((count+1) * sizeof(Protocol *));
4061
4062 unsigned int i;
4063 for (i = 0; i < count; i++) {
4064 result[i] = (Protocol *)remapProtocol(proto->protocols->list[i]);
4065 }
4066 result[i] = NULL;
4067 }
4068
4069 rwlock_unlock_read(&runtimeLock);
4070
4071 if (outCount) *outCount = count;
4072 return result;
4073 }
4074
4075
4076 /***********************************************************************
4077 * objc_allocateProtocol
4078 * Creates a new protocol. The protocol may not be used until
4079 * objc_registerProtocol() is called.
4080 * Returns NULL if a protocol with the same name already exists.
4081 * Locking: acquires runtimeLock
4082 **********************************************************************/
4083 Protocol *
4084 objc_allocateProtocol(const char *name)
4085 {
4086 rwlock_write(&runtimeLock);
4087
4088 if (NXMapGet(protocols(), name)) {
4089 rwlock_unlock_write(&runtimeLock);
4090 return NULL;
4091 }
4092
4093 protocol_t *result = (protocol_t *)_calloc_internal(sizeof(protocol_t), 1);
4094
4095 extern class_t OBJC_CLASS_$___IncompleteProtocol;
4096 Class cls = (Class)&OBJC_CLASS_$___IncompleteProtocol;
4097 result->isa = cls;
4098 result->name = _strdup_internal(name);
4099
4100 // fixme reserve name without installing
4101
4102 rwlock_unlock_write(&runtimeLock);
4103
4104 return (Protocol *)result;
4105 }
4106
4107
4108 /***********************************************************************
4109 * objc_registerProtocol
4110 * Registers a newly-constructed protocol. The protocol is now
4111 * ready for use and immutable.
4112 * Locking: acquires runtimeLock
4113 **********************************************************************/
4114 void objc_registerProtocol(Protocol *proto_gen)
4115 {
4116 protocol_t *proto = newprotocol(proto_gen);
4117
4118 rwlock_write(&runtimeLock);
4119
4120 extern class_t OBJC_CLASS_$___IncompleteProtocol;
4121 Class oldcls = (Class)&OBJC_CLASS_$___IncompleteProtocol;
4122 extern class_t OBJC_CLASS_$_Protocol;
4123 Class cls = (Class)&OBJC_CLASS_$_Protocol;
4124
4125 if (proto->isa == cls) {
4126 _objc_inform("objc_registerProtocol: protocol '%s' was already "
4127 "registered!", proto->name);
4128 rwlock_unlock_write(&runtimeLock);
4129 return;
4130 }
4131 if (proto->isa != oldcls) {
4132 _objc_inform("objc_registerProtocol: protocol '%s' was not allocated "
4133 "with objc_allocateProtocol!", proto->name);
4134 rwlock_unlock_write(&runtimeLock);
4135 return;
4136 }
4137
4138 proto->isa = cls;
4139
4140 NXMapKeyCopyingInsert(protocols(), proto->name, proto);
4141
4142 rwlock_unlock_write(&runtimeLock);
4143 }
4144
4145
4146 /***********************************************************************
4147 * protocol_addProtocol
4148 * Adds an incorporated protocol to another protocol.
4149 * No method enforcement is performed.
4150 * `proto` must be under construction. `addition` must not.
4151 * Locking: acquires runtimeLock
4152 **********************************************************************/
4153 void
4154 protocol_addProtocol(Protocol *proto_gen, Protocol *addition_gen)
4155 {
4156 protocol_t *proto = newprotocol(proto_gen);
4157 protocol_t *addition = newprotocol(addition_gen);
4158
4159 extern class_t OBJC_CLASS_$___IncompleteProtocol;
4160 Class cls = (Class)&OBJC_CLASS_$___IncompleteProtocol;
4161
4162 if (!proto_gen) return;
4163 if (!addition_gen) return;
4164
4165 rwlock_write(&runtimeLock);
4166
4167 if (proto->isa != cls) {
4168 _objc_inform("protocol_addProtocol: modified protocol '%s' is not "
4169 "under construction!", proto->name);
4170 rwlock_unlock_write(&runtimeLock);
4171 return;
4172 }
4173 if (addition->isa == cls) {
4174 _objc_inform("protocol_addProtocol: added protocol '%s' is still "
4175 "under construction!", addition->name);
4176 rwlock_unlock_write(&runtimeLock);
4177 return;
4178 }
4179
4180 protocol_list_t *protolist = proto->protocols;
4181 if (!protolist) {
4182 protolist = (protocol_list_t *)
4183 _calloc_internal(1, sizeof(protocol_list_t)
4184 + sizeof(protolist->list[0]));
4185 } else {
4186 protolist = (protocol_list_t *)
4187 _realloc_internal(protolist, protocol_list_size(protolist)
4188 + sizeof(protolist->list[0]));
4189 }
4190
4191 protolist->list[protolist->count++] = (protocol_ref_t)addition;
4192 proto->protocols = protolist;
4193
4194 rwlock_unlock_write(&runtimeLock);
4195 }
4196
4197
4198 /***********************************************************************
4199 * protocol_addMethodDescription
4200 * Adds a method to a protocol. The protocol must be under construction.
4201 * Locking: acquires runtimeLock
4202 **********************************************************************/
4203 static void
4204 _protocol_addMethod(method_list_t **list, SEL name, const char *types)
4205 {
4206 if (!*list) {
4207 *list = (method_list_t *)
4208 _calloc_internal(sizeof(method_list_t), 1);
4209 (*list)->entsize_NEVER_USE = sizeof((*list)->first);
4210 setMethodListFixedUp(*list);
4211 } else {
4212 size_t size = method_list_size(*list) + method_list_entsize(*list);
4213 *list = (method_list_t *)
4214 _realloc_internal(*list, size);
4215 }
4216
4217 method_t *meth = method_list_nth(*list, (*list)->count++);
4218 meth->name = name;
4219 meth->types = _strdup_internal(types ? types : "");
4220 meth->imp = NULL;
4221 }
4222
4223 void
4224 protocol_addMethodDescription(Protocol *proto_gen, SEL name, const char *types,
4225 BOOL isRequiredMethod, BOOL isInstanceMethod)
4226 {
4227 protocol_t *proto = newprotocol(proto_gen);
4228
4229 extern class_t OBJC_CLASS_$___IncompleteProtocol;
4230 Class cls = (Class)&OBJC_CLASS_$___IncompleteProtocol;
4231
4232 if (!proto_gen) return;
4233
4234 rwlock_write(&runtimeLock);
4235
4236 if (proto->isa != cls) {
4237 _objc_inform("protocol_addMethodDescription: protocol '%s' is not "
4238 "under construction!", proto->name);
4239 rwlock_unlock_write(&runtimeLock);
4240 return;
4241 }
4242
4243 if (isRequiredMethod && isInstanceMethod) {
4244 _protocol_addMethod(&proto->instanceMethods, name, types);
4245 } else if (isRequiredMethod && !isInstanceMethod) {
4246 _protocol_addMethod(&proto->classMethods, name, types);
4247 } else if (!isRequiredMethod && isInstanceMethod) {
4248 _protocol_addMethod(&proto->optionalInstanceMethods, name, types);
4249 } else /* !isRequiredMethod && !isInstanceMethod) */ {
4250 _protocol_addMethod(&proto->optionalClassMethods, name, types);
4251 }
4252
4253 rwlock_unlock_write(&runtimeLock);
4254 }
4255
4256
4257 /***********************************************************************
4258 * protocol_addProperty
4259 * Adds a property to a protocol. The protocol must be under construction.
4260 * Locking: acquires runtimeLock
4261 **********************************************************************/
4262 static void
4263 _protocol_addProperty(property_list_t **plist, const char *name,
4264 const objc_property_attribute_t *attrs,
4265 unsigned int count)
4266 {
4267 if (!*plist) {
4268 *plist = (property_list_t *)
4269 _calloc_internal(sizeof(property_list_t), 1);
4270 (*plist)->entsize = sizeof(property_t);
4271 } else {
4272 *plist = (property_list_t *)
4273 _realloc_internal(*plist, sizeof(property_list_t)
4274 + (*plist)->count * (*plist)->entsize);
4275 }
4276
4277 property_t *prop = property_list_nth(*plist, (*plist)->count++);
4278 prop->name = _strdup_internal(name);
4279 prop->attributes = copyPropertyAttributeString(attrs, count);
4280 }
4281
4282 void
4283 protocol_addProperty(Protocol *proto_gen, const char *name,
4284 const objc_property_attribute_t *attrs,
4285 unsigned int count,
4286 BOOL isRequiredProperty, BOOL isInstanceProperty)
4287 {
4288 protocol_t *proto = newprotocol(proto_gen);
4289
4290 extern class_t OBJC_CLASS_$___IncompleteProtocol;
4291 Class cls = (Class)&OBJC_CLASS_$___IncompleteProtocol;
4292
4293 if (!proto) return;
4294 if (!name) return;
4295
4296 rwlock_write(&runtimeLock);
4297
4298 if (proto->isa != cls) {
4299 _objc_inform("protocol_addProperty: protocol '%s' is not "
4300 "under construction!", proto->name);
4301 rwlock_unlock_write(&runtimeLock);
4302 return;
4303 }
4304
4305 if (isRequiredProperty && isInstanceProperty) {
4306 _protocol_addProperty(&proto->instanceProperties, name, attrs, count);
4307 }
4308 //else if (isRequiredProperty && !isInstanceProperty) {
4309 // _protocol_addProperty(&proto->classProperties, name, attrs, count);
4310 //} else if (!isRequiredProperty && isInstanceProperty) {
4311 // _protocol_addProperty(&proto->optionalInstanceProperties, name, attrs, count);
4312 //} else /* !isRequiredProperty && !isInstanceProperty) */ {
4313 // _protocol_addProperty(&proto->optionalClassProperties, name, attrs, count);
4314 //}
4315
4316 rwlock_unlock_write(&runtimeLock);
4317 }
4318
4319
4320 /***********************************************************************
4321 * objc_getClassList
4322 * Returns pointers to all classes.
4323 * This requires all classes be realized, which is regretfully non-lazy.
4324 * Locking: acquires runtimeLock
4325 **********************************************************************/
4326 int
4327 objc_getClassList(Class *buffer, int bufferLen)
4328 {
4329 rwlock_write(&runtimeLock);
4330
4331 realizeAllClasses();
4332
4333 int count;
4334 class_t *cls;
4335 NXHashState state;
4336 NXHashTable *classes = realizedClasses();
4337 int allCount = NXCountHashTable(classes);
4338
4339 if (!buffer) {
4340 rwlock_unlock_write(&runtimeLock);
4341 return allCount;
4342 }
4343
4344 count = 0;
4345 state = NXInitHashState(classes);
4346 while (count < bufferLen &&
4347 NXNextHashState(classes, &state, (void **)&cls))
4348 {
4349 buffer[count++] = (Class)cls;
4350 }
4351
4352 rwlock_unlock_write(&runtimeLock);
4353
4354 return allCount;
4355 }
4356
4357
4358 /***********************************************************************
4359 * objc_copyClassList
4360 * Returns pointers to all classes.
4361 * This requires all classes be realized, which is regretfully non-lazy.
4362 *
4363 * outCount may be NULL. *outCount is the number of classes returned.
4364 * If the returned array is not NULL, it is NULL-terminated and must be
4365 * freed with free().
4366 * Locking: write-locks runtimeLock
4367 **********************************************************************/
4368 Class *
4369 objc_copyClassList(unsigned int *outCount)
4370 {
4371 rwlock_write(&runtimeLock);
4372
4373 realizeAllClasses();
4374
4375 Class *result = NULL;
4376 NXHashTable *classes = realizedClasses();
4377 unsigned int count = NXCountHashTable(classes);
4378
4379 if (count > 0) {
4380 class_t *cls;
4381 NXHashState state = NXInitHashState(classes);
4382 result = (Class *)malloc((1+count) * sizeof(Class));
4383 count = 0;
4384 while (NXNextHashState(classes, &state, (void **)&cls)) {
4385 result[count++] = (Class)cls;
4386 }
4387 result[count] = NULL;
4388 }
4389
4390 rwlock_unlock_write(&runtimeLock);
4391
4392 if (outCount) *outCount = count;
4393 return result;
4394 }
4395
4396
4397 /***********************************************************************
4398 * objc_copyProtocolList
4399 * Returns pointers to all protocols.
4400 * Locking: read-locks runtimeLock
4401 **********************************************************************/
4402 Protocol * __unsafe_unretained *
4403 objc_copyProtocolList(unsigned int *outCount)
4404 {
4405 rwlock_read(&runtimeLock);
4406
4407 unsigned int count, i;
4408 Protocol *proto;
4409 const char *name;
4410 NXMapState state;
4411 NXMapTable *protocol_map = protocols();
4412 Protocol **result;
4413
4414 count = NXCountMapTable(protocol_map);
4415 if (count == 0) {
4416 rwlock_unlock_read(&runtimeLock);
4417 if (outCount) *outCount = 0;
4418 return NULL;
4419 }
4420
4421 result = (Protocol **)calloc(1 + count, sizeof(Protocol *));
4422
4423 i = 0;
4424 state = NXInitMapState(protocol_map);
4425 while (NXNextMapState(protocol_map, &state,
4426 (const void **)&name, (const void **)&proto))
4427 {
4428 result[i++] = proto;
4429 }
4430
4431 result[i++] = NULL;
4432 assert(i == count+1);
4433
4434 rwlock_unlock_read(&runtimeLock);
4435
4436 if (outCount) *outCount = count;
4437 return result;
4438 }
4439
4440
4441 /***********************************************************************
4442 * objc_getProtocol
4443 * Get a protocol by name, or return NULL
4444 * Locking: read-locks runtimeLock
4445 **********************************************************************/
4446 Protocol *objc_getProtocol(const char *name)
4447 {
4448 rwlock_read(&runtimeLock);
4449 Protocol *result = (Protocol *)NXMapGet(protocols(), name);
4450 rwlock_unlock_read(&runtimeLock);
4451 return result;
4452 }
4453
4454
4455 /***********************************************************************
4456 * class_copyMethodList
4457 * fixme
4458 * Locking: read-locks runtimeLock
4459 **********************************************************************/
4460 Method *
4461 class_copyMethodList(Class cls_gen, unsigned int *outCount)
4462 {
4463 class_t *cls = newcls(cls_gen);
4464 unsigned int count = 0;
4465 Method *result = NULL;
4466
4467 if (!cls) {
4468 if (outCount) *outCount = 0;
4469 return NULL;
4470 }
4471
4472 rwlock_read(&runtimeLock);
4473
4474 assert(isRealized(cls));
4475
4476 FOREACH_METHOD_LIST(mlist, cls, {
4477 count += mlist->count;
4478 });
4479
4480 if (count > 0) {
4481 unsigned int m;
4482 result = (Method *)malloc((count + 1) * sizeof(Method));
4483
4484 m = 0;
4485 FOREACH_METHOD_LIST(mlist, cls, {
4486 unsigned int i;
4487 for (i = 0; i < mlist->count; i++) {
4488 Method aMethod = (Method)method_list_nth(mlist, i);
4489 if (ignoreSelector(method_getName(aMethod))) {
4490 count--;
4491 continue;
4492 }
4493 result[m++] = aMethod;
4494 }
4495 });
4496 result[m] = NULL;
4497 }
4498
4499 rwlock_unlock_read(&runtimeLock);
4500
4501 if (outCount) *outCount = count;
4502 return result;
4503 }
4504
4505
4506 /***********************************************************************
4507 * class_copyIvarList
4508 * fixme
4509 * Locking: read-locks runtimeLock
4510 **********************************************************************/
4511 Ivar *
4512 class_copyIvarList(Class cls_gen, unsigned int *outCount)
4513 {
4514 class_t *cls = newcls(cls_gen);
4515 const ivar_list_t *ivars;
4516 Ivar *result = NULL;
4517 unsigned int count = 0;
4518 unsigned int i;
4519
4520 if (!cls) {
4521 if (outCount) *outCount = 0;
4522 return NULL;
4523 }
4524
4525 rwlock_read(&runtimeLock);
4526
4527 assert(isRealized(cls));
4528
4529 if ((ivars = cls->data()->ro->ivars) && ivars->count) {
4530 result = (Ivar *)malloc((ivars->count+1) * sizeof(Ivar));
4531
4532 for (i = 0; i < ivars->count; i++) {
4533 ivar_t *ivar = ivar_list_nth(ivars, i);
4534 if (!ivar->offset) continue; // anonymous bitfield
4535 result[count++] = (Ivar)ivar;
4536 }
4537 result[count] = NULL;
4538 }
4539
4540 rwlock_unlock_read(&runtimeLock);
4541
4542 if (outCount) *outCount = count;
4543 return result;
4544 }
4545
4546
4547 /***********************************************************************
4548 * class_copyPropertyList. Returns a heap block containing the
4549 * properties declared in the class, or NULL if the class
4550 * declares no properties. Caller must free the block.
4551 * Does not copy any superclass's properties.
4552 * Locking: read-locks runtimeLock
4553 **********************************************************************/
4554 objc_property_t *
4555 class_copyPropertyList(Class cls_gen, unsigned int *outCount)
4556 {
4557 class_t *cls = newcls(cls_gen);
4558 chained_property_list *plist;
4559 unsigned int count = 0;
4560 property_t **result = NULL;
4561
4562 if (!cls) {
4563 if (outCount) *outCount = 0;
4564 return NULL;
4565 }
4566
4567 rwlock_read(&runtimeLock);
4568
4569 assert(isRealized(cls));
4570
4571 for (plist = cls->data()->properties; plist; plist = plist->next) {
4572 count += plist->count;
4573 }
4574
4575 if (count > 0) {
4576 unsigned int p;
4577 result = (property_t **)malloc((count + 1) * sizeof(property_t *));
4578
4579 p = 0;
4580 for (plist = cls->data()->properties; plist; plist = plist->next) {
4581 unsigned int i;
4582 for (i = 0; i < plist->count; i++) {
4583 result[p++] = &plist->list[i];
4584 }
4585 }
4586 result[p] = NULL;
4587 }
4588
4589 rwlock_unlock_read(&runtimeLock);
4590
4591 if (outCount) *outCount = count;
4592 return (objc_property_t *)result;
4593 }
4594
4595
4596 /***********************************************************************
4597 * _class_getLoadMethod
4598 * fixme
4599 * Called only from add_class_to_loadable_list.
4600 * Locking: runtimeLock must be read- or write-locked by the caller.
4601 **********************************************************************/
4602 IMP
4603 _class_getLoadMethod(Class cls_gen)
4604 {
4605 rwlock_assert_locked(&runtimeLock);
4606
4607 class_t *cls = newcls(cls_gen);
4608 const method_list_t *mlist;
4609 uint32_t i;
4610
4611 assert(isRealized(cls));
4612 assert(isRealized(cls->isa));
4613 assert(!isMetaClass(cls));
4614 assert(isMetaClass(cls->isa));
4615
4616 mlist = cls->isa->data()->ro->baseMethods;
4617 if (mlist) for (i = 0; i < mlist->count; i++) {
4618 method_t *m = method_list_nth(mlist, i);
4619 if (0 == strcmp((const char *)m->name, "load")) {
4620 return m->imp;
4621 }
4622 }
4623
4624 return NULL;
4625 }
4626
4627
4628 /***********************************************************************
4629 * _category_getName
4630 * Returns a category's name.
4631 * Locking: none
4632 **********************************************************************/
4633 const char *
4634 _category_getName(Category cat)
4635 {
4636 return newcategory(cat)->name;
4637 }
4638
4639
4640 /***********************************************************************
4641 * _category_getClassName
4642 * Returns a category's class's name
4643 * Called only from add_category_to_loadable_list and
4644 * remove_category_from_loadable_list.
4645 * Locking: runtimeLock must be read- or write-locked by the caller
4646 **********************************************************************/
4647 const char *
4648 _category_getClassName(Category cat)
4649 {
4650 rwlock_assert_locked(&runtimeLock);
4651 return getName(remapClass(newcategory(cat)->cls));
4652 }
4653
4654
4655 /***********************************************************************
4656 * _category_getClass
4657 * Returns a category's class
4658 * Called only by call_category_loads.
4659 * Locking: read-locks runtimeLock
4660 **********************************************************************/
4661 Class
4662 _category_getClass(Category cat)
4663 {
4664 rwlock_read(&runtimeLock);
4665 class_t *result = remapClass(newcategory(cat)->cls);
4666 assert(isRealized(result)); // ok for call_category_loads' usage
4667 rwlock_unlock_read(&runtimeLock);
4668 return (Class)result;
4669 }
4670
4671
4672 /***********************************************************************
4673 * _category_getLoadMethod
4674 * fixme
4675 * Called only from add_category_to_loadable_list
4676 * Locking: runtimeLock must be read- or write-locked by the caller
4677 **********************************************************************/
4678 IMP
4679 _category_getLoadMethod(Category cat)
4680 {
4681 rwlock_assert_locked(&runtimeLock);
4682
4683 const method_list_t *mlist;
4684 uint32_t i;
4685
4686 mlist = newcategory(cat)->classMethods;
4687 if (mlist) for (i = 0; i < mlist->count; i++) {
4688 method_t *m = method_list_nth(mlist, i);
4689 if (0 == strcmp((const char *)m->name, "load")) {
4690 return m->imp;
4691 }
4692 }
4693
4694 return NULL;
4695 }
4696
4697
4698 /***********************************************************************
4699 * class_copyProtocolList
4700 * fixme
4701 * Locking: read-locks runtimeLock
4702 **********************************************************************/
4703 Protocol * __unsafe_unretained *
4704 class_copyProtocolList(Class cls_gen, unsigned int *outCount)
4705 {
4706 class_t *cls = newcls(cls_gen);
4707 Protocol **r;
4708 const protocol_list_t **p;
4709 unsigned int count = 0;
4710 unsigned int i;
4711 Protocol **result = NULL;
4712
4713 if (!cls) {
4714 if (outCount) *outCount = 0;
4715 return NULL;
4716 }
4717
4718 rwlock_read(&runtimeLock);
4719
4720 assert(isRealized(cls));
4721
4722 for (p = cls->data()->protocols; p && *p; p++) {
4723 count += (uint32_t)(*p)->count;
4724 }
4725
4726 if (count) {
4727 result = (Protocol **)malloc((count+1) * sizeof(Protocol *));
4728 r = result;
4729 for (p = cls->data()->protocols; p && *p; p++) {
4730 for (i = 0; i < (*p)->count; i++) {
4731 *r++ = (Protocol *)remapProtocol((*p)->list[i]);
4732 }
4733 }
4734 *r++ = NULL;
4735 }
4736
4737 rwlock_unlock_read(&runtimeLock);
4738
4739 if (outCount) *outCount = count;
4740 return result;
4741 }
4742
4743
4744 /***********************************************************************
4745 * _objc_copyClassNamesForImage
4746 * fixme
4747 * Locking: read-locks runtimeLock
4748 **********************************************************************/
4749 const char **
4750 _objc_copyClassNamesForImage(header_info *hi, unsigned int *outCount)
4751 {
4752 size_t count, i, shift;
4753 classref_t *classlist;
4754 const char **names;
4755
4756 rwlock_read(&runtimeLock);
4757
4758 classlist = _getObjc2ClassList(hi, &count);
4759 names = (const char **)malloc((count+1) * sizeof(const char *));
4760
4761 shift = 0;
4762 for (i = 0; i < count; i++) {
4763 class_t *cls = remapClass(classlist[i]);
4764 if (cls) {
4765 names[i-shift] = getName(cls);
4766 } else {
4767 shift++; // ignored weak-linked class
4768 }
4769 }
4770 count -= shift;
4771 names[count] = NULL;
4772
4773 rwlock_unlock_read(&runtimeLock);
4774
4775 if (outCount) *outCount = (unsigned int)count;
4776 return names;
4777 }
4778
4779
4780 /***********************************************************************
4781 * _class_getCache
4782 * fixme
4783 * Locking: none
4784 **********************************************************************/
4785 Cache
4786 _class_getCache(Class cls)
4787 {
4788 return newcls(cls)->cache;
4789 }
4790
4791
4792 /***********************************************************************
4793 * _class_getInstanceSize
4794 * Uses alignedInstanceSize() to ensure that
4795 * obj + class_getInstanceSize(obj->isa) == object_getIndexedIvars(obj)
4796 * Locking: none
4797 **********************************************************************/
4798 size_t
4799 _class_getInstanceSize(Class cls)
4800 {
4801 if (!cls) return 0;
4802 return alignedInstanceSize(newcls(cls));
4803 }
4804
4805 static uint32_t
4806 unalignedInstanceSize(class_t *cls)
4807 {
4808 assert(cls);
4809 assert(isRealized(cls));
4810 return (uint32_t)cls->data()->ro->instanceSize;
4811 }
4812
4813 static uint32_t
4814 alignedInstanceSize(class_t *cls)
4815 {
4816 assert(cls);
4817 assert(isRealized(cls));
4818 // fixme rdar://5278267
4819 return (uint32_t)((unalignedInstanceSize(cls) + WORD_MASK) & ~WORD_MASK);
4820 }
4821
4822 /***********************************************************************
4823 * _class_getInstanceStart
4824 * Uses alignedInstanceStart() to ensure that ARR layout strings are
4825 * interpreted relative to the first word aligned ivar of an object.
4826 * Locking: none
4827 **********************************************************************/
4828
4829 static uint32_t
4830 alignedInstanceStart(class_t *cls)
4831 {
4832 assert(cls);
4833 assert(isRealized(cls));
4834 return (uint32_t)((cls->data()->ro->instanceStart + WORD_MASK) & ~WORD_MASK);
4835 }
4836
4837 uint32_t _class_getInstanceStart(Class cls_gen) {
4838 class_t *cls = newcls(cls_gen);
4839 return alignedInstanceStart(cls);
4840 }
4841
4842
4843 /***********************************************************************
4844 * class_getVersion
4845 * fixme
4846 * Locking: none
4847 **********************************************************************/
4848 int
4849 class_getVersion(Class cls)
4850 {
4851 if (!cls) return 0;
4852 assert(isRealized(newcls(cls)));
4853 return newcls(cls)->data()->version;
4854 }
4855
4856
4857 /***********************************************************************
4858 * _class_setCache
4859 * fixme
4860 * Locking: none
4861 **********************************************************************/
4862 void
4863 _class_setCache(Class cls, Cache cache)
4864 {
4865 newcls(cls)->cache = cache;
4866 }
4867
4868
4869 /***********************************************************************
4870 * class_setVersion
4871 * fixme
4872 * Locking: none
4873 **********************************************************************/
4874 void
4875 class_setVersion(Class cls, int version)
4876 {
4877 if (!cls) return;
4878 assert(isRealized(newcls(cls)));
4879 newcls(cls)->data()->version = version;
4880 }
4881
4882
4883 /***********************************************************************
4884 * _class_getName
4885 * fixme
4886 * Locking: acquires runtimeLock
4887 **********************************************************************/
4888 const char *_class_getName(Class cls)
4889 {
4890 if (!cls) return "nil";
4891 // fixme hack rwlock_write(&runtimeLock);
4892 const char *name = getName(newcls(cls));
4893 // rwlock_unlock_write(&runtimeLock);
4894 return name;
4895 }
4896
4897
4898 /***********************************************************************
4899 * getName
4900 * fixme
4901 * Locking: runtimeLock must be held by the caller
4902 **********************************************************************/
4903 static const char *
4904 getName(class_t *cls)
4905 {
4906 // fixme hack rwlock_assert_writing(&runtimeLock);
4907 assert(cls);
4908
4909 if (isRealized(cls)) {
4910 return cls->data()->ro->name;
4911 } else {
4912 return ((const class_ro_t *)cls->data())->name;
4913 }
4914 }
4915
4916 static method_t *findMethodInSortedMethodList(SEL key, const method_list_t *list)
4917 {
4918 const method_t * const first = &list->first;
4919 const method_t *base = first;
4920 const method_t *probe;
4921 uintptr_t keyValue = (uintptr_t)key;
4922 uint32_t count;
4923
4924 for (count = list->count; count != 0; count >>= 1) {
4925 probe = base + (count >> 1);
4926
4927 uintptr_t probeValue = (uintptr_t)probe->name;
4928
4929 if (keyValue == probeValue) {
4930 // `probe` is a match.
4931 // Rewind looking for the *first* occurrence of this value.
4932 // This is required for correct category overrides.
4933 while (probe > first && keyValue == (uintptr_t)probe[-1].name) {
4934 probe--;
4935 }
4936 return (method_t *)probe;
4937 }
4938
4939 if (keyValue > probeValue) {
4940 base = probe + 1;
4941 count--;
4942 }
4943 }
4944
4945 return NULL;
4946 }
4947
4948 /***********************************************************************
4949 * getMethodNoSuper_nolock
4950 * fixme
4951 * Locking: runtimeLock must be read- or write-locked by the caller
4952 **********************************************************************/
4953 static method_t *search_method_list(const method_list_t *mlist, SEL sel)
4954 {
4955 int methodListIsFixedUp = isMethodListFixedUp(mlist);
4956 int methodListHasExpectedSize = mlist->getEntsize() == sizeof(method_t);
4957
4958 if (__builtin_expect(methodListIsFixedUp && methodListHasExpectedSize, 1)) {
4959 return findMethodInSortedMethodList(sel, mlist);
4960 } else {
4961 // Linear search of unsorted method list
4962 method_list_t::method_iterator iter = mlist->begin();
4963 method_list_t::method_iterator end = mlist->end();
4964 for ( ; iter != end; ++iter) {
4965 if (iter->name == sel) return &*iter;
4966 }
4967 }
4968
4969 #ifndef NDEBUG
4970 // sanity-check negative results
4971 if (isMethodListFixedUp(mlist)) {
4972 method_list_t::method_iterator iter = mlist->begin();
4973 method_list_t::method_iterator end = mlist->end();
4974 for ( ; iter != end; ++iter) {
4975 if (iter->name == sel) {
4976 _objc_fatal("linear search worked when binary search did not");
4977 }
4978 }
4979 }
4980 #endif
4981
4982 return NULL;
4983 }
4984
4985 static method_t *
4986 getMethodNoSuper_nolock(class_t *cls, SEL sel)
4987 {
4988 rwlock_assert_locked(&runtimeLock);
4989
4990 assert(isRealized(cls));
4991 // fixme nil cls?
4992 // fixme NULL sel?
4993
4994 FOREACH_METHOD_LIST(mlist, cls, {
4995 method_t *m = search_method_list(mlist, sel);
4996 if (m) return m;
4997 });
4998
4999 return NULL;
5000 }
5001
5002
5003 /***********************************************************************
5004 * _class_getMethodNoSuper
5005 * fixme
5006 * Locking: read-locks runtimeLock
5007 **********************************************************************/
5008 Method
5009 _class_getMethodNoSuper(Class cls, SEL sel)
5010 {
5011 rwlock_read(&runtimeLock);
5012 Method result = (Method)getMethodNoSuper_nolock(newcls(cls), sel);
5013 rwlock_unlock_read(&runtimeLock);
5014 return result;
5015 }
5016
5017 /***********************************************************************
5018 * _class_getMethodNoSuper
5019 * For use inside lockForMethodLookup() only.
5020 * Locking: read-locks runtimeLock
5021 **********************************************************************/
5022 Method
5023 _class_getMethodNoSuper_nolock(Class cls, SEL sel)
5024 {
5025 return (Method)getMethodNoSuper_nolock(newcls(cls), sel);
5026 }
5027
5028
5029 /***********************************************************************
5030 * getMethod_nolock
5031 * fixme
5032 * Locking: runtimeLock must be read- or write-locked by the caller
5033 **********************************************************************/
5034 static method_t *
5035 getMethod_nolock(class_t *cls, SEL sel)
5036 {
5037 method_t *m = NULL;
5038
5039 rwlock_assert_locked(&runtimeLock);
5040
5041 // fixme nil cls?
5042 // fixme NULL sel?
5043
5044 assert(isRealized(cls));
5045
5046 while (cls && ((m = getMethodNoSuper_nolock(cls, sel))) == NULL) {
5047 cls = getSuperclass(cls);
5048 }
5049
5050 return m;
5051 }
5052
5053
5054 /***********************************************************************
5055 * _class_getMethod
5056 * fixme
5057 * Locking: read-locks runtimeLock
5058 **********************************************************************/
5059 Method _class_getMethod(Class cls, SEL sel)
5060 {
5061 Method m;
5062 rwlock_read(&runtimeLock);
5063 m = (Method)getMethod_nolock(newcls(cls), sel);
5064 rwlock_unlock_read(&runtimeLock);
5065 return m;
5066 }
5067
5068
5069 /***********************************************************************
5070 * ABI-specific lookUpMethod helpers.
5071 * Locking: read- and write-locks runtimeLock.
5072 **********************************************************************/
5073 void lockForMethodLookup(void)
5074 {
5075 rwlock_read(&runtimeLock);
5076 }
5077 void unlockForMethodLookup(void)
5078 {
5079 rwlock_unlock_read(&runtimeLock);
5080 }
5081
5082 IMP prepareForMethodLookup(Class cls, SEL sel, BOOL init, id obj)
5083 {
5084 rwlock_assert_unlocked(&runtimeLock);
5085
5086 if (!isRealized(newcls(cls))) {
5087 rwlock_write(&runtimeLock);
5088 realizeClass(newcls(cls));
5089 rwlock_unlock_write(&runtimeLock);
5090 }
5091
5092 if (init && !_class_isInitialized(cls)) {
5093 _class_initialize (_class_getNonMetaClass(cls, obj));
5094 // If sel == initialize, _class_initialize will send +initialize and
5095 // then the messenger will send +initialize again after this
5096 // procedure finishes. Of course, if this is not being called
5097 // from the messenger then it won't happen. 2778172
5098 }
5099
5100 return NULL;
5101 }
5102
5103
5104 /***********************************************************************
5105 * class_getProperty
5106 * fixme
5107 * Locking: read-locks runtimeLock
5108 **********************************************************************/
5109 objc_property_t class_getProperty(Class cls_gen, const char *name)
5110 {
5111 property_t *result = NULL;
5112 chained_property_list *plist;
5113 class_t *cls = newcls(cls_gen);
5114
5115 if (!cls || !name) return NULL;
5116
5117 rwlock_read(&runtimeLock);
5118
5119 assert(isRealized(cls));
5120
5121 for ( ; cls; cls = getSuperclass(cls)) {
5122 for (plist = cls->data()->properties; plist; plist = plist->next) {
5123 uint32_t i;
5124 for (i = 0; i < plist->count; i++) {
5125 if (0 == strcmp(name, plist->list[i].name)) {
5126 result = &plist->list[i];
5127 goto done;
5128 }
5129 }
5130 }
5131 }
5132
5133 done:
5134 rwlock_unlock_read(&runtimeLock);
5135
5136 return (objc_property_t)result;
5137 }
5138
5139
5140 /***********************************************************************
5141 * Locking: fixme
5142 **********************************************************************/
5143 BOOL _class_isMetaClass(Class cls)
5144 {
5145 if (!cls) return NO;
5146 return isMetaClass(newcls(cls));
5147 }
5148
5149 static BOOL
5150 isMetaClass(class_t *cls)
5151 {
5152 assert(cls);
5153 assert(isRealized(cls));
5154 return (cls->data()->ro->flags & RO_META) ? YES : NO;
5155 }
5156
5157 class_t *getMeta(class_t *cls)
5158 {
5159 if (isMetaClass(cls)) return cls;
5160 else return cls->isa;
5161 }
5162
5163 Class _class_getMeta(Class cls)
5164 {
5165 return (Class)getMeta(newcls(cls));
5166 }
5167
5168 Class gdb_class_getClass(Class cls)
5169 {
5170 const char *className = getName(newcls(cls));
5171 if(!className || !strlen(className)) return Nil;
5172 Class rCls = look_up_class(className, NO, NO);
5173 return rCls;
5174 }
5175
5176 Class gdb_object_getClass(id obj)
5177 {
5178 Class cls = _object_getClass(obj);
5179 return gdb_class_getClass(cls);
5180 }
5181
5182 BOOL gdb_objc_isRuntimeLocked()
5183 {
5184 if (rwlock_try_write(&runtimeLock)) {
5185 rwlock_unlock_write(&runtimeLock);
5186 } else
5187 return YES;
5188
5189 if (mutex_try_lock(&cacheUpdateLock)) {
5190 mutex_unlock(&cacheUpdateLock);
5191 } else
5192 return YES;
5193
5194 return NO;
5195 }
5196
5197 /***********************************************************************
5198 * Locking: fixme
5199 **********************************************************************/
5200 BOOL
5201 _class_isInitializing(Class cls_gen)
5202 {
5203 class_t *cls = newcls(_class_getMeta(cls_gen));
5204 return (cls->data()->flags & RW_INITIALIZING) ? YES : NO;
5205 }
5206
5207
5208 /***********************************************************************
5209 * Locking: fixme
5210 **********************************************************************/
5211 BOOL
5212 _class_isInitialized(Class cls_gen)
5213 {
5214 class_t *cls = newcls(_class_getMeta(cls_gen));
5215 return (cls->data()->flags & RW_INITIALIZED) ? YES : NO;
5216 }
5217
5218
5219 /***********************************************************************
5220 * Locking: fixme
5221 **********************************************************************/
5222 void
5223 _class_setInitializing(Class cls_gen)
5224 {
5225 assert(!_class_isMetaClass(cls_gen));
5226 class_t *cls = newcls(_class_getMeta(cls_gen));
5227 changeInfo(cls, RW_INITIALIZING, 0);
5228 }
5229
5230
5231 /***********************************************************************
5232 * Locking: write-locks runtimeLock
5233 **********************************************************************/
5234 void
5235 _class_setInitialized(Class cls_gen)
5236 {
5237 class_t *metacls;
5238 class_t *cls;
5239
5240 rwlock_write(&runtimeLock);
5241
5242 assert(!_class_isMetaClass(cls_gen));
5243
5244 cls = newcls(cls_gen);
5245 metacls = getMeta(cls);
5246
5247 // Update vtables (initially postponed pending +initialize completion)
5248 // Do cls first because root metacls is a subclass of root cls
5249 updateVtable(cls, YES);
5250 updateVtable(metacls, YES);
5251
5252 rwlock_unlock_write(&runtimeLock);
5253
5254 changeInfo(metacls, RW_INITIALIZED, RW_INITIALIZING);
5255 }
5256
5257
5258 /***********************************************************************
5259 * Locking: fixme
5260 **********************************************************************/
5261 BOOL
5262 _class_shouldGrowCache(Class cls)
5263 {
5264 return YES; // fixme good or bad for memory use?
5265 }
5266
5267
5268 /***********************************************************************
5269 * Locking: fixme
5270 **********************************************************************/
5271 void
5272 _class_setGrowCache(Class cls, BOOL grow)
5273 {
5274 // fixme good or bad for memory use?
5275 }
5276
5277
5278 /***********************************************************************
5279 * _class_isLoadable
5280 * fixme
5281 * Locking: none
5282 **********************************************************************/
5283 BOOL
5284 _class_isLoadable(Class cls)
5285 {
5286 assert(isRealized(newcls(cls)));
5287 return YES; // any class registered for +load is definitely loadable
5288 }
5289
5290
5291 /***********************************************************************
5292 * Locking: fixme
5293 **********************************************************************/
5294 static BOOL
5295 hasCxxStructors(class_t *cls)
5296 {
5297 // this DOES check superclasses too, because addSubclass()
5298 // propagates the flag from the superclass.
5299 assert(isRealized(cls));
5300 return (cls->data()->flags & RW_HAS_CXX_STRUCTORS) ? YES : NO;
5301 }
5302
5303 BOOL
5304 _class_hasCxxStructors(Class cls)
5305 {
5306 return hasCxxStructors(newcls(cls));
5307 }
5308
5309
5310 /***********************************************************************
5311 * Locking: fixme
5312 **********************************************************************/
5313 BOOL
5314 _class_shouldFinalizeOnMainThread(Class cls)
5315 {
5316 assert(isRealized(newcls(cls)));
5317 return (newcls(cls)->data()->flags & RW_FINALIZE_ON_MAIN_THREAD) ? YES : NO;
5318 }
5319
5320
5321 /***********************************************************************
5322 * Locking: fixme
5323 **********************************************************************/
5324 void
5325 _class_setFinalizeOnMainThread(Class cls)
5326 {
5327 assert(isRealized(newcls(cls)));
5328 changeInfo(newcls(cls), RW_FINALIZE_ON_MAIN_THREAD, 0);
5329 }
5330
5331
5332 /***********************************************************************
5333 * _class_instancesHaveAssociatedObjects
5334 * May manipulate unrealized future classes in the CF-bridged case.
5335 **********************************************************************/
5336 BOOL
5337 _class_instancesHaveAssociatedObjects(Class cls_gen)
5338 {
5339 class_t *cls = newcls(cls_gen);
5340 assert(isFuture(cls) || isRealized(cls));
5341 return (cls->data()->flags & RW_INSTANCES_HAVE_ASSOCIATED_OBJECTS) ? YES : NO;
5342 }
5343
5344
5345 /***********************************************************************
5346 * _class_setInstancesHaveAssociatedObjects
5347 * May manipulate unrealized future classes in the CF-bridged case.
5348 **********************************************************************/
5349 void
5350 _class_setInstancesHaveAssociatedObjects(Class cls_gen)
5351 {
5352 class_t *cls = newcls(cls_gen);
5353 assert(isFuture(cls) || isRealized(cls));
5354 changeInfo(cls, RW_INSTANCES_HAVE_ASSOCIATED_OBJECTS, 0);
5355 }
5356
5357
5358 /***********************************************************************
5359 * _class_usesAutomaticRetainRelease
5360 * Returns YES if class was compiled with -fobjc-arc
5361 **********************************************************************/
5362 BOOL _class_usesAutomaticRetainRelease(Class cls_gen)
5363 {
5364 class_t *cls = newcls(cls_gen);
5365 return (cls->data()->ro->flags & RO_IS_ARR) ? YES : NO;
5366 }
5367
5368
5369 /***********************************************************************
5370 * Return YES if sel is used by retain/release implementors
5371 **********************************************************************/
5372 static bool isRRSelector(SEL sel)
5373 {
5374 return (sel == SEL_retain || sel == SEL_release ||
5375 sel == SEL_autorelease || sel == SEL_retainCount);
5376 }
5377
5378
5379 /***********************************************************************
5380 * Return YES if sel is used by allocWithZone implementors
5381 **********************************************************************/
5382 static bool isAWZSelector(SEL sel)
5383 {
5384 return (sel == SEL_allocWithZone);
5385 }
5386
5387
5388 /***********************************************************************
5389 * Mark this class and all of its subclasses as implementors or
5390 * inheritors of custom RR (retain/release/autorelease/retainCount)
5391 **********************************************************************/
5392 void class_t::setHasCustomRR(bool inherited)
5393 {
5394 rwlock_assert_writing(&runtimeLock);
5395
5396 if (hasCustomRR()) return;
5397
5398 FOREACH_REALIZED_CLASS_AND_SUBCLASS(c, this, {
5399 if (PrintCustomRR && !c->hasCustomRR()) {
5400 _objc_inform("CUSTOM RR: %s%s%s", getName(c),
5401 isMetaClass(c) ? " (meta)" : "",
5402 (inherited || c != this) ? " (inherited)" : "");
5403 }
5404 #if CLASS_FAST_FLAGS_VIA_RW_DATA
5405 c->data_NEVER_USE |= (uintptr_t)1;
5406 #else
5407 c->data()->flags |= RW_HAS_CUSTOM_RR;
5408 #endif
5409 });
5410 }
5411
5412
5413 /***********************************************************************
5414 * Mark this class and all of its subclasses as implementors or
5415 * inheritors of custom allocWithZone:
5416 **********************************************************************/
5417 void class_t::setHasCustomAWZ(bool inherited )
5418 {
5419 rwlock_assert_writing(&runtimeLock);
5420
5421 if (hasCustomAWZ()) return;
5422
5423 FOREACH_REALIZED_CLASS_AND_SUBCLASS(c, this, {
5424 if (PrintCustomAWZ && !c->hasCustomAWZ()) {
5425 _objc_inform("CUSTOM AWZ: %s%s%s", getName(c),
5426 isMetaClass(c) ? " (meta)" : "",
5427 (inherited || c != this) ? " (inherited)" : "");
5428 }
5429 #if CLASS_FAST_FLAGS_VIA_RW_DATA
5430 c->data_NEVER_USE |= (uintptr_t)2;
5431 #else
5432 c->data()->flags |= RW_HAS_CUSTOM_AWZ;
5433 #endif
5434 });
5435 }
5436
5437
5438 /***********************************************************************
5439 * Update custom RR and AWZ when a method changes its IMP
5440 **********************************************************************/
5441 static void
5442 updateCustomRR_AWZ(class_t *cls, method_t *meth)
5443 {
5444 // In almost all cases, IMP swizzling does not affect custom RR/AWZ bits.
5445 // The class is already marked for custom RR/AWZ, so changing the IMP
5446 // does not transition from non-custom to custom.
5447 //
5448 // The only cases where IMP swizzling can affect the RR/AWZ bits is
5449 // if the swizzled method is one of the methods that is assumed to be
5450 // non-custom. These special cases come from attachMethodLists().
5451 // We look for such cases here if we do not know the affected class.
5452
5453 if (isRRSelector(meth->name)) {
5454 if (cls) {
5455 cls->setHasCustomRR();
5456 } else {
5457 // Don't know the class.
5458 // The only special case is class NSObject.
5459 FOREACH_METHOD_LIST(mlist, classNSObject(), {
5460 for (uint32_t i = 0; i < mlist->count; i++) {
5461 if (meth == method_list_nth(mlist, i)) {
5462 // Yep, they're swizzling NSObject.
5463 classNSObject()->setHasCustomRR();
5464 return;
5465 }
5466 }
5467 });
5468 }
5469 }
5470 else if (isAWZSelector(meth->name)) {
5471 if (cls) {
5472 cls->setHasCustomAWZ();
5473 } else {
5474 // Don't know the class.
5475 // The only special case is metaclass NSObject.
5476 FOREACH_METHOD_LIST(mlist, classNSObject()->isa, {
5477 for (uint32_t i = 0; i < mlist->count; i++) {
5478 if (meth == method_list_nth(mlist, i)) {
5479 // Yep, they're swizzling metaclass NSObject.
5480 classNSObject()->isa->setHasCustomRR();
5481 return;
5482 }
5483 }
5484 });
5485 }
5486 }
5487 }
5488
5489 /***********************************************************************
5490 * Locking: none
5491 * fixme assert realized to get superclass remapping?
5492 **********************************************************************/
5493 Class
5494 _class_getSuperclass(Class cls)
5495 {
5496 return (Class)getSuperclass(newcls(cls));
5497 }
5498
5499 static class_t *
5500 getSuperclass(class_t *cls)
5501 {
5502 if (!cls) return NULL;
5503 return cls->superclass;
5504 }
5505
5506
5507 /***********************************************************************
5508 * class_getIvarLayout
5509 * Called by the garbage collector.
5510 * The class must be NULL or already realized.
5511 * Locking: none
5512 **********************************************************************/
5513 const uint8_t *
5514 class_getIvarLayout(Class cls_gen)
5515 {
5516 class_t *cls = newcls(cls_gen);
5517 if (cls) return cls->data()->ro->ivarLayout;
5518 else return NULL;
5519 }
5520
5521
5522 /***********************************************************************
5523 * class_getWeakIvarLayout
5524 * Called by the garbage collector.
5525 * The class must be NULL or already realized.
5526 * Locking: none
5527 **********************************************************************/
5528 const uint8_t *
5529 class_getWeakIvarLayout(Class cls_gen)
5530 {
5531 class_t *cls = newcls(cls_gen);
5532 if (cls) return cls->data()->ro->weakIvarLayout;
5533 else return NULL;
5534 }
5535
5536
5537 /***********************************************************************
5538 * class_setIvarLayout
5539 * Changes the class's GC scan layout.
5540 * NULL layout means no unscanned ivars
5541 * The class must be under construction.
5542 * fixme: sanity-check layout vs instance size?
5543 * fixme: sanity-check layout vs superclass?
5544 * Locking: acquires runtimeLock
5545 **********************************************************************/
5546 void
5547 class_setIvarLayout(Class cls_gen, const uint8_t *layout)
5548 {
5549 class_t *cls = newcls(cls_gen);
5550 if (!cls) return;
5551
5552 rwlock_write(&runtimeLock);
5553
5554 // Can only change layout of in-construction classes.
5555 // note: if modifications to post-construction classes were
5556 // allowed, there would be a race below (us vs. concurrent GC scan)
5557 if (!(cls->data()->flags & RW_CONSTRUCTING)) {
5558 _objc_inform("*** Can't set ivar layout for already-registered "
5559 "class '%s'", getName(cls));
5560 rwlock_unlock_write(&runtimeLock);
5561 return;
5562 }
5563
5564 class_ro_t *ro_w = make_ro_writeable(cls->data());
5565
5566 try_free(ro_w->ivarLayout);
5567 ro_w->ivarLayout = _ustrdup_internal(layout);
5568
5569 rwlock_unlock_write(&runtimeLock);
5570 }
5571
5572 // SPI: Instance-specific object layout.
5573
5574 void
5575 _class_setIvarLayoutAccessor(Class cls_gen, const uint8_t* (*accessor) (id object)) {
5576 class_t *cls = newcls(cls_gen);
5577 if (!cls) return;
5578
5579 rwlock_write(&runtimeLock);
5580
5581 class_ro_t *ro_w = make_ro_writeable(cls->data());
5582
5583 // FIXME: this really isn't safe to free if there are instances of this class already.
5584 if (!(cls->data()->flags & RW_HAS_INSTANCE_SPECIFIC_LAYOUT)) try_free(ro_w->ivarLayout);
5585 ro_w->ivarLayout = (uint8_t *)accessor;
5586 changeInfo(cls, RW_HAS_INSTANCE_SPECIFIC_LAYOUT, 0);
5587
5588 rwlock_unlock_write(&runtimeLock);
5589 }
5590
5591 const uint8_t *
5592 _object_getIvarLayout(Class cls_gen, id object) {
5593 class_t *cls = newcls(cls_gen);
5594 if (cls) {
5595 const uint8_t* layout = cls->data()->ro->ivarLayout;
5596 if (cls->data()->flags & RW_HAS_INSTANCE_SPECIFIC_LAYOUT) {
5597 const uint8_t* (*accessor) (id object) = (const uint8_t* (*)(id))layout;
5598 layout = accessor(object);
5599 }
5600 return layout;
5601 }
5602 return NULL;
5603 }
5604
5605 /***********************************************************************
5606 * class_setWeakIvarLayout
5607 * Changes the class's GC weak layout.
5608 * NULL layout means no weak ivars
5609 * The class must be under construction.
5610 * fixme: sanity-check layout vs instance size?
5611 * fixme: sanity-check layout vs superclass?
5612 * Locking: acquires runtimeLock
5613 **********************************************************************/
5614 void
5615 class_setWeakIvarLayout(Class cls_gen, const uint8_t *layout)
5616 {
5617 class_t *cls = newcls(cls_gen);
5618 if (!cls) return;
5619
5620 rwlock_write(&runtimeLock);
5621
5622 // Can only change layout of in-construction classes.
5623 // note: if modifications to post-construction classes were
5624 // allowed, there would be a race below (us vs. concurrent GC scan)
5625 if (!(cls->data()->flags & RW_CONSTRUCTING)) {
5626 _objc_inform("*** Can't set weak ivar layout for already-registered "
5627 "class '%s'", getName(cls));
5628 rwlock_unlock_write(&runtimeLock);
5629 return;
5630 }
5631
5632 class_ro_t *ro_w = make_ro_writeable(cls->data());
5633
5634 try_free(ro_w->weakIvarLayout);
5635 ro_w->weakIvarLayout = _ustrdup_internal(layout);
5636
5637 rwlock_unlock_write(&runtimeLock);
5638 }
5639
5640
5641 /***********************************************************************
5642 * _class_getVariable
5643 * fixme
5644 * Locking: read-locks runtimeLock
5645 **********************************************************************/
5646 Ivar
5647 _class_getVariable(Class cls, const char *name, Class *memberOf)
5648 {
5649 rwlock_read(&runtimeLock);
5650
5651 for ( ; cls != Nil; cls = class_getSuperclass(cls)) {
5652 ivar_t *ivar = getIvar(newcls(cls), name);
5653 if (ivar) {
5654 rwlock_unlock_read(&runtimeLock);
5655 if (memberOf) *memberOf = cls;
5656 return (Ivar)ivar;
5657 }
5658 }
5659
5660 rwlock_unlock_read(&runtimeLock);
5661
5662 return NULL;
5663 }
5664
5665
5666 /***********************************************************************
5667 * class_conformsToProtocol
5668 * fixme
5669 * Locking: read-locks runtimeLock
5670 **********************************************************************/
5671 BOOL class_conformsToProtocol(Class cls_gen, Protocol *proto_gen)
5672 {
5673 class_t *cls = newcls(cls_gen);
5674 protocol_t *proto = newprotocol(proto_gen);
5675 const protocol_list_t **plist;
5676 unsigned int i;
5677 BOOL result = NO;
5678
5679 if (!cls_gen) return NO;
5680 if (!proto_gen) return NO;
5681
5682 rwlock_read(&runtimeLock);
5683
5684 assert(isRealized(cls));
5685
5686 for (plist = cls->data()->protocols; plist && *plist; plist++) {
5687 for (i = 0; i < (*plist)->count; i++) {
5688 protocol_t *p = remapProtocol((*plist)->list[i]);
5689 if (p == proto || _protocol_conformsToProtocol_nolock(p, proto)) {
5690 result = YES;
5691 goto done;
5692 }
5693 }
5694 }
5695
5696 done:
5697 rwlock_unlock_read(&runtimeLock);
5698
5699 return result;
5700 }
5701
5702
5703 /***********************************************************************
5704 * addMethod
5705 * fixme
5706 * Locking: runtimeLock must be held by the caller
5707 **********************************************************************/
5708 static IMP
5709 addMethod(class_t *cls, SEL name, IMP imp, const char *types, BOOL replace)
5710 {
5711 IMP result = NULL;
5712
5713 rwlock_assert_writing(&runtimeLock);
5714
5715 assert(types);
5716 assert(isRealized(cls));
5717
5718 method_t *m;
5719 if ((m = getMethodNoSuper_nolock(cls, name))) {
5720 // already exists
5721 if (!replace) {
5722 result = _method_getImplementation(m);
5723 } else {
5724 result = _method_setImplementation(cls, m, imp);
5725 }
5726 } else {
5727 // fixme optimize
5728 method_list_t *newlist;
5729 newlist = (method_list_t *)_calloc_internal(sizeof(*newlist), 1);
5730 newlist->entsize_NEVER_USE = (uint32_t)sizeof(method_t) | fixed_up_method_list;
5731 newlist->count = 1;
5732 newlist->first.name = name;
5733 newlist->first.types = strdup(types);
5734 if (!ignoreSelector(name)) {
5735 newlist->first.imp = imp;
5736 } else {
5737 newlist->first.imp = (IMP)&_objc_ignored_method;
5738 }
5739
5740 BOOL vtablesAffected = NO;
5741 attachMethodLists(cls, &newlist, 1, NO, NO, &vtablesAffected);
5742 flushCaches(cls);
5743 if (vtablesAffected) flushVtables(cls);
5744
5745 result = NULL;
5746 }
5747
5748 return result;
5749 }
5750
5751
5752 BOOL
5753 class_addMethod(Class cls, SEL name, IMP imp, const char *types)
5754 {
5755 if (!cls) return NO;
5756
5757 rwlock_write(&runtimeLock);
5758 IMP old = addMethod(newcls(cls), name, imp, types ?: "", NO);
5759 rwlock_unlock_write(&runtimeLock);
5760 return old ? NO : YES;
5761 }
5762
5763
5764 IMP
5765 class_replaceMethod(Class cls, SEL name, IMP imp, const char *types)
5766 {
5767 if (!cls) return NULL;
5768
5769 rwlock_write(&runtimeLock);
5770 IMP old = addMethod(newcls(cls), name, imp, types ?: "", YES);
5771 rwlock_unlock_write(&runtimeLock);
5772 return old;
5773 }
5774
5775
5776 /***********************************************************************
5777 * class_addIvar
5778 * Adds an ivar to a class.
5779 * Locking: acquires runtimeLock
5780 **********************************************************************/
5781 BOOL
5782 class_addIvar(Class cls_gen, const char *name, size_t size,
5783 uint8_t alignment, const char *type)
5784 {
5785 class_t *cls = newcls(cls_gen);
5786
5787 if (!cls) return NO;
5788
5789 if (!type) type = "";
5790 if (name && 0 == strcmp(name, "")) name = NULL;
5791
5792 rwlock_write(&runtimeLock);
5793
5794 assert(isRealized(cls));
5795
5796 // No class variables
5797 if (isMetaClass(cls)) {
5798 rwlock_unlock_write(&runtimeLock);
5799 return NO;
5800 }
5801
5802 // Can only add ivars to in-construction classes.
5803 if (!(cls->data()->flags & RW_CONSTRUCTING)) {
5804 rwlock_unlock_write(&runtimeLock);
5805 return NO;
5806 }
5807
5808 // Check for existing ivar with this name, unless it's anonymous.
5809 // Check for too-big ivar.
5810 // fixme check for superclass ivar too?
5811 if ((name && getIvar(cls, name)) || size > UINT32_MAX) {
5812 rwlock_unlock_write(&runtimeLock);
5813 return NO;
5814 }
5815
5816 class_ro_t *ro_w = make_ro_writeable(cls->data());
5817
5818 // fixme allocate less memory here
5819
5820 ivar_list_t *oldlist, *newlist;
5821 if ((oldlist = (ivar_list_t *)cls->data()->ro->ivars)) {
5822 size_t oldsize = ivar_list_size(oldlist);
5823 newlist = (ivar_list_t *)
5824 _calloc_internal(oldsize + oldlist->entsize, 1);
5825 memcpy(newlist, oldlist, oldsize);
5826 _free_internal(oldlist);
5827 } else {
5828 newlist = (ivar_list_t *)
5829 _calloc_internal(sizeof(ivar_list_t), 1);
5830 newlist->entsize = (uint32_t)sizeof(ivar_t);
5831 }
5832
5833 uint32_t offset = unalignedInstanceSize(cls);
5834 uint32_t alignMask = (1<<alignment)-1;
5835 offset = (offset + alignMask) & ~alignMask;
5836
5837 ivar_t *ivar = ivar_list_nth(newlist, newlist->count++);
5838 ivar->offset = (uintptr_t *)_malloc_internal(sizeof(*ivar->offset));
5839 *ivar->offset = offset;
5840 ivar->name = name ? _strdup_internal(name) : NULL;
5841 ivar->type = _strdup_internal(type);
5842 ivar->alignment = alignment;
5843 ivar->size = (uint32_t)size;
5844
5845 ro_w->ivars = newlist;
5846 ro_w->instanceSize = (uint32_t)(offset + size);
5847
5848 // Ivar layout updated in registerClass.
5849
5850 rwlock_unlock_write(&runtimeLock);
5851
5852 return YES;
5853 }
5854
5855
5856 /***********************************************************************
5857 * class_addProtocol
5858 * Adds a protocol to a class.
5859 * Locking: acquires runtimeLock
5860 **********************************************************************/
5861 BOOL class_addProtocol(Class cls_gen, Protocol *protocol_gen)
5862 {
5863 class_t *cls = newcls(cls_gen);
5864 protocol_t *protocol = newprotocol(protocol_gen);
5865 protocol_list_t *plist;
5866 const protocol_list_t **plistp;
5867
5868 if (!cls) return NO;
5869 if (class_conformsToProtocol(cls_gen, protocol_gen)) return NO;
5870
5871 rwlock_write(&runtimeLock);
5872
5873 assert(isRealized(cls));
5874
5875 // fixme optimize
5876 plist = (protocol_list_t *)
5877 _malloc_internal(sizeof(protocol_list_t) + sizeof(protocol_t *));
5878 plist->count = 1;
5879 plist->list[0] = (protocol_ref_t)protocol;
5880
5881 unsigned int count = 0;
5882 for (plistp = cls->data()->protocols; plistp && *plistp; plistp++) {
5883 count++;
5884 }
5885
5886 cls->data()->protocols = (const protocol_list_t **)
5887 _realloc_internal(cls->data()->protocols,
5888 (count+2) * sizeof(protocol_list_t *));
5889 cls->data()->protocols[count] = plist;
5890 cls->data()->protocols[count+1] = NULL;
5891
5892 // fixme metaclass?
5893
5894 rwlock_unlock_write(&runtimeLock);
5895
5896 return YES;
5897 }
5898
5899
5900 /***********************************************************************
5901 * class_addProperty
5902 * Adds a property to a class.
5903 * Locking: acquires runtimeLock
5904 **********************************************************************/
5905 static BOOL
5906 _class_addProperty(Class cls_gen, const char *name,
5907 const objc_property_attribute_t *attrs, unsigned int count,
5908 BOOL replace)
5909 {
5910 class_t *cls = newcls(cls_gen);
5911 chained_property_list *plist;
5912
5913 if (!cls) return NO;
5914 if (!name) return NO;
5915
5916 property_t *prop = class_getProperty(cls_gen, name);
5917 if (prop && !replace) {
5918 // already exists, refuse to replace
5919 return NO;
5920 }
5921 else if (prop) {
5922 // replace existing
5923 rwlock_write(&runtimeLock);
5924 try_free(prop->attributes);
5925 prop->attributes = copyPropertyAttributeString(attrs, count);
5926 rwlock_unlock_write(&runtimeLock);
5927 return YES;
5928 }
5929 else {
5930 rwlock_write(&runtimeLock);
5931
5932 assert(isRealized(cls));
5933
5934 plist = (chained_property_list *)
5935 _malloc_internal(sizeof(*plist) + sizeof(plist->list[0]));
5936 plist->count = 1;
5937 plist->list[0].name = _strdup_internal(name);
5938 plist->list[0].attributes = copyPropertyAttributeString(attrs, count);
5939
5940 plist->next = cls->data()->properties;
5941 cls->data()->properties = plist;
5942
5943 rwlock_unlock_write(&runtimeLock);
5944
5945 return YES;
5946 }
5947 }
5948
5949 BOOL
5950 class_addProperty(Class cls_gen, const char *name,
5951 const objc_property_attribute_t *attrs, unsigned int n)
5952 {
5953 return _class_addProperty(cls_gen, name, attrs, n, NO);
5954 }
5955
5956 void
5957 class_replaceProperty(Class cls_gen, const char *name,
5958 const objc_property_attribute_t *attrs, unsigned int n)
5959 {
5960 _class_addProperty(cls_gen, name, attrs, n, YES);
5961 }
5962
5963
5964 /***********************************************************************
5965 * look_up_class
5966 * Look up a class by name, and realize it.
5967 * Locking: acquires runtimeLock
5968 **********************************************************************/
5969 id
5970 look_up_class(const char *name,
5971 BOOL includeUnconnected __attribute__((unused)),
5972 BOOL includeClassHandler __attribute__((unused)))
5973 {
5974 if (!name) return nil;
5975
5976 rwlock_read(&runtimeLock);
5977 class_t *result = getClass(name);
5978 BOOL unrealized = result && !isRealized(result);
5979 rwlock_unlock_read(&runtimeLock);
5980 if (unrealized) {
5981 rwlock_write(&runtimeLock);
5982 realizeClass(result);
5983 rwlock_unlock_write(&runtimeLock);
5984 }
5985 return (id)result;
5986 }
5987
5988
5989 /***********************************************************************
5990 * objc_duplicateClass
5991 * fixme
5992 * Locking: acquires runtimeLock
5993 **********************************************************************/
5994 Class
5995 objc_duplicateClass(Class original_gen, const char *name,
5996 size_t extraBytes)
5997 {
5998 class_t *original = newcls(original_gen);
5999 class_t *duplicate;
6000
6001 rwlock_write(&runtimeLock);
6002
6003 assert(isRealized(original));
6004 assert(!isMetaClass(original));
6005
6006 duplicate = (class_t *)
6007 _calloc_class(alignedInstanceSize(original->isa) + extraBytes);
6008 if (unalignedInstanceSize(original->isa) < sizeof(class_t)) {
6009 _objc_inform("busted! %s\n", original->data()->ro->name);
6010 }
6011
6012
6013 duplicate->isa = original->isa;
6014 duplicate->superclass = original->superclass;
6015 duplicate->cache = (Cache)&_objc_empty_cache;
6016 duplicate->vtable = &_objc_empty_vtable;
6017
6018 duplicate->setData((class_rw_t *)_calloc_internal(sizeof(*original->data()), 1));
6019 duplicate->data()->flags = (original->data()->flags | RW_COPIED_RO) & ~RW_SPECIALIZED_VTABLE;
6020 duplicate->data()->version = original->data()->version;
6021 duplicate->data()->firstSubclass = NULL;
6022 duplicate->data()->nextSiblingClass = NULL;
6023
6024 duplicate->data()->ro = (class_ro_t *)
6025 _memdup_internal(original->data()->ro, sizeof(*original->data()->ro));
6026 *(char **)&duplicate->data()->ro->name = _strdup_internal(name);
6027
6028 if (original->data()->flags & RW_METHOD_ARRAY) {
6029 duplicate->data()->method_lists = (method_list_t **)
6030 _memdup_internal(original->data()->method_lists,
6031 malloc_size(original->data()->method_lists));
6032 method_list_t **mlistp;
6033 for (mlistp = duplicate->data()->method_lists; *mlistp; mlistp++) {
6034 *mlistp = (method_list_t *)
6035 _memdup_internal(*mlistp, method_list_size(*mlistp));
6036 }
6037 } else {
6038 if (original->data()->method_list) {
6039 duplicate->data()->method_list = (method_list_t *)
6040 _memdup_internal(original->data()->method_list,
6041 method_list_size(original->data()->method_list));
6042 }
6043 }
6044
6045 // fixme dies when categories are added to the base
6046 duplicate->data()->properties = original->data()->properties;
6047 duplicate->data()->protocols = original->data()->protocols;
6048
6049 if (duplicate->superclass) {
6050 addSubclass(duplicate->superclass, duplicate);
6051 }
6052
6053 // Don't methodize class - construction above is correct
6054
6055 addNamedClass(duplicate, duplicate->data()->ro->name);
6056 addRealizedClass(duplicate);
6057 // no: duplicate->isa == original->isa
6058 // addRealizedMetaclass(duplicate->isa);
6059
6060 if (PrintConnecting) {
6061 _objc_inform("CLASS: realizing class '%s' (duplicate of %s) %p %p",
6062 name, original->data()->ro->name,
6063 duplicate, duplicate->data()->ro);
6064 }
6065
6066 rwlock_unlock_write(&runtimeLock);
6067
6068 return (Class)duplicate;
6069 }
6070
6071 /***********************************************************************
6072 * objc_initializeClassPair
6073 * Locking: runtimeLock must be write-locked by the caller
6074 **********************************************************************/
6075
6076 // &UnsetLayout is the default ivar layout during class construction
6077 static const uint8_t UnsetLayout = 0;
6078
6079 static void objc_initializeClassPair_internal(Class superclass_gen, const char *name, Class cls_gen, Class meta_gen)
6080 {
6081 rwlock_assert_writing(&runtimeLock);
6082
6083 class_t *superclass = newcls(superclass_gen);
6084 class_t *cls = newcls(cls_gen);
6085 class_t *meta = newcls(meta_gen);
6086 class_ro_t *cls_ro_w, *meta_ro_w;
6087
6088 cls->setData((class_rw_t *)_calloc_internal(sizeof(class_rw_t), 1));
6089 meta->setData((class_rw_t *)_calloc_internal(sizeof(class_rw_t), 1));
6090 cls_ro_w = (class_ro_t *)_calloc_internal(sizeof(class_ro_t), 1);
6091 meta_ro_w = (class_ro_t *)_calloc_internal(sizeof(class_ro_t), 1);
6092 cls->data()->ro = cls_ro_w;
6093 meta->data()->ro = meta_ro_w;
6094
6095 // Set basic info
6096 cls->cache = (Cache)&_objc_empty_cache;
6097 meta->cache = (Cache)&_objc_empty_cache;
6098 cls->vtable = &_objc_empty_vtable;
6099 meta->vtable = &_objc_empty_vtable;
6100
6101 cls->data()->flags = RW_CONSTRUCTING | RW_COPIED_RO | RW_REALIZED;
6102 meta->data()->flags = RW_CONSTRUCTING | RW_COPIED_RO | RW_REALIZED;
6103 cls->data()->version = 0;
6104 meta->data()->version = 7;
6105
6106 cls_ro_w->flags = 0;
6107 meta_ro_w->flags = RO_META;
6108 if (!superclass) {
6109 cls_ro_w->flags |= RO_ROOT;
6110 meta_ro_w->flags |= RO_ROOT;
6111 }
6112 if (superclass) {
6113 cls_ro_w->instanceStart = unalignedInstanceSize(superclass);
6114 meta_ro_w->instanceStart = unalignedInstanceSize(superclass->isa);
6115 cls_ro_w->instanceSize = cls_ro_w->instanceStart;
6116 meta_ro_w->instanceSize = meta_ro_w->instanceStart;
6117 } else {
6118 cls_ro_w->instanceStart = 0;
6119 meta_ro_w->instanceStart = (uint32_t)sizeof(class_t);
6120 cls_ro_w->instanceSize = (uint32_t)sizeof(id); // just an isa
6121 meta_ro_w->instanceSize = meta_ro_w->instanceStart;
6122 }
6123
6124 cls_ro_w->name = _strdup_internal(name);
6125 meta_ro_w->name = _strdup_internal(name);
6126
6127 cls_ro_w->ivarLayout = &UnsetLayout;
6128 cls_ro_w->weakIvarLayout = &UnsetLayout;
6129
6130 // Connect to superclasses and metaclasses
6131 cls->isa = meta;
6132 if (superclass) {
6133 meta->isa = superclass->isa->isa;
6134 cls->superclass = superclass;
6135 meta->superclass = superclass->isa;
6136 addSubclass(superclass, cls);
6137 addSubclass(superclass->isa, meta);
6138 } else {
6139 meta->isa = meta;
6140 cls->superclass = Nil;
6141 meta->superclass = cls;
6142 addSubclass(cls, meta);
6143 }
6144 }
6145
6146 /***********************************************************************
6147 * objc_initializeClassPair
6148 **********************************************************************/
6149 Class objc_initializeClassPair(Class superclass_gen, const char *name, Class cls_gen, Class meta_gen)
6150 {
6151 class_t *superclass = newcls(superclass_gen);
6152
6153 rwlock_write(&runtimeLock);
6154
6155 //
6156 // Common superclass integrity checks with objc_allocateClassPair
6157 //
6158 if (getClass(name)) {
6159 rwlock_unlock_write(&runtimeLock);
6160 return Nil;
6161 }
6162 // fixme reserve class against simultaneous allocation
6163
6164 if (superclass) assert(isRealized(superclass));
6165
6166 if (superclass && superclass->data()->flags & RW_CONSTRUCTING) {
6167 // Can't make subclass of an in-construction class
6168 rwlock_unlock_write(&runtimeLock);
6169 return Nil;
6170 }
6171
6172
6173 // just initialize what was supplied
6174 objc_initializeClassPair_internal(superclass_gen, name, cls_gen, meta_gen);
6175
6176 rwlock_unlock_write(&runtimeLock);
6177 return cls_gen;
6178 }
6179
6180 /***********************************************************************
6181 * objc_allocateClassPair
6182 * fixme
6183 * Locking: acquires runtimeLock
6184 **********************************************************************/
6185 Class objc_allocateClassPair(Class superclass_gen, const char *name,
6186 size_t extraBytes)
6187 {
6188 class_t *superclass = newcls(superclass_gen);
6189 Class cls, meta;
6190
6191 rwlock_write(&runtimeLock);
6192
6193 //
6194 // Common superclass integrity checks with objc_initializeClassPair
6195 //
6196 if (getClass(name)) {
6197 rwlock_unlock_write(&runtimeLock);
6198 return Nil;
6199 }
6200 // fixme reserve class against simmultaneous allocation
6201
6202 if (superclass) assert(isRealized(superclass));
6203
6204 if (superclass && superclass->data()->flags & RW_CONSTRUCTING) {
6205 // Can't make subclass of an in-construction class
6206 rwlock_unlock_write(&runtimeLock);
6207 return Nil;
6208 }
6209
6210
6211
6212 // Allocate new classes.
6213 size_t size = sizeof(class_t);
6214 size_t metasize = sizeof(class_t);
6215 if (superclass) {
6216 size = alignedInstanceSize(superclass->isa);
6217 metasize = alignedInstanceSize(superclass->isa->isa);
6218 }
6219 cls = _calloc_class(size + extraBytes);
6220 meta = _calloc_class(metasize + extraBytes);
6221
6222 objc_initializeClassPair_internal(superclass_gen, name, cls, meta);
6223
6224 rwlock_unlock_write(&runtimeLock);
6225
6226 return (Class)cls;
6227 }
6228
6229
6230 /***********************************************************************
6231 * objc_registerClassPair
6232 * fixme
6233 * Locking: acquires runtimeLock
6234 **********************************************************************/
6235 void objc_registerClassPair(Class cls_gen)
6236 {
6237 class_t *cls = newcls(cls_gen);
6238
6239 rwlock_write(&runtimeLock);
6240
6241 if ((cls->data()->flags & RW_CONSTRUCTED) ||
6242 (cls->isa->data()->flags & RW_CONSTRUCTED))
6243 {
6244 _objc_inform("objc_registerClassPair: class '%s' was already "
6245 "registered!", cls->data()->ro->name);
6246 rwlock_unlock_write(&runtimeLock);
6247 return;
6248 }
6249
6250 if (!(cls->data()->flags & RW_CONSTRUCTING) ||
6251 !(cls->isa->data()->flags & RW_CONSTRUCTING))
6252 {
6253 _objc_inform("objc_registerClassPair: class '%s' was not "
6254 "allocated with objc_allocateClassPair!",
6255 cls->data()->ro->name);
6256 rwlock_unlock_write(&runtimeLock);
6257 return;
6258 }
6259
6260 // Build ivar layouts
6261 if (UseGC) {
6262 class_t *supercls = getSuperclass(cls);
6263 class_ro_t *ro_w = (class_ro_t *)cls->data()->ro;
6264
6265 if (ro_w->ivarLayout != &UnsetLayout) {
6266 // Class builder already called class_setIvarLayout.
6267 }
6268 else if (!supercls) {
6269 // Root class. Scan conservatively (should be isa ivar only).
6270 ro_w->ivarLayout = NULL;
6271 }
6272 else if (ro_w->ivars == NULL) {
6273 // No local ivars. Use superclass's layouts.
6274 ro_w->ivarLayout =
6275 _ustrdup_internal(supercls->data()->ro->ivarLayout);
6276 }
6277 else {
6278 // Has local ivars. Build layouts based on superclass.
6279 layout_bitmap bitmap =
6280 layout_bitmap_create(supercls->data()->ro->ivarLayout,
6281 unalignedInstanceSize(supercls),
6282 unalignedInstanceSize(cls), NO);
6283 uint32_t i;
6284 for (i = 0; i < ro_w->ivars->count; i++) {
6285 ivar_t *ivar = ivar_list_nth(ro_w->ivars, i);
6286 if (!ivar->offset) continue; // anonymous bitfield
6287
6288 layout_bitmap_set_ivar(bitmap, ivar->type, *ivar->offset);
6289 }
6290 ro_w->ivarLayout = layout_string_create(bitmap);
6291 layout_bitmap_free(bitmap);
6292 }
6293
6294 if (ro_w->weakIvarLayout != &UnsetLayout) {
6295 // Class builder already called class_setWeakIvarLayout.
6296 }
6297 else if (!supercls) {
6298 // Root class. No weak ivars (should be isa ivar only).
6299 ro_w->weakIvarLayout = NULL;
6300 }
6301 else if (ro_w->ivars == NULL) {
6302 // No local ivars. Use superclass's layout.
6303 ro_w->weakIvarLayout =
6304 _ustrdup_internal(supercls->data()->ro->weakIvarLayout);
6305 }
6306 else {
6307 // Has local ivars. Build layout based on superclass.
6308 // No way to add weak ivars yet.
6309 ro_w->weakIvarLayout =
6310 _ustrdup_internal(supercls->data()->ro->weakIvarLayout);
6311 }
6312 }
6313
6314 // Clear "under construction" bit, set "done constructing" bit
6315 cls->data()->flags &= ~RW_CONSTRUCTING;
6316 cls->isa->data()->flags &= ~RW_CONSTRUCTING;
6317 cls->data()->flags |= RW_CONSTRUCTED;
6318 cls->isa->data()->flags |= RW_CONSTRUCTED;
6319
6320 // Add to named and realized classes
6321 addNamedClass(cls, cls->data()->ro->name);
6322 addRealizedClass(cls);
6323 addRealizedMetaclass(cls->isa);
6324 addNonMetaClass(cls);
6325
6326 rwlock_unlock_write(&runtimeLock);
6327 }
6328
6329
6330 /***********************************************************************
6331 * detach_class
6332 * Disconnect a class from other data structures.
6333 * Exception: does not remove the class from the +load list
6334 * Call this before free_class.
6335 * Locking: runtimeLock must be held by the caller.
6336 **********************************************************************/
6337 static void detach_class(class_t *cls, BOOL isMeta)
6338 {
6339 rwlock_assert_writing(&runtimeLock);
6340
6341 // categories not yet attached to this class
6342 category_list *cats;
6343 cats = unattachedCategoriesForClass(cls);
6344 if (cats) free(cats);
6345
6346 // superclass's subclass list
6347 if (isRealized(cls)) {
6348 class_t *supercls = getSuperclass(cls);
6349 if (supercls) {
6350 removeSubclass(supercls, cls);
6351 }
6352 }
6353
6354 // class tables and +load queue
6355 if (!isMeta) {
6356 removeNamedClass(cls, getName(cls));
6357 removeRealizedClass(cls);
6358 removeNonMetaClass(cls);
6359 } else {
6360 removeRealizedMetaclass(cls);
6361 }
6362 }
6363
6364
6365 /***********************************************************************
6366 * free_class
6367 * Frees a class's data structures.
6368 * Call this after detach_class.
6369 * Locking: runtimeLock must be held by the caller
6370 **********************************************************************/
6371 static void free_class(class_t *cls)
6372 {
6373 rwlock_assert_writing(&runtimeLock);
6374
6375 if (! isRealized(cls)) return;
6376
6377 uint32_t i;
6378
6379 // Dereferences the cache contents; do this before freeing methods
6380 if (cls->cache != (Cache)&_objc_empty_cache) _cache_free(cls->cache);
6381
6382 FOREACH_METHOD_LIST(mlist, cls, {
6383 for (i = 0; i < mlist->count; i++) {
6384 method_t *m = method_list_nth(mlist, i);
6385 try_free(m->types);
6386 }
6387 try_free(mlist);
6388 });
6389 if (cls->data()->flags & RW_METHOD_ARRAY) {
6390 try_free(cls->data()->method_lists);
6391 }
6392
6393 const ivar_list_t *ilist = cls->data()->ro->ivars;
6394 if (ilist) {
6395 for (i = 0; i < ilist->count; i++) {
6396 const ivar_t *ivar = ivar_list_nth(ilist, i);
6397 try_free(ivar->offset);
6398 try_free(ivar->name);
6399 try_free(ivar->type);
6400 }
6401 try_free(ilist);
6402 }
6403
6404 const protocol_list_t **plistp;
6405 for (plistp = cls->data()->protocols; plistp && *plistp; plistp++) {
6406 try_free(*plistp);
6407 }
6408 try_free(cls->data()->protocols);
6409
6410 const chained_property_list *proplist = cls->data()->properties;
6411 while (proplist) {
6412 for (i = 0; i < proplist->count; i++) {
6413 const property_t *prop = proplist->list+i;
6414 try_free(prop->name);
6415 try_free(prop->attributes);
6416 }
6417 {
6418 const chained_property_list *temp = proplist;
6419 proplist = proplist->next;
6420 try_free(temp);
6421 }
6422 }
6423
6424 if (cls->vtable != &_objc_empty_vtable &&
6425 cls->data()->flags & RW_SPECIALIZED_VTABLE) try_free(cls->vtable);
6426 try_free(cls->data()->ro->ivarLayout);
6427 try_free(cls->data()->ro->weakIvarLayout);
6428 try_free(cls->data()->ro->name);
6429 try_free(cls->data()->ro);
6430 try_free(cls->data());
6431 try_free(cls);
6432 }
6433
6434
6435 void objc_disposeClassPair(Class cls_gen)
6436 {
6437 class_t *cls = newcls(cls_gen);
6438
6439 rwlock_write(&runtimeLock);
6440
6441 if (!(cls->data()->flags & (RW_CONSTRUCTED|RW_CONSTRUCTING)) ||
6442 !(cls->isa->data()->flags & (RW_CONSTRUCTED|RW_CONSTRUCTING)))
6443 {
6444 // class not allocated with objc_allocateClassPair
6445 // disposing still-unregistered class is OK!
6446 _objc_inform("objc_disposeClassPair: class '%s' was not "
6447 "allocated with objc_allocateClassPair!",
6448 cls->data()->ro->name);
6449 rwlock_unlock_write(&runtimeLock);
6450 return;
6451 }
6452
6453 if (isMetaClass(cls)) {
6454 _objc_inform("objc_disposeClassPair: class '%s' is a metaclass, "
6455 "not a class!", cls->data()->ro->name);
6456 rwlock_unlock_write(&runtimeLock);
6457 return;
6458 }
6459
6460 // Shouldn't have any live subclasses.
6461 if (cls->data()->firstSubclass) {
6462 _objc_inform("objc_disposeClassPair: class '%s' still has subclasses, "
6463 "including '%s'!", cls->data()->ro->name,
6464 getName(cls->data()->firstSubclass));
6465 }
6466 if (cls->isa->data()->firstSubclass) {
6467 _objc_inform("objc_disposeClassPair: class '%s' still has subclasses, "
6468 "including '%s'!", cls->data()->ro->name,
6469 getName(cls->isa->data()->firstSubclass));
6470 }
6471
6472 // don't remove_class_from_loadable_list()
6473 // - it's not there and we don't have the lock
6474 detach_class(cls->isa, YES);
6475 detach_class(cls, NO);
6476 free_class(cls->isa);
6477 free_class(cls);
6478
6479 rwlock_unlock_write(&runtimeLock);
6480 }
6481
6482
6483 /***********************************************************************
6484 * class_createInstance
6485 * fixme
6486 * Locking: none
6487 **********************************************************************/
6488 static id
6489 _class_createInstanceFromZone(Class cls, size_t extraBytes, void *zone)
6490 __attribute__((always_inline));
6491
6492 static id
6493 _class_createInstanceFromZone(Class cls, size_t extraBytes, void *zone)
6494 {
6495 if (!cls) return nil;
6496
6497 assert(isRealized(newcls(cls)));
6498
6499 size_t size = alignedInstanceSize(newcls(cls)) + extraBytes;
6500
6501 // CF requires all object be at least 16 bytes.
6502 if (size < 16) size = 16;
6503
6504 id obj;
6505 #if SUPPORT_GC
6506 if (UseGC) {
6507 obj = (id)auto_zone_allocate_object(gc_zone, size,
6508 AUTO_OBJECT_SCANNED, 0, 1);
6509 } else
6510 #endif
6511 if (zone) {
6512 obj = (id)malloc_zone_calloc ((malloc_zone_t *)zone, 1, size);
6513 } else {
6514 obj = (id)calloc(1, size);
6515 }
6516 if (!obj) return nil;
6517
6518 obj->isa = cls; // need not be object_setClass
6519
6520 if (_class_hasCxxStructors(cls)) {
6521 obj = _objc_constructOrFree(cls, obj);
6522 }
6523
6524 return obj;
6525 }
6526
6527
6528 id
6529 class_createInstance(Class cls, size_t extraBytes)
6530 {
6531 return _class_createInstanceFromZone(cls, extraBytes, NULL);
6532 }
6533
6534 /***********************************************************************
6535 * class_createInstances
6536 * fixme
6537 * Locking: none
6538 **********************************************************************/
6539 unsigned
6540 class_createInstances(Class cls, size_t extraBytes,
6541 id *results, unsigned num_requested)
6542 {
6543 return _class_createInstancesFromZone(cls, extraBytes, NULL,
6544 results, num_requested);
6545 }
6546
6547 static BOOL classOrSuperClassesUseARR(Class cls) {
6548 while (cls) {
6549 if (_class_usesAutomaticRetainRelease(cls)) return true;
6550 cls = class_getSuperclass(cls);
6551 }
6552 return false;
6553 }
6554
6555 static void arr_fixup_copied_references(id newObject, id oldObject)
6556 {
6557 // use ARR layouts to correctly copy the references from old object to new, both strong and weak.
6558 Class cls = oldObject->isa;
6559 while (cls) {
6560 if (_class_usesAutomaticRetainRelease(cls)) {
6561 // FIXME: align the instance start to nearest id boundary. This currently handles the case where
6562 // the the compiler folds a leading BOOL (char, short, etc.) into the alignment slop of a superclass.
6563 size_t instanceStart = _class_getInstanceStart(cls);
6564 const uint8_t *strongLayout = class_getIvarLayout(cls);
6565 if (strongLayout) {
6566 id *newPtr = (id *)((char*)newObject + instanceStart);
6567 unsigned char byte;
6568 while ((byte = *strongLayout++)) {
6569 unsigned skips = (byte >> 4);
6570 unsigned scans = (byte & 0x0F);
6571 newPtr += skips;
6572 while (scans--) {
6573 // ensure strong references are properly retained.
6574 id value = *newPtr++;
6575 if (value) objc_retain(value);
6576 }
6577 }
6578 }
6579 const uint8_t *weakLayout = class_getWeakIvarLayout(cls);
6580 // fix up weak references if any.
6581 if (weakLayout) {
6582 id *newPtr = (id *)((char*)newObject + instanceStart), *oldPtr = (id *)((char*)oldObject + instanceStart);
6583 unsigned char byte;
6584 while ((byte = *weakLayout++)) {
6585 unsigned skips = (byte >> 4);
6586 unsigned weaks = (byte & 0x0F);
6587 newPtr += skips, oldPtr += skips;
6588 while (weaks--) {
6589 *newPtr = nil;
6590 objc_storeWeak(newPtr, objc_loadWeak(oldPtr));
6591 ++newPtr, ++oldPtr;
6592 }
6593 }
6594 }
6595 }
6596 cls = class_getSuperclass(cls);
6597 }
6598 }
6599
6600 /***********************************************************************
6601 * object_copyFromZone
6602 * fixme
6603 * Locking: none
6604 **********************************************************************/
6605 static id
6606 _object_copyFromZone(id oldObj, size_t extraBytes, void *zone)
6607 {
6608 id obj;
6609 size_t size;
6610
6611 if (!oldObj) return nil;
6612 if (OBJC_IS_TAGGED_PTR(oldObj)) return oldObj;
6613
6614 size = _class_getInstanceSize(oldObj->isa) + extraBytes;
6615 #if SUPPORT_GC
6616 if (UseGC) {
6617 obj = (id) auto_zone_allocate_object(gc_zone, size,
6618 AUTO_OBJECT_SCANNED, 0, 1);
6619 } else
6620 #endif
6621 if (zone) {
6622 obj = (id) malloc_zone_calloc((malloc_zone_t *)zone, size, 1);
6623 } else {
6624 obj = (id) calloc(1, size);
6625 }
6626 if (!obj) return nil;
6627
6628 // fixme this doesn't handle C++ ivars correctly (#4619414)
6629 objc_memmove_collectable(obj, oldObj, size);
6630
6631 #if SUPPORT_GC
6632 if (UseGC)
6633 gc_fixup_weakreferences(obj, oldObj);
6634 else if (classOrSuperClassesUseARR(obj->isa))
6635 arr_fixup_copied_references(obj, oldObj);
6636 #else
6637 if (classOrSuperClassesUseARR(obj->isa))
6638 arr_fixup_copied_references(obj, oldObj);
6639 #endif
6640
6641 return obj;
6642 }
6643
6644
6645 /***********************************************************************
6646 * object_copy
6647 * fixme
6648 * Locking: none
6649 **********************************************************************/
6650 id
6651 object_copy(id oldObj, size_t extraBytes)
6652 {
6653 return _object_copyFromZone(oldObj, extraBytes, malloc_default_zone());
6654 }
6655
6656
6657 #if !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE)
6658
6659 /***********************************************************************
6660 * class_createInstanceFromZone
6661 * fixme
6662 * Locking: none
6663 **********************************************************************/
6664 id
6665 class_createInstanceFromZone(Class cls, size_t extraBytes, void *zone)
6666 {
6667 return _class_createInstanceFromZone(cls, extraBytes, zone);
6668 }
6669
6670 /***********************************************************************
6671 * object_copyFromZone
6672 * fixme
6673 * Locking: none
6674 **********************************************************************/
6675 id
6676 object_copyFromZone(id oldObj, size_t extraBytes, void *zone)
6677 {
6678 return _object_copyFromZone(oldObj, extraBytes, zone);
6679 }
6680
6681 #endif
6682
6683
6684 /***********************************************************************
6685 * objc_destructInstance
6686 * Destroys an instance without freeing memory.
6687 * Calls C++ destructors.
6688 * Calls ARR ivar cleanup.
6689 * Removes associative references.
6690 * Returns `obj`. Does nothing if `obj` is nil.
6691 * Be warned that GC DOES NOT CALL THIS. If you edit this, also edit finalize.
6692 * CoreFoundation and other clients do call this under GC.
6693 **********************************************************************/
6694 void *objc_destructInstance(id obj)
6695 {
6696 if (obj) {
6697 Class isa_gen = _object_getClass(obj);
6698 class_t *isa = newcls(isa_gen);
6699
6700 // Read all of the flags at once for performance.
6701 bool cxx = hasCxxStructors(isa);
6702 bool assoc = !UseGC && _class_instancesHaveAssociatedObjects(isa_gen);
6703
6704 // This order is important.
6705 if (cxx) object_cxxDestruct(obj);
6706 if (assoc) _object_remove_assocations(obj);
6707
6708 if (!UseGC) objc_clear_deallocating(obj);
6709 }
6710
6711 return obj;
6712 }
6713
6714
6715 /***********************************************************************
6716 * object_dispose
6717 * fixme
6718 * Locking: none
6719 **********************************************************************/
6720 id
6721 object_dispose(id obj)
6722 {
6723 if (!obj) return nil;
6724
6725 objc_destructInstance(obj);
6726
6727 #if SUPPORT_GC
6728 if (UseGC) {
6729 auto_zone_retain(gc_zone, obj); // gc free expects rc==1
6730 }
6731 #endif
6732
6733 free(obj);
6734
6735 return nil;
6736 }
6737
6738
6739 /***********************************************************************
6740 * _objc_getFreedObjectClass
6741 * fixme
6742 * Locking: none
6743 **********************************************************************/
6744 Class _objc_getFreedObjectClass (void)
6745 {
6746 return nil;
6747 }
6748
6749 #if SUPPORT_FIXUP
6750
6751 OBJC_EXTERN id objc_msgSend_fixedup(id, SEL, ...);
6752 OBJC_EXTERN id objc_msgSendSuper2_fixedup(id, SEL, ...);
6753 OBJC_EXTERN id objc_msgSend_stret_fixedup(id, SEL, ...);
6754 OBJC_EXTERN id objc_msgSendSuper2_stret_fixedup(id, SEL, ...);
6755 #if defined(__i386__) || defined(__x86_64__)
6756 OBJC_EXTERN id objc_msgSend_fpret_fixedup(id, SEL, ...);
6757 #endif
6758 #if defined(__x86_64__)
6759 OBJC_EXTERN id objc_msgSend_fp2ret_fixedup(id, SEL, ...);
6760 #endif
6761
6762 /***********************************************************************
6763 * _objc_fixupMessageRef
6764 * Fixes up message ref *msg.
6765 * obj is the receiver. supr is NULL for non-super messages
6766 * Locking: acquires runtimeLock
6767 **********************************************************************/
6768 OBJC_EXTERN IMP
6769 _objc_fixupMessageRef(id obj, struct objc_super2 *supr, message_ref_t *msg)
6770 {
6771 IMP imp;
6772 class_t *isa;
6773 #if SUPPORT_VTABLE
6774 int vtableIndex;
6775 #endif
6776
6777 rwlock_assert_unlocked(&runtimeLock);
6778
6779 if (!supr) {
6780 // normal message - search obj->isa for the method implementation
6781 isa = (class_t *) _object_getClass(obj);
6782
6783 if (!isRealized(isa)) {
6784 // obj is a class object, isa is its metaclass
6785 class_t *cls;
6786 rwlock_write(&runtimeLock);
6787 cls = realizeClass((class_t *)obj);
6788 rwlock_unlock_write(&runtimeLock);
6789
6790 // shouldn't have instances of unrealized classes!
6791 assert(isMetaClass(isa));
6792 // shouldn't be relocating classes here!
6793 assert(cls == (class_t *)obj);
6794 }
6795 }
6796 else {
6797 // this is objc_msgSend_super, and supr->current_class->superclass
6798 // is the class to search for the method implementation
6799 assert(isRealized((class_t *)supr->current_class));
6800 isa = getSuperclass((class_t *)supr->current_class);
6801 }
6802
6803 msg->sel = sel_registerName((const char *)msg->sel);
6804
6805 if (ignoreSelector(msg->sel)) {
6806 // ignored selector - bypass dispatcher
6807 msg->imp = (IMP)&vtable_ignored;
6808 imp = (IMP)&_objc_ignored_method;
6809 }
6810 #if SUPPORT_VTABLE
6811 else if (msg->imp == (IMP)&objc_msgSend_fixup &&
6812 (vtableIndex = vtable_getIndex(msg->sel)) >= 0)
6813 {
6814 // vtable dispatch
6815 msg->imp = vtableTrampolines[vtableIndex];
6816 imp = isa->vtable[vtableIndex];
6817 }
6818 #endif
6819 else {
6820 // ordinary dispatch
6821 imp = lookUpMethod((Class)isa, msg->sel, YES/*initialize*/, YES/*cache*/, obj);
6822
6823 if (msg->imp == (IMP)&objc_msgSend_fixup) {
6824 msg->imp = (IMP)&objc_msgSend_fixedup;
6825 }
6826 else if (msg->imp == (IMP)&objc_msgSendSuper2_fixup) {
6827 msg->imp = (IMP)&objc_msgSendSuper2_fixedup;
6828 }
6829 else if (msg->imp == (IMP)&objc_msgSend_stret_fixup) {
6830 msg->imp = (IMP)&objc_msgSend_stret_fixedup;
6831 }
6832 else if (msg->imp == (IMP)&objc_msgSendSuper2_stret_fixup) {
6833 msg->imp = (IMP)&objc_msgSendSuper2_stret_fixedup;
6834 }
6835 #if defined(__i386__) || defined(__x86_64__)
6836 else if (msg->imp == (IMP)&objc_msgSend_fpret_fixup) {
6837 msg->imp = (IMP)&objc_msgSend_fpret_fixedup;
6838 }
6839 #endif
6840 #if defined(__x86_64__)
6841 else if (msg->imp == (IMP)&objc_msgSend_fp2ret_fixup) {
6842 msg->imp = (IMP)&objc_msgSend_fp2ret_fixedup;
6843 }
6844 #endif
6845 else {
6846 // The ref may already have been fixed up, either by another thread
6847 // or by +initialize via lookUpMethod above.
6848 }
6849 }
6850
6851 return imp;
6852 }
6853
6854 // SUPPORT_FIXUP
6855 #endif
6856
6857
6858 // ProKit SPI
6859 static class_t *setSuperclass(class_t *cls, class_t *newSuper)
6860 {
6861 class_t *oldSuper;
6862
6863 rwlock_assert_writing(&runtimeLock);
6864
6865 assert(isRealized(cls));
6866 assert(isRealized(newSuper));
6867
6868 oldSuper = cls->superclass;
6869 removeSubclass(oldSuper, cls);
6870 removeSubclass(oldSuper->isa, cls->isa);
6871
6872 cls->superclass = newSuper;
6873 cls->isa->superclass = newSuper->isa;
6874 addSubclass(newSuper, cls);
6875 addSubclass(newSuper->isa, cls->isa);
6876
6877 flushCaches(cls->isa);
6878 flushVtables(cls->isa);
6879 flushCaches(cls);
6880 flushVtables(cls);
6881
6882 return oldSuper;
6883 }
6884
6885
6886 Class class_setSuperclass(Class cls_gen, Class newSuper_gen)
6887 {
6888 class_t *cls = newcls(cls_gen);
6889 class_t *newSuper = newcls(newSuper_gen);
6890 class_t *oldSuper;
6891
6892 rwlock_write(&runtimeLock);
6893 oldSuper = setSuperclass(cls, newSuper);
6894 rwlock_unlock_write(&runtimeLock);
6895
6896 return (Class)oldSuper;
6897 }
6898
6899 #endif