]> git.saurik.com Git - apple/objc4.git/blob - runtime/objc-runtime-new.mm
objc4-779.1.tar.gz
[apple/objc4.git] / runtime / objc-runtime-new.mm
1 /*
2 * Copyright (c) 2005-2009 Apple Inc. All Rights Reserved.
3 *
4 * @APPLE_LICENSE_HEADER_START@
5 *
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. Please obtain a copy of the License at
10 * http://www.opensource.apple.com/apsl/ and read it before using this
11 * file.
12 *
13 * The Original Code and all software distributed under the License are
14 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
15 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
16 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
18 * Please see the License for the specific language governing rights and
19 * limitations under the License.
20 *
21 * @APPLE_LICENSE_HEADER_END@
22 */
23
24 /***********************************************************************
25 * objc-runtime-new.m
26 * Support for new-ABI classes and images.
27 **********************************************************************/
28
29 #if __OBJC2__
30
31 #include "DenseMapExtras.h"
32 #include "objc-private.h"
33 #include "objc-runtime-new.h"
34 #include "objc-file.h"
35 #include "objc-cache.h"
36 #include <Block.h>
37 #include <objc/message.h>
38 #include <mach/shared_region.h>
39
40 #define newprotocol(p) ((protocol_t *)p)
41
42 static void disableTaggedPointers();
43 static void detach_class(Class cls, bool isMeta);
44 static void free_class(Class cls);
45 static IMP addMethod(Class cls, SEL name, IMP imp, const char *types, bool replace);
46 static void adjustCustomFlagsForMethodChange(Class cls, method_t *meth);
47 static method_t *search_method_list(const method_list_t *mlist, SEL sel);
48 static bool method_lists_contains_any(method_list_t **mlists, method_list_t **end,
49 SEL sels[], size_t selcount);
50 static void flushCaches(Class cls);
51 static void initializeTaggedPointerObfuscator(void);
52 #if SUPPORT_FIXUP
53 static void fixupMessageRef(message_ref_t *msg);
54 #endif
55 static Class realizeClassMaybeSwiftAndUnlock(Class cls, mutex_t& lock);
56 static Class readClass(Class cls, bool headerIsBundle, bool headerIsPreoptimized);
57
58 struct locstamped_category_t {
59 category_t *cat;
60 struct header_info *hi;
61 };
62 enum {
63 ATTACH_CLASS = 1 << 0,
64 ATTACH_METACLASS = 1 << 1,
65 ATTACH_CLASS_AND_METACLASS = 1 << 2,
66 ATTACH_EXISTING = 1 << 3,
67 };
68 static void attachCategories(Class cls, const struct locstamped_category_t *cats_list, uint32_t cats_count, int flags);
69
70
71 /***********************************************************************
72 * Lock management
73 **********************************************************************/
74 mutex_t runtimeLock;
75 mutex_t selLock;
76 #if CONFIG_USE_CACHE_LOCK
77 mutex_t cacheUpdateLock;
78 #endif
79 recursive_mutex_t loadMethodLock;
80
81 /***********************************************************************
82 * Class structure decoding
83 **********************************************************************/
84
85 const uintptr_t objc_debug_class_rw_data_mask = FAST_DATA_MASK;
86
87
88 /***********************************************************************
89 * Non-pointer isa decoding
90 **********************************************************************/
91 #if SUPPORT_INDEXED_ISA
92
93 // Indexed non-pointer isa.
94
95 // These are used to mask the ISA and see if its got an index or not.
96 const uintptr_t objc_debug_indexed_isa_magic_mask = ISA_INDEX_MAGIC_MASK;
97 const uintptr_t objc_debug_indexed_isa_magic_value = ISA_INDEX_MAGIC_VALUE;
98
99 // die if masks overlap
100 STATIC_ASSERT((ISA_INDEX_MASK & ISA_INDEX_MAGIC_MASK) == 0);
101
102 // die if magic is wrong
103 STATIC_ASSERT((~ISA_INDEX_MAGIC_MASK & ISA_INDEX_MAGIC_VALUE) == 0);
104
105 // Then these are used to extract the index from the ISA.
106 const uintptr_t objc_debug_indexed_isa_index_mask = ISA_INDEX_MASK;
107 const uintptr_t objc_debug_indexed_isa_index_shift = ISA_INDEX_SHIFT;
108
109 asm("\n .globl _objc_absolute_indexed_isa_magic_mask" \
110 "\n _objc_absolute_indexed_isa_magic_mask = " STRINGIFY2(ISA_INDEX_MAGIC_MASK));
111 asm("\n .globl _objc_absolute_indexed_isa_magic_value" \
112 "\n _objc_absolute_indexed_isa_magic_value = " STRINGIFY2(ISA_INDEX_MAGIC_VALUE));
113 asm("\n .globl _objc_absolute_indexed_isa_index_mask" \
114 "\n _objc_absolute_indexed_isa_index_mask = " STRINGIFY2(ISA_INDEX_MASK));
115 asm("\n .globl _objc_absolute_indexed_isa_index_shift" \
116 "\n _objc_absolute_indexed_isa_index_shift = " STRINGIFY2(ISA_INDEX_SHIFT));
117
118
119 // And then we can use that index to get the class from this array. Note
120 // the size is provided so that clients can ensure the index they get is in
121 // bounds and not read off the end of the array.
122 // Defined in the objc-msg-*.s files
123 // const Class objc_indexed_classes[]
124
125 // When we don't have enough bits to store a class*, we can instead store an
126 // index in to this array. Classes are added here when they are realized.
127 // Note, an index of 0 is illegal.
128 uintptr_t objc_indexed_classes_count = 0;
129
130 // SUPPORT_INDEXED_ISA
131 #else
132 // not SUPPORT_INDEXED_ISA
133
134 // These variables exist but are all set to 0 so that they are ignored.
135 const uintptr_t objc_debug_indexed_isa_magic_mask = 0;
136 const uintptr_t objc_debug_indexed_isa_magic_value = 0;
137 const uintptr_t objc_debug_indexed_isa_index_mask = 0;
138 const uintptr_t objc_debug_indexed_isa_index_shift = 0;
139 Class objc_indexed_classes[1] = { nil };
140 uintptr_t objc_indexed_classes_count = 0;
141
142 // not SUPPORT_INDEXED_ISA
143 #endif
144
145
146 #if SUPPORT_PACKED_ISA
147
148 // Packed non-pointer isa.
149
150 asm("\n .globl _objc_absolute_packed_isa_class_mask" \
151 "\n _objc_absolute_packed_isa_class_mask = " STRINGIFY2(ISA_MASK));
152
153 const uintptr_t objc_debug_isa_class_mask = ISA_MASK;
154 const uintptr_t objc_debug_isa_magic_mask = ISA_MAGIC_MASK;
155 const uintptr_t objc_debug_isa_magic_value = ISA_MAGIC_VALUE;
156
157 // die if masks overlap
158 STATIC_ASSERT((ISA_MASK & ISA_MAGIC_MASK) == 0);
159
160 // die if magic is wrong
161 STATIC_ASSERT((~ISA_MAGIC_MASK & ISA_MAGIC_VALUE) == 0);
162
163 // die if virtual address space bound goes up
164 STATIC_ASSERT((~ISA_MASK & MACH_VM_MAX_ADDRESS) == 0 ||
165 ISA_MASK + sizeof(void*) == MACH_VM_MAX_ADDRESS);
166
167 // SUPPORT_PACKED_ISA
168 #else
169 // not SUPPORT_PACKED_ISA
170
171 // These variables exist but enforce pointer alignment only.
172 const uintptr_t objc_debug_isa_class_mask = (~WORD_MASK);
173 const uintptr_t objc_debug_isa_magic_mask = WORD_MASK;
174 const uintptr_t objc_debug_isa_magic_value = 0;
175
176 // not SUPPORT_PACKED_ISA
177 #endif
178
179
180 /***********************************************************************
181 * Swift marker bits
182 **********************************************************************/
183 const uintptr_t objc_debug_swift_stable_abi_bit = FAST_IS_SWIFT_STABLE;
184
185
186 /***********************************************************************
187 * allocatedClasses
188 * A table of all classes (and metaclasses) which have been allocated
189 * with objc_allocateClassPair.
190 **********************************************************************/
191 namespace objc {
192 static ExplicitInitDenseSet<Class> allocatedClasses;
193 }
194
195 /***********************************************************************
196 * _firstRealizedClass
197 * The root of all realized classes
198 **********************************************************************/
199 static Class _firstRealizedClass = nil;
200
201 /*
202 Low two bits of mlist->entsize is used as the fixed-up marker.
203 PREOPTIMIZED VERSION:
204 Method lists from shared cache are 1 (uniqued) or 3 (uniqued and sorted).
205 (Protocol method lists are not sorted because of their extra parallel data)
206 Runtime fixed-up method lists get 3.
207 UN-PREOPTIMIZED VERSION:
208 Method lists from shared cache are 1 (uniqued) or 3 (uniqued and sorted)
209 Shared cache's sorting and uniquing are not trusted, but do affect the
210 location of the selector name string.
211 Runtime fixed-up method lists get 2.
212
213 High two bits of protocol->flags is used as the fixed-up marker.
214 PREOPTIMIZED VERSION:
215 Protocols from shared cache are 1<<30.
216 Runtime fixed-up protocols get 1<<30.
217 UN-PREOPTIMIZED VERSION:
218 Protocols from shared cache are 1<<30.
219 Shared cache's fixups are not trusted.
220 Runtime fixed-up protocols get 3<<30.
221 */
222
223 static uint32_t fixed_up_method_list = 3;
224 static uint32_t uniqued_method_list = 1;
225 static uint32_t fixed_up_protocol = PROTOCOL_FIXED_UP_1;
226 static uint32_t canonical_protocol = PROTOCOL_IS_CANONICAL;
227
228 void
229 disableSharedCacheOptimizations(void)
230 {
231 fixed_up_method_list = 2;
232 // It is safe to set uniqued method lists to 0 as we'll never call it unless
233 // the method list was already in need of being fixed up
234 uniqued_method_list = 0;
235 fixed_up_protocol = PROTOCOL_FIXED_UP_1 | PROTOCOL_FIXED_UP_2;
236 // Its safe to just set canonical protocol to 0 as we'll never call
237 // clearIsCanonical() unless isCanonical() returned true, which can't happen
238 // with a 0 mask
239 canonical_protocol = 0;
240 }
241
242 bool method_list_t::isUniqued() const {
243 return (flags() & uniqued_method_list) != 0;
244 }
245
246 bool method_list_t::isFixedUp() const {
247 return flags() == fixed_up_method_list;
248 }
249
250 void method_list_t::setFixedUp() {
251 runtimeLock.assertLocked();
252 ASSERT(!isFixedUp());
253 entsizeAndFlags = entsize() | fixed_up_method_list;
254 }
255
256 bool protocol_t::isFixedUp() const {
257 return (flags & PROTOCOL_FIXED_UP_MASK) == fixed_up_protocol;
258 }
259
260 void protocol_t::setFixedUp() {
261 runtimeLock.assertLocked();
262 ASSERT(!isFixedUp());
263 flags = (flags & ~PROTOCOL_FIXED_UP_MASK) | fixed_up_protocol;
264 }
265
266 bool protocol_t::isCanonical() const {
267 return (flags & canonical_protocol) != 0;
268 }
269
270 void protocol_t::clearIsCanonical() {
271 runtimeLock.assertLocked();
272 ASSERT(isCanonical());
273 flags = flags & ~canonical_protocol;
274 }
275
276
277 method_list_t **method_array_t::endCategoryMethodLists(Class cls)
278 {
279 method_list_t **mlists = beginLists();
280 method_list_t **mlistsEnd = endLists();
281
282 if (mlists == mlistsEnd || !cls->data()->ro->baseMethods())
283 {
284 // No methods, or no base methods.
285 // Everything here is a category method.
286 return mlistsEnd;
287 }
288
289 // Have base methods. Category methods are
290 // everything except the last method list.
291 return mlistsEnd - 1;
292 }
293
294 static const char *sel_cname(SEL sel)
295 {
296 return (const char *)(void *)sel;
297 }
298
299
300 static size_t protocol_list_size(const protocol_list_t *plist)
301 {
302 return sizeof(protocol_list_t) + plist->count * sizeof(protocol_t *);
303 }
304
305
306 static void try_free(const void *p)
307 {
308 if (p && malloc_size(p)) free((void *)p);
309 }
310
311
312 using ClassCopyFixupHandler = void (*)(Class _Nonnull oldClass,
313 Class _Nonnull newClass);
314 // Normally there's only one handler registered.
315 static GlobalSmallVector<ClassCopyFixupHandler, 1> classCopyFixupHandlers;
316
317 void _objc_setClassCopyFixupHandler(void (* _Nonnull newFixupHandler)
318 (Class _Nonnull oldClass, Class _Nonnull newClass)) {
319 mutex_locker_t lock(runtimeLock);
320
321 classCopyFixupHandlers.append(newFixupHandler);
322 }
323
324 static Class
325 alloc_class_for_subclass(Class supercls, size_t extraBytes)
326 {
327 if (!supercls || !supercls->isAnySwift()) {
328 return _calloc_class(sizeof(objc_class) + extraBytes);
329 }
330
331 // Superclass is a Swift class. New subclass must duplicate its extra bits.
332
333 // Allocate the new class, with space for super's prefix and suffix
334 // and self's extraBytes.
335 swift_class_t *swiftSupercls = (swift_class_t *)supercls;
336 size_t superSize = swiftSupercls->classSize;
337 void *superBits = swiftSupercls->baseAddress();
338 void *bits = malloc(superSize + extraBytes);
339
340 // Copy all of the superclass's data to the new class.
341 memcpy(bits, superBits, superSize);
342
343 // Erase the objc data and the Swift description in the new class.
344 swift_class_t *swcls = (swift_class_t *)
345 ((uint8_t *)bits + swiftSupercls->classAddressOffset);
346 bzero(swcls, sizeof(objc_class));
347 swcls->description = nil;
348
349 for (auto handler : classCopyFixupHandlers) {
350 handler(supercls, (Class)swcls);
351 }
352
353 // Mark this class as Swift-enhanced.
354 if (supercls->isSwiftStable()) {
355 swcls->bits.setIsSwiftStable();
356 }
357 if (supercls->isSwiftLegacy()) {
358 swcls->bits.setIsSwiftLegacy();
359 }
360
361 return (Class)swcls;
362 }
363
364
365 /***********************************************************************
366 * object_getIndexedIvars.
367 **********************************************************************/
368 void *object_getIndexedIvars(id obj)
369 {
370 uint8_t *base = (uint8_t *)obj;
371
372 if (!obj) return nil;
373 if (obj->isTaggedPointer()) return nil;
374
375 if (!obj->isClass()) return base + obj->ISA()->alignedInstanceSize();
376
377 Class cls = (Class)obj;
378 if (!cls->isAnySwift()) return base + sizeof(objc_class);
379
380 swift_class_t *swcls = (swift_class_t *)cls;
381 return base - swcls->classAddressOffset + word_align(swcls->classSize);
382 }
383
384
385 /***********************************************************************
386 * make_ro_writeable
387 * Reallocates rw->ro if necessary to make it writeable.
388 * Locking: runtimeLock must be held by the caller.
389 **********************************************************************/
390 static class_ro_t *make_ro_writeable(class_rw_t *rw)
391 {
392 runtimeLock.assertLocked();
393
394 if (rw->flags & RW_COPIED_RO) {
395 // already writeable, do nothing
396 } else {
397 rw->ro = rw->ro->duplicate();
398 rw->flags |= RW_COPIED_RO;
399 }
400 return (class_ro_t *)rw->ro;
401 }
402
403
404 /***********************************************************************
405 * dataSegmentsContain
406 * Returns true if the given address lies within a data segment in any
407 * loaded image.
408 **********************************************************************/
409 NEVER_INLINE
410 static bool
411 dataSegmentsContain(Class cls)
412 {
413 uint32_t index;
414 if (objc::dataSegmentsRanges.find((uintptr_t)cls, index)) {
415 // if the class is realized (hence has a class_rw_t),
416 // memorize where we found the range
417 if (cls->isRealized()) {
418 cls->data()->witness = (uint16_t)index;
419 }
420 return true;
421 }
422 return false;
423 }
424
425
426 /***********************************************************************
427 * isKnownClass
428 * Return true if the class is known to the runtime (located within the
429 * shared cache, within the data segment of a loaded image, or has been
430 * allocated with obj_allocateClassPair).
431 *
432 * The result of this operation is cached on the class in a "witness"
433 * value that is cheaply checked in the fastpath.
434 **********************************************************************/
435 ALWAYS_INLINE
436 static bool
437 isKnownClass(Class cls)
438 {
439 if (fastpath(objc::dataSegmentsRanges.contains(cls->data()->witness, (uintptr_t)cls))) {
440 return true;
441 }
442 auto &set = objc::allocatedClasses.get();
443 return set.find(cls) != set.end() || dataSegmentsContain(cls);
444 }
445
446
447 /***********************************************************************
448 * addClassTableEntry
449 * Add a class to the table of all classes. If addMeta is true,
450 * automatically adds the metaclass of the class as well.
451 * Locking: runtimeLock must be held by the caller.
452 **********************************************************************/
453 static void
454 addClassTableEntry(Class cls, bool addMeta = true)
455 {
456 runtimeLock.assertLocked();
457
458 // This class is allowed to be a known class via the shared cache or via
459 // data segments, but it is not allowed to be in the dynamic table already.
460 auto &set = objc::allocatedClasses.get();
461
462 ASSERT(set.find(cls) == set.end());
463
464 if (!isKnownClass(cls))
465 set.insert(cls);
466 if (addMeta)
467 addClassTableEntry(cls->ISA(), false);
468 }
469
470
471 /***********************************************************************
472 * checkIsKnownClass
473 * Checks the given class against the list of all known classes. Dies
474 * with a fatal error if the class is not known.
475 * Locking: runtimeLock must be held by the caller.
476 **********************************************************************/
477 ALWAYS_INLINE
478 static void
479 checkIsKnownClass(Class cls)
480 {
481 if (slowpath(!isKnownClass(cls))) {
482 _objc_fatal("Attempt to use unknown class %p.", cls);
483 }
484 }
485
486 /***********************************************************************
487 * classNSObject
488 * Returns class NSObject.
489 * Locking: none
490 **********************************************************************/
491 static Class classNSObject(void)
492 {
493 extern objc_class OBJC_CLASS_$_NSObject;
494 return (Class)&OBJC_CLASS_$_NSObject;
495 }
496
497 static Class metaclassNSObject(void)
498 {
499 extern objc_class OBJC_METACLASS_$_NSObject;
500 return (Class)&OBJC_METACLASS_$_NSObject;
501 }
502
503 /***********************************************************************
504 * printReplacements
505 * Implementation of PrintReplacedMethods / OBJC_PRINT_REPLACED_METHODS.
506 * Warn about methods from cats that override other methods in cats or cls.
507 * Assumes no methods from cats have been added to cls yet.
508 **********************************************************************/
509 __attribute__((cold, noinline))
510 static void
511 printReplacements(Class cls, const locstamped_category_t *cats_list, uint32_t cats_count)
512 {
513 uint32_t c;
514 bool isMeta = cls->isMetaClass();
515
516 // Newest categories are LAST in cats
517 // Later categories override earlier ones.
518 for (c = 0; c < cats_count; c++) {
519 category_t *cat = cats_list[c].cat;
520
521 method_list_t *mlist = cat->methodsForMeta(isMeta);
522 if (!mlist) continue;
523
524 for (const auto& meth : *mlist) {
525 SEL s = sel_registerName(sel_cname(meth.name));
526
527 // Search for replaced methods in method lookup order.
528 // Complain about the first duplicate only.
529
530 // Look for method in earlier categories
531 for (uint32_t c2 = 0; c2 < c; c2++) {
532 category_t *cat2 = cats_list[c2].cat;
533
534 const method_list_t *mlist2 = cat2->methodsForMeta(isMeta);
535 if (!mlist2) continue;
536
537 for (const auto& meth2 : *mlist2) {
538 SEL s2 = sel_registerName(sel_cname(meth2.name));
539 if (s == s2) {
540 logReplacedMethod(cls->nameForLogging(), s,
541 cls->isMetaClass(), cat->name,
542 meth2.imp, meth.imp);
543 goto complained;
544 }
545 }
546 }
547
548 // Look for method in cls
549 for (const auto& meth2 : cls->data()->methods) {
550 SEL s2 = sel_registerName(sel_cname(meth2.name));
551 if (s == s2) {
552 logReplacedMethod(cls->nameForLogging(), s,
553 cls->isMetaClass(), cat->name,
554 meth2.imp, meth.imp);
555 goto complained;
556 }
557 }
558
559 complained:
560 ;
561 }
562 }
563 }
564
565
566 /***********************************************************************
567 * unreasonableClassCount
568 * Provides an upper bound for any iteration of classes,
569 * to prevent spins when runtime metadata is corrupted.
570 **********************************************************************/
571 static unsigned unreasonableClassCount()
572 {
573 runtimeLock.assertLocked();
574
575 int base = NXCountMapTable(gdb_objc_realized_classes) +
576 getPreoptimizedClassUnreasonableCount();
577
578 // Provide lots of slack here. Some iterations touch metaclasses too.
579 // Some iterations backtrack (like realized class iteration).
580 // We don't need an efficient bound, merely one that prevents spins.
581 return (base + 1) * 16;
582 }
583
584
585 /***********************************************************************
586 * Class enumerators
587 * The passed in block returns `false` if subclasses can be skipped
588 * Locking: runtimeLock must be held by the caller.
589 **********************************************************************/
590 static inline void
591 foreach_realized_class_and_subclass_2(Class top, unsigned &count,
592 bool skip_metaclass,
593 bool (^code)(Class) __attribute((noescape)))
594 {
595 Class cls = top;
596
597 runtimeLock.assertLocked();
598 ASSERT(top);
599
600 while (1) {
601 if (--count == 0) {
602 _objc_fatal("Memory corruption in class list.");
603 }
604
605 bool skip_subclasses;
606
607 if (skip_metaclass && cls->isMetaClass()) {
608 skip_subclasses = true;
609 } else {
610 skip_subclasses = !code(cls);
611 }
612
613 if (!skip_subclasses && cls->data()->firstSubclass) {
614 cls = cls->data()->firstSubclass;
615 } else {
616 while (!cls->data()->nextSiblingClass && cls != top) {
617 cls = cls->superclass;
618 if (--count == 0) {
619 _objc_fatal("Memory corruption in class list.");
620 }
621 }
622 if (cls == top) break;
623 cls = cls->data()->nextSiblingClass;
624 }
625 }
626 }
627
628 // Enumerates a class and all of its realized subclasses.
629 static void
630 foreach_realized_class_and_subclass(Class top, bool (^code)(Class) __attribute((noescape)))
631 {
632 unsigned int count = unreasonableClassCount();
633
634 foreach_realized_class_and_subclass_2(top, count, false, code);
635 }
636
637 // Enumerates all realized classes and metaclasses.
638 static void
639 foreach_realized_class_and_metaclass(bool (^code)(Class) __attribute((noescape)))
640 {
641 unsigned int count = unreasonableClassCount();
642
643 for (Class top = _firstRealizedClass;
644 top != nil;
645 top = top->data()->nextSiblingClass)
646 {
647 foreach_realized_class_and_subclass_2(top, count, false, code);
648 }
649 }
650
651 // Enumerates all realized classes (ignoring metaclasses).
652 static void
653 foreach_realized_class(bool (^code)(Class) __attribute((noescape)))
654 {
655 unsigned int count = unreasonableClassCount();
656
657 for (Class top = _firstRealizedClass;
658 top != nil;
659 top = top->data()->nextSiblingClass)
660 {
661 foreach_realized_class_and_subclass_2(top, count, true, code);
662 }
663 }
664
665
666 /***********************************************************************
667 * Method Scanners / Optimization tracking
668 * Implementation of scanning for various implementations of methods.
669 **********************************************************************/
670
671 namespace objc {
672
673 enum SelectorBundle {
674 AWZ,
675 RR,
676 Core,
677 };
678
679 namespace scanner {
680
681 // The current state of NSObject swizzling for every scanner
682 //
683 // It allows for cheap checks of global swizzles, and also lets
684 // things like IMP Swizzling before NSObject has been initialized
685 // to be remembered, as setInitialized() would miss these.
686 //
687 // Every pair of bits describes a SelectorBundle.
688 // even bits: is NSObject class swizzled for this bundle
689 // odd bits: is NSObject meta class swizzled for this bundle
690 static uintptr_t NSObjectSwizzledMask;
691
692 static ALWAYS_INLINE uintptr_t
693 swizzlingBit(SelectorBundle bundle, bool isMeta)
694 {
695 return 1UL << (2 * bundle + isMeta);
696 }
697
698 static void __attribute__((cold, noinline))
699 printCustom(Class cls, SelectorBundle bundle, bool inherited)
700 {
701 static char const * const SelectorBundleName[] = {
702 [AWZ] = "CUSTOM AWZ",
703 [RR] = "CUSTOM RR",
704 [Core] = "CUSTOM Core",
705 };
706
707 _objc_inform("%s: %s%s%s", SelectorBundleName[bundle],
708 cls->nameForLogging(),
709 cls->isMetaClass() ? " (meta)" : "",
710 inherited ? " (inherited)" : "");
711 }
712
713 enum class Scope { Instances, Classes, Both };
714
715 template <typename Traits, SelectorBundle Bundle, bool &ShouldPrint, Scope Domain = Scope::Both>
716 class Mixin {
717
718 // work around compiler being broken with templates using Class/objc_class,
719 // probably some weird confusion with Class being builtin
720 ALWAYS_INLINE static objc_class *as_objc_class(Class cls) {
721 return (objc_class *)cls;
722 }
723
724 static void
725 setCustomRecursively(Class cls, bool inherited = false)
726 {
727 foreach_realized_class_and_subclass(cls, [=](Class c){
728 if (c != cls && !as_objc_class(c)->isInitialized()) {
729 // Subclass not yet initialized. Wait for setInitialized() to do it
730 return false;
731 }
732 if (Traits::isCustom(c)) {
733 return false;
734 }
735 Traits::setCustom(c);
736 if (ShouldPrint) {
737 printCustom(cls, Bundle, inherited || c != cls);
738 }
739 return true;
740 });
741 }
742
743 static bool
744 isNSObjectSwizzled(bool isMeta)
745 {
746 return NSObjectSwizzledMask & swizzlingBit(Bundle, isMeta);
747 }
748
749 static void
750 setNSObjectSwizzled(Class NSOClass, bool isMeta)
751 {
752 NSObjectSwizzledMask |= swizzlingBit(Bundle, isMeta);
753 if (as_objc_class(NSOClass)->isInitialized()) {
754 setCustomRecursively(NSOClass);
755 }
756 }
757
758 static void
759 scanChangedMethodForUnknownClass(const method_t *meth)
760 {
761 Class cls;
762
763 cls = classNSObject();
764 if (Domain != Scope::Classes && !isNSObjectSwizzled(NO)) {
765 for (const auto &meth2: as_objc_class(cls)->data()->methods) {
766 if (meth == &meth2) {
767 setNSObjectSwizzled(cls, NO);
768 break;
769 }
770 }
771 }
772
773 cls = metaclassNSObject();
774 if (Domain != Scope::Instances && !isNSObjectSwizzled(YES)) {
775 for (const auto &meth2: as_objc_class(cls)->data()->methods) {
776 if (meth == &meth2) {
777 setNSObjectSwizzled(cls, YES);
778 break;
779 }
780 }
781 }
782 }
783
784 static void
785 scanAddedClassImpl(Class cls, bool isMeta)
786 {
787 Class NSOClass = (isMeta ? metaclassNSObject() : classNSObject());
788 bool setCustom = NO, inherited = NO;
789
790 if (isNSObjectSwizzled(isMeta)) {
791 setCustom = YES;
792 } else if (cls == NSOClass) {
793 // NSObject is default but we need to check categories
794 auto &methods = as_objc_class(cls)->data()->methods;
795 setCustom = Traits::scanMethodLists(methods.beginCategoryMethodLists(),
796 methods.endCategoryMethodLists(cls));
797 } else if (!isMeta && !as_objc_class(cls)->superclass) {
798 // Custom Root class
799 setCustom = YES;
800 } else if (Traits::isCustom(as_objc_class(cls)->superclass)) {
801 // Superclass is custom, therefore we are too.
802 setCustom = YES;
803 inherited = YES;
804 } else {
805 // Not NSObject.
806 auto &methods = as_objc_class(cls)->data()->methods;
807 setCustom = Traits::scanMethodLists(methods.beginLists(),
808 methods.endLists());
809 }
810 if (slowpath(setCustom)) {
811 if (ShouldPrint) printCustom(cls, Bundle, inherited);
812 } else {
813 Traits::setDefault(cls);
814 }
815 }
816
817 public:
818 // Scan a class that is about to be marked Initialized for particular
819 // bundles of selectors, and mark the class and its children
820 // accordingly.
821 //
822 // This also handles inheriting properties from its superclass.
823 //
824 // Caller: objc_class::setInitialized()
825 static void
826 scanInitializedClass(Class cls, Class metacls)
827 {
828 if (Domain != Scope::Classes) {
829 scanAddedClassImpl(cls, false);
830 }
831 if (Domain != Scope::Instances) {
832 scanAddedClassImpl(metacls, true);
833 }
834 }
835
836 // Inherit various properties from the superclass when a class
837 // is being added to the graph.
838 //
839 // Caller: addSubclass()
840 static void
841 scanAddedSubClass(Class subcls, Class supercls)
842 {
843 if (slowpath(Traits::isCustom(supercls) && !Traits::isCustom(subcls))) {
844 setCustomRecursively(subcls, true);
845 }
846 }
847
848 // Scan Method lists for selectors that would override things
849 // in a Bundle.
850 //
851 // This is used to detect when categories override problematic selectors
852 // are injected in a class after it has been initialized.
853 //
854 // Caller: prepareMethodLists()
855 static void
856 scanAddedMethodLists(Class cls, method_list_t **mlists, int count)
857 {
858 if (slowpath(Traits::isCustom(cls))) {
859 return;
860 }
861 if (slowpath(Traits::scanMethodLists(mlists, mlists + count))) {
862 setCustomRecursively(cls);
863 }
864 }
865
866 // Handle IMP Swizzling (the IMP for an exisiting method being changed).
867 //
868 // In almost all cases, IMP swizzling does not affect custom bits.
869 // Custom search will already find the method whether or not
870 // it is swizzled, so it does not transition from non-custom to custom.
871 //
872 // The only cases where IMP swizzling can affect the custom bits is
873 // if the swizzled method is one of the methods that is assumed to be
874 // non-custom. These special cases are listed in setInitialized().
875 // We look for such cases here.
876 //
877 // Caller: Swizzling methods via adjustCustomFlagsForMethodChange()
878 static void
879 scanChangedMethod(Class cls, const method_t *meth)
880 {
881 if (fastpath(!Traits::isInterestingSelector(meth->name))) {
882 return;
883 }
884
885 if (cls) {
886 bool isMeta = as_objc_class(cls)->isMetaClass();
887 if (isMeta && Domain != Scope::Instances) {
888 if (cls == metaclassNSObject() && !isNSObjectSwizzled(isMeta)) {
889 setNSObjectSwizzled(cls, isMeta);
890 }
891 }
892 if (!isMeta && Domain != Scope::Classes) {
893 if (cls == classNSObject() && !isNSObjectSwizzled(isMeta)) {
894 setNSObjectSwizzled(cls, isMeta);
895 }
896 }
897 } else {
898 // We're called from method_exchangeImplementations, only NSObject
899 // class and metaclass may be problematic (exchanging the default
900 // builtin IMP of an interesting seleector, is a swizzling that,
901 // may flip our scanned property. For other classes, the previous
902 // value had already flipped the property).
903 //
904 // However, as we don't know the class, we need to scan all of
905 // NSObject class and metaclass methods (this is SLOW).
906 scanChangedMethodForUnknownClass(meth);
907 }
908 }
909 };
910
911 } // namespace scanner
912
913 // AWZ methods: +alloc / +allocWithZone:
914 struct AWZScanner : scanner::Mixin<AWZScanner, AWZ, PrintCustomAWZ, scanner::Scope::Classes> {
915 static bool isCustom(Class cls) {
916 return cls->hasCustomAWZ();
917 }
918 static void setCustom(Class cls) {
919 cls->setHasCustomAWZ();
920 }
921 static void setDefault(Class cls) {
922 cls->setHasDefaultAWZ();
923 }
924 static bool isInterestingSelector(SEL sel) {
925 return sel == @selector(alloc) || sel == @selector(allocWithZone:);
926 }
927 static bool scanMethodLists(method_list_t **mlists, method_list_t **end) {
928 SEL sels[2] = { @selector(alloc), @selector(allocWithZone:), };
929 return method_lists_contains_any(mlists, end, sels, 2);
930 }
931 };
932
933 // Retain/Release methods that are extremely rarely overridden
934 //
935 // retain/release/autorelease/retainCount/
936 // _tryRetain/_isDeallocating/retainWeakReference/allowsWeakReference
937 struct RRScanner : scanner::Mixin<RRScanner, RR, PrintCustomRR
938 #if !SUPPORT_NONPOINTER_ISA
939 , scanner::Scope::Instances
940 #endif
941 > {
942 static bool isCustom(Class cls) {
943 return cls->hasCustomRR();
944 }
945 static void setCustom(Class cls) {
946 cls->setHasCustomRR();
947 }
948 static void setDefault(Class cls) {
949 cls->setHasDefaultRR();
950 }
951 static bool isInterestingSelector(SEL sel) {
952 return sel == @selector(retain) ||
953 sel == @selector(release) ||
954 sel == @selector(autorelease) ||
955 sel == @selector(_tryRetain) ||
956 sel == @selector(_isDeallocating) ||
957 sel == @selector(retainCount) ||
958 sel == @selector(allowsWeakReference) ||
959 sel == @selector(retainWeakReference);
960 }
961 static bool scanMethodLists(method_list_t **mlists, method_list_t **end) {
962 SEL sels[8] = {
963 @selector(retain),
964 @selector(release),
965 @selector(autorelease),
966 @selector(_tryRetain),
967 @selector(_isDeallocating),
968 @selector(retainCount),
969 @selector(allowsWeakReference),
970 @selector(retainWeakReference),
971 };
972 return method_lists_contains_any(mlists, end, sels, 8);
973 }
974 };
975
976 // Core NSObject methods that are extremely rarely overridden
977 //
978 // +new, ±class, ±self, ±isKindOfClass:, ±respondsToSelector
979 struct CoreScanner : scanner::Mixin<CoreScanner, Core, PrintCustomCore> {
980 static bool isCustom(Class cls) {
981 return cls->hasCustomCore();
982 }
983 static void setCustom(Class cls) {
984 cls->setHasCustomCore();
985 }
986 static void setDefault(Class cls) {
987 cls->setHasDefaultCore();
988 }
989 static bool isInterestingSelector(SEL sel) {
990 return sel == @selector(new) ||
991 sel == @selector(self) ||
992 sel == @selector(class) ||
993 sel == @selector(isKindOfClass:) ||
994 sel == @selector(respondsToSelector:);
995 }
996 static bool scanMethodLists(method_list_t **mlists, method_list_t **end) {
997 SEL sels[5] = {
998 @selector(new),
999 @selector(self),
1000 @selector(class),
1001 @selector(isKindOfClass:),
1002 @selector(respondsToSelector:)
1003 };
1004 return method_lists_contains_any(mlists, end, sels, 5);
1005 }
1006 };
1007
1008 class category_list : nocopy_t {
1009 union {
1010 locstamped_category_t lc;
1011 struct {
1012 locstamped_category_t *array;
1013 // this aliases with locstamped_category_t::hi
1014 // which is an aliased pointer
1015 uint32_t is_array : 1;
1016 uint32_t count : 31;
1017 uint32_t size : 32;
1018 };
1019 } _u;
1020
1021 public:
1022 category_list() : _u{{nullptr, nullptr}} { }
1023 category_list(locstamped_category_t lc) : _u{{lc}} { }
1024 category_list(category_list &&other) : category_list() {
1025 std::swap(_u, other._u);
1026 }
1027 ~category_list()
1028 {
1029 if (_u.is_array) {
1030 free(_u.array);
1031 }
1032 }
1033
1034 uint32_t count() const
1035 {
1036 if (_u.is_array) return _u.count;
1037 return _u.lc.cat ? 1 : 0;
1038 }
1039
1040 uint32_t arrayByteSize(uint32_t size) const
1041 {
1042 return sizeof(locstamped_category_t) * size;
1043 }
1044
1045 const locstamped_category_t *array() const
1046 {
1047 return _u.is_array ? _u.array : &_u.lc;
1048 }
1049
1050 void append(locstamped_category_t lc)
1051 {
1052 if (_u.is_array) {
1053 if (_u.count == _u.size) {
1054 // Have a typical malloc growth:
1055 // - size <= 8: grow by 2
1056 // - size <= 16: grow by 4
1057 // - size <= 32: grow by 8
1058 // ... etc
1059 _u.size += _u.size < 8 ? 2 : 1 << (fls(_u.size) - 2);
1060 _u.array = (locstamped_category_t *)reallocf(_u.array, arrayByteSize(_u.size));
1061 }
1062 _u.array[_u.count++] = lc;
1063 } else if (_u.lc.cat == NULL) {
1064 _u.lc = lc;
1065 } else {
1066 locstamped_category_t *arr = (locstamped_category_t *)malloc(arrayByteSize(2));
1067 arr[0] = _u.lc;
1068 arr[1] = lc;
1069
1070 _u.array = arr;
1071 _u.is_array = true;
1072 _u.count = 2;
1073 _u.size = 2;
1074 }
1075 }
1076
1077 void erase(category_t *cat)
1078 {
1079 if (_u.is_array) {
1080 for (int i = 0; i < _u.count; i++) {
1081 if (_u.array[i].cat == cat) {
1082 // shift entries to preserve list order
1083 memmove(&_u.array[i], &_u.array[i+1], arrayByteSize(_u.count - i - 1));
1084 return;
1085 }
1086 }
1087 } else if (_u.lc.cat == cat) {
1088 _u.lc.cat = NULL;
1089 _u.lc.hi = NULL;
1090 }
1091 }
1092 };
1093
1094 class UnattachedCategories : public ExplicitInitDenseMap<Class, category_list>
1095 {
1096 public:
1097 void addForClass(locstamped_category_t lc, Class cls)
1098 {
1099 runtimeLock.assertLocked();
1100
1101 if (slowpath(PrintConnecting)) {
1102 _objc_inform("CLASS: found category %c%s(%s)",
1103 cls->isMetaClass() ? '+' : '-',
1104 cls->nameForLogging(), lc.cat->name);
1105 }
1106
1107 auto result = get().try_emplace(cls, lc);
1108 if (!result.second) {
1109 result.first->second.append(lc);
1110 }
1111 }
1112
1113 void attachToClass(Class cls, Class previously, int flags)
1114 {
1115 runtimeLock.assertLocked();
1116 ASSERT((flags & ATTACH_CLASS) ||
1117 (flags & ATTACH_METACLASS) ||
1118 (flags & ATTACH_CLASS_AND_METACLASS));
1119
1120 auto &map = get();
1121 auto it = map.find(previously);
1122
1123 if (it != map.end()) {
1124 category_list &list = it->second;
1125 if (flags & ATTACH_CLASS_AND_METACLASS) {
1126 int otherFlags = flags & ~ATTACH_CLASS_AND_METACLASS;
1127 attachCategories(cls, list.array(), list.count(), otherFlags | ATTACH_CLASS);
1128 attachCategories(cls->ISA(), list.array(), list.count(), otherFlags | ATTACH_METACLASS);
1129 } else {
1130 attachCategories(cls, list.array(), list.count(), flags);
1131 }
1132 map.erase(it);
1133 }
1134 }
1135
1136 void eraseCategoryForClass(category_t *cat, Class cls)
1137 {
1138 runtimeLock.assertLocked();
1139
1140 auto &map = get();
1141 auto it = map.find(cls);
1142 if (it != map.end()) {
1143 category_list &list = it->second;
1144 list.erase(cat);
1145 if (list.count() == 0) {
1146 map.erase(it);
1147 }
1148 }
1149 }
1150
1151 void eraseClass(Class cls)
1152 {
1153 runtimeLock.assertLocked();
1154
1155 get().erase(cls);
1156 }
1157 };
1158
1159 static UnattachedCategories unattachedCategories;
1160
1161 } // namespace objc
1162
1163 static bool isBundleClass(Class cls)
1164 {
1165 return cls->data()->ro->flags & RO_FROM_BUNDLE;
1166 }
1167
1168
1169 static void
1170 fixupMethodList(method_list_t *mlist, bool bundleCopy, bool sort)
1171 {
1172 runtimeLock.assertLocked();
1173 ASSERT(!mlist->isFixedUp());
1174
1175 // fixme lock less in attachMethodLists ?
1176 // dyld3 may have already uniqued, but not sorted, the list
1177 if (!mlist->isUniqued()) {
1178 mutex_locker_t lock(selLock);
1179
1180 // Unique selectors in list.
1181 for (auto& meth : *mlist) {
1182 const char *name = sel_cname(meth.name);
1183 meth.name = sel_registerNameNoLock(name, bundleCopy);
1184 }
1185 }
1186
1187 // Sort by selector address.
1188 if (sort) {
1189 method_t::SortBySELAddress sorter;
1190 std::stable_sort(mlist->begin(), mlist->end(), sorter);
1191 }
1192
1193 // Mark method list as uniqued and sorted
1194 mlist->setFixedUp();
1195 }
1196
1197
1198 static void
1199 prepareMethodLists(Class cls, method_list_t **addedLists, int addedCount,
1200 bool baseMethods, bool methodsFromBundle)
1201 {
1202 runtimeLock.assertLocked();
1203
1204 if (addedCount == 0) return;
1205
1206 // There exist RR/AWZ/Core special cases for some class's base methods.
1207 // But this code should never need to scan base methods for RR/AWZ/Core:
1208 // default RR/AWZ/Core cannot be set before setInitialized().
1209 // Therefore we need not handle any special cases here.
1210 if (baseMethods) {
1211 ASSERT(cls->hasCustomAWZ() && cls->hasCustomRR() && cls->hasCustomCore());
1212 }
1213
1214 // Add method lists to array.
1215 // Reallocate un-fixed method lists.
1216 // The new methods are PREPENDED to the method list array.
1217
1218 for (int i = 0; i < addedCount; i++) {
1219 method_list_t *mlist = addedLists[i];
1220 ASSERT(mlist);
1221
1222 // Fixup selectors if necessary
1223 if (!mlist->isFixedUp()) {
1224 fixupMethodList(mlist, methodsFromBundle, true/*sort*/);
1225 }
1226 }
1227
1228 // If the class is initialized, then scan for method implementations
1229 // tracked by the class's flags. If it's not initialized yet,
1230 // then objc_class::setInitialized() will take care of it.
1231 if (cls->isInitialized()) {
1232 objc::AWZScanner::scanAddedMethodLists(cls, addedLists, addedCount);
1233 objc::RRScanner::scanAddedMethodLists(cls, addedLists, addedCount);
1234 objc::CoreScanner::scanAddedMethodLists(cls, addedLists, addedCount);
1235 }
1236 }
1237
1238
1239 // Attach method lists and properties and protocols from categories to a class.
1240 // Assumes the categories in cats are all loaded and sorted by load order,
1241 // oldest categories first.
1242 static void
1243 attachCategories(Class cls, const locstamped_category_t *cats_list, uint32_t cats_count,
1244 int flags)
1245 {
1246 if (slowpath(PrintReplacedMethods)) {
1247 printReplacements(cls, cats_list, cats_count);
1248 }
1249 if (slowpath(PrintConnecting)) {
1250 _objc_inform("CLASS: attaching %d categories to%s class '%s'%s",
1251 cats_count, (flags & ATTACH_EXISTING) ? " existing" : "",
1252 cls->nameForLogging(), (flags & ATTACH_METACLASS) ? " (meta)" : "");
1253 }
1254
1255 /*
1256 * Only a few classes have more than 64 categories during launch.
1257 * This uses a little stack, and avoids malloc.
1258 *
1259 * Categories must be added in the proper order, which is back
1260 * to front. To do that with the chunking, we iterate cats_list
1261 * from front to back, build up the local buffers backwards,
1262 * and call attachLists on the chunks. attachLists prepends the
1263 * lists, so the final result is in the expected order.
1264 */
1265 constexpr uint32_t ATTACH_BUFSIZ = 64;
1266 method_list_t *mlists[ATTACH_BUFSIZ];
1267 property_list_t *proplists[ATTACH_BUFSIZ];
1268 protocol_list_t *protolists[ATTACH_BUFSIZ];
1269
1270 uint32_t mcount = 0;
1271 uint32_t propcount = 0;
1272 uint32_t protocount = 0;
1273 bool fromBundle = NO;
1274 bool isMeta = (flags & ATTACH_METACLASS);
1275 auto rw = cls->data();
1276
1277 for (uint32_t i = 0; i < cats_count; i++) {
1278 auto& entry = cats_list[i];
1279
1280 method_list_t *mlist = entry.cat->methodsForMeta(isMeta);
1281 if (mlist) {
1282 if (mcount == ATTACH_BUFSIZ) {
1283 prepareMethodLists(cls, mlists, mcount, NO, fromBundle);
1284 rw->methods.attachLists(mlists, mcount);
1285 mcount = 0;
1286 }
1287 mlists[ATTACH_BUFSIZ - ++mcount] = mlist;
1288 fromBundle |= entry.hi->isBundle();
1289 }
1290
1291 property_list_t *proplist =
1292 entry.cat->propertiesForMeta(isMeta, entry.hi);
1293 if (proplist) {
1294 if (propcount == ATTACH_BUFSIZ) {
1295 rw->properties.attachLists(proplists, propcount);
1296 propcount = 0;
1297 }
1298 proplists[ATTACH_BUFSIZ - ++propcount] = proplist;
1299 }
1300
1301 protocol_list_t *protolist = entry.cat->protocolsForMeta(isMeta);
1302 if (protolist) {
1303 if (protocount == ATTACH_BUFSIZ) {
1304 rw->protocols.attachLists(protolists, protocount);
1305 protocount = 0;
1306 }
1307 protolists[ATTACH_BUFSIZ - ++protocount] = protolist;
1308 }
1309 }
1310
1311 if (mcount > 0) {
1312 prepareMethodLists(cls, mlists + ATTACH_BUFSIZ - mcount, mcount, NO, fromBundle);
1313 rw->methods.attachLists(mlists + ATTACH_BUFSIZ - mcount, mcount);
1314 if (flags & ATTACH_EXISTING) flushCaches(cls);
1315 }
1316
1317 rw->properties.attachLists(proplists + ATTACH_BUFSIZ - propcount, propcount);
1318
1319 rw->protocols.attachLists(protolists + ATTACH_BUFSIZ - protocount, protocount);
1320 }
1321
1322
1323 /***********************************************************************
1324 * methodizeClass
1325 * Fixes up cls's method list, protocol list, and property list.
1326 * Attaches any outstanding categories.
1327 * Locking: runtimeLock must be held by the caller
1328 **********************************************************************/
1329 static void methodizeClass(Class cls, Class previously)
1330 {
1331 runtimeLock.assertLocked();
1332
1333 bool isMeta = cls->isMetaClass();
1334 auto rw = cls->data();
1335 auto ro = rw->ro;
1336
1337 // Methodizing for the first time
1338 if (PrintConnecting) {
1339 _objc_inform("CLASS: methodizing class '%s' %s",
1340 cls->nameForLogging(), isMeta ? "(meta)" : "");
1341 }
1342
1343 // Install methods and properties that the class implements itself.
1344 method_list_t *list = ro->baseMethods();
1345 if (list) {
1346 prepareMethodLists(cls, &list, 1, YES, isBundleClass(cls));
1347 rw->methods.attachLists(&list, 1);
1348 }
1349
1350 property_list_t *proplist = ro->baseProperties;
1351 if (proplist) {
1352 rw->properties.attachLists(&proplist, 1);
1353 }
1354
1355 protocol_list_t *protolist = ro->baseProtocols;
1356 if (protolist) {
1357 rw->protocols.attachLists(&protolist, 1);
1358 }
1359
1360 // Root classes get bonus method implementations if they don't have
1361 // them already. These apply before category replacements.
1362 if (cls->isRootMetaclass()) {
1363 // root metaclass
1364 addMethod(cls, @selector(initialize), (IMP)&objc_noop_imp, "", NO);
1365 }
1366
1367 // Attach categories.
1368 if (previously) {
1369 if (isMeta) {
1370 objc::unattachedCategories.attachToClass(cls, previously,
1371 ATTACH_METACLASS);
1372 } else {
1373 // When a class relocates, categories with class methods
1374 // may be registered on the class itself rather than on
1375 // the metaclass. Tell attachToClass to look for those.
1376 objc::unattachedCategories.attachToClass(cls, previously,
1377 ATTACH_CLASS_AND_METACLASS);
1378 }
1379 }
1380 objc::unattachedCategories.attachToClass(cls, cls,
1381 isMeta ? ATTACH_METACLASS : ATTACH_CLASS);
1382
1383 #if DEBUG
1384 // Debug: sanity-check all SELs; log method list contents
1385 for (const auto& meth : rw->methods) {
1386 if (PrintConnecting) {
1387 _objc_inform("METHOD %c[%s %s]", isMeta ? '+' : '-',
1388 cls->nameForLogging(), sel_getName(meth.name));
1389 }
1390 ASSERT(sel_registerName(sel_getName(meth.name)) == meth.name);
1391 }
1392 #endif
1393 }
1394
1395
1396 /***********************************************************************
1397 * nonMetaClasses
1398 * Returns the secondary metaclass => class map
1399 * Used for some cases of +initialize and +resolveClassMethod:.
1400 * This map does not contain all class and metaclass pairs. It only
1401 * contains metaclasses whose classes would be in the runtime-allocated
1402 * named-class table, but are not because some other class with the same name
1403 * is in that table.
1404 * Classes with no duplicates are not included.
1405 * Classes in the preoptimized named-class table are not included.
1406 * Classes whose duplicates are in the preoptimized table are not included.
1407 * Most code should use getMaybeUnrealizedNonMetaClass()
1408 * instead of reading this table.
1409 * Locking: runtimeLock must be read- or write-locked by the caller
1410 **********************************************************************/
1411 static NXMapTable *nonmeta_class_map = nil;
1412 static NXMapTable *nonMetaClasses(void)
1413 {
1414 runtimeLock.assertLocked();
1415
1416 if (nonmeta_class_map) return nonmeta_class_map;
1417
1418 // nonmeta_class_map is typically small
1419 INIT_ONCE_PTR(nonmeta_class_map,
1420 NXCreateMapTable(NXPtrValueMapPrototype, 32),
1421 NXFreeMapTable(v));
1422
1423 return nonmeta_class_map;
1424 }
1425
1426
1427 /***********************************************************************
1428 * addNonMetaClass
1429 * Adds metacls => cls to the secondary metaclass map
1430 * Locking: runtimeLock must be held by the caller
1431 **********************************************************************/
1432 static void addNonMetaClass(Class cls)
1433 {
1434 runtimeLock.assertLocked();
1435 void *old;
1436 old = NXMapInsert(nonMetaClasses(), cls->ISA(), cls);
1437
1438 ASSERT(!cls->isMetaClassMaybeUnrealized());
1439 ASSERT(cls->ISA()->isMetaClassMaybeUnrealized());
1440 ASSERT(!old);
1441 }
1442
1443
1444 static void removeNonMetaClass(Class cls)
1445 {
1446 runtimeLock.assertLocked();
1447 NXMapRemove(nonMetaClasses(), cls->ISA());
1448 }
1449
1450
1451 static bool scanMangledField(const char *&string, const char *end,
1452 const char *&field, int& length)
1453 {
1454 // Leading zero not allowed.
1455 if (*string == '0') return false;
1456
1457 length = 0;
1458 field = string;
1459 while (field < end) {
1460 char c = *field;
1461 if (!isdigit(c)) break;
1462 field++;
1463 if (__builtin_smul_overflow(length, 10, &length)) return false;
1464 if (__builtin_sadd_overflow(length, c - '0', &length)) return false;
1465 }
1466
1467 string = field + length;
1468 return length > 0 && string <= end;
1469 }
1470
1471
1472 /***********************************************************************
1473 * copySwiftV1DemangledName
1474 * Returns the pretty form of the given Swift-v1-mangled class or protocol name.
1475 * Returns nil if the string doesn't look like a mangled Swift v1 name.
1476 * The result must be freed with free().
1477 **********************************************************************/
1478 static char *copySwiftV1DemangledName(const char *string, bool isProtocol = false)
1479 {
1480 if (!string) return nil;
1481
1482 // Swift mangling prefix.
1483 if (strncmp(string, isProtocol ? "_TtP" : "_TtC", 4) != 0) return nil;
1484 string += 4;
1485
1486 const char *end = string + strlen(string);
1487
1488 // Module name.
1489 const char *prefix;
1490 int prefixLength;
1491 if (string[0] == 's') {
1492 // "s" is the Swift module.
1493 prefix = "Swift";
1494 prefixLength = 5;
1495 string += 1;
1496 } else {
1497 if (! scanMangledField(string, end, prefix, prefixLength)) return nil;
1498 }
1499
1500 // Class or protocol name.
1501 const char *suffix;
1502 int suffixLength;
1503 if (! scanMangledField(string, end, suffix, suffixLength)) return nil;
1504
1505 if (isProtocol) {
1506 // Remainder must be "_".
1507 if (strcmp(string, "_") != 0) return nil;
1508 } else {
1509 // Remainder must be empty.
1510 if (string != end) return nil;
1511 }
1512
1513 char *result;
1514 asprintf(&result, "%.*s.%.*s", prefixLength,prefix, suffixLength,suffix);
1515 return result;
1516 }
1517
1518
1519 /***********************************************************************
1520 * copySwiftV1MangledName
1521 * Returns the Swift 1.0 mangled form of the given class or protocol name.
1522 * Returns nil if the string doesn't look like an unmangled Swift name.
1523 * The result must be freed with free().
1524 **********************************************************************/
1525 static char *copySwiftV1MangledName(const char *string, bool isProtocol = false)
1526 {
1527 if (!string) return nil;
1528
1529 size_t dotCount = 0;
1530 size_t dotIndex;
1531 const char *s;
1532 for (s = string; *s; s++) {
1533 if (*s == '.') {
1534 dotCount++;
1535 dotIndex = s - string;
1536 }
1537 }
1538 size_t stringLength = s - string;
1539
1540 if (dotCount != 1 || dotIndex == 0 || dotIndex >= stringLength-1) {
1541 return nil;
1542 }
1543
1544 const char *prefix = string;
1545 size_t prefixLength = dotIndex;
1546 const char *suffix = string + dotIndex + 1;
1547 size_t suffixLength = stringLength - (dotIndex + 1);
1548
1549 char *name;
1550
1551 if (prefixLength == 5 && memcmp(prefix, "Swift", 5) == 0) {
1552 asprintf(&name, "_Tt%cs%zu%.*s%s",
1553 isProtocol ? 'P' : 'C',
1554 suffixLength, (int)suffixLength, suffix,
1555 isProtocol ? "_" : "");
1556 } else {
1557 asprintf(&name, "_Tt%c%zu%.*s%zu%.*s%s",
1558 isProtocol ? 'P' : 'C',
1559 prefixLength, (int)prefixLength, prefix,
1560 suffixLength, (int)suffixLength, suffix,
1561 isProtocol ? "_" : "");
1562 }
1563 return name;
1564 }
1565
1566
1567 /***********************************************************************
1568 * getClassExceptSomeSwift
1569 * Looks up a class by name. The class MIGHT NOT be realized.
1570 * Demangled Swift names are recognized.
1571 * Classes known to the Swift runtime but not yet used are NOT recognized.
1572 * (such as subclasses of un-instantiated generics)
1573 * Use look_up_class() to find them as well.
1574 * Locking: runtimeLock must be read- or write-locked by the caller.
1575 **********************************************************************/
1576
1577 // This is a misnomer: gdb_objc_realized_classes is actually a list of
1578 // named classes not in the dyld shared cache, whether realized or not.
1579 NXMapTable *gdb_objc_realized_classes; // exported for debuggers in objc-gdb.h
1580 uintptr_t objc_debug_realized_class_generation_count;
1581
1582 static Class getClass_impl(const char *name)
1583 {
1584 runtimeLock.assertLocked();
1585
1586 // allocated in _read_images
1587 ASSERT(gdb_objc_realized_classes);
1588
1589 // Try runtime-allocated table
1590 Class result = (Class)NXMapGet(gdb_objc_realized_classes, name);
1591 if (result) return result;
1592
1593 // Try table from dyld shared cache.
1594 // Note we do this last to handle the case where we dlopen'ed a shared cache
1595 // dylib with duplicates of classes already present in the main executable.
1596 // In that case, we put the class from the main executable in
1597 // gdb_objc_realized_classes and want to check that before considering any
1598 // newly loaded shared cache binaries.
1599 return getPreoptimizedClass(name);
1600 }
1601
1602 static Class getClassExceptSomeSwift(const char *name)
1603 {
1604 runtimeLock.assertLocked();
1605
1606 // Try name as-is
1607 Class result = getClass_impl(name);
1608 if (result) return result;
1609
1610 // Try Swift-mangled equivalent of the given name.
1611 if (char *swName = copySwiftV1MangledName(name)) {
1612 result = getClass_impl(swName);
1613 free(swName);
1614 return result;
1615 }
1616
1617 return nil;
1618 }
1619
1620
1621 /***********************************************************************
1622 * addNamedClass
1623 * Adds name => cls to the named non-meta class map.
1624 * Warns about duplicate class names and keeps the old mapping.
1625 * Locking: runtimeLock must be held by the caller
1626 **********************************************************************/
1627 static void addNamedClass(Class cls, const char *name, Class replacing = nil)
1628 {
1629 runtimeLock.assertLocked();
1630 Class old;
1631 if ((old = getClassExceptSomeSwift(name)) && old != replacing) {
1632 inform_duplicate(name, old, cls);
1633
1634 // getMaybeUnrealizedNonMetaClass uses name lookups.
1635 // Classes not found by name lookup must be in the
1636 // secondary meta->nonmeta table.
1637 addNonMetaClass(cls);
1638 } else {
1639 NXMapInsert(gdb_objc_realized_classes, name, cls);
1640 }
1641 ASSERT(!(cls->data()->flags & RO_META));
1642
1643 // wrong: constructed classes are already realized when they get here
1644 // ASSERT(!cls->isRealized());
1645 }
1646
1647
1648 /***********************************************************************
1649 * removeNamedClass
1650 * Removes cls from the name => cls map.
1651 * Locking: runtimeLock must be held by the caller
1652 **********************************************************************/
1653 static void removeNamedClass(Class cls, const char *name)
1654 {
1655 runtimeLock.assertLocked();
1656 ASSERT(!(cls->data()->flags & RO_META));
1657 if (cls == NXMapGet(gdb_objc_realized_classes, name)) {
1658 NXMapRemove(gdb_objc_realized_classes, name);
1659 } else {
1660 // cls has a name collision with another class - don't remove the other
1661 // but do remove cls from the secondary metaclass->class map.
1662 removeNonMetaClass(cls);
1663 }
1664 }
1665
1666
1667 /***********************************************************************
1668 * futureNamedClasses
1669 * Returns the classname => future class map for unrealized future classes.
1670 * Locking: runtimeLock must be held by the caller
1671 **********************************************************************/
1672 static NXMapTable *future_named_class_map = nil;
1673 static NXMapTable *futureNamedClasses()
1674 {
1675 runtimeLock.assertLocked();
1676
1677 if (future_named_class_map) return future_named_class_map;
1678
1679 // future_named_class_map is big enough for CF's classes and a few others
1680 future_named_class_map =
1681 NXCreateMapTable(NXStrValueMapPrototype, 32);
1682
1683 return future_named_class_map;
1684 }
1685
1686
1687 static bool haveFutureNamedClasses() {
1688 return future_named_class_map && NXCountMapTable(future_named_class_map);
1689 }
1690
1691
1692 /***********************************************************************
1693 * addFutureNamedClass
1694 * Installs cls as the class structure to use for the named class if it appears.
1695 * Locking: runtimeLock must be held by the caller
1696 **********************************************************************/
1697 static void addFutureNamedClass(const char *name, Class cls)
1698 {
1699 void *old;
1700
1701 runtimeLock.assertLocked();
1702
1703 if (PrintFuture) {
1704 _objc_inform("FUTURE: reserving %p for %s", (void*)cls, name);
1705 }
1706
1707 class_rw_t *rw = (class_rw_t *)calloc(sizeof(class_rw_t), 1);
1708 class_ro_t *ro = (class_ro_t *)calloc(sizeof(class_ro_t), 1);
1709 ro->name = strdupIfMutable(name);
1710 rw->ro = ro;
1711 cls->setData(rw);
1712 cls->data()->flags = RO_FUTURE;
1713
1714 old = NXMapKeyCopyingInsert(futureNamedClasses(), name, cls);
1715 ASSERT(!old);
1716 }
1717
1718
1719 /***********************************************************************
1720 * popFutureNamedClass
1721 * Removes the named class from the unrealized future class list,
1722 * because it has been realized.
1723 * Returns nil if the name is not used by a future class.
1724 * Locking: runtimeLock must be held by the caller
1725 **********************************************************************/
1726 static Class popFutureNamedClass(const char *name)
1727 {
1728 runtimeLock.assertLocked();
1729
1730 Class cls = nil;
1731
1732 if (future_named_class_map) {
1733 cls = (Class)NXMapKeyFreeingRemove(future_named_class_map, name);
1734 if (cls && NXCountMapTable(future_named_class_map) == 0) {
1735 NXFreeMapTable(future_named_class_map);
1736 future_named_class_map = nil;
1737 }
1738 }
1739
1740 return cls;
1741 }
1742
1743
1744 /***********************************************************************
1745 * remappedClasses
1746 * Returns the oldClass => newClass map for realized future classes.
1747 * Returns the oldClass => nil map for ignored weak-linked classes.
1748 * Locking: runtimeLock must be read- or write-locked by the caller
1749 **********************************************************************/
1750 static objc::DenseMap<Class, Class> *remappedClasses(bool create)
1751 {
1752 static objc::LazyInitDenseMap<Class, Class> remapped_class_map;
1753
1754 runtimeLock.assertLocked();
1755
1756 // start big enough to hold CF's classes and a few others
1757 return remapped_class_map.get(create, 32);
1758 }
1759
1760
1761 /***********************************************************************
1762 * noClassesRemapped
1763 * Returns YES if no classes have been remapped
1764 * Locking: runtimeLock must be read- or write-locked by the caller
1765 **********************************************************************/
1766 static bool noClassesRemapped(void)
1767 {
1768 runtimeLock.assertLocked();
1769
1770 bool result = (remappedClasses(NO) == nil);
1771 #if DEBUG
1772 // Catch construction of an empty table, which defeats optimization.
1773 auto *map = remappedClasses(NO);
1774 if (map) ASSERT(map->size() > 0);
1775 #endif
1776 return result;
1777 }
1778
1779
1780 /***********************************************************************
1781 * addRemappedClass
1782 * newcls is a realized future class, replacing oldcls.
1783 * OR newcls is nil, replacing ignored weak-linked class oldcls.
1784 * Locking: runtimeLock must be write-locked by the caller
1785 **********************************************************************/
1786 static void addRemappedClass(Class oldcls, Class newcls)
1787 {
1788 runtimeLock.assertLocked();
1789
1790 if (PrintFuture) {
1791 _objc_inform("FUTURE: using %p instead of %p for %s",
1792 (void*)newcls, (void*)oldcls, oldcls->nameForLogging());
1793 }
1794
1795 auto result = remappedClasses(YES)->insert({ oldcls, newcls });
1796 #if DEBUG
1797 if (!std::get<1>(result)) {
1798 // An existing mapping was overwritten. This is not allowed
1799 // unless it was to nil.
1800 auto iterator = std::get<0>(result);
1801 auto value = std::get<1>(*iterator);
1802 ASSERT(value == nil);
1803 }
1804 #else
1805 (void)result;
1806 #endif
1807 }
1808
1809
1810 /***********************************************************************
1811 * remapClass
1812 * Returns the live class pointer for cls, which may be pointing to
1813 * a class struct that has been reallocated.
1814 * Returns nil if cls is ignored because of weak linking.
1815 * Locking: runtimeLock must be read- or write-locked by the caller
1816 **********************************************************************/
1817 static Class remapClass(Class cls)
1818 {
1819 runtimeLock.assertLocked();
1820
1821 if (!cls) return nil;
1822
1823 auto *map = remappedClasses(NO);
1824 if (!map)
1825 return cls;
1826
1827 auto iterator = map->find(cls);
1828 if (iterator == map->end())
1829 return cls;
1830 return std::get<1>(*iterator);
1831 }
1832
1833 static Class remapClass(classref_t cls)
1834 {
1835 return remapClass((Class)cls);
1836 }
1837
1838 Class _class_remap(Class cls)
1839 {
1840 mutex_locker_t lock(runtimeLock);
1841 return remapClass(cls);
1842 }
1843
1844 /***********************************************************************
1845 * remapClassRef
1846 * Fix up a class ref, in case the class referenced has been reallocated
1847 * or is an ignored weak-linked class.
1848 * Locking: runtimeLock must be read- or write-locked by the caller
1849 **********************************************************************/
1850 static void remapClassRef(Class *clsref)
1851 {
1852 runtimeLock.assertLocked();
1853
1854 Class newcls = remapClass(*clsref);
1855 if (*clsref != newcls) *clsref = newcls;
1856 }
1857
1858
1859 _Nullable Class
1860 objc_loadClassref(_Nullable Class * _Nonnull clsref)
1861 {
1862 auto *atomicClsref = explicit_atomic<uintptr_t>::from_pointer((uintptr_t *)clsref);
1863
1864 uintptr_t cls = atomicClsref->load(std::memory_order_relaxed);
1865 if (fastpath((cls & 1) == 0))
1866 return (Class)cls;
1867
1868 auto stub = (stub_class_t *)(cls & ~1ULL);
1869 Class initialized = stub->initializer((Class)stub, nil);
1870 atomicClsref->store((uintptr_t)initialized, std::memory_order_relaxed);
1871 return initialized;
1872 }
1873
1874
1875 /***********************************************************************
1876 * getMaybeUnrealizedNonMetaClass
1877 * Return the ordinary class for this class or metaclass.
1878 * `inst` is an instance of `cls` or a subclass thereof, or nil.
1879 * Non-nil inst is faster.
1880 * The result may be unrealized.
1881 * Used by +initialize.
1882 * Locking: runtimeLock must be read- or write-locked by the caller
1883 **********************************************************************/
1884 static Class getMaybeUnrealizedNonMetaClass(Class metacls, id inst)
1885 {
1886 static int total, named, secondary, sharedcache, dyld3;
1887 runtimeLock.assertLocked();
1888 ASSERT(metacls->isRealized());
1889
1890 total++;
1891
1892 // return cls itself if it's already a non-meta class
1893 if (!metacls->isMetaClass()) return metacls;
1894
1895 // metacls really is a metaclass
1896 // which means inst (if any) is a class
1897
1898 // special case for root metaclass
1899 // where inst == inst->ISA() == metacls is possible
1900 if (metacls->ISA() == metacls) {
1901 Class cls = metacls->superclass;
1902 ASSERT(cls->isRealized());
1903 ASSERT(!cls->isMetaClass());
1904 ASSERT(cls->ISA() == metacls);
1905 if (cls->ISA() == metacls) return cls;
1906 }
1907
1908 // use inst if available
1909 if (inst) {
1910 Class cls = remapClass((Class)inst);
1911 // cls may be a subclass - find the real class for metacls
1912 // fixme this probably stops working once Swift starts
1913 // reallocating classes if cls is unrealized.
1914 while (cls) {
1915 if (cls->ISA() == metacls) {
1916 ASSERT(!cls->isMetaClassMaybeUnrealized());
1917 return cls;
1918 }
1919 cls = cls->superclass;
1920 }
1921 #if DEBUG
1922 _objc_fatal("cls is not an instance of metacls");
1923 #else
1924 // release build: be forgiving and fall through to slow lookups
1925 #endif
1926 }
1927
1928 // try name lookup
1929 {
1930 Class cls = getClassExceptSomeSwift(metacls->mangledName());
1931 if (cls && cls->ISA() == metacls) {
1932 named++;
1933 if (PrintInitializing) {
1934 _objc_inform("INITIALIZE: %d/%d (%g%%) "
1935 "successful by-name metaclass lookups",
1936 named, total, named*100.0/total);
1937 }
1938 return cls;
1939 }
1940 }
1941
1942 // try secondary table
1943 {
1944 Class cls = (Class)NXMapGet(nonMetaClasses(), metacls);
1945 if (cls) {
1946 secondary++;
1947 if (PrintInitializing) {
1948 _objc_inform("INITIALIZE: %d/%d (%g%%) "
1949 "successful secondary metaclass lookups",
1950 secondary, total, secondary*100.0/total);
1951 }
1952
1953 ASSERT(cls->ISA() == metacls);
1954 return cls;
1955 }
1956 }
1957
1958 // try the dyld closure table
1959 if (isPreoptimized())
1960 {
1961 // Try table from dyld closure first. It was built to ignore the dupes it
1962 // knows will come from the cache, so anything left in here was there when
1963 // we launched
1964 Class cls = nil;
1965 // Note, we have to pass the lambda directly here as otherwise we would try
1966 // message copy and autorelease.
1967 _dyld_for_each_objc_class(metacls->mangledName(),
1968 [&cls, metacls](void* classPtr, bool isLoaded, bool* stop) {
1969 // Skip images which aren't loaded. This supports the case where dyld
1970 // might soft link an image from the main binary so its possibly not
1971 // loaded yet.
1972 if (!isLoaded)
1973 return;
1974
1975 // Found a loaded image with this class name, so check if its the right one
1976 Class result = (Class)classPtr;
1977 if (result->ISA() == metacls) {
1978 cls = result;
1979 *stop = true;
1980 }
1981 });
1982
1983 if (cls) {
1984 dyld3++;
1985 if (PrintInitializing) {
1986 _objc_inform("INITIALIZE: %d/%d (%g%%) "
1987 "successful dyld closure metaclass lookups",
1988 dyld3, total, dyld3*100.0/total);
1989 }
1990
1991 return cls;
1992 }
1993 }
1994
1995 // try any duplicates in the dyld shared cache
1996 {
1997 Class cls = nil;
1998
1999 int count;
2000 Class *classes = copyPreoptimizedClasses(metacls->mangledName(),&count);
2001 if (classes) {
2002 for (int i = 0; i < count; i++) {
2003 if (classes[i]->ISA() == metacls) {
2004 cls = classes[i];
2005 break;
2006 }
2007 }
2008 free(classes);
2009 }
2010
2011 if (cls) {
2012 sharedcache++;
2013 if (PrintInitializing) {
2014 _objc_inform("INITIALIZE: %d/%d (%g%%) "
2015 "successful shared cache metaclass lookups",
2016 sharedcache, total, sharedcache*100.0/total);
2017 }
2018
2019 return cls;
2020 }
2021 }
2022
2023 _objc_fatal("no class for metaclass %p", (void*)metacls);
2024 }
2025
2026
2027 /***********************************************************************
2028 * class_initialize. Send the '+initialize' message on demand to any
2029 * uninitialized class. Force initialization of superclasses first.
2030 * inst is an instance of cls, or nil. Non-nil is better for performance.
2031 * Returns the class pointer. If the class was unrealized then
2032 * it may be reallocated.
2033 * Locking:
2034 * runtimeLock must be held by the caller
2035 * This function may drop the lock.
2036 * On exit the lock is re-acquired or dropped as requested by leaveLocked.
2037 **********************************************************************/
2038 static Class initializeAndMaybeRelock(Class cls, id inst,
2039 mutex_t& lock, bool leaveLocked)
2040 {
2041 lock.assertLocked();
2042 ASSERT(cls->isRealized());
2043
2044 if (cls->isInitialized()) {
2045 if (!leaveLocked) lock.unlock();
2046 return cls;
2047 }
2048
2049 // Find the non-meta class for cls, if it is not already one.
2050 // The +initialize message is sent to the non-meta class object.
2051 Class nonmeta = getMaybeUnrealizedNonMetaClass(cls, inst);
2052
2053 // Realize the non-meta class if necessary.
2054 if (nonmeta->isRealized()) {
2055 // nonmeta is cls, which was already realized
2056 // OR nonmeta is distinct, but is already realized
2057 // - nothing else to do
2058 lock.unlock();
2059 } else {
2060 nonmeta = realizeClassMaybeSwiftAndUnlock(nonmeta, lock);
2061 // runtimeLock is now unlocked
2062 // fixme Swift can't relocate the class today,
2063 // but someday it will:
2064 cls = object_getClass(nonmeta);
2065 }
2066
2067 // runtimeLock is now unlocked, for +initialize dispatch
2068 ASSERT(nonmeta->isRealized());
2069 initializeNonMetaClass(nonmeta);
2070
2071 if (leaveLocked) runtimeLock.lock();
2072 return cls;
2073 }
2074
2075 // Locking: acquires runtimeLock
2076 Class class_initialize(Class cls, id obj)
2077 {
2078 runtimeLock.lock();
2079 return initializeAndMaybeRelock(cls, obj, runtimeLock, false);
2080 }
2081
2082 // Locking: caller must hold runtimeLock; this may drop and re-acquire it
2083 static Class initializeAndLeaveLocked(Class cls, id obj, mutex_t& lock)
2084 {
2085 return initializeAndMaybeRelock(cls, obj, lock, true);
2086 }
2087
2088
2089 /***********************************************************************
2090 * addRootClass
2091 * Adds cls as a new realized root class.
2092 * Locking: runtimeLock must be held by the caller.
2093 **********************************************************************/
2094 static void addRootClass(Class cls)
2095 {
2096 runtimeLock.assertLocked();
2097
2098 ASSERT(cls->isRealized());
2099
2100 objc_debug_realized_class_generation_count++;
2101
2102 cls->data()->nextSiblingClass = _firstRealizedClass;
2103 _firstRealizedClass = cls;
2104 }
2105
2106 static void removeRootClass(Class cls)
2107 {
2108 runtimeLock.assertLocked();
2109
2110 objc_debug_realized_class_generation_count++;
2111
2112 Class *classp;
2113 for (classp = &_firstRealizedClass;
2114 *classp != cls;
2115 classp = &(*classp)->data()->nextSiblingClass)
2116 { }
2117
2118 *classp = (*classp)->data()->nextSiblingClass;
2119 }
2120
2121
2122 /***********************************************************************
2123 * addSubclass
2124 * Adds subcls as a subclass of supercls.
2125 * Locking: runtimeLock must be held by the caller.
2126 **********************************************************************/
2127 static void addSubclass(Class supercls, Class subcls)
2128 {
2129 runtimeLock.assertLocked();
2130
2131 if (supercls && subcls) {
2132 ASSERT(supercls->isRealized());
2133 ASSERT(subcls->isRealized());
2134
2135 objc_debug_realized_class_generation_count++;
2136
2137 subcls->data()->nextSiblingClass = supercls->data()->firstSubclass;
2138 supercls->data()->firstSubclass = subcls;
2139
2140 if (supercls->hasCxxCtor()) {
2141 subcls->setHasCxxCtor();
2142 }
2143
2144 if (supercls->hasCxxDtor()) {
2145 subcls->setHasCxxDtor();
2146 }
2147
2148 objc::AWZScanner::scanAddedSubClass(subcls, supercls);
2149 objc::RRScanner::scanAddedSubClass(subcls, supercls);
2150 objc::CoreScanner::scanAddedSubClass(subcls, supercls);
2151
2152 // Special case: instancesRequireRawIsa does not propagate
2153 // from root class to root metaclass
2154 if (supercls->instancesRequireRawIsa() && supercls->superclass) {
2155 subcls->setInstancesRequireRawIsaRecursively(true);
2156 }
2157 }
2158 }
2159
2160
2161 /***********************************************************************
2162 * removeSubclass
2163 * Removes subcls as a subclass of supercls.
2164 * Locking: runtimeLock must be held by the caller.
2165 **********************************************************************/
2166 static void removeSubclass(Class supercls, Class subcls)
2167 {
2168 runtimeLock.assertLocked();
2169 ASSERT(supercls->isRealized());
2170 ASSERT(subcls->isRealized());
2171 ASSERT(subcls->superclass == supercls);
2172
2173 objc_debug_realized_class_generation_count++;
2174
2175 Class *cp;
2176 for (cp = &supercls->data()->firstSubclass;
2177 *cp && *cp != subcls;
2178 cp = &(*cp)->data()->nextSiblingClass)
2179 ;
2180 ASSERT(*cp == subcls);
2181 *cp = subcls->data()->nextSiblingClass;
2182 }
2183
2184
2185
2186 /***********************************************************************
2187 * protocols
2188 * Returns the protocol name => protocol map for protocols.
2189 * Locking: runtimeLock must read- or write-locked by the caller
2190 **********************************************************************/
2191 static NXMapTable *protocols(void)
2192 {
2193 static NXMapTable *protocol_map = nil;
2194
2195 runtimeLock.assertLocked();
2196
2197 INIT_ONCE_PTR(protocol_map,
2198 NXCreateMapTable(NXStrValueMapPrototype, 16),
2199 NXFreeMapTable(v) );
2200
2201 return protocol_map;
2202 }
2203
2204
2205 /***********************************************************************
2206 * getProtocol
2207 * Looks up a protocol by name. Demangled Swift names are recognized.
2208 * Locking: runtimeLock must be read- or write-locked by the caller.
2209 **********************************************************************/
2210 static NEVER_INLINE Protocol *getProtocol(const char *name)
2211 {
2212 runtimeLock.assertLocked();
2213
2214 // Try name as-is.
2215 Protocol *result = (Protocol *)NXMapGet(protocols(), name);
2216 if (result) return result;
2217
2218 // Try Swift-mangled equivalent of the given name.
2219 if (char *swName = copySwiftV1MangledName(name, true/*isProtocol*/)) {
2220 result = (Protocol *)NXMapGet(protocols(), swName);
2221 free(swName);
2222 if (result) return result;
2223 }
2224
2225 // Try table from dyld shared cache
2226 // Temporarily check that we are using the new table. Eventually this check
2227 // will always be true.
2228 // FIXME: Remove this check when we can
2229 if (sharedCacheSupportsProtocolRoots()) {
2230 result = getPreoptimizedProtocol(name);
2231 if (result) return result;
2232 }
2233
2234 return nil;
2235 }
2236
2237
2238 /***********************************************************************
2239 * remapProtocol
2240 * Returns the live protocol pointer for proto, which may be pointing to
2241 * a protocol struct that has been reallocated.
2242 * Locking: runtimeLock must be read- or write-locked by the caller
2243 **********************************************************************/
2244 static ALWAYS_INLINE protocol_t *remapProtocol(protocol_ref_t proto)
2245 {
2246 runtimeLock.assertLocked();
2247
2248 // Protocols in shared cache images have a canonical bit to mark that they
2249 // are the definition we should use
2250 if (((protocol_t *)proto)->isCanonical())
2251 return (protocol_t *)proto;
2252
2253 protocol_t *newproto = (protocol_t *)
2254 getProtocol(((protocol_t *)proto)->mangledName);
2255 return newproto ? newproto : (protocol_t *)proto;
2256 }
2257
2258
2259 /***********************************************************************
2260 * remapProtocolRef
2261 * Fix up a protocol ref, in case the protocol referenced has been reallocated.
2262 * Locking: runtimeLock must be read- or write-locked by the caller
2263 **********************************************************************/
2264 static size_t UnfixedProtocolReferences;
2265 static void remapProtocolRef(protocol_t **protoref)
2266 {
2267 runtimeLock.assertLocked();
2268
2269 protocol_t *newproto = remapProtocol((protocol_ref_t)*protoref);
2270 if (*protoref != newproto) {
2271 *protoref = newproto;
2272 UnfixedProtocolReferences++;
2273 }
2274 }
2275
2276
2277 /***********************************************************************
2278 * moveIvars
2279 * Slides a class's ivars to accommodate the given superclass size.
2280 * Ivars are NOT compacted to compensate for a superclass that shrunk.
2281 * Locking: runtimeLock must be held by the caller.
2282 **********************************************************************/
2283 static void moveIvars(class_ro_t *ro, uint32_t superSize)
2284 {
2285 runtimeLock.assertLocked();
2286
2287 uint32_t diff;
2288
2289 ASSERT(superSize > ro->instanceStart);
2290 diff = superSize - ro->instanceStart;
2291
2292 if (ro->ivars) {
2293 // Find maximum alignment in this class's ivars
2294 uint32_t maxAlignment = 1;
2295 for (const auto& ivar : *ro->ivars) {
2296 if (!ivar.offset) continue; // anonymous bitfield
2297
2298 uint32_t alignment = ivar.alignment();
2299 if (alignment > maxAlignment) maxAlignment = alignment;
2300 }
2301
2302 // Compute a slide value that preserves that alignment
2303 uint32_t alignMask = maxAlignment - 1;
2304 diff = (diff + alignMask) & ~alignMask;
2305
2306 // Slide all of this class's ivars en masse
2307 for (const auto& ivar : *ro->ivars) {
2308 if (!ivar.offset) continue; // anonymous bitfield
2309
2310 uint32_t oldOffset = (uint32_t)*ivar.offset;
2311 uint32_t newOffset = oldOffset + diff;
2312 *ivar.offset = newOffset;
2313
2314 if (PrintIvars) {
2315 _objc_inform("IVARS: offset %u -> %u for %s "
2316 "(size %u, align %u)",
2317 oldOffset, newOffset, ivar.name,
2318 ivar.size, ivar.alignment());
2319 }
2320 }
2321 }
2322
2323 *(uint32_t *)&ro->instanceStart += diff;
2324 *(uint32_t *)&ro->instanceSize += diff;
2325 }
2326
2327
2328 static void reconcileInstanceVariables(Class cls, Class supercls, const class_ro_t*& ro)
2329 {
2330 class_rw_t *rw = cls->data();
2331
2332 ASSERT(supercls);
2333 ASSERT(!cls->isMetaClass());
2334
2335 /* debug: print them all before sliding
2336 if (ro->ivars) {
2337 for (const auto& ivar : *ro->ivars) {
2338 if (!ivar.offset) continue; // anonymous bitfield
2339
2340 _objc_inform("IVARS: %s.%s (offset %u, size %u, align %u)",
2341 ro->name, ivar.name,
2342 *ivar.offset, ivar.size, ivar.alignment());
2343 }
2344 }
2345 */
2346
2347 // Non-fragile ivars - reconcile this class with its superclass
2348 const class_ro_t *super_ro = supercls->data()->ro;
2349
2350 if (DebugNonFragileIvars) {
2351 // Debugging: Force non-fragile ivars to slide.
2352 // Intended to find compiler, runtime, and program bugs.
2353 // If it fails with this and works without, you have a problem.
2354
2355 // Operation: Reset everything to 0 + misalignment.
2356 // Then force the normal sliding logic to push everything back.
2357
2358 // Exceptions: root classes, metaclasses, *NSCF* classes,
2359 // __CF* classes, NSConstantString, NSSimpleCString
2360
2361 // (already know it's not root because supercls != nil)
2362 const char *clsname = cls->mangledName();
2363 if (!strstr(clsname, "NSCF") &&
2364 0 != strncmp(clsname, "__CF", 4) &&
2365 0 != strcmp(clsname, "NSConstantString") &&
2366 0 != strcmp(clsname, "NSSimpleCString"))
2367 {
2368 uint32_t oldStart = ro->instanceStart;
2369 class_ro_t *ro_w = make_ro_writeable(rw);
2370 ro = rw->ro;
2371
2372 // Find max ivar alignment in class.
2373 // default to word size to simplify ivar update
2374 uint32_t alignment = 1<<WORD_SHIFT;
2375 if (ro->ivars) {
2376 for (const auto& ivar : *ro->ivars) {
2377 if (ivar.alignment() > alignment) {
2378 alignment = ivar.alignment();
2379 }
2380 }
2381 }
2382 uint32_t misalignment = ro->instanceStart % alignment;
2383 uint32_t delta = ro->instanceStart - misalignment;
2384 ro_w->instanceStart = misalignment;
2385 ro_w->instanceSize -= delta;
2386
2387 if (PrintIvars) {
2388 _objc_inform("IVARS: DEBUG: forcing ivars for class '%s' "
2389 "to slide (instanceStart %zu -> %zu)",
2390 cls->nameForLogging(), (size_t)oldStart,
2391 (size_t)ro->instanceStart);
2392 }
2393
2394 if (ro->ivars) {
2395 for (const auto& ivar : *ro->ivars) {
2396 if (!ivar.offset) continue; // anonymous bitfield
2397 *ivar.offset -= delta;
2398 }
2399 }
2400 }
2401 }
2402
2403 if (ro->instanceStart >= super_ro->instanceSize) {
2404 // Superclass has not overgrown its space. We're done here.
2405 return;
2406 }
2407 // fixme can optimize for "class has no new ivars", etc
2408
2409 if (ro->instanceStart < super_ro->instanceSize) {
2410 // Superclass has changed size. This class's ivars must move.
2411 // Also slide layout bits in parallel.
2412 // This code is incapable of compacting the subclass to
2413 // compensate for a superclass that shrunk, so don't do that.
2414 if (PrintIvars) {
2415 _objc_inform("IVARS: sliding ivars for class %s "
2416 "(superclass was %u bytes, now %u)",
2417 cls->nameForLogging(), ro->instanceStart,
2418 super_ro->instanceSize);
2419 }
2420 class_ro_t *ro_w = make_ro_writeable(rw);
2421 ro = rw->ro;
2422 moveIvars(ro_w, super_ro->instanceSize);
2423 gdb_objc_class_changed(cls, OBJC_CLASS_IVARS_CHANGED, ro->name);
2424 }
2425 }
2426
2427
2428 /***********************************************************************
2429 * realizeClassWithoutSwift
2430 * Performs first-time initialization on class cls,
2431 * including allocating its read-write data.
2432 * Does not perform any Swift-side initialization.
2433 * Returns the real class structure for the class.
2434 * Locking: runtimeLock must be write-locked by the caller
2435 **********************************************************************/
2436 static Class realizeClassWithoutSwift(Class cls, Class previously)
2437 {
2438 runtimeLock.assertLocked();
2439
2440 const class_ro_t *ro;
2441 class_rw_t *rw;
2442 Class supercls;
2443 Class metacls;
2444 bool isMeta;
2445
2446 if (!cls) return nil;
2447 if (cls->isRealized()) return cls;
2448 ASSERT(cls == remapClass(cls));
2449
2450 // fixme verify class is not in an un-dlopened part of the shared cache?
2451
2452 ro = (const class_ro_t *)cls->data();
2453 if (ro->flags & RO_FUTURE) {
2454 // This was a future class. rw data is already allocated.
2455 rw = cls->data();
2456 ro = cls->data()->ro;
2457 cls->changeInfo(RW_REALIZED|RW_REALIZING, RW_FUTURE);
2458 } else {
2459 // Normal class. Allocate writeable class data.
2460 rw = (class_rw_t *)calloc(sizeof(class_rw_t), 1);
2461 rw->ro = ro;
2462 rw->flags = RW_REALIZED|RW_REALIZING;
2463 cls->setData(rw);
2464 }
2465
2466 isMeta = ro->flags & RO_META;
2467 #if FAST_CACHE_META
2468 if (isMeta) cls->cache.setBit(FAST_CACHE_META);
2469 #endif
2470 rw->version = isMeta ? 7 : 0; // old runtime went up to 6
2471
2472
2473 // Choose an index for this class.
2474 // Sets cls->instancesRequireRawIsa if indexes no more indexes are available
2475 cls->chooseClassArrayIndex();
2476
2477 if (PrintConnecting) {
2478 _objc_inform("CLASS: realizing class '%s'%s %p %p #%u %s%s",
2479 cls->nameForLogging(), isMeta ? " (meta)" : "",
2480 (void*)cls, ro, cls->classArrayIndex(),
2481 cls->isSwiftStable() ? "(swift)" : "",
2482 cls->isSwiftLegacy() ? "(pre-stable swift)" : "");
2483 }
2484
2485 // Realize superclass and metaclass, if they aren't already.
2486 // This needs to be done after RW_REALIZED is set above, for root classes.
2487 // This needs to be done after class index is chosen, for root metaclasses.
2488 // This assumes that none of those classes have Swift contents,
2489 // or that Swift's initializers have already been called.
2490 // fixme that assumption will be wrong if we add support
2491 // for ObjC subclasses of Swift classes.
2492 supercls = realizeClassWithoutSwift(remapClass(cls->superclass), nil);
2493 metacls = realizeClassWithoutSwift(remapClass(cls->ISA()), nil);
2494
2495 #if SUPPORT_NONPOINTER_ISA
2496 if (isMeta) {
2497 // Metaclasses do not need any features from non pointer ISA
2498 // This allows for a faspath for classes in objc_retain/objc_release.
2499 cls->setInstancesRequireRawIsa();
2500 } else {
2501 // Disable non-pointer isa for some classes and/or platforms.
2502 // Set instancesRequireRawIsa.
2503 bool instancesRequireRawIsa = cls->instancesRequireRawIsa();
2504 bool rawIsaIsInherited = false;
2505 static bool hackedDispatch = false;
2506
2507 if (DisableNonpointerIsa) {
2508 // Non-pointer isa disabled by environment or app SDK version
2509 instancesRequireRawIsa = true;
2510 }
2511 else if (!hackedDispatch && 0 == strcmp(ro->name, "OS_object"))
2512 {
2513 // hack for libdispatch et al - isa also acts as vtable pointer
2514 hackedDispatch = true;
2515 instancesRequireRawIsa = true;
2516 }
2517 else if (supercls && supercls->superclass &&
2518 supercls->instancesRequireRawIsa())
2519 {
2520 // This is also propagated by addSubclass()
2521 // but nonpointer isa setup needs it earlier.
2522 // Special case: instancesRequireRawIsa does not propagate
2523 // from root class to root metaclass
2524 instancesRequireRawIsa = true;
2525 rawIsaIsInherited = true;
2526 }
2527
2528 if (instancesRequireRawIsa) {
2529 cls->setInstancesRequireRawIsaRecursively(rawIsaIsInherited);
2530 }
2531 }
2532 // SUPPORT_NONPOINTER_ISA
2533 #endif
2534
2535 // Update superclass and metaclass in case of remapping
2536 cls->superclass = supercls;
2537 cls->initClassIsa(metacls);
2538
2539 // Reconcile instance variable offsets / layout.
2540 // This may reallocate class_ro_t, updating our ro variable.
2541 if (supercls && !isMeta) reconcileInstanceVariables(cls, supercls, ro);
2542
2543 // Set fastInstanceSize if it wasn't set already.
2544 cls->setInstanceSize(ro->instanceSize);
2545
2546 // Copy some flags from ro to rw
2547 if (ro->flags & RO_HAS_CXX_STRUCTORS) {
2548 cls->setHasCxxDtor();
2549 if (! (ro->flags & RO_HAS_CXX_DTOR_ONLY)) {
2550 cls->setHasCxxCtor();
2551 }
2552 }
2553
2554 // Propagate the associated objects forbidden flag from ro or from
2555 // the superclass.
2556 if ((ro->flags & RO_FORBIDS_ASSOCIATED_OBJECTS) ||
2557 (supercls && supercls->forbidsAssociatedObjects()))
2558 {
2559 rw->flags |= RW_FORBIDS_ASSOCIATED_OBJECTS;
2560 }
2561
2562 // Connect this class to its superclass's subclass lists
2563 if (supercls) {
2564 addSubclass(supercls, cls);
2565 } else {
2566 addRootClass(cls);
2567 }
2568
2569 // Attach categories
2570 methodizeClass(cls, previously);
2571
2572 return cls;
2573 }
2574
2575
2576 /***********************************************************************
2577 * _objc_realizeClassFromSwift
2578 * Called by Swift when it needs the ObjC part of a class to be realized.
2579 * There are four cases:
2580 * 1. cls != nil; previously == cls
2581 * Class cls is being realized in place
2582 * 2. cls != nil; previously == nil
2583 * Class cls is being constructed at runtime
2584 * 3. cls != nil; previously != cls
2585 * The class that was at previously has been reallocated to cls
2586 * 4. cls == nil, previously != nil
2587 * The class at previously is hereby disavowed
2588 *
2589 * Only variants #1 and #2 are supported today.
2590 *
2591 * Locking: acquires runtimeLock
2592 **********************************************************************/
2593 Class _objc_realizeClassFromSwift(Class cls, void *previously)
2594 {
2595 if (cls) {
2596 if (previously && previously != (void*)cls) {
2597 // #3: relocation
2598 mutex_locker_t lock(runtimeLock);
2599 addRemappedClass((Class)previously, cls);
2600 addClassTableEntry(cls);
2601 addNamedClass(cls, cls->mangledName(), /*replacing*/nil);
2602 return realizeClassWithoutSwift(cls, (Class)previously);
2603 } else {
2604 // #1 and #2: realization in place, or new class
2605 mutex_locker_t lock(runtimeLock);
2606
2607 if (!previously) {
2608 // #2: new class
2609 cls = readClass(cls, false/*bundle*/, false/*shared cache*/);
2610 }
2611
2612 // #1 and #2: realization in place, or new class
2613 // We ignore the Swift metadata initializer callback.
2614 // We assume that's all handled since we're being called from Swift.
2615 return realizeClassWithoutSwift(cls, nil);
2616 }
2617 }
2618 else {
2619 // #4: disavowal
2620 // In the future this will mean remapping the old address to nil
2621 // and if necessary removing the old address from any other tables.
2622 _objc_fatal("Swift requested that class %p be ignored, "
2623 "but libobjc does not support that.", previously);
2624 }
2625 }
2626
2627 /***********************************************************************
2628 * realizeSwiftClass
2629 * Performs first-time initialization on class cls,
2630 * including allocating its read-write data,
2631 * and any Swift-side initialization.
2632 * Returns the real class structure for the class.
2633 * Locking: acquires runtimeLock indirectly
2634 **********************************************************************/
2635 static Class realizeSwiftClass(Class cls)
2636 {
2637 runtimeLock.assertUnlocked();
2638
2639 // Some assumptions:
2640 // * Metaclasses never have a Swift initializer.
2641 // * Root classes never have a Swift initializer.
2642 // (These two together avoid initialization order problems at the root.)
2643 // * Unrealized non-Swift classes have no Swift ancestry.
2644 // * Unrealized Swift classes with no initializer have no ancestry that
2645 // does have the initializer.
2646 // (These two together mean we don't need to scan superclasses here
2647 // and we don't need to worry about Swift superclasses inside
2648 // realizeClassWithoutSwift()).
2649
2650 // fixme some of these assumptions will be wrong
2651 // if we add support for ObjC sublasses of Swift classes.
2652
2653 #if DEBUG
2654 runtimeLock.lock();
2655 ASSERT(remapClass(cls) == cls);
2656 ASSERT(cls->isSwiftStable_ButAllowLegacyForNow());
2657 ASSERT(!cls->isMetaClassMaybeUnrealized());
2658 ASSERT(cls->superclass);
2659 runtimeLock.unlock();
2660 #endif
2661
2662 // Look for a Swift metadata initialization function
2663 // installed on the class. If it is present we call it.
2664 // That function in turn initializes the Swift metadata,
2665 // prepares the "compiler-generated" ObjC metadata if not
2666 // already present, and calls _objc_realizeSwiftClass() to finish
2667 // our own initialization.
2668
2669 if (auto init = cls->swiftMetadataInitializer()) {
2670 if (PrintConnecting) {
2671 _objc_inform("CLASS: calling Swift metadata initializer "
2672 "for class '%s' (%p)", cls->nameForLogging(), cls);
2673 }
2674
2675 Class newcls = init(cls, nil);
2676
2677 // fixme someday Swift will need to relocate classes at this point,
2678 // but we don't accept that yet.
2679 if (cls != newcls) {
2680 mutex_locker_t lock(runtimeLock);
2681 addRemappedClass(cls, newcls);
2682 }
2683
2684 return newcls;
2685 }
2686 else {
2687 // No Swift-side initialization callback.
2688 // Perform our own realization directly.
2689 mutex_locker_t lock(runtimeLock);
2690 return realizeClassWithoutSwift(cls, nil);
2691 }
2692 }
2693
2694
2695 /***********************************************************************
2696 * realizeClassMaybeSwift (MaybeRelock / AndUnlock / AndLeaveLocked)
2697 * Realize a class that might be a Swift class.
2698 * Returns the real class structure for the class.
2699 * Locking:
2700 * runtimeLock must be held on entry
2701 * runtimeLock may be dropped during execution
2702 * ...AndUnlock function leaves runtimeLock unlocked on exit
2703 * ...AndLeaveLocked re-acquires runtimeLock if it was dropped
2704 * This complication avoids repeated lock transitions in some cases.
2705 **********************************************************************/
2706 static Class
2707 realizeClassMaybeSwiftMaybeRelock(Class cls, mutex_t& lock, bool leaveLocked)
2708 {
2709 lock.assertLocked();
2710
2711 if (!cls->isSwiftStable_ButAllowLegacyForNow()) {
2712 // Non-Swift class. Realize it now with the lock still held.
2713 // fixme wrong in the future for objc subclasses of swift classes
2714 realizeClassWithoutSwift(cls, nil);
2715 if (!leaveLocked) lock.unlock();
2716 } else {
2717 // Swift class. We need to drop locks and call the Swift
2718 // runtime to initialize it.
2719 lock.unlock();
2720 cls = realizeSwiftClass(cls);
2721 ASSERT(cls->isRealized()); // callback must have provoked realization
2722 if (leaveLocked) lock.lock();
2723 }
2724
2725 return cls;
2726 }
2727
2728 static Class
2729 realizeClassMaybeSwiftAndUnlock(Class cls, mutex_t& lock)
2730 {
2731 return realizeClassMaybeSwiftMaybeRelock(cls, lock, false);
2732 }
2733
2734 static Class
2735 realizeClassMaybeSwiftAndLeaveLocked(Class cls, mutex_t& lock)
2736 {
2737 return realizeClassMaybeSwiftMaybeRelock(cls, lock, true);
2738 }
2739
2740
2741 /***********************************************************************
2742 * missingWeakSuperclass
2743 * Return YES if some superclass of cls was weak-linked and is missing.
2744 **********************************************************************/
2745 static bool
2746 missingWeakSuperclass(Class cls)
2747 {
2748 ASSERT(!cls->isRealized());
2749
2750 if (!cls->superclass) {
2751 // superclass nil. This is normal for root classes only.
2752 return (!(cls->data()->flags & RO_ROOT));
2753 } else {
2754 // superclass not nil. Check if a higher superclass is missing.
2755 Class supercls = remapClass(cls->superclass);
2756 ASSERT(cls != cls->superclass);
2757 ASSERT(cls != supercls);
2758 if (!supercls) return YES;
2759 if (supercls->isRealized()) return NO;
2760 return missingWeakSuperclass(supercls);
2761 }
2762 }
2763
2764
2765 /***********************************************************************
2766 * realizeAllClassesInImage
2767 * Non-lazily realizes all unrealized classes in the given image.
2768 * Locking: runtimeLock must be held by the caller.
2769 * Locking: this function may drop and re-acquire the lock.
2770 **********************************************************************/
2771 static void realizeAllClassesInImage(header_info *hi)
2772 {
2773 runtimeLock.assertLocked();
2774
2775 size_t count, i;
2776 classref_t const *classlist;
2777
2778 if (hi->areAllClassesRealized()) return;
2779
2780 classlist = _getObjc2ClassList(hi, &count);
2781
2782 for (i = 0; i < count; i++) {
2783 Class cls = remapClass(classlist[i]);
2784 if (cls) {
2785 realizeClassMaybeSwiftAndLeaveLocked(cls, runtimeLock);
2786 }
2787 }
2788
2789 hi->setAllClassesRealized(YES);
2790 }
2791
2792
2793 /***********************************************************************
2794 * realizeAllClasses
2795 * Non-lazily realizes all unrealized classes in all known images.
2796 * Locking: runtimeLock must be held by the caller.
2797 * Locking: this function may drop and re-acquire the lock.
2798 * Dropping the lock makes this function thread-unsafe with respect
2799 * to concurrent image unload, but the callers of this function
2800 * already ultimately do something that is also thread-unsafe with
2801 * respect to image unload (such as using the list of all classes).
2802 **********************************************************************/
2803 static void realizeAllClasses(void)
2804 {
2805 runtimeLock.assertLocked();
2806
2807 header_info *hi;
2808 for (hi = FirstHeader; hi; hi = hi->getNext()) {
2809 realizeAllClassesInImage(hi); // may drop and re-acquire runtimeLock
2810 }
2811 }
2812
2813
2814 /***********************************************************************
2815 * _objc_allocateFutureClass
2816 * Allocate an unresolved future class for the given class name.
2817 * Returns any existing allocation if one was already made.
2818 * Assumes the named class doesn't exist yet.
2819 * Locking: acquires runtimeLock
2820 **********************************************************************/
2821 Class _objc_allocateFutureClass(const char *name)
2822 {
2823 mutex_locker_t lock(runtimeLock);
2824
2825 Class cls;
2826 NXMapTable *map = futureNamedClasses();
2827
2828 if ((cls = (Class)NXMapGet(map, name))) {
2829 // Already have a future class for this name.
2830 return cls;
2831 }
2832
2833 cls = _calloc_class(sizeof(objc_class));
2834 addFutureNamedClass(name, cls);
2835
2836 return cls;
2837 }
2838
2839
2840 /***********************************************************************
2841 * objc_getFutureClass. Return the id of the named class.
2842 * If the class does not exist, return an uninitialized class
2843 * structure that will be used for the class when and if it
2844 * does get loaded.
2845 * Not thread safe.
2846 **********************************************************************/
2847 Class objc_getFutureClass(const char *name)
2848 {
2849 Class cls;
2850
2851 // YES unconnected, NO class handler
2852 // (unconnected is OK because it will someday be the real class)
2853 cls = look_up_class(name, YES, NO);
2854 if (cls) {
2855 if (PrintFuture) {
2856 _objc_inform("FUTURE: found %p already in use for %s",
2857 (void*)cls, name);
2858 }
2859
2860 return cls;
2861 }
2862
2863 // No class or future class with that name yet. Make one.
2864 // fixme not thread-safe with respect to
2865 // simultaneous library load or getFutureClass.
2866 return _objc_allocateFutureClass(name);
2867 }
2868
2869
2870 BOOL _class_isFutureClass(Class cls)
2871 {
2872 return cls && cls->isFuture();
2873 }
2874
2875
2876 /***********************************************************************
2877 * _objc_flush_caches
2878 * Flushes all caches.
2879 * (Historical behavior: flush caches for cls, its metaclass,
2880 * and subclasses thereof. Nil flushes all classes.)
2881 * Locking: acquires runtimeLock
2882 **********************************************************************/
2883 static void flushCaches(Class cls)
2884 {
2885 runtimeLock.assertLocked();
2886 #if CONFIG_USE_CACHE_LOCK
2887 mutex_locker_t lock(cacheUpdateLock);
2888 #endif
2889
2890 if (cls) {
2891 foreach_realized_class_and_subclass(cls, [](Class c){
2892 cache_erase_nolock(c);
2893 return true;
2894 });
2895 }
2896 else {
2897 foreach_realized_class_and_metaclass([](Class c){
2898 cache_erase_nolock(c);
2899 return true;
2900 });
2901 }
2902 }
2903
2904
2905 void _objc_flush_caches(Class cls)
2906 {
2907 {
2908 mutex_locker_t lock(runtimeLock);
2909 flushCaches(cls);
2910 if (cls && cls->superclass && cls != cls->getIsa()) {
2911 flushCaches(cls->getIsa());
2912 } else {
2913 // cls is a root class or root metaclass. Its metaclass is itself
2914 // or a subclass so the metaclass caches were already flushed.
2915 }
2916 }
2917
2918 if (!cls) {
2919 // collectALot if cls==nil
2920 #if CONFIG_USE_CACHE_LOCK
2921 mutex_locker_t lock(cacheUpdateLock);
2922 #else
2923 mutex_locker_t lock(runtimeLock);
2924 #endif
2925 cache_collect(true);
2926 }
2927 }
2928
2929
2930 /***********************************************************************
2931 * map_images
2932 * Process the given images which are being mapped in by dyld.
2933 * Calls ABI-agnostic code after taking ABI-specific locks.
2934 *
2935 * Locking: write-locks runtimeLock
2936 **********************************************************************/
2937 void
2938 map_images(unsigned count, const char * const paths[],
2939 const struct mach_header * const mhdrs[])
2940 {
2941 mutex_locker_t lock(runtimeLock);
2942 return map_images_nolock(count, paths, mhdrs);
2943 }
2944
2945
2946 /***********************************************************************
2947 * load_images
2948 * Process +load in the given images which are being mapped in by dyld.
2949 *
2950 * Locking: write-locks runtimeLock and loadMethodLock
2951 **********************************************************************/
2952 extern bool hasLoadMethods(const headerType *mhdr);
2953 extern void prepare_load_methods(const headerType *mhdr);
2954
2955 void
2956 load_images(const char *path __unused, const struct mach_header *mh)
2957 {
2958 // Return without taking locks if there are no +load methods here.
2959 if (!hasLoadMethods((const headerType *)mh)) return;
2960
2961 recursive_mutex_locker_t lock(loadMethodLock);
2962
2963 // Discover load methods
2964 {
2965 mutex_locker_t lock2(runtimeLock);
2966 prepare_load_methods((const headerType *)mh);
2967 }
2968
2969 // Call +load methods (without runtimeLock - re-entrant)
2970 call_load_methods();
2971 }
2972
2973
2974 /***********************************************************************
2975 * unmap_image
2976 * Process the given image which is about to be unmapped by dyld.
2977 *
2978 * Locking: write-locks runtimeLock and loadMethodLock
2979 **********************************************************************/
2980 void
2981 unmap_image(const char *path __unused, const struct mach_header *mh)
2982 {
2983 recursive_mutex_locker_t lock(loadMethodLock);
2984 mutex_locker_t lock2(runtimeLock);
2985 unmap_image_nolock(mh);
2986 }
2987
2988
2989 /***********************************************************************
2990 * mustReadClasses
2991 * Preflight check in advance of readClass() from an image.
2992 **********************************************************************/
2993 bool mustReadClasses(header_info *hi, bool hasDyldRoots)
2994 {
2995 const char *reason;
2996
2997 // If the image is not preoptimized then we must read classes.
2998 if (!hi->hasPreoptimizedClasses()) {
2999 reason = nil; // Don't log this one because it is noisy.
3000 goto readthem;
3001 }
3002
3003 // If iOS simulator then we must read classes.
3004 #if TARGET_OS_SIMULATOR
3005 reason = "the image is for iOS simulator";
3006 goto readthem;
3007 #endif
3008
3009 ASSERT(!hi->isBundle()); // no MH_BUNDLE in shared cache
3010
3011 // If the image may have missing weak superclasses then we must read classes
3012 if (!noMissingWeakSuperclasses() || hasDyldRoots) {
3013 reason = "the image may contain classes with missing weak superclasses";
3014 goto readthem;
3015 }
3016
3017 // If there are unresolved future classes then we must read classes.
3018 if (haveFutureNamedClasses()) {
3019 reason = "there are unresolved future classes pending";
3020 goto readthem;
3021 }
3022
3023 // readClass() rewrites bits in backward-deploying Swift stable ABI code.
3024 // The assumption here is there there are no such classes
3025 // in the dyld shared cache.
3026 #if DEBUG
3027 {
3028 size_t count;
3029 classref_t const *classlist = _getObjc2ClassList(hi, &count);
3030 for (size_t i = 0; i < count; i++) {
3031 Class cls = remapClass(classlist[i]);
3032 ASSERT(!cls->isUnfixedBackwardDeployingStableSwift());
3033 }
3034 }
3035 #endif
3036
3037 // readClass() does not need to do anything.
3038 return NO;
3039
3040 readthem:
3041 if (PrintPreopt && reason) {
3042 _objc_inform("PREOPTIMIZATION: reading classes manually from %s "
3043 "because %s", hi->fname(), reason);
3044 }
3045 return YES;
3046 }
3047
3048
3049 /***********************************************************************
3050 * readClass
3051 * Read a class and metaclass as written by a compiler.
3052 * Returns the new class pointer. This could be:
3053 * - cls
3054 * - nil (cls has a missing weak-linked superclass)
3055 * - something else (space for this class was reserved by a future class)
3056 *
3057 * Note that all work performed by this function is preflighted by
3058 * mustReadClasses(). Do not change this function without updating that one.
3059 *
3060 * Locking: runtimeLock acquired by map_images or objc_readClassPair
3061 **********************************************************************/
3062 Class readClass(Class cls, bool headerIsBundle, bool headerIsPreoptimized)
3063 {
3064 const char *mangledName = cls->mangledName();
3065
3066 if (missingWeakSuperclass(cls)) {
3067 // No superclass (probably weak-linked).
3068 // Disavow any knowledge of this subclass.
3069 if (PrintConnecting) {
3070 _objc_inform("CLASS: IGNORING class '%s' with "
3071 "missing weak-linked superclass",
3072 cls->nameForLogging());
3073 }
3074 addRemappedClass(cls, nil);
3075 cls->superclass = nil;
3076 return nil;
3077 }
3078
3079 cls->fixupBackwardDeployingStableSwift();
3080
3081 Class replacing = nil;
3082 if (Class newCls = popFutureNamedClass(mangledName)) {
3083 // This name was previously allocated as a future class.
3084 // Copy objc_class to future class's struct.
3085 // Preserve future's rw data block.
3086
3087 if (newCls->isAnySwift()) {
3088 _objc_fatal("Can't complete future class request for '%s' "
3089 "because the real class is too big.",
3090 cls->nameForLogging());
3091 }
3092
3093 class_rw_t *rw = newCls->data();
3094 const class_ro_t *old_ro = rw->ro;
3095 memcpy(newCls, cls, sizeof(objc_class));
3096 rw->ro = (class_ro_t *)newCls->data();
3097 newCls->setData(rw);
3098 freeIfMutable((char *)old_ro->name);
3099 free((void *)old_ro);
3100
3101 addRemappedClass(cls, newCls);
3102
3103 replacing = cls;
3104 cls = newCls;
3105 }
3106
3107 if (headerIsPreoptimized && !replacing) {
3108 // class list built in shared cache
3109 // fixme strict assert doesn't work because of duplicates
3110 // ASSERT(cls == getClass(name));
3111 ASSERT(getClassExceptSomeSwift(mangledName));
3112 } else {
3113 addNamedClass(cls, mangledName, replacing);
3114 addClassTableEntry(cls);
3115 }
3116
3117 // for future reference: shared cache never contains MH_BUNDLEs
3118 if (headerIsBundle) {
3119 cls->data()->flags |= RO_FROM_BUNDLE;
3120 cls->ISA()->data()->flags |= RO_FROM_BUNDLE;
3121 }
3122
3123 return cls;
3124 }
3125
3126
3127 /***********************************************************************
3128 * readProtocol
3129 * Read a protocol as written by a compiler.
3130 **********************************************************************/
3131 static void
3132 readProtocol(protocol_t *newproto, Class protocol_class,
3133 NXMapTable *protocol_map,
3134 bool headerIsPreoptimized, bool headerIsBundle)
3135 {
3136 // This is not enough to make protocols in unloaded bundles safe,
3137 // but it does prevent crashes when looking up unrelated protocols.
3138 auto insertFn = headerIsBundle ? NXMapKeyCopyingInsert : NXMapInsert;
3139
3140 protocol_t *oldproto = (protocol_t *)getProtocol(newproto->mangledName);
3141
3142 if (oldproto) {
3143 if (oldproto != newproto) {
3144 // Some other definition already won.
3145 if (PrintProtocols) {
3146 _objc_inform("PROTOCOLS: protocol at %p is %s "
3147 "(duplicate of %p)",
3148 newproto, oldproto->nameForLogging(), oldproto);
3149 }
3150
3151 // If we are a shared cache binary then we have a definition of this
3152 // protocol, but if another one was chosen then we need to clear our
3153 // isCanonical bit so that no-one trusts it.
3154 // Note, if getProtocol returned a shared cache protocol then the
3155 // canonical definition is already in the shared cache and we don't
3156 // need to do anything.
3157 if (headerIsPreoptimized && !oldproto->isCanonical()) {
3158 // Note newproto is an entry in our __objc_protolist section which
3159 // for shared cache binaries points to the original protocol in
3160 // that binary, not the shared cache uniqued one.
3161 auto cacheproto = (protocol_t *)
3162 getSharedCachePreoptimizedProtocol(newproto->mangledName);
3163 if (cacheproto && cacheproto->isCanonical())
3164 cacheproto->clearIsCanonical();
3165 }
3166 }
3167 }
3168 else if (headerIsPreoptimized) {
3169 // Shared cache initialized the protocol object itself,
3170 // but in order to allow out-of-cache replacement we need
3171 // to add it to the protocol table now.
3172
3173 protocol_t *cacheproto = (protocol_t *)
3174 getPreoptimizedProtocol(newproto->mangledName);
3175 protocol_t *installedproto;
3176 if (cacheproto && cacheproto != newproto) {
3177 // Another definition in the shared cache wins (because
3178 // everything in the cache was fixed up to point to it).
3179 installedproto = cacheproto;
3180 }
3181 else {
3182 // This definition wins.
3183 installedproto = newproto;
3184 }
3185
3186 ASSERT(installedproto->getIsa() == protocol_class);
3187 ASSERT(installedproto->size >= sizeof(protocol_t));
3188 insertFn(protocol_map, installedproto->mangledName,
3189 installedproto);
3190
3191 if (PrintProtocols) {
3192 _objc_inform("PROTOCOLS: protocol at %p is %s",
3193 installedproto, installedproto->nameForLogging());
3194 if (newproto != installedproto) {
3195 _objc_inform("PROTOCOLS: protocol at %p is %s "
3196 "(duplicate of %p)",
3197 newproto, installedproto->nameForLogging(),
3198 installedproto);
3199 }
3200 }
3201 }
3202 else if (newproto->size >= sizeof(protocol_t)) {
3203 // New protocol from an un-preoptimized image
3204 // with sufficient storage. Fix it up in place.
3205 // fixme duplicate protocols from unloadable bundle
3206 newproto->initIsa(protocol_class); // fixme pinned
3207 insertFn(protocol_map, newproto->mangledName, newproto);
3208 if (PrintProtocols) {
3209 _objc_inform("PROTOCOLS: protocol at %p is %s",
3210 newproto, newproto->nameForLogging());
3211 }
3212 }
3213 else {
3214 // New protocol from an un-preoptimized image
3215 // with insufficient storage. Reallocate it.
3216 // fixme duplicate protocols from unloadable bundle
3217 size_t size = max(sizeof(protocol_t), (size_t)newproto->size);
3218 protocol_t *installedproto = (protocol_t *)calloc(size, 1);
3219 memcpy(installedproto, newproto, newproto->size);
3220 installedproto->size = (typeof(installedproto->size))size;
3221
3222 installedproto->initIsa(protocol_class); // fixme pinned
3223 insertFn(protocol_map, installedproto->mangledName, installedproto);
3224 if (PrintProtocols) {
3225 _objc_inform("PROTOCOLS: protocol at %p is %s ",
3226 installedproto, installedproto->nameForLogging());
3227 _objc_inform("PROTOCOLS: protocol at %p is %s "
3228 "(reallocated to %p)",
3229 newproto, installedproto->nameForLogging(),
3230 installedproto);
3231 }
3232 }
3233 }
3234
3235 /***********************************************************************
3236 * _read_images
3237 * Perform initial processing of the headers in the linked
3238 * list beginning with headerList.
3239 *
3240 * Called by: map_images_nolock
3241 *
3242 * Locking: runtimeLock acquired by map_images
3243 **********************************************************************/
3244 void _read_images(header_info **hList, uint32_t hCount, int totalClasses, int unoptimizedTotalClasses)
3245 {
3246 header_info *hi;
3247 uint32_t hIndex;
3248 size_t count;
3249 size_t i;
3250 Class *resolvedFutureClasses = nil;
3251 size_t resolvedFutureClassCount = 0;
3252 static bool doneOnce;
3253 bool launchTime = NO;
3254 TimeLogger ts(PrintImageTimes);
3255
3256 runtimeLock.assertLocked();
3257
3258 #define EACH_HEADER \
3259 hIndex = 0; \
3260 hIndex < hCount && (hi = hList[hIndex]); \
3261 hIndex++
3262
3263 if (!doneOnce) {
3264 doneOnce = YES;
3265 launchTime = YES;
3266
3267 #if SUPPORT_NONPOINTER_ISA
3268 // Disable non-pointer isa under some conditions.
3269
3270 # if SUPPORT_INDEXED_ISA
3271 // Disable nonpointer isa if any image contains old Swift code
3272 for (EACH_HEADER) {
3273 if (hi->info()->containsSwift() &&
3274 hi->info()->swiftUnstableVersion() < objc_image_info::SwiftVersion3)
3275 {
3276 DisableNonpointerIsa = true;
3277 if (PrintRawIsa) {
3278 _objc_inform("RAW ISA: disabling non-pointer isa because "
3279 "the app or a framework contains Swift code "
3280 "older than Swift 3.0");
3281 }
3282 break;
3283 }
3284 }
3285 # endif
3286
3287 # if TARGET_OS_OSX
3288 // Disable non-pointer isa if the app is too old
3289 // (linked before OS X 10.11)
3290 if (dyld_get_program_sdk_version() < DYLD_MACOSX_VERSION_10_11) {
3291 DisableNonpointerIsa = true;
3292 if (PrintRawIsa) {
3293 _objc_inform("RAW ISA: disabling non-pointer isa because "
3294 "the app is too old (SDK version " SDK_FORMAT ")",
3295 FORMAT_SDK(dyld_get_program_sdk_version()));
3296 }
3297 }
3298
3299 // Disable non-pointer isa if the app has a __DATA,__objc_rawisa section
3300 // New apps that load old extensions may need this.
3301 for (EACH_HEADER) {
3302 if (hi->mhdr()->filetype != MH_EXECUTE) continue;
3303 unsigned long size;
3304 if (getsectiondata(hi->mhdr(), "__DATA", "__objc_rawisa", &size)) {
3305 DisableNonpointerIsa = true;
3306 if (PrintRawIsa) {
3307 _objc_inform("RAW ISA: disabling non-pointer isa because "
3308 "the app has a __DATA,__objc_rawisa section");
3309 }
3310 }
3311 break; // assume only one MH_EXECUTE image
3312 }
3313 # endif
3314
3315 #endif
3316
3317 if (DisableTaggedPointers) {
3318 disableTaggedPointers();
3319 }
3320
3321 initializeTaggedPointerObfuscator();
3322
3323 if (PrintConnecting) {
3324 _objc_inform("CLASS: found %d classes during launch", totalClasses);
3325 }
3326
3327 // namedClasses
3328 // Preoptimized classes don't go in this table.
3329 // 4/3 is NXMapTable's load factor
3330 int namedClassesSize =
3331 (isPreoptimized() ? unoptimizedTotalClasses : totalClasses) * 4 / 3;
3332 gdb_objc_realized_classes =
3333 NXCreateMapTable(NXStrValueMapPrototype, namedClassesSize);
3334
3335 ts.log("IMAGE TIMES: first time tasks");
3336 }
3337
3338 // Fix up @selector references
3339 static size_t UnfixedSelectors;
3340 {
3341 mutex_locker_t lock(selLock);
3342 for (EACH_HEADER) {
3343 if (hi->hasPreoptimizedSelectors()) continue;
3344
3345 bool isBundle = hi->isBundle();
3346 SEL *sels = _getObjc2SelectorRefs(hi, &count);
3347 UnfixedSelectors += count;
3348 for (i = 0; i < count; i++) {
3349 const char *name = sel_cname(sels[i]);
3350 SEL sel = sel_registerNameNoLock(name, isBundle);
3351 if (sels[i] != sel) {
3352 sels[i] = sel;
3353 }
3354 }
3355 }
3356 }
3357
3358 ts.log("IMAGE TIMES: fix up selector references");
3359
3360 // Discover classes. Fix up unresolved future classes. Mark bundle classes.
3361 bool hasDyldRoots = dyld_shared_cache_some_image_overridden();
3362
3363 for (EACH_HEADER) {
3364 if (! mustReadClasses(hi, hasDyldRoots)) {
3365 // Image is sufficiently optimized that we need not call readClass()
3366 continue;
3367 }
3368
3369 classref_t const *classlist = _getObjc2ClassList(hi, &count);
3370
3371 bool headerIsBundle = hi->isBundle();
3372 bool headerIsPreoptimized = hi->hasPreoptimizedClasses();
3373
3374 for (i = 0; i < count; i++) {
3375 Class cls = (Class)classlist[i];
3376 Class newCls = readClass(cls, headerIsBundle, headerIsPreoptimized);
3377
3378 if (newCls != cls && newCls) {
3379 // Class was moved but not deleted. Currently this occurs
3380 // only when the new class resolved a future class.
3381 // Non-lazily realize the class below.
3382 resolvedFutureClasses = (Class *)
3383 realloc(resolvedFutureClasses,
3384 (resolvedFutureClassCount+1) * sizeof(Class));
3385 resolvedFutureClasses[resolvedFutureClassCount++] = newCls;
3386 }
3387 }
3388 }
3389
3390 ts.log("IMAGE TIMES: discover classes");
3391
3392 // Fix up remapped classes
3393 // Class list and nonlazy class list remain unremapped.
3394 // Class refs and super refs are remapped for message dispatching.
3395
3396 if (!noClassesRemapped()) {
3397 for (EACH_HEADER) {
3398 Class *classrefs = _getObjc2ClassRefs(hi, &count);
3399 for (i = 0; i < count; i++) {
3400 remapClassRef(&classrefs[i]);
3401 }
3402 // fixme why doesn't test future1 catch the absence of this?
3403 classrefs = _getObjc2SuperRefs(hi, &count);
3404 for (i = 0; i < count; i++) {
3405 remapClassRef(&classrefs[i]);
3406 }
3407 }
3408 }
3409
3410 ts.log("IMAGE TIMES: remap classes");
3411
3412 #if SUPPORT_FIXUP
3413 // Fix up old objc_msgSend_fixup call sites
3414 for (EACH_HEADER) {
3415 message_ref_t *refs = _getObjc2MessageRefs(hi, &count);
3416 if (count == 0) continue;
3417
3418 if (PrintVtables) {
3419 _objc_inform("VTABLES: repairing %zu unsupported vtable dispatch "
3420 "call sites in %s", count, hi->fname());
3421 }
3422 for (i = 0; i < count; i++) {
3423 fixupMessageRef(refs+i);
3424 }
3425 }
3426
3427 ts.log("IMAGE TIMES: fix up objc_msgSend_fixup");
3428 #endif
3429
3430 bool cacheSupportsProtocolRoots = sharedCacheSupportsProtocolRoots();
3431
3432 // Discover protocols. Fix up protocol refs.
3433 for (EACH_HEADER) {
3434 extern objc_class OBJC_CLASS_$_Protocol;
3435 Class cls = (Class)&OBJC_CLASS_$_Protocol;
3436 ASSERT(cls);
3437 NXMapTable *protocol_map = protocols();
3438 bool isPreoptimized = hi->hasPreoptimizedProtocols();
3439
3440 // Skip reading protocols if this is an image from the shared cache
3441 // and we support roots
3442 // Note, after launch we do need to walk the protocol as the protocol
3443 // in the shared cache is marked with isCanonical() and that may not
3444 // be true if some non-shared cache binary was chosen as the canonical
3445 // definition
3446 if (launchTime && isPreoptimized && cacheSupportsProtocolRoots) {
3447 if (PrintProtocols) {
3448 _objc_inform("PROTOCOLS: Skipping reading protocols in image: %s",
3449 hi->fname());
3450 }
3451 continue;
3452 }
3453
3454 bool isBundle = hi->isBundle();
3455
3456 protocol_t * const *protolist = _getObjc2ProtocolList(hi, &count);
3457 for (i = 0; i < count; i++) {
3458 readProtocol(protolist[i], cls, protocol_map,
3459 isPreoptimized, isBundle);
3460 }
3461 }
3462
3463 ts.log("IMAGE TIMES: discover protocols");
3464
3465 // Fix up @protocol references
3466 // Preoptimized images may have the right
3467 // answer already but we don't know for sure.
3468 for (EACH_HEADER) {
3469 // At launch time, we know preoptimized image refs are pointing at the
3470 // shared cache definition of a protocol. We can skip the check on
3471 // launch, but have to visit @protocol refs for shared cache images
3472 // loaded later.
3473 if (launchTime && cacheSupportsProtocolRoots && hi->isPreoptimized())
3474 continue;
3475 protocol_t **protolist = _getObjc2ProtocolRefs(hi, &count);
3476 for (i = 0; i < count; i++) {
3477 remapProtocolRef(&protolist[i]);
3478 }
3479 }
3480
3481 ts.log("IMAGE TIMES: fix up @protocol references");
3482
3483 // Discover categories.
3484 for (EACH_HEADER) {
3485 bool hasClassProperties = hi->info()->hasCategoryClassProperties();
3486
3487 auto processCatlist = [&](category_t * const *catlist) {
3488 for (i = 0; i < count; i++) {
3489 category_t *cat = catlist[i];
3490 Class cls = remapClass(cat->cls);
3491 locstamped_category_t lc{cat, hi};
3492
3493 if (!cls) {
3494 // Category's target class is missing (probably weak-linked).
3495 // Ignore the category.
3496 if (PrintConnecting) {
3497 _objc_inform("CLASS: IGNORING category \?\?\?(%s) %p with "
3498 "missing weak-linked target class",
3499 cat->name, cat);
3500 }
3501 continue;
3502 }
3503
3504 // Process this category.
3505 if (cls->isStubClass()) {
3506 // Stub classes are never realized. Stub classes
3507 // don't know their metaclass until they're
3508 // initialized, so we have to add categories with
3509 // class methods or properties to the stub itself.
3510 // methodizeClass() will find them and add them to
3511 // the metaclass as appropriate.
3512 if (cat->instanceMethods ||
3513 cat->protocols ||
3514 cat->instanceProperties ||
3515 cat->classMethods ||
3516 cat->protocols ||
3517 (hasClassProperties && cat->_classProperties))
3518 {
3519 objc::unattachedCategories.addForClass(lc, cls);
3520 }
3521 } else {
3522 // First, register the category with its target class.
3523 // Then, rebuild the class's method lists (etc) if
3524 // the class is realized.
3525 if (cat->instanceMethods || cat->protocols
3526 || cat->instanceProperties)
3527 {
3528 if (cls->isRealized()) {
3529 attachCategories(cls, &lc, 1, ATTACH_EXISTING);
3530 } else {
3531 objc::unattachedCategories.addForClass(lc, cls);
3532 }
3533 }
3534
3535 if (cat->classMethods || cat->protocols
3536 || (hasClassProperties && cat->_classProperties))
3537 {
3538 if (cls->ISA()->isRealized()) {
3539 attachCategories(cls->ISA(), &lc, 1, ATTACH_EXISTING | ATTACH_METACLASS);
3540 } else {
3541 objc::unattachedCategories.addForClass(lc, cls->ISA());
3542 }
3543 }
3544 }
3545 }
3546 };
3547 processCatlist(_getObjc2CategoryList(hi, &count));
3548 processCatlist(_getObjc2CategoryList2(hi, &count));
3549 }
3550
3551 ts.log("IMAGE TIMES: discover categories");
3552
3553 // Category discovery MUST BE Late to avoid potential races
3554 // when other threads call the new category code before
3555 // this thread finishes its fixups.
3556
3557 // +load handled by prepare_load_methods()
3558
3559 // Realize non-lazy classes (for +load methods and static instances)
3560 for (EACH_HEADER) {
3561 classref_t const *classlist =
3562 _getObjc2NonlazyClassList(hi, &count);
3563 for (i = 0; i < count; i++) {
3564 Class cls = remapClass(classlist[i]);
3565 if (!cls) continue;
3566
3567 addClassTableEntry(cls);
3568
3569 if (cls->isSwiftStable()) {
3570 if (cls->swiftMetadataInitializer()) {
3571 _objc_fatal("Swift class %s with a metadata initializer "
3572 "is not allowed to be non-lazy",
3573 cls->nameForLogging());
3574 }
3575 // fixme also disallow relocatable classes
3576 // We can't disallow all Swift classes because of
3577 // classes like Swift.__EmptyArrayStorage
3578 }
3579 realizeClassWithoutSwift(cls, nil);
3580 }
3581 }
3582
3583 ts.log("IMAGE TIMES: realize non-lazy classes");
3584
3585 // Realize newly-resolved future classes, in case CF manipulates them
3586 if (resolvedFutureClasses) {
3587 for (i = 0; i < resolvedFutureClassCount; i++) {
3588 Class cls = resolvedFutureClasses[i];
3589 if (cls->isSwiftStable()) {
3590 _objc_fatal("Swift class is not allowed to be future");
3591 }
3592 realizeClassWithoutSwift(cls, nil);
3593 cls->setInstancesRequireRawIsaRecursively(false/*inherited*/);
3594 }
3595 free(resolvedFutureClasses);
3596 }
3597
3598 ts.log("IMAGE TIMES: realize future classes");
3599
3600 if (DebugNonFragileIvars) {
3601 realizeAllClasses();
3602 }
3603
3604
3605 // Print preoptimization statistics
3606 if (PrintPreopt) {
3607 static unsigned int PreoptTotalMethodLists;
3608 static unsigned int PreoptOptimizedMethodLists;
3609 static unsigned int PreoptTotalClasses;
3610 static unsigned int PreoptOptimizedClasses;
3611
3612 for (EACH_HEADER) {
3613 if (hi->hasPreoptimizedSelectors()) {
3614 _objc_inform("PREOPTIMIZATION: honoring preoptimized selectors "
3615 "in %s", hi->fname());
3616 }
3617 else if (hi->info()->optimizedByDyld()) {
3618 _objc_inform("PREOPTIMIZATION: IGNORING preoptimized selectors "
3619 "in %s", hi->fname());
3620 }
3621
3622 classref_t const *classlist = _getObjc2ClassList(hi, &count);
3623 for (i = 0; i < count; i++) {
3624 Class cls = remapClass(classlist[i]);
3625 if (!cls) continue;
3626
3627 PreoptTotalClasses++;
3628 if (hi->hasPreoptimizedClasses()) {
3629 PreoptOptimizedClasses++;
3630 }
3631
3632 const method_list_t *mlist;
3633 if ((mlist = ((class_ro_t *)cls->data())->baseMethods())) {
3634 PreoptTotalMethodLists++;
3635 if (mlist->isFixedUp()) {
3636 PreoptOptimizedMethodLists++;
3637 }
3638 }
3639 if ((mlist=((class_ro_t *)cls->ISA()->data())->baseMethods())) {
3640 PreoptTotalMethodLists++;
3641 if (mlist->isFixedUp()) {
3642 PreoptOptimizedMethodLists++;
3643 }
3644 }
3645 }
3646 }
3647
3648 _objc_inform("PREOPTIMIZATION: %zu selector references not "
3649 "pre-optimized", UnfixedSelectors);
3650 _objc_inform("PREOPTIMIZATION: %u/%u (%.3g%%) method lists pre-sorted",
3651 PreoptOptimizedMethodLists, PreoptTotalMethodLists,
3652 PreoptTotalMethodLists
3653 ? 100.0*PreoptOptimizedMethodLists/PreoptTotalMethodLists
3654 : 0.0);
3655 _objc_inform("PREOPTIMIZATION: %u/%u (%.3g%%) classes pre-registered",
3656 PreoptOptimizedClasses, PreoptTotalClasses,
3657 PreoptTotalClasses
3658 ? 100.0*PreoptOptimizedClasses/PreoptTotalClasses
3659 : 0.0);
3660 _objc_inform("PREOPTIMIZATION: %zu protocol references not "
3661 "pre-optimized", UnfixedProtocolReferences);
3662 }
3663
3664 #undef EACH_HEADER
3665 }
3666
3667
3668 /***********************************************************************
3669 * prepare_load_methods
3670 * Schedule +load for classes in this image, any un-+load-ed
3671 * superclasses in other images, and any categories in this image.
3672 **********************************************************************/
3673 // Recursively schedule +load for cls and any un-+load-ed superclasses.
3674 // cls must already be connected.
3675 static void schedule_class_load(Class cls)
3676 {
3677 if (!cls) return;
3678 ASSERT(cls->isRealized()); // _read_images should realize
3679
3680 if (cls->data()->flags & RW_LOADED) return;
3681
3682 // Ensure superclass-first ordering
3683 schedule_class_load(cls->superclass);
3684
3685 add_class_to_loadable_list(cls);
3686 cls->setInfo(RW_LOADED);
3687 }
3688
3689 // Quick scan for +load methods that doesn't take a lock.
3690 bool hasLoadMethods(const headerType *mhdr)
3691 {
3692 size_t count;
3693 if (_getObjc2NonlazyClassList(mhdr, &count) && count > 0) return true;
3694 if (_getObjc2NonlazyCategoryList(mhdr, &count) && count > 0) return true;
3695 return false;
3696 }
3697
3698 void prepare_load_methods(const headerType *mhdr)
3699 {
3700 size_t count, i;
3701
3702 runtimeLock.assertLocked();
3703
3704 classref_t const *classlist =
3705 _getObjc2NonlazyClassList(mhdr, &count);
3706 for (i = 0; i < count; i++) {
3707 schedule_class_load(remapClass(classlist[i]));
3708 }
3709
3710 category_t * const *categorylist = _getObjc2NonlazyCategoryList(mhdr, &count);
3711 for (i = 0; i < count; i++) {
3712 category_t *cat = categorylist[i];
3713 Class cls = remapClass(cat->cls);
3714 if (!cls) continue; // category for ignored weak-linked class
3715 if (cls->isSwiftStable()) {
3716 _objc_fatal("Swift class extensions and categories on Swift "
3717 "classes are not allowed to have +load methods");
3718 }
3719 realizeClassWithoutSwift(cls, nil);
3720 ASSERT(cls->ISA()->isRealized());
3721 add_category_to_loadable_list(cat);
3722 }
3723 }
3724
3725
3726 /***********************************************************************
3727 * _unload_image
3728 * Only handles MH_BUNDLE for now.
3729 * Locking: write-lock and loadMethodLock acquired by unmap_image
3730 **********************************************************************/
3731 void _unload_image(header_info *hi)
3732 {
3733 size_t count, i;
3734
3735 loadMethodLock.assertLocked();
3736 runtimeLock.assertLocked();
3737
3738 // Unload unattached categories and categories waiting for +load.
3739
3740 // Ignore __objc_catlist2. We don't support unloading Swift
3741 // and we never will.
3742 category_t * const *catlist = _getObjc2CategoryList(hi, &count);
3743 for (i = 0; i < count; i++) {
3744 category_t *cat = catlist[i];
3745 Class cls = remapClass(cat->cls);
3746 if (!cls) continue; // category for ignored weak-linked class
3747
3748 // fixme for MH_DYLIB cat's class may have been unloaded already
3749
3750 // unattached list
3751 objc::unattachedCategories.eraseCategoryForClass(cat, cls);
3752
3753 // +load queue
3754 remove_category_from_loadable_list(cat);
3755 }
3756
3757 // Unload classes.
3758
3759 // Gather classes from both __DATA,__objc_clslist
3760 // and __DATA,__objc_nlclslist. arclite's hack puts a class in the latter
3761 // only, and we need to unload that class if we unload an arclite image.
3762
3763 objc::DenseSet<Class> classes{};
3764 classref_t const *classlist;
3765
3766 classlist = _getObjc2ClassList(hi, &count);
3767 for (i = 0; i < count; i++) {
3768 Class cls = remapClass(classlist[i]);
3769 if (cls) classes.insert(cls);
3770 }
3771
3772 classlist = _getObjc2NonlazyClassList(hi, &count);
3773 for (i = 0; i < count; i++) {
3774 Class cls = remapClass(classlist[i]);
3775 if (cls) classes.insert(cls);
3776 }
3777
3778 // First detach classes from each other. Then free each class.
3779 // This avoid bugs where this loop unloads a subclass before its superclass
3780
3781 for (Class cls: classes) {
3782 remove_class_from_loadable_list(cls);
3783 detach_class(cls->ISA(), YES);
3784 detach_class(cls, NO);
3785 }
3786 for (Class cls: classes) {
3787 free_class(cls->ISA());
3788 free_class(cls);
3789 }
3790
3791 // XXX FIXME -- Clean up protocols:
3792 // <rdar://problem/9033191> Support unloading protocols at dylib/image unload time
3793
3794 // fixme DebugUnload
3795 }
3796
3797
3798 /***********************************************************************
3799 * method_getDescription
3800 * Returns a pointer to this method's objc_method_description.
3801 * Locking: none
3802 **********************************************************************/
3803 struct objc_method_description *
3804 method_getDescription(Method m)
3805 {
3806 if (!m) return nil;
3807 return (struct objc_method_description *)m;
3808 }
3809
3810
3811 IMP
3812 method_getImplementation(Method m)
3813 {
3814 return m ? m->imp : nil;
3815 }
3816
3817
3818 /***********************************************************************
3819 * method_getName
3820 * Returns this method's selector.
3821 * The method must not be nil.
3822 * The method must already have been fixed-up.
3823 * Locking: none
3824 **********************************************************************/
3825 SEL
3826 method_getName(Method m)
3827 {
3828 if (!m) return nil;
3829
3830 ASSERT(m->name == sel_registerName(sel_getName(m->name)));
3831 return m->name;
3832 }
3833
3834
3835 /***********************************************************************
3836 * method_getTypeEncoding
3837 * Returns this method's old-style type encoding string.
3838 * The method must not be nil.
3839 * Locking: none
3840 **********************************************************************/
3841 const char *
3842 method_getTypeEncoding(Method m)
3843 {
3844 if (!m) return nil;
3845 return m->types;
3846 }
3847
3848
3849 /***********************************************************************
3850 * method_setImplementation
3851 * Sets this method's implementation to imp.
3852 * The previous implementation is returned.
3853 **********************************************************************/
3854 static IMP
3855 _method_setImplementation(Class cls, method_t *m, IMP imp)
3856 {
3857 runtimeLock.assertLocked();
3858
3859 if (!m) return nil;
3860 if (!imp) return nil;
3861
3862 IMP old = m->imp;
3863 m->imp = imp;
3864
3865 // Cache updates are slow if cls is nil (i.e. unknown)
3866 // RR/AWZ updates are slow if cls is nil (i.e. unknown)
3867 // fixme build list of classes whose Methods are known externally?
3868
3869 flushCaches(cls);
3870
3871 adjustCustomFlagsForMethodChange(cls, m);
3872
3873 return old;
3874 }
3875
3876 IMP
3877 method_setImplementation(Method m, IMP imp)
3878 {
3879 // Don't know the class - will be slow if RR/AWZ are affected
3880 // fixme build list of classes whose Methods are known externally?
3881 mutex_locker_t lock(runtimeLock);
3882 return _method_setImplementation(Nil, m, imp);
3883 }
3884
3885
3886 void method_exchangeImplementations(Method m1, Method m2)
3887 {
3888 if (!m1 || !m2) return;
3889
3890 mutex_locker_t lock(runtimeLock);
3891
3892 IMP m1_imp = m1->imp;
3893 m1->imp = m2->imp;
3894 m2->imp = m1_imp;
3895
3896
3897 // RR/AWZ updates are slow because class is unknown
3898 // Cache updates are slow because class is unknown
3899 // fixme build list of classes whose Methods are known externally?
3900
3901 flushCaches(nil);
3902
3903 adjustCustomFlagsForMethodChange(nil, m1);
3904 adjustCustomFlagsForMethodChange(nil, m2);
3905 }
3906
3907
3908 /***********************************************************************
3909 * ivar_getOffset
3910 * fixme
3911 * Locking: none
3912 **********************************************************************/
3913 ptrdiff_t
3914 ivar_getOffset(Ivar ivar)
3915 {
3916 if (!ivar) return 0;
3917 return *ivar->offset;
3918 }
3919
3920
3921 /***********************************************************************
3922 * ivar_getName
3923 * fixme
3924 * Locking: none
3925 **********************************************************************/
3926 const char *
3927 ivar_getName(Ivar ivar)
3928 {
3929 if (!ivar) return nil;
3930 return ivar->name;
3931 }
3932
3933
3934 /***********************************************************************
3935 * ivar_getTypeEncoding
3936 * fixme
3937 * Locking: none
3938 **********************************************************************/
3939 const char *
3940 ivar_getTypeEncoding(Ivar ivar)
3941 {
3942 if (!ivar) return nil;
3943 return ivar->type;
3944 }
3945
3946
3947
3948 const char *property_getName(objc_property_t prop)
3949 {
3950 return prop->name;
3951 }
3952
3953 const char *property_getAttributes(objc_property_t prop)
3954 {
3955 return prop->attributes;
3956 }
3957
3958 objc_property_attribute_t *property_copyAttributeList(objc_property_t prop,
3959 unsigned int *outCount)
3960 {
3961 if (!prop) {
3962 if (outCount) *outCount = 0;
3963 return nil;
3964 }
3965
3966 mutex_locker_t lock(runtimeLock);
3967 return copyPropertyAttributeList(prop->attributes,outCount);
3968 }
3969
3970 char * property_copyAttributeValue(objc_property_t prop, const char *name)
3971 {
3972 if (!prop || !name || *name == '\0') return nil;
3973
3974 mutex_locker_t lock(runtimeLock);
3975 return copyPropertyAttributeValue(prop->attributes, name);
3976 }
3977
3978
3979 /***********************************************************************
3980 * getExtendedTypesIndexesForMethod
3981 * Returns:
3982 * a is the count of methods in all method lists before m's method list
3983 * b is the index of m in m's method list
3984 * a+b is the index of m's extended types in the extended types array
3985 **********************************************************************/
3986 static void getExtendedTypesIndexesForMethod(protocol_t *proto, const method_t *m, bool isRequiredMethod, bool isInstanceMethod, uint32_t& a, uint32_t &b)
3987 {
3988 a = 0;
3989
3990 if (proto->instanceMethods) {
3991 if (isRequiredMethod && isInstanceMethod) {
3992 b = proto->instanceMethods->indexOfMethod(m);
3993 return;
3994 }
3995 a += proto->instanceMethods->count;
3996 }
3997
3998 if (proto->classMethods) {
3999 if (isRequiredMethod && !isInstanceMethod) {
4000 b = proto->classMethods->indexOfMethod(m);
4001 return;
4002 }
4003 a += proto->classMethods->count;
4004 }
4005
4006 if (proto->optionalInstanceMethods) {
4007 if (!isRequiredMethod && isInstanceMethod) {
4008 b = proto->optionalInstanceMethods->indexOfMethod(m);
4009 return;
4010 }
4011 a += proto->optionalInstanceMethods->count;
4012 }
4013
4014 if (proto->optionalClassMethods) {
4015 if (!isRequiredMethod && !isInstanceMethod) {
4016 b = proto->optionalClassMethods->indexOfMethod(m);
4017 return;
4018 }
4019 a += proto->optionalClassMethods->count;
4020 }
4021 }
4022
4023
4024 /***********************************************************************
4025 * getExtendedTypesIndexForMethod
4026 * Returns the index of m's extended types in proto's extended types array.
4027 **********************************************************************/
4028 static uint32_t getExtendedTypesIndexForMethod(protocol_t *proto, const method_t *m, bool isRequiredMethod, bool isInstanceMethod)
4029 {
4030 uint32_t a;
4031 uint32_t b;
4032 getExtendedTypesIndexesForMethod(proto, m, isRequiredMethod,
4033 isInstanceMethod, a, b);
4034 return a + b;
4035 }
4036
4037
4038 /***********************************************************************
4039 * fixupProtocolMethodList
4040 * Fixes up a single method list in a protocol.
4041 **********************************************************************/
4042 static void
4043 fixupProtocolMethodList(protocol_t *proto, method_list_t *mlist,
4044 bool required, bool instance)
4045 {
4046 runtimeLock.assertLocked();
4047
4048 if (!mlist) return;
4049 if (mlist->isFixedUp()) return;
4050
4051 const char **extTypes = proto->extendedMethodTypes();
4052 fixupMethodList(mlist, true/*always copy for simplicity*/,
4053 !extTypes/*sort if no extended method types*/);
4054
4055 if (extTypes) {
4056 // Sort method list and extended method types together.
4057 // fixupMethodList() can't do this.
4058 // fixme COW stomp
4059 uint32_t count = mlist->count;
4060 uint32_t prefix;
4061 uint32_t junk;
4062 getExtendedTypesIndexesForMethod(proto, &mlist->get(0),
4063 required, instance, prefix, junk);
4064 for (uint32_t i = 0; i < count; i++) {
4065 for (uint32_t j = i+1; j < count; j++) {
4066 method_t& mi = mlist->get(i);
4067 method_t& mj = mlist->get(j);
4068 if (mi.name > mj.name) {
4069 std::swap(mi, mj);
4070 std::swap(extTypes[prefix+i], extTypes[prefix+j]);
4071 }
4072 }
4073 }
4074 }
4075 }
4076
4077
4078 /***********************************************************************
4079 * fixupProtocol
4080 * Fixes up all of a protocol's method lists.
4081 **********************************************************************/
4082 static void
4083 fixupProtocol(protocol_t *proto)
4084 {
4085 runtimeLock.assertLocked();
4086
4087 if (proto->protocols) {
4088 for (uintptr_t i = 0; i < proto->protocols->count; i++) {
4089 protocol_t *sub = remapProtocol(proto->protocols->list[i]);
4090 if (!sub->isFixedUp()) fixupProtocol(sub);
4091 }
4092 }
4093
4094 fixupProtocolMethodList(proto, proto->instanceMethods, YES, YES);
4095 fixupProtocolMethodList(proto, proto->classMethods, YES, NO);
4096 fixupProtocolMethodList(proto, proto->optionalInstanceMethods, NO, YES);
4097 fixupProtocolMethodList(proto, proto->optionalClassMethods, NO, NO);
4098
4099 // fixme memory barrier so we can check this with no lock
4100 proto->setFixedUp();
4101 }
4102
4103
4104 /***********************************************************************
4105 * fixupProtocolIfNeeded
4106 * Fixes up all of a protocol's method lists if they aren't fixed up already.
4107 * Locking: write-locks runtimeLock.
4108 **********************************************************************/
4109 static void
4110 fixupProtocolIfNeeded(protocol_t *proto)
4111 {
4112 runtimeLock.assertUnlocked();
4113 ASSERT(proto);
4114
4115 if (!proto->isFixedUp()) {
4116 mutex_locker_t lock(runtimeLock);
4117 fixupProtocol(proto);
4118 }
4119 }
4120
4121
4122 static method_list_t *
4123 getProtocolMethodList(protocol_t *proto, bool required, bool instance)
4124 {
4125 method_list_t **mlistp = nil;
4126 if (required) {
4127 if (instance) {
4128 mlistp = &proto->instanceMethods;
4129 } else {
4130 mlistp = &proto->classMethods;
4131 }
4132 } else {
4133 if (instance) {
4134 mlistp = &proto->optionalInstanceMethods;
4135 } else {
4136 mlistp = &proto->optionalClassMethods;
4137 }
4138 }
4139
4140 return *mlistp;
4141 }
4142
4143
4144 /***********************************************************************
4145 * protocol_getMethod_nolock
4146 * Locking: runtimeLock must be held by the caller
4147 **********************************************************************/
4148 static method_t *
4149 protocol_getMethod_nolock(protocol_t *proto, SEL sel,
4150 bool isRequiredMethod, bool isInstanceMethod,
4151 bool recursive)
4152 {
4153 runtimeLock.assertLocked();
4154
4155 if (!proto || !sel) return nil;
4156
4157 ASSERT(proto->isFixedUp());
4158
4159 method_list_t *mlist =
4160 getProtocolMethodList(proto, isRequiredMethod, isInstanceMethod);
4161 if (mlist) {
4162 method_t *m = search_method_list(mlist, sel);
4163 if (m) return m;
4164 }
4165
4166 if (recursive && proto->protocols) {
4167 method_t *m;
4168 for (uint32_t i = 0; i < proto->protocols->count; i++) {
4169 protocol_t *realProto = remapProtocol(proto->protocols->list[i]);
4170 m = protocol_getMethod_nolock(realProto, sel,
4171 isRequiredMethod, isInstanceMethod,
4172 true);
4173 if (m) return m;
4174 }
4175 }
4176
4177 return nil;
4178 }
4179
4180
4181 /***********************************************************************
4182 * protocol_getMethod
4183 * fixme
4184 * Locking: acquires runtimeLock
4185 **********************************************************************/
4186 Method
4187 protocol_getMethod(protocol_t *proto, SEL sel, bool isRequiredMethod, bool isInstanceMethod, bool recursive)
4188 {
4189 if (!proto) return nil;
4190 fixupProtocolIfNeeded(proto);
4191
4192 mutex_locker_t lock(runtimeLock);
4193 return protocol_getMethod_nolock(proto, sel, isRequiredMethod,
4194 isInstanceMethod, recursive);
4195 }
4196
4197
4198 /***********************************************************************
4199 * protocol_getMethodTypeEncoding_nolock
4200 * Return the @encode string for the requested protocol method.
4201 * Returns nil if the compiler did not emit any extended @encode data.
4202 * Locking: runtimeLock must be held by the caller
4203 **********************************************************************/
4204 const char *
4205 protocol_getMethodTypeEncoding_nolock(protocol_t *proto, SEL sel,
4206 bool isRequiredMethod,
4207 bool isInstanceMethod)
4208 {
4209 runtimeLock.assertLocked();
4210
4211 if (!proto) return nil;
4212 if (!proto->extendedMethodTypes()) return nil;
4213
4214 ASSERT(proto->isFixedUp());
4215
4216 method_t *m =
4217 protocol_getMethod_nolock(proto, sel,
4218 isRequiredMethod, isInstanceMethod, false);
4219 if (m) {
4220 uint32_t i = getExtendedTypesIndexForMethod(proto, m,
4221 isRequiredMethod,
4222 isInstanceMethod);
4223 return proto->extendedMethodTypes()[i];
4224 }
4225
4226 // No method with that name. Search incorporated protocols.
4227 if (proto->protocols) {
4228 for (uintptr_t i = 0; i < proto->protocols->count; i++) {
4229 const char *enc =
4230 protocol_getMethodTypeEncoding_nolock(remapProtocol(proto->protocols->list[i]), sel, isRequiredMethod, isInstanceMethod);
4231 if (enc) return enc;
4232 }
4233 }
4234
4235 return nil;
4236 }
4237
4238 /***********************************************************************
4239 * _protocol_getMethodTypeEncoding
4240 * Return the @encode string for the requested protocol method.
4241 * Returns nil if the compiler did not emit any extended @encode data.
4242 * Locking: acquires runtimeLock
4243 **********************************************************************/
4244 const char *
4245 _protocol_getMethodTypeEncoding(Protocol *proto_gen, SEL sel,
4246 BOOL isRequiredMethod, BOOL isInstanceMethod)
4247 {
4248 protocol_t *proto = newprotocol(proto_gen);
4249
4250 if (!proto) return nil;
4251 fixupProtocolIfNeeded(proto);
4252
4253 mutex_locker_t lock(runtimeLock);
4254 return protocol_getMethodTypeEncoding_nolock(proto, sel,
4255 isRequiredMethod,
4256 isInstanceMethod);
4257 }
4258
4259
4260 /***********************************************************************
4261 * protocol_t::demangledName
4262 * Returns the (Swift-demangled) name of the given protocol.
4263 * Locking: none
4264 **********************************************************************/
4265 const char *
4266 protocol_t::demangledName()
4267 {
4268 ASSERT(hasDemangledNameField());
4269
4270 if (! _demangledName) {
4271 char *de = copySwiftV1DemangledName(mangledName, true/*isProtocol*/);
4272 if (! OSAtomicCompareAndSwapPtrBarrier(nil, (void*)(de ?: mangledName),
4273 (void**)&_demangledName))
4274 {
4275 if (de) free(de);
4276 }
4277 }
4278 return _demangledName;
4279 }
4280
4281 /***********************************************************************
4282 * protocol_getName
4283 * Returns the (Swift-demangled) name of the given protocol.
4284 * Locking: runtimeLock must not be held by the caller
4285 **********************************************************************/
4286 const char *
4287 protocol_getName(Protocol *proto)
4288 {
4289 if (!proto) return "nil";
4290 else return newprotocol(proto)->demangledName();
4291 }
4292
4293
4294 /***********************************************************************
4295 * protocol_getInstanceMethodDescription
4296 * Returns the description of a named instance method.
4297 * Locking: runtimeLock must not be held by the caller
4298 **********************************************************************/
4299 struct objc_method_description
4300 protocol_getMethodDescription(Protocol *p, SEL aSel,
4301 BOOL isRequiredMethod, BOOL isInstanceMethod)
4302 {
4303 Method m =
4304 protocol_getMethod(newprotocol(p), aSel,
4305 isRequiredMethod, isInstanceMethod, true);
4306 if (m) return *method_getDescription(m);
4307 else return (struct objc_method_description){nil, nil};
4308 }
4309
4310
4311 /***********************************************************************
4312 * protocol_conformsToProtocol_nolock
4313 * Returns YES if self conforms to other.
4314 * Locking: runtimeLock must be held by the caller.
4315 **********************************************************************/
4316 static bool
4317 protocol_conformsToProtocol_nolock(protocol_t *self, protocol_t *other)
4318 {
4319 runtimeLock.assertLocked();
4320
4321 if (!self || !other) {
4322 return NO;
4323 }
4324
4325 // protocols need not be fixed up
4326
4327 if (0 == strcmp(self->mangledName, other->mangledName)) {
4328 return YES;
4329 }
4330
4331 if (self->protocols) {
4332 uintptr_t i;
4333 for (i = 0; i < self->protocols->count; i++) {
4334 protocol_t *proto = remapProtocol(self->protocols->list[i]);
4335 if (other == proto) {
4336 return YES;
4337 }
4338 if (0 == strcmp(other->mangledName, proto->mangledName)) {
4339 return YES;
4340 }
4341 if (protocol_conformsToProtocol_nolock(proto, other)) {
4342 return YES;
4343 }
4344 }
4345 }
4346
4347 return NO;
4348 }
4349
4350
4351 /***********************************************************************
4352 * protocol_conformsToProtocol
4353 * Returns YES if self conforms to other.
4354 * Locking: acquires runtimeLock
4355 **********************************************************************/
4356 BOOL protocol_conformsToProtocol(Protocol *self, Protocol *other)
4357 {
4358 mutex_locker_t lock(runtimeLock);
4359 return protocol_conformsToProtocol_nolock(newprotocol(self),
4360 newprotocol(other));
4361 }
4362
4363
4364 /***********************************************************************
4365 * protocol_isEqual
4366 * Return YES if two protocols are equal (i.e. conform to each other)
4367 * Locking: acquires runtimeLock
4368 **********************************************************************/
4369 BOOL protocol_isEqual(Protocol *self, Protocol *other)
4370 {
4371 if (self == other) return YES;
4372 if (!self || !other) return NO;
4373
4374 if (!protocol_conformsToProtocol(self, other)) return NO;
4375 if (!protocol_conformsToProtocol(other, self)) return NO;
4376
4377 return YES;
4378 }
4379
4380
4381 /***********************************************************************
4382 * protocol_copyMethodDescriptionList
4383 * Returns descriptions of a protocol's methods.
4384 * Locking: acquires runtimeLock
4385 **********************************************************************/
4386 struct objc_method_description *
4387 protocol_copyMethodDescriptionList(Protocol *p,
4388 BOOL isRequiredMethod,BOOL isInstanceMethod,
4389 unsigned int *outCount)
4390 {
4391 protocol_t *proto = newprotocol(p);
4392 struct objc_method_description *result = nil;
4393 unsigned int count = 0;
4394
4395 if (!proto) {
4396 if (outCount) *outCount = 0;
4397 return nil;
4398 }
4399
4400 fixupProtocolIfNeeded(proto);
4401
4402 mutex_locker_t lock(runtimeLock);
4403
4404 method_list_t *mlist =
4405 getProtocolMethodList(proto, isRequiredMethod, isInstanceMethod);
4406
4407 if (mlist) {
4408 result = (struct objc_method_description *)
4409 calloc(mlist->count + 1, sizeof(struct objc_method_description));
4410 for (const auto& meth : *mlist) {
4411 result[count].name = meth.name;
4412 result[count].types = (char *)meth.types;
4413 count++;
4414 }
4415 }
4416
4417 if (outCount) *outCount = count;
4418 return result;
4419 }
4420
4421
4422 /***********************************************************************
4423 * protocol_getProperty
4424 * fixme
4425 * Locking: runtimeLock must be held by the caller
4426 **********************************************************************/
4427 static property_t *
4428 protocol_getProperty_nolock(protocol_t *proto, const char *name,
4429 bool isRequiredProperty, bool isInstanceProperty)
4430 {
4431 runtimeLock.assertLocked();
4432
4433 if (!isRequiredProperty) {
4434 // Only required properties are currently supported.
4435 return nil;
4436 }
4437
4438 property_list_t *plist = isInstanceProperty ?
4439 proto->instanceProperties : proto->classProperties();
4440 if (plist) {
4441 for (auto& prop : *plist) {
4442 if (0 == strcmp(name, prop.name)) {
4443 return &prop;
4444 }
4445 }
4446 }
4447
4448 if (proto->protocols) {
4449 uintptr_t i;
4450 for (i = 0; i < proto->protocols->count; i++) {
4451 protocol_t *p = remapProtocol(proto->protocols->list[i]);
4452 property_t *prop =
4453 protocol_getProperty_nolock(p, name,
4454 isRequiredProperty,
4455 isInstanceProperty);
4456 if (prop) return prop;
4457 }
4458 }
4459
4460 return nil;
4461 }
4462
4463 objc_property_t protocol_getProperty(Protocol *p, const char *name,
4464 BOOL isRequiredProperty, BOOL isInstanceProperty)
4465 {
4466 if (!p || !name) return nil;
4467
4468 mutex_locker_t lock(runtimeLock);
4469 return (objc_property_t)
4470 protocol_getProperty_nolock(newprotocol(p), name,
4471 isRequiredProperty, isInstanceProperty);
4472 }
4473
4474
4475 /***********************************************************************
4476 * protocol_copyPropertyList
4477 * protocol_copyPropertyList2
4478 * fixme
4479 * Locking: acquires runtimeLock
4480 **********************************************************************/
4481 static property_t **
4482 copyPropertyList(property_list_t *plist, unsigned int *outCount)
4483 {
4484 property_t **result = nil;
4485 unsigned int count = 0;
4486
4487 if (plist) {
4488 count = plist->count;
4489 }
4490
4491 if (count > 0) {
4492 result = (property_t **)malloc((count+1) * sizeof(property_t *));
4493
4494 count = 0;
4495 for (auto& prop : *plist) {
4496 result[count++] = &prop;
4497 }
4498 result[count] = nil;
4499 }
4500
4501 if (outCount) *outCount = count;
4502 return result;
4503 }
4504
4505 objc_property_t *
4506 protocol_copyPropertyList2(Protocol *proto, unsigned int *outCount,
4507 BOOL isRequiredProperty, BOOL isInstanceProperty)
4508 {
4509 if (!proto || !isRequiredProperty) {
4510 // Optional properties are not currently supported.
4511 if (outCount) *outCount = 0;
4512 return nil;
4513 }
4514
4515 mutex_locker_t lock(runtimeLock);
4516
4517 property_list_t *plist = isInstanceProperty
4518 ? newprotocol(proto)->instanceProperties
4519 : newprotocol(proto)->classProperties();
4520 return (objc_property_t *)copyPropertyList(plist, outCount);
4521 }
4522
4523 objc_property_t *
4524 protocol_copyPropertyList(Protocol *proto, unsigned int *outCount)
4525 {
4526 return protocol_copyPropertyList2(proto, outCount,
4527 YES/*required*/, YES/*instance*/);
4528 }
4529
4530
4531 /***********************************************************************
4532 * protocol_copyProtocolList
4533 * Copies this protocol's incorporated protocols.
4534 * Does not copy those protocol's incorporated protocols in turn.
4535 * Locking: acquires runtimeLock
4536 **********************************************************************/
4537 Protocol * __unsafe_unretained *
4538 protocol_copyProtocolList(Protocol *p, unsigned int *outCount)
4539 {
4540 unsigned int count = 0;
4541 Protocol **result = nil;
4542 protocol_t *proto = newprotocol(p);
4543
4544 if (!proto) {
4545 if (outCount) *outCount = 0;
4546 return nil;
4547 }
4548
4549 mutex_locker_t lock(runtimeLock);
4550
4551 if (proto->protocols) {
4552 count = (unsigned int)proto->protocols->count;
4553 }
4554 if (count > 0) {
4555 result = (Protocol **)malloc((count+1) * sizeof(Protocol *));
4556
4557 unsigned int i;
4558 for (i = 0; i < count; i++) {
4559 result[i] = (Protocol *)remapProtocol(proto->protocols->list[i]);
4560 }
4561 result[i] = nil;
4562 }
4563
4564 if (outCount) *outCount = count;
4565 return result;
4566 }
4567
4568
4569 /***********************************************************************
4570 * objc_allocateProtocol
4571 * Creates a new protocol. The protocol may not be used until
4572 * objc_registerProtocol() is called.
4573 * Returns nil if a protocol with the same name already exists.
4574 * Locking: acquires runtimeLock
4575 **********************************************************************/
4576 Protocol *
4577 objc_allocateProtocol(const char *name)
4578 {
4579 mutex_locker_t lock(runtimeLock);
4580
4581 if (getProtocol(name)) {
4582 return nil;
4583 }
4584
4585 protocol_t *result = (protocol_t *)calloc(sizeof(protocol_t), 1);
4586
4587 extern objc_class OBJC_CLASS_$___IncompleteProtocol;
4588 Class cls = (Class)&OBJC_CLASS_$___IncompleteProtocol;
4589 result->initProtocolIsa(cls);
4590 result->size = sizeof(protocol_t);
4591 // fixme mangle the name if it looks swift-y?
4592 result->mangledName = strdupIfMutable(name);
4593
4594 // fixme reserve name without installing
4595
4596 return (Protocol *)result;
4597 }
4598
4599
4600 /***********************************************************************
4601 * objc_registerProtocol
4602 * Registers a newly-constructed protocol. The protocol is now
4603 * ready for use and immutable.
4604 * Locking: acquires runtimeLock
4605 **********************************************************************/
4606 void objc_registerProtocol(Protocol *proto_gen)
4607 {
4608 protocol_t *proto = newprotocol(proto_gen);
4609
4610 mutex_locker_t lock(runtimeLock);
4611
4612 extern objc_class OBJC_CLASS_$___IncompleteProtocol;
4613 Class oldcls = (Class)&OBJC_CLASS_$___IncompleteProtocol;
4614 extern objc_class OBJC_CLASS_$_Protocol;
4615 Class cls = (Class)&OBJC_CLASS_$_Protocol;
4616
4617 if (proto->ISA() == cls) {
4618 _objc_inform("objc_registerProtocol: protocol '%s' was already "
4619 "registered!", proto->nameForLogging());
4620 return;
4621 }
4622 if (proto->ISA() != oldcls) {
4623 _objc_inform("objc_registerProtocol: protocol '%s' was not allocated "
4624 "with objc_allocateProtocol!", proto->nameForLogging());
4625 return;
4626 }
4627
4628 // NOT initProtocolIsa(). The protocol object may already
4629 // have been retained and we must preserve that count.
4630 proto->changeIsa(cls);
4631
4632 // Don't add this protocol if we already have it.
4633 // Should we warn on duplicates?
4634 if (getProtocol(proto->mangledName) == nil) {
4635 NXMapKeyCopyingInsert(protocols(), proto->mangledName, proto);
4636 }
4637 }
4638
4639
4640 /***********************************************************************
4641 * protocol_addProtocol
4642 * Adds an incorporated protocol to another protocol.
4643 * No method enforcement is performed.
4644 * `proto` must be under construction. `addition` must not.
4645 * Locking: acquires runtimeLock
4646 **********************************************************************/
4647 void
4648 protocol_addProtocol(Protocol *proto_gen, Protocol *addition_gen)
4649 {
4650 protocol_t *proto = newprotocol(proto_gen);
4651 protocol_t *addition = newprotocol(addition_gen);
4652
4653 extern objc_class OBJC_CLASS_$___IncompleteProtocol;
4654 Class cls = (Class)&OBJC_CLASS_$___IncompleteProtocol;
4655
4656 if (!proto_gen) return;
4657 if (!addition_gen) return;
4658
4659 mutex_locker_t lock(runtimeLock);
4660
4661 if (proto->ISA() != cls) {
4662 _objc_inform("protocol_addProtocol: modified protocol '%s' is not "
4663 "under construction!", proto->nameForLogging());
4664 return;
4665 }
4666 if (addition->ISA() == cls) {
4667 _objc_inform("protocol_addProtocol: added protocol '%s' is still "
4668 "under construction!", addition->nameForLogging());
4669 return;
4670 }
4671
4672 protocol_list_t *protolist = proto->protocols;
4673 if (!protolist) {
4674 protolist = (protocol_list_t *)
4675 calloc(1, sizeof(protocol_list_t)
4676 + sizeof(protolist->list[0]));
4677 } else {
4678 protolist = (protocol_list_t *)
4679 realloc(protolist, protocol_list_size(protolist)
4680 + sizeof(protolist->list[0]));
4681 }
4682
4683 protolist->list[protolist->count++] = (protocol_ref_t)addition;
4684 proto->protocols = protolist;
4685 }
4686
4687
4688 /***********************************************************************
4689 * protocol_addMethodDescription
4690 * Adds a method to a protocol. The protocol must be under construction.
4691 * Locking: acquires runtimeLock
4692 **********************************************************************/
4693 static void
4694 protocol_addMethod_nolock(method_list_t*& list, SEL name, const char *types)
4695 {
4696 if (!list) {
4697 list = (method_list_t *)calloc(sizeof(method_list_t), 1);
4698 list->entsizeAndFlags = sizeof(list->first);
4699 list->setFixedUp();
4700 } else {
4701 size_t size = list->byteSize() + list->entsize();
4702 list = (method_list_t *)realloc(list, size);
4703 }
4704
4705 method_t& meth = list->get(list->count++);
4706 meth.name = name;
4707 meth.types = types ? strdupIfMutable(types) : "";
4708 meth.imp = nil;
4709 }
4710
4711 void
4712 protocol_addMethodDescription(Protocol *proto_gen, SEL name, const char *types,
4713 BOOL isRequiredMethod, BOOL isInstanceMethod)
4714 {
4715 protocol_t *proto = newprotocol(proto_gen);
4716
4717 extern objc_class OBJC_CLASS_$___IncompleteProtocol;
4718 Class cls = (Class)&OBJC_CLASS_$___IncompleteProtocol;
4719
4720 if (!proto_gen) return;
4721
4722 mutex_locker_t lock(runtimeLock);
4723
4724 if (proto->ISA() != cls) {
4725 _objc_inform("protocol_addMethodDescription: protocol '%s' is not "
4726 "under construction!", proto->nameForLogging());
4727 return;
4728 }
4729
4730 if (isRequiredMethod && isInstanceMethod) {
4731 protocol_addMethod_nolock(proto->instanceMethods, name, types);
4732 } else if (isRequiredMethod && !isInstanceMethod) {
4733 protocol_addMethod_nolock(proto->classMethods, name, types);
4734 } else if (!isRequiredMethod && isInstanceMethod) {
4735 protocol_addMethod_nolock(proto->optionalInstanceMethods, name,types);
4736 } else /* !isRequiredMethod && !isInstanceMethod) */ {
4737 protocol_addMethod_nolock(proto->optionalClassMethods, name, types);
4738 }
4739 }
4740
4741
4742 /***********************************************************************
4743 * protocol_addProperty
4744 * Adds a property to a protocol. The protocol must be under construction.
4745 * Locking: acquires runtimeLock
4746 **********************************************************************/
4747 static void
4748 protocol_addProperty_nolock(property_list_t *&plist, const char *name,
4749 const objc_property_attribute_t *attrs,
4750 unsigned int count)
4751 {
4752 if (!plist) {
4753 plist = (property_list_t *)calloc(sizeof(property_list_t), 1);
4754 plist->entsizeAndFlags = sizeof(property_t);
4755 } else {
4756 plist = (property_list_t *)
4757 realloc(plist, sizeof(property_list_t)
4758 + plist->count * plist->entsize());
4759 }
4760
4761 property_t& prop = plist->get(plist->count++);
4762 prop.name = strdupIfMutable(name);
4763 prop.attributes = copyPropertyAttributeString(attrs, count);
4764 }
4765
4766 void
4767 protocol_addProperty(Protocol *proto_gen, const char *name,
4768 const objc_property_attribute_t *attrs,
4769 unsigned int count,
4770 BOOL isRequiredProperty, BOOL isInstanceProperty)
4771 {
4772 protocol_t *proto = newprotocol(proto_gen);
4773
4774 extern objc_class OBJC_CLASS_$___IncompleteProtocol;
4775 Class cls = (Class)&OBJC_CLASS_$___IncompleteProtocol;
4776
4777 if (!proto) return;
4778 if (!name) return;
4779
4780 mutex_locker_t lock(runtimeLock);
4781
4782 if (proto->ISA() != cls) {
4783 _objc_inform("protocol_addProperty: protocol '%s' is not "
4784 "under construction!", proto->nameForLogging());
4785 return;
4786 }
4787
4788 if (isRequiredProperty && isInstanceProperty) {
4789 protocol_addProperty_nolock(proto->instanceProperties, name, attrs, count);
4790 }
4791 else if (isRequiredProperty && !isInstanceProperty) {
4792 protocol_addProperty_nolock(proto->_classProperties, name, attrs, count);
4793 }
4794 //else if (!isRequiredProperty && isInstanceProperty) {
4795 // protocol_addProperty_nolock(proto->optionalInstanceProperties, name, attrs, count);
4796 //}
4797 //else /* !isRequiredProperty && !isInstanceProperty) */ {
4798 // protocol_addProperty_nolock(proto->optionalClassProperties, name, attrs, count);
4799 //}
4800 }
4801
4802 static int
4803 objc_getRealizedClassList_nolock(Class *buffer, int bufferLen)
4804 {
4805 int count = 0;
4806
4807 if (buffer) {
4808 int c = 0;
4809 foreach_realized_class([=, &count, &c](Class cls) {
4810 count++;
4811 if (c < bufferLen) {
4812 buffer[c++] = cls;
4813 }
4814 return true;
4815 });
4816 } else {
4817 foreach_realized_class([&count](Class cls) {
4818 count++;
4819 return true;
4820 });
4821 }
4822
4823 return count;
4824 }
4825
4826 static Class *
4827 objc_copyRealizedClassList_nolock(unsigned int *outCount)
4828 {
4829 Class *result = nil;
4830 unsigned int count = 0;
4831
4832 foreach_realized_class([&count](Class cls) {
4833 count++;
4834 return true;
4835 });
4836
4837 if (count > 0) {
4838 unsigned int c = 0;
4839
4840 result = (Class *)malloc((1+count) * sizeof(Class));
4841 foreach_realized_class([=, &c](Class cls) {
4842 result[c++] = cls;
4843 return true;
4844 });
4845 result[c] = nil;
4846 }
4847
4848 if (outCount) *outCount = count;
4849 return result;
4850 }
4851
4852 static void
4853 class_getImpCache_nolock(Class cls, cache_t &cache, objc_imp_cache_entry *buffer, int len)
4854 {
4855 bucket_t *buckets = cache.buckets();
4856
4857 uintptr_t count = cache.capacity();
4858 uintptr_t index;
4859 int wpos = 0;
4860
4861 for (index = 0; index < count && wpos < len; index += 1) {
4862 if (buckets[index].sel()) {
4863 buffer[wpos].imp = buckets[index].imp(cls);
4864 buffer[wpos].sel = buckets[index].sel();
4865 wpos++;
4866 }
4867 }
4868 }
4869
4870 /***********************************************************************
4871 * objc_getClassList
4872 * Returns pointers to all classes.
4873 * This requires all classes be realized, which is regretfully non-lazy.
4874 * Locking: acquires runtimeLock
4875 **********************************************************************/
4876 int
4877 objc_getClassList(Class *buffer, int bufferLen)
4878 {
4879 mutex_locker_t lock(runtimeLock);
4880
4881 realizeAllClasses();
4882
4883 return objc_getRealizedClassList_nolock(buffer, bufferLen);
4884 }
4885
4886 /***********************************************************************
4887 * objc_copyClassList
4888 * Returns pointers to Realized classes.
4889 *
4890 * outCount may be nil. *outCount is the number of classes returned.
4891 * If the returned array is not nil, it is nil-terminated and must be
4892 * freed with free().
4893 * Locking: write-locks runtimeLock
4894 **********************************************************************/
4895 Class *
4896 objc_copyRealizedClassList(unsigned int *outCount)
4897 {
4898 mutex_locker_t lock(runtimeLock);
4899
4900 return objc_copyRealizedClassList_nolock(outCount);
4901 }
4902
4903
4904 /***********************************************************************
4905 * objc_copyClassList
4906 * Returns pointers to all classes.
4907 * This requires all classes be realized, which is regretfully non-lazy.
4908 *
4909 * outCount may be nil. *outCount is the number of classes returned.
4910 * If the returned array is not nil, it is nil-terminated and must be
4911 * freed with free().
4912 * Locking: write-locks runtimeLock
4913 **********************************************************************/
4914 Class *
4915 objc_copyClassList(unsigned int *outCount)
4916 {
4917 mutex_locker_t lock(runtimeLock);
4918
4919 realizeAllClasses();
4920
4921 return objc_copyRealizedClassList_nolock(outCount);
4922 }
4923
4924 /***********************************************************************
4925 * class_copyImpCache
4926 * Returns the current content of the Class IMP Cache
4927 *
4928 * outCount may be nil. *outCount is the number of entries returned.
4929 * If the returned array is not nil, it is nil-terminated and must be
4930 * freed with free().
4931 * Locking: write-locks cacheUpdateLock
4932 **********************************************************************/
4933 objc_imp_cache_entry *
4934 class_copyImpCache(Class cls, int *outCount)
4935 {
4936 objc_imp_cache_entry *buffer = nullptr;
4937
4938 #if CONFIG_USE_CACHE_LOCK
4939 mutex_locker_t lock(cacheUpdateLock);
4940 #else
4941 mutex_locker_t lock(runtimeLock);
4942 #endif
4943
4944 cache_t &cache = cls->cache;
4945 int count = (int)cache.occupied();
4946
4947 if (count) {
4948 buffer = (objc_imp_cache_entry *)calloc(1+count, sizeof(objc_imp_cache_entry));
4949 class_getImpCache_nolock(cls, cache, buffer, count);
4950 }
4951
4952 if (outCount) *outCount = count;
4953 return buffer;
4954 }
4955
4956
4957 /***********************************************************************
4958 * objc_copyProtocolList
4959 * Returns pointers to all protocols.
4960 * Locking: read-locks runtimeLock
4961 **********************************************************************/
4962 Protocol * __unsafe_unretained *
4963 objc_copyProtocolList(unsigned int *outCount)
4964 {
4965 mutex_locker_t lock(runtimeLock);
4966
4967 NXMapTable *protocol_map = protocols();
4968
4969 // Find all the protocols from the pre-optimized images. These protocols
4970 // won't be in the protocol map.
4971 objc::DenseMap<const char*, Protocol*> preoptimizedProtocols;
4972 if (sharedCacheSupportsProtocolRoots()) {
4973 header_info *hi;
4974 for (hi = FirstHeader; hi; hi = hi->getNext()) {
4975 if (!hi->hasPreoptimizedProtocols())
4976 continue;
4977
4978 size_t count, i;
4979 const protocol_t * const *protolist = _getObjc2ProtocolList(hi, &count);
4980 for (i = 0; i < count; i++) {
4981 const protocol_t* protocol = protolist[i];
4982
4983 // Skip protocols we have in the run time map. These likely
4984 // correspond to protocols added dynamically which have the same
4985 // name as a protocol found later in a dlopen'ed shared cache image.
4986 if (NXMapGet(protocol_map, protocol->mangledName) != nil)
4987 continue;
4988
4989 // The protocols in the shared cache protolist point to their
4990 // original on-disk object, not the optimized one. We can use the name
4991 // to find the optimized one.
4992 Protocol* optimizedProto = getPreoptimizedProtocol(protocol->mangledName);
4993 preoptimizedProtocols.insert({ protocol->mangledName, optimizedProto });
4994 }
4995 }
4996 }
4997
4998 unsigned int count = NXCountMapTable(protocol_map) + (unsigned int)preoptimizedProtocols.size();
4999 if (count == 0) {
5000 if (outCount) *outCount = 0;
5001 return nil;
5002 }
5003
5004 Protocol **result = (Protocol **)malloc((count+1) * sizeof(Protocol*));
5005
5006 unsigned int i = 0;
5007 Protocol *proto;
5008 const char *name;
5009 NXMapState state = NXInitMapState(protocol_map);
5010 while (NXNextMapState(protocol_map, &state,
5011 (const void **)&name, (const void **)&proto))
5012 {
5013 result[i++] = proto;
5014 }
5015
5016 // Add any protocols found in the pre-optimized table
5017 for (auto it : preoptimizedProtocols) {
5018 result[i++] = it.second;
5019 }
5020
5021 result[i++] = nil;
5022 ASSERT(i == count+1);
5023
5024 if (outCount) *outCount = count;
5025 return result;
5026 }
5027
5028
5029 /***********************************************************************
5030 * objc_getProtocol
5031 * Get a protocol by name, or return nil
5032 * Locking: read-locks runtimeLock
5033 **********************************************************************/
5034 Protocol *objc_getProtocol(const char *name)
5035 {
5036 mutex_locker_t lock(runtimeLock);
5037 return getProtocol(name);
5038 }
5039
5040
5041 /***********************************************************************
5042 * class_copyMethodList
5043 * fixme
5044 * Locking: read-locks runtimeLock
5045 **********************************************************************/
5046 Method *
5047 class_copyMethodList(Class cls, unsigned int *outCount)
5048 {
5049 unsigned int count = 0;
5050 Method *result = nil;
5051
5052 if (!cls) {
5053 if (outCount) *outCount = 0;
5054 return nil;
5055 }
5056
5057 mutex_locker_t lock(runtimeLock);
5058
5059 ASSERT(cls->isRealized());
5060
5061 count = cls->data()->methods.count();
5062
5063 if (count > 0) {
5064 result = (Method *)malloc((count + 1) * sizeof(Method));
5065
5066 count = 0;
5067 for (auto& meth : cls->data()->methods) {
5068 result[count++] = &meth;
5069 }
5070 result[count] = nil;
5071 }
5072
5073 if (outCount) *outCount = count;
5074 return result;
5075 }
5076
5077
5078 /***********************************************************************
5079 * class_copyIvarList
5080 * fixme
5081 * Locking: read-locks runtimeLock
5082 **********************************************************************/
5083 Ivar *
5084 class_copyIvarList(Class cls, unsigned int *outCount)
5085 {
5086 const ivar_list_t *ivars;
5087 Ivar *result = nil;
5088 unsigned int count = 0;
5089
5090 if (!cls) {
5091 if (outCount) *outCount = 0;
5092 return nil;
5093 }
5094
5095 mutex_locker_t lock(runtimeLock);
5096
5097 ASSERT(cls->isRealized());
5098
5099 if ((ivars = cls->data()->ro->ivars) && ivars->count) {
5100 result = (Ivar *)malloc((ivars->count+1) * sizeof(Ivar));
5101
5102 for (auto& ivar : *ivars) {
5103 if (!ivar.offset) continue; // anonymous bitfield
5104 result[count++] = &ivar;
5105 }
5106 result[count] = nil;
5107 }
5108
5109 if (outCount) *outCount = count;
5110 return result;
5111 }
5112
5113
5114 /***********************************************************************
5115 * class_copyPropertyList. Returns a heap block containing the
5116 * properties declared in the class, or nil if the class
5117 * declares no properties. Caller must free the block.
5118 * Does not copy any superclass's properties.
5119 * Locking: read-locks runtimeLock
5120 **********************************************************************/
5121 objc_property_t *
5122 class_copyPropertyList(Class cls, unsigned int *outCount)
5123 {
5124 if (!cls) {
5125 if (outCount) *outCount = 0;
5126 return nil;
5127 }
5128
5129 mutex_locker_t lock(runtimeLock);
5130
5131 checkIsKnownClass(cls);
5132 ASSERT(cls->isRealized());
5133
5134 auto rw = cls->data();
5135
5136 property_t **result = nil;
5137 unsigned int count = rw->properties.count();
5138 if (count > 0) {
5139 result = (property_t **)malloc((count + 1) * sizeof(property_t *));
5140
5141 count = 0;
5142 for (auto& prop : rw->properties) {
5143 result[count++] = &prop;
5144 }
5145 result[count] = nil;
5146 }
5147
5148 if (outCount) *outCount = count;
5149 return (objc_property_t *)result;
5150 }
5151
5152
5153 /***********************************************************************
5154 * objc_class::getLoadMethod
5155 * fixme
5156 * Called only from add_class_to_loadable_list.
5157 * Locking: runtimeLock must be read- or write-locked by the caller.
5158 **********************************************************************/
5159 IMP
5160 objc_class::getLoadMethod()
5161 {
5162 runtimeLock.assertLocked();
5163
5164 const method_list_t *mlist;
5165
5166 ASSERT(isRealized());
5167 ASSERT(ISA()->isRealized());
5168 ASSERT(!isMetaClass());
5169 ASSERT(ISA()->isMetaClass());
5170
5171 mlist = ISA()->data()->ro->baseMethods();
5172 if (mlist) {
5173 for (const auto& meth : *mlist) {
5174 const char *name = sel_cname(meth.name);
5175 if (0 == strcmp(name, "load")) {
5176 return meth.imp;
5177 }
5178 }
5179 }
5180
5181 return nil;
5182 }
5183
5184
5185 /***********************************************************************
5186 * _category_getName
5187 * Returns a category's name.
5188 * Locking: none
5189 **********************************************************************/
5190 const char *
5191 _category_getName(Category cat)
5192 {
5193 return cat->name;
5194 }
5195
5196
5197 /***********************************************************************
5198 * _category_getClassName
5199 * Returns a category's class's name
5200 * Called only from add_category_to_loadable_list and
5201 * remove_category_from_loadable_list for logging purposes.
5202 * Locking: runtimeLock must be read- or write-locked by the caller
5203 **********************************************************************/
5204 const char *
5205 _category_getClassName(Category cat)
5206 {
5207 runtimeLock.assertLocked();
5208 return remapClass(cat->cls)->nameForLogging();
5209 }
5210
5211
5212 /***********************************************************************
5213 * _category_getClass
5214 * Returns a category's class
5215 * Called only by call_category_loads.
5216 * Locking: read-locks runtimeLock
5217 **********************************************************************/
5218 Class
5219 _category_getClass(Category cat)
5220 {
5221 mutex_locker_t lock(runtimeLock);
5222 Class result = remapClass(cat->cls);
5223 ASSERT(result->isRealized()); // ok for call_category_loads' usage
5224 return result;
5225 }
5226
5227
5228 /***********************************************************************
5229 * _category_getLoadMethod
5230 * fixme
5231 * Called only from add_category_to_loadable_list
5232 * Locking: runtimeLock must be read- or write-locked by the caller
5233 **********************************************************************/
5234 IMP
5235 _category_getLoadMethod(Category cat)
5236 {
5237 runtimeLock.assertLocked();
5238
5239 const method_list_t *mlist;
5240
5241 mlist = cat->classMethods;
5242 if (mlist) {
5243 for (const auto& meth : *mlist) {
5244 const char *name = sel_cname(meth.name);
5245 if (0 == strcmp(name, "load")) {
5246 return meth.imp;
5247 }
5248 }
5249 }
5250
5251 return nil;
5252 }
5253
5254
5255 /***********************************************************************
5256 * category_t::propertiesForMeta
5257 * Return a category's instance or class properties.
5258 * hi is the image containing the category.
5259 **********************************************************************/
5260 property_list_t *
5261 category_t::propertiesForMeta(bool isMeta, struct header_info *hi)
5262 {
5263 if (!isMeta) return instanceProperties;
5264 else if (hi->info()->hasCategoryClassProperties()) return _classProperties;
5265 else return nil;
5266 }
5267
5268
5269 /***********************************************************************
5270 * class_copyProtocolList
5271 * fixme
5272 * Locking: read-locks runtimeLock
5273 **********************************************************************/
5274 Protocol * __unsafe_unretained *
5275 class_copyProtocolList(Class cls, unsigned int *outCount)
5276 {
5277 unsigned int count = 0;
5278 Protocol **result = nil;
5279
5280 if (!cls) {
5281 if (outCount) *outCount = 0;
5282 return nil;
5283 }
5284
5285 mutex_locker_t lock(runtimeLock);
5286
5287 checkIsKnownClass(cls);
5288
5289 ASSERT(cls->isRealized());
5290
5291 count = cls->data()->protocols.count();
5292
5293 if (count > 0) {
5294 result = (Protocol **)malloc((count+1) * sizeof(Protocol *));
5295
5296 count = 0;
5297 for (const auto& proto : cls->data()->protocols) {
5298 result[count++] = (Protocol *)remapProtocol(proto);
5299 }
5300 result[count] = nil;
5301 }
5302
5303 if (outCount) *outCount = count;
5304 return result;
5305 }
5306
5307
5308 /***********************************************************************
5309 * objc_copyImageNames
5310 * Copies names of loaded images with ObjC contents.
5311 *
5312 * Locking: acquires runtimeLock
5313 **********************************************************************/
5314 const char **objc_copyImageNames(unsigned int *outCount)
5315 {
5316 mutex_locker_t lock(runtimeLock);
5317
5318 int HeaderCount = 0;
5319 for (header_info *hi = FirstHeader; hi != nil; hi = hi->getNext()) {
5320 HeaderCount++;
5321 }
5322
5323 #if TARGET_OS_WIN32
5324 const TCHAR **names = (const TCHAR **)
5325 malloc((HeaderCount+1) * sizeof(TCHAR *));
5326 #else
5327 const char **names = (const char **)
5328 malloc((HeaderCount+1) * sizeof(char *));
5329 #endif
5330
5331 unsigned int count = 0;
5332 for (header_info *hi = FirstHeader; hi != nil; hi = hi->getNext()) {
5333 #if TARGET_OS_WIN32
5334 if (hi->moduleName) {
5335 names[count++] = hi->moduleName;
5336 }
5337 #else
5338 const char *fname = hi->fname();
5339 if (fname) {
5340 names[count++] = fname;
5341 }
5342 #endif
5343 }
5344 names[count] = nil;
5345
5346 if (count == 0) {
5347 // Return nil instead of empty list if there are no images
5348 free((void *)names);
5349 names = nil;
5350 }
5351
5352 if (outCount) *outCount = count;
5353 return names;
5354 }
5355
5356
5357 /***********************************************************************
5358 * copyClassNamesForImage_nolock
5359 * Copies class names from the given image.
5360 * Missing weak-import classes are omitted.
5361 * Swift class names are demangled.
5362 *
5363 * Locking: runtimeLock must be held by the caller
5364 **********************************************************************/
5365 const char **
5366 copyClassNamesForImage_nolock(header_info *hi, unsigned int *outCount)
5367 {
5368 runtimeLock.assertLocked();
5369 ASSERT(hi);
5370
5371 size_t count;
5372 classref_t const *classlist = _getObjc2ClassList(hi, &count);
5373 const char **names = (const char **)
5374 malloc((count+1) * sizeof(const char *));
5375
5376 size_t shift = 0;
5377 for (size_t i = 0; i < count; i++) {
5378 Class cls = remapClass(classlist[i]);
5379 if (cls) {
5380 names[i-shift] = cls->demangledName();
5381 } else {
5382 shift++; // ignored weak-linked class
5383 }
5384 }
5385 count -= shift;
5386 names[count] = nil;
5387
5388 if (outCount) *outCount = (unsigned int)count;
5389 return names;
5390 }
5391
5392
5393
5394 /***********************************************************************
5395 * objc_copyClassNamesForImage
5396 * Copies class names from the named image.
5397 * The image name must be identical to dladdr's dli_fname value.
5398 * Missing weak-import classes are omitted.
5399 * Swift class names are demangled.
5400 *
5401 * Locking: acquires runtimeLock
5402 **********************************************************************/
5403 const char **
5404 objc_copyClassNamesForImage(const char *image, unsigned int *outCount)
5405 {
5406 if (!image) {
5407 if (outCount) *outCount = 0;
5408 return nil;
5409 }
5410
5411 mutex_locker_t lock(runtimeLock);
5412
5413 // Find the image.
5414 header_info *hi;
5415 for (hi = FirstHeader; hi != nil; hi = hi->getNext()) {
5416 #if TARGET_OS_WIN32
5417 if (0 == wcscmp((TCHAR *)image, hi->moduleName)) break;
5418 #else
5419 if (0 == strcmp(image, hi->fname())) break;
5420 #endif
5421 }
5422
5423 if (!hi) {
5424 if (outCount) *outCount = 0;
5425 return nil;
5426 }
5427
5428 return copyClassNamesForImage_nolock(hi, outCount);
5429 }
5430
5431
5432 /***********************************************************************
5433 * objc_copyClassNamesForImageHeader
5434 * Copies class names from the given image.
5435 * Missing weak-import classes are omitted.
5436 * Swift class names are demangled.
5437 *
5438 * Locking: acquires runtimeLock
5439 **********************************************************************/
5440 const char **
5441 objc_copyClassNamesForImageHeader(const struct mach_header *mh, unsigned int *outCount)
5442 {
5443 if (!mh) {
5444 if (outCount) *outCount = 0;
5445 return nil;
5446 }
5447
5448 mutex_locker_t lock(runtimeLock);
5449
5450 // Find the image.
5451 header_info *hi;
5452 for (hi = FirstHeader; hi != nil; hi = hi->getNext()) {
5453 if (hi->mhdr() == (const headerType *)mh) break;
5454 }
5455
5456 if (!hi) {
5457 if (outCount) *outCount = 0;
5458 return nil;
5459 }
5460
5461 return copyClassNamesForImage_nolock(hi, outCount);
5462 }
5463
5464
5465 /***********************************************************************
5466 * saveTemporaryString
5467 * Save a string in a thread-local FIFO buffer.
5468 * This is suitable for temporary strings generated for logging purposes.
5469 **********************************************************************/
5470 static void
5471 saveTemporaryString(char *str)
5472 {
5473 // Fixed-size FIFO. We free the first string, shift
5474 // the rest, and add the new string to the end.
5475 _objc_pthread_data *data = _objc_fetch_pthread_data(true);
5476 if (data->printableNames[0]) {
5477 free(data->printableNames[0]);
5478 }
5479 int last = countof(data->printableNames) - 1;
5480 for (int i = 0; i < last; i++) {
5481 data->printableNames[i] = data->printableNames[i+1];
5482 }
5483 data->printableNames[last] = str;
5484 }
5485
5486
5487 /***********************************************************************
5488 * objc_class::nameForLogging
5489 * Returns the class's name, suitable for display.
5490 * The returned memory is TEMPORARY. Print it or copy it immediately.
5491 * Locking: none
5492 **********************************************************************/
5493 const char *
5494 objc_class::nameForLogging()
5495 {
5496 // Handle the easy case directly.
5497 if (isRealized() || isFuture()) {
5498 if (data()->demangledName) return data()->demangledName;
5499 }
5500
5501 char *result;
5502
5503 const char *name = mangledName();
5504 char *de = copySwiftV1DemangledName(name);
5505 if (de) result = de;
5506 else result = strdup(name);
5507
5508 saveTemporaryString(result);
5509 return result;
5510 }
5511
5512
5513 /***********************************************************************
5514 * objc_class::demangledName
5515 * If realize=false, the class must already be realized or future.
5516 * Locking: runtimeLock may or may not be held by the caller.
5517 **********************************************************************/
5518 mutex_t DemangleCacheLock;
5519 static objc::DenseSet<const char *> *DemangleCache;
5520 const char *
5521 objc_class::demangledName()
5522 {
5523 // Return previously demangled name if available.
5524 if (isRealized() || isFuture()) {
5525 if (data()->demangledName) return data()->demangledName;
5526 }
5527
5528 // Try demangling the mangled name.
5529 const char *mangled = mangledName();
5530 char *de = copySwiftV1DemangledName(mangled);
5531 if (isRealized() || isFuture()) {
5532 // Class is already realized or future.
5533 // Save demangling result in rw data.
5534 // We may not own runtimeLock so use an atomic operation instead.
5535 if (! OSAtomicCompareAndSwapPtrBarrier(nil, (void*)(de ?: mangled),
5536 (void**)&data()->demangledName))
5537 {
5538 if (de) free(de);
5539 }
5540 return data()->demangledName;
5541 }
5542
5543 // Class is not yet realized.
5544 if (!de) {
5545 // Name is not mangled. Return it without caching.
5546 return mangled;
5547 }
5548
5549 // Class is not yet realized and name is mangled.
5550 // Allocate the name but don't save it in the class.
5551 // Save the name in a side cache instead to prevent leaks.
5552 // When the class is actually realized we may allocate a second
5553 // copy of the name, but we don't care.
5554 // (Previously we would try to realize the class now and save the
5555 // name there, but realization is more complicated for Swift classes.)
5556
5557 // Only objc_copyClassNamesForImage() should get here.
5558 // fixme lldb's calls to class_getName() can also get here when
5559 // interrogating the dyld shared cache. (rdar://27258517)
5560 // fixme runtimeLock.assertLocked();
5561 // fixme ASSERT(realize);
5562
5563 const char *cached;
5564 {
5565 mutex_locker_t lock(DemangleCacheLock);
5566 if (!DemangleCache) {
5567 DemangleCache = new objc::DenseSet<const char *>{};
5568 }
5569 cached = *DemangleCache->insert(de).first;
5570 }
5571 if (cached != de) free(de);
5572 return cached;
5573 }
5574
5575
5576 /***********************************************************************
5577 * class_getName
5578 * fixme
5579 * Locking: may acquire DemangleCacheLock
5580 **********************************************************************/
5581 const char *class_getName(Class cls)
5582 {
5583 if (!cls) return "nil";
5584 // fixme lldb calls class_getName() on unrealized classes (rdar://27258517)
5585 // ASSERT(cls->isRealized() || cls->isFuture());
5586 return cls->demangledName();
5587 }
5588
5589 /***********************************************************************
5590 * objc_debug_class_getNameRaw
5591 * fixme
5592 * Locking: none
5593 **********************************************************************/
5594 const char *objc_debug_class_getNameRaw(Class cls)
5595 {
5596 if (!cls) return "nil";
5597 return cls->mangledName();
5598 }
5599
5600
5601 /***********************************************************************
5602 * class_getVersion
5603 * fixme
5604 * Locking: none
5605 **********************************************************************/
5606 int
5607 class_getVersion(Class cls)
5608 {
5609 if (!cls) return 0;
5610 ASSERT(cls->isRealized());
5611 return cls->data()->version;
5612 }
5613
5614
5615 /***********************************************************************
5616 * class_setVersion
5617 * fixme
5618 * Locking: none
5619 **********************************************************************/
5620 void
5621 class_setVersion(Class cls, int version)
5622 {
5623 if (!cls) return;
5624 ASSERT(cls->isRealized());
5625 cls->data()->version = version;
5626 }
5627
5628 /***********************************************************************
5629 * search_method_list_inline
5630 **********************************************************************/
5631 ALWAYS_INLINE static method_t *
5632 findMethodInSortedMethodList(SEL key, const method_list_t *list)
5633 {
5634 ASSERT(list);
5635
5636 const method_t * const first = &list->first;
5637 const method_t *base = first;
5638 const method_t *probe;
5639 uintptr_t keyValue = (uintptr_t)key;
5640 uint32_t count;
5641
5642 for (count = list->count; count != 0; count >>= 1) {
5643 probe = base + (count >> 1);
5644
5645 uintptr_t probeValue = (uintptr_t)probe->name;
5646
5647 if (keyValue == probeValue) {
5648 // `probe` is a match.
5649 // Rewind looking for the *first* occurrence of this value.
5650 // This is required for correct category overrides.
5651 while (probe > first && keyValue == (uintptr_t)probe[-1].name) {
5652 probe--;
5653 }
5654 return (method_t *)probe;
5655 }
5656
5657 if (keyValue > probeValue) {
5658 base = probe + 1;
5659 count--;
5660 }
5661 }
5662
5663 return nil;
5664 }
5665
5666 ALWAYS_INLINE static method_t *
5667 search_method_list_inline(const method_list_t *mlist, SEL sel)
5668 {
5669 int methodListIsFixedUp = mlist->isFixedUp();
5670 int methodListHasExpectedSize = mlist->entsize() == sizeof(method_t);
5671
5672 if (fastpath(methodListIsFixedUp && methodListHasExpectedSize)) {
5673 return findMethodInSortedMethodList(sel, mlist);
5674 } else {
5675 // Linear search of unsorted method list
5676 for (auto& meth : *mlist) {
5677 if (meth.name == sel) return &meth;
5678 }
5679 }
5680
5681 #if DEBUG
5682 // sanity-check negative results
5683 if (mlist->isFixedUp()) {
5684 for (auto& meth : *mlist) {
5685 if (meth.name == sel) {
5686 _objc_fatal("linear search worked when binary search did not");
5687 }
5688 }
5689 }
5690 #endif
5691
5692 return nil;
5693 }
5694
5695 NEVER_INLINE static method_t *
5696 search_method_list(const method_list_t *mlist, SEL sel)
5697 {
5698 return search_method_list_inline(mlist, sel);
5699 }
5700
5701 /***********************************************************************
5702 * method_lists_contains_any
5703 **********************************************************************/
5704 static NEVER_INLINE bool
5705 method_lists_contains_any(method_list_t **mlists, method_list_t **end,
5706 SEL sels[], size_t selcount)
5707 {
5708 while (mlists < end) {
5709 const method_list_t *mlist = *mlists++;
5710 int methodListIsFixedUp = mlist->isFixedUp();
5711 int methodListHasExpectedSize = mlist->entsize() == sizeof(method_t);
5712
5713 if (fastpath(methodListIsFixedUp && methodListHasExpectedSize)) {
5714 for (size_t i = 0; i < selcount; i++) {
5715 if (findMethodInSortedMethodList(sels[i], mlist)) {
5716 return true;
5717 }
5718 }
5719 } else {
5720 for (auto& meth : *mlist) {
5721 for (size_t i = 0; i < selcount; i++) {
5722 if (meth.name == sels[i]) {
5723 return true;
5724 }
5725 }
5726 }
5727 }
5728 }
5729 return false;
5730 }
5731
5732 /***********************************************************************
5733 * getMethodNoSuper_nolock
5734 * fixme
5735 * Locking: runtimeLock must be read- or write-locked by the caller
5736 **********************************************************************/
5737 static method_t *
5738 getMethodNoSuper_nolock(Class cls, SEL sel)
5739 {
5740 runtimeLock.assertLocked();
5741
5742 ASSERT(cls->isRealized());
5743 // fixme nil cls?
5744 // fixme nil sel?
5745
5746 for (auto mlists = cls->data()->methods.beginLists(),
5747 end = cls->data()->methods.endLists();
5748 mlists != end;
5749 ++mlists)
5750 {
5751 // <rdar://problem/46904873> getMethodNoSuper_nolock is the hottest
5752 // caller of search_method_list, inlining it turns
5753 // getMethodNoSuper_nolock into a frame-less function and eliminates
5754 // any store from this codepath.
5755 method_t *m = search_method_list_inline(*mlists, sel);
5756 if (m) return m;
5757 }
5758
5759 return nil;
5760 }
5761
5762
5763 /***********************************************************************
5764 * getMethod_nolock
5765 * fixme
5766 * Locking: runtimeLock must be read- or write-locked by the caller
5767 **********************************************************************/
5768 static method_t *
5769 getMethod_nolock(Class cls, SEL sel)
5770 {
5771 method_t *m = nil;
5772
5773 runtimeLock.assertLocked();
5774
5775 // fixme nil cls?
5776 // fixme nil sel?
5777
5778 ASSERT(cls->isRealized());
5779
5780 while (cls && ((m = getMethodNoSuper_nolock(cls, sel))) == nil) {
5781 cls = cls->superclass;
5782 }
5783
5784 return m;
5785 }
5786
5787
5788 /***********************************************************************
5789 * _class_getMethod
5790 * fixme
5791 * Locking: read-locks runtimeLock
5792 **********************************************************************/
5793 static Method _class_getMethod(Class cls, SEL sel)
5794 {
5795 mutex_locker_t lock(runtimeLock);
5796 return getMethod_nolock(cls, sel);
5797 }
5798
5799
5800 /***********************************************************************
5801 * class_getInstanceMethod. Return the instance method for the
5802 * specified class and selector.
5803 **********************************************************************/
5804 Method class_getInstanceMethod(Class cls, SEL sel)
5805 {
5806 if (!cls || !sel) return nil;
5807
5808 // This deliberately avoids +initialize because it historically did so.
5809
5810 // This implementation is a bit weird because it's the only place that
5811 // wants a Method instead of an IMP.
5812
5813 #warning fixme build and search caches
5814
5815 // Search method lists, try method resolver, etc.
5816 lookUpImpOrForward(nil, sel, cls, LOOKUP_RESOLVER);
5817
5818 #warning fixme build and search caches
5819
5820 return _class_getMethod(cls, sel);
5821 }
5822
5823
5824 /***********************************************************************
5825 * resolveClassMethod
5826 * Call +resolveClassMethod, looking for a method to be added to class cls.
5827 * cls should be a metaclass.
5828 * Does not check if the method already exists.
5829 **********************************************************************/
5830 static void resolveClassMethod(id inst, SEL sel, Class cls)
5831 {
5832 runtimeLock.assertUnlocked();
5833 ASSERT(cls->isRealized());
5834 ASSERT(cls->isMetaClass());
5835
5836 if (!lookUpImpOrNil(inst, @selector(resolveClassMethod:), cls)) {
5837 // Resolver not implemented.
5838 return;
5839 }
5840
5841 Class nonmeta;
5842 {
5843 mutex_locker_t lock(runtimeLock);
5844 nonmeta = getMaybeUnrealizedNonMetaClass(cls, inst);
5845 // +initialize path should have realized nonmeta already
5846 if (!nonmeta->isRealized()) {
5847 _objc_fatal("nonmeta class %s (%p) unexpectedly not realized",
5848 nonmeta->nameForLogging(), nonmeta);
5849 }
5850 }
5851 BOOL (*msg)(Class, SEL, SEL) = (typeof(msg))objc_msgSend;
5852 bool resolved = msg(nonmeta, @selector(resolveClassMethod:), sel);
5853
5854 // Cache the result (good or bad) so the resolver doesn't fire next time.
5855 // +resolveClassMethod adds to self->ISA() a.k.a. cls
5856 IMP imp = lookUpImpOrNil(inst, sel, cls);
5857
5858 if (resolved && PrintResolving) {
5859 if (imp) {
5860 _objc_inform("RESOLVE: method %c[%s %s] "
5861 "dynamically resolved to %p",
5862 cls->isMetaClass() ? '+' : '-',
5863 cls->nameForLogging(), sel_getName(sel), imp);
5864 }
5865 else {
5866 // Method resolver didn't add anything?
5867 _objc_inform("RESOLVE: +[%s resolveClassMethod:%s] returned YES"
5868 ", but no new implementation of %c[%s %s] was found",
5869 cls->nameForLogging(), sel_getName(sel),
5870 cls->isMetaClass() ? '+' : '-',
5871 cls->nameForLogging(), sel_getName(sel));
5872 }
5873 }
5874 }
5875
5876
5877 /***********************************************************************
5878 * resolveInstanceMethod
5879 * Call +resolveInstanceMethod, looking for a method to be added to class cls.
5880 * cls may be a metaclass or a non-meta class.
5881 * Does not check if the method already exists.
5882 **********************************************************************/
5883 static void resolveInstanceMethod(id inst, SEL sel, Class cls)
5884 {
5885 runtimeLock.assertUnlocked();
5886 ASSERT(cls->isRealized());
5887 SEL resolve_sel = @selector(resolveInstanceMethod:);
5888
5889 if (!lookUpImpOrNil(cls, resolve_sel, cls->ISA())) {
5890 // Resolver not implemented.
5891 return;
5892 }
5893
5894 BOOL (*msg)(Class, SEL, SEL) = (typeof(msg))objc_msgSend;
5895 bool resolved = msg(cls, resolve_sel, sel);
5896
5897 // Cache the result (good or bad) so the resolver doesn't fire next time.
5898 // +resolveInstanceMethod adds to self a.k.a. cls
5899 IMP imp = lookUpImpOrNil(inst, sel, cls);
5900
5901 if (resolved && PrintResolving) {
5902 if (imp) {
5903 _objc_inform("RESOLVE: method %c[%s %s] "
5904 "dynamically resolved to %p",
5905 cls->isMetaClass() ? '+' : '-',
5906 cls->nameForLogging(), sel_getName(sel), imp);
5907 }
5908 else {
5909 // Method resolver didn't add anything?
5910 _objc_inform("RESOLVE: +[%s resolveInstanceMethod:%s] returned YES"
5911 ", but no new implementation of %c[%s %s] was found",
5912 cls->nameForLogging(), sel_getName(sel),
5913 cls->isMetaClass() ? '+' : '-',
5914 cls->nameForLogging(), sel_getName(sel));
5915 }
5916 }
5917 }
5918
5919
5920 /***********************************************************************
5921 * resolveMethod_locked
5922 * Call +resolveClassMethod or +resolveInstanceMethod.
5923 *
5924 * Called with the runtimeLock held to avoid pressure in the caller
5925 * Tail calls into lookUpImpOrForward, also to avoid pressure in the callerb
5926 **********************************************************************/
5927 static NEVER_INLINE IMP
5928 resolveMethod_locked(id inst, SEL sel, Class cls, int behavior)
5929 {
5930 runtimeLock.assertLocked();
5931 ASSERT(cls->isRealized());
5932
5933 runtimeLock.unlock();
5934
5935 if (! cls->isMetaClass()) {
5936 // try [cls resolveInstanceMethod:sel]
5937 resolveInstanceMethod(inst, sel, cls);
5938 }
5939 else {
5940 // try [nonMetaClass resolveClassMethod:sel]
5941 // and [cls resolveInstanceMethod:sel]
5942 resolveClassMethod(inst, sel, cls);
5943 if (!lookUpImpOrNil(inst, sel, cls)) {
5944 resolveInstanceMethod(inst, sel, cls);
5945 }
5946 }
5947
5948 // chances are that calling the resolver have populated the cache
5949 // so attempt using it
5950 return lookUpImpOrForward(inst, sel, cls, behavior | LOOKUP_CACHE);
5951 }
5952
5953
5954 /***********************************************************************
5955 * log_and_fill_cache
5956 * Log this method call. If the logger permits it, fill the method cache.
5957 * cls is the method whose cache should be filled.
5958 * implementer is the class that owns the implementation in question.
5959 **********************************************************************/
5960 static void
5961 log_and_fill_cache(Class cls, IMP imp, SEL sel, id receiver, Class implementer)
5962 {
5963 #if SUPPORT_MESSAGE_LOGGING
5964 if (slowpath(objcMsgLogEnabled && implementer)) {
5965 bool cacheIt = logMessageSend(implementer->isMetaClass(),
5966 cls->nameForLogging(),
5967 implementer->nameForLogging(),
5968 sel);
5969 if (!cacheIt) return;
5970 }
5971 #endif
5972 cache_fill(cls, sel, imp, receiver);
5973 }
5974
5975
5976 /***********************************************************************
5977 * lookUpImpOrForward.
5978 * The standard IMP lookup.
5979 * Without LOOKUP_INITIALIZE: tries to avoid +initialize (but sometimes fails)
5980 * Without LOOKUP_CACHE: skips optimistic unlocked lookup (but uses cache elsewhere)
5981 * Most callers should use LOOKUP_INITIALIZE and LOOKUP_CACHE
5982 * inst is an instance of cls or a subclass thereof, or nil if none is known.
5983 * If cls is an un-initialized metaclass then a non-nil inst is faster.
5984 * May return _objc_msgForward_impcache. IMPs destined for external use
5985 * must be converted to _objc_msgForward or _objc_msgForward_stret.
5986 * If you don't want forwarding at all, use LOOKUP_NIL.
5987 **********************************************************************/
5988 IMP lookUpImpOrForward(id inst, SEL sel, Class cls, int behavior)
5989 {
5990 const IMP forward_imp = (IMP)_objc_msgForward_impcache;
5991 IMP imp = nil;
5992 Class curClass;
5993
5994 runtimeLock.assertUnlocked();
5995
5996 // Optimistic cache lookup
5997 if (fastpath(behavior & LOOKUP_CACHE)) {
5998 imp = cache_getImp(cls, sel);
5999 if (imp) goto done_nolock;
6000 }
6001
6002 // runtimeLock is held during isRealized and isInitialized checking
6003 // to prevent races against concurrent realization.
6004
6005 // runtimeLock is held during method search to make
6006 // method-lookup + cache-fill atomic with respect to method addition.
6007 // Otherwise, a category could be added but ignored indefinitely because
6008 // the cache was re-filled with the old value after the cache flush on
6009 // behalf of the category.
6010
6011 runtimeLock.lock();
6012
6013 // We don't want people to be able to craft a binary blob that looks like
6014 // a class but really isn't one and do a CFI attack.
6015 //
6016 // To make these harder we want to make sure this is a class that was
6017 // either built into the binary or legitimately registered through
6018 // objc_duplicateClass, objc_initializeClassPair or objc_allocateClassPair.
6019 //
6020 // TODO: this check is quite costly during process startup.
6021 checkIsKnownClass(cls);
6022
6023 if (slowpath(!cls->isRealized())) {
6024 cls = realizeClassMaybeSwiftAndLeaveLocked(cls, runtimeLock);
6025 // runtimeLock may have been dropped but is now locked again
6026 }
6027
6028 if (slowpath((behavior & LOOKUP_INITIALIZE) && !cls->isInitialized())) {
6029 cls = initializeAndLeaveLocked(cls, inst, runtimeLock);
6030 // runtimeLock may have been dropped but is now locked again
6031
6032 // If sel == initialize, class_initialize will send +initialize and
6033 // then the messenger will send +initialize again after this
6034 // procedure finishes. Of course, if this is not being called
6035 // from the messenger then it won't happen. 2778172
6036 }
6037
6038 runtimeLock.assertLocked();
6039 curClass = cls;
6040
6041 // The code used to lookpu the class's cache again right after
6042 // we take the lock but for the vast majority of the cases
6043 // evidence shows this is a miss most of the time, hence a time loss.
6044 //
6045 // The only codepath calling into this without having performed some
6046 // kind of cache lookup is class_getInstanceMethod().
6047
6048 for (unsigned attempts = unreasonableClassCount();;) {
6049 // curClass method list.
6050 Method meth = getMethodNoSuper_nolock(curClass, sel);
6051 if (meth) {
6052 imp = meth->imp;
6053 goto done;
6054 }
6055
6056 if (slowpath((curClass = curClass->superclass) == nil)) {
6057 // No implementation found, and method resolver didn't help.
6058 // Use forwarding.
6059 imp = forward_imp;
6060 break;
6061 }
6062
6063 // Halt if there is a cycle in the superclass chain.
6064 if (slowpath(--attempts == 0)) {
6065 _objc_fatal("Memory corruption in class list.");
6066 }
6067
6068 // Superclass cache.
6069 imp = cache_getImp(curClass, sel);
6070 if (slowpath(imp == forward_imp)) {
6071 // Found a forward:: entry in a superclass.
6072 // Stop searching, but don't cache yet; call method
6073 // resolver for this class first.
6074 break;
6075 }
6076 if (fastpath(imp)) {
6077 // Found the method in a superclass. Cache it in this class.
6078 goto done;
6079 }
6080 }
6081
6082 // No implementation found. Try method resolver once.
6083
6084 if (slowpath(behavior & LOOKUP_RESOLVER)) {
6085 behavior ^= LOOKUP_RESOLVER;
6086 return resolveMethod_locked(inst, sel, cls, behavior);
6087 }
6088
6089 done:
6090 log_and_fill_cache(cls, imp, sel, inst, curClass);
6091 runtimeLock.unlock();
6092 done_nolock:
6093 if (slowpath((behavior & LOOKUP_NIL) && imp == forward_imp)) {
6094 return nil;
6095 }
6096 return imp;
6097 }
6098
6099 /***********************************************************************
6100 * lookupMethodInClassAndLoadCache.
6101 * Like lookUpImpOrForward, but does not search superclasses.
6102 * Caches and returns objc_msgForward if the method is not found in the class.
6103 **********************************************************************/
6104 IMP lookupMethodInClassAndLoadCache(Class cls, SEL sel)
6105 {
6106 Method meth;
6107 IMP imp;
6108
6109 // fixme this is incomplete - no resolver, +initialize -
6110 // but it's only used for .cxx_construct/destruct so we don't care
6111 ASSERT(sel == SEL_cxx_construct || sel == SEL_cxx_destruct);
6112
6113 // Search cache first.
6114 imp = cache_getImp(cls, sel);
6115 if (imp) return imp;
6116
6117 // Cache miss. Search method list.
6118
6119 mutex_locker_t lock(runtimeLock);
6120
6121 meth = getMethodNoSuper_nolock(cls, sel);
6122
6123 if (meth) {
6124 // Hit in method list. Cache it.
6125 cache_fill(cls, sel, meth->imp, nil);
6126 return meth->imp;
6127 } else {
6128 // Miss in method list. Cache objc_msgForward.
6129 cache_fill(cls, sel, _objc_msgForward_impcache, nil);
6130 return _objc_msgForward_impcache;
6131 }
6132 }
6133
6134
6135 /***********************************************************************
6136 * class_getProperty
6137 * fixme
6138 * Locking: read-locks runtimeLock
6139 **********************************************************************/
6140 objc_property_t class_getProperty(Class cls, const char *name)
6141 {
6142 if (!cls || !name) return nil;
6143
6144 mutex_locker_t lock(runtimeLock);
6145
6146 checkIsKnownClass(cls);
6147
6148 ASSERT(cls->isRealized());
6149
6150 for ( ; cls; cls = cls->superclass) {
6151 for (auto& prop : cls->data()->properties) {
6152 if (0 == strcmp(name, prop.name)) {
6153 return (objc_property_t)&prop;
6154 }
6155 }
6156 }
6157
6158 return nil;
6159 }
6160
6161
6162 /***********************************************************************
6163 * Locking: fixme
6164 **********************************************************************/
6165
6166 Class gdb_class_getClass(Class cls)
6167 {
6168 const char *className = cls->mangledName();
6169 if(!className || !strlen(className)) return Nil;
6170 Class rCls = look_up_class(className, NO, NO);
6171 return rCls;
6172 }
6173
6174 Class gdb_object_getClass(id obj)
6175 {
6176 if (!obj) return nil;
6177 return gdb_class_getClass(obj->getIsa());
6178 }
6179
6180
6181 /***********************************************************************
6182 * Locking: write-locks runtimeLock
6183 **********************************************************************/
6184 void
6185 objc_class::setInitialized()
6186 {
6187 Class metacls;
6188 Class cls;
6189
6190 ASSERT(!isMetaClass());
6191
6192 cls = (Class)this;
6193 metacls = cls->ISA();
6194
6195 mutex_locker_t lock(runtimeLock);
6196
6197 // Special cases:
6198 // - NSObject AWZ class methods are default.
6199 // - NSObject RR class and instance methods are default.
6200 // - NSObject Core class and instance methods are default.
6201 // adjustCustomFlagsForMethodChange() also knows these special cases.
6202 // attachMethodLists() also knows these special cases.
6203
6204 objc::AWZScanner::scanInitializedClass(cls, metacls);
6205 objc::RRScanner::scanInitializedClass(cls, metacls);
6206 objc::CoreScanner::scanInitializedClass(cls, metacls);
6207
6208 // Update the +initialize flags.
6209 // Do this last.
6210 metacls->changeInfo(RW_INITIALIZED, RW_INITIALIZING);
6211 }
6212
6213
6214 void
6215 objc_class::printInstancesRequireRawIsa(bool inherited)
6216 {
6217 ASSERT(PrintRawIsa);
6218 ASSERT(instancesRequireRawIsa());
6219 _objc_inform("RAW ISA: %s%s%s", nameForLogging(),
6220 isMetaClass() ? " (meta)" : "",
6221 inherited ? " (inherited)" : "");
6222 }
6223
6224 /***********************************************************************
6225 * Mark this class and all of its subclasses as requiring raw isa pointers
6226 **********************************************************************/
6227 void objc_class::setInstancesRequireRawIsaRecursively(bool inherited)
6228 {
6229 Class cls = (Class)this;
6230 runtimeLock.assertLocked();
6231
6232 if (instancesRequireRawIsa()) return;
6233
6234 foreach_realized_class_and_subclass(cls, [=](Class c){
6235 if (c->instancesRequireRawIsa()) {
6236 return false;
6237 }
6238
6239 c->setInstancesRequireRawIsa();
6240
6241 if (PrintRawIsa) c->printInstancesRequireRawIsa(inherited || c != cls);
6242 return true;
6243 });
6244 }
6245
6246
6247 /***********************************************************************
6248 * Choose a class index.
6249 * Set instancesRequireRawIsa if no more class indexes are available.
6250 **********************************************************************/
6251 void objc_class::chooseClassArrayIndex()
6252 {
6253 #if SUPPORT_INDEXED_ISA
6254 Class cls = (Class)this;
6255 runtimeLock.assertLocked();
6256
6257 if (objc_indexed_classes_count >= ISA_INDEX_COUNT) {
6258 // No more indexes available.
6259 ASSERT(cls->classArrayIndex() == 0);
6260 cls->setInstancesRequireRawIsaRecursively(false/*not inherited*/);
6261 return;
6262 }
6263
6264 unsigned index = objc_indexed_classes_count++;
6265 if (index == 0) index = objc_indexed_classes_count++; // index 0 is unused
6266 classForIndex(index) = cls;
6267 cls->setClassArrayIndex(index);
6268 #endif
6269 }
6270
6271
6272 /***********************************************************************
6273 * Update custom RR and AWZ when a method changes its IMP
6274 **********************************************************************/
6275 static void
6276 adjustCustomFlagsForMethodChange(Class cls, method_t *meth)
6277 {
6278 objc::AWZScanner::scanChangedMethod(cls, meth);
6279 objc::RRScanner::scanChangedMethod(cls, meth);
6280 objc::CoreScanner::scanChangedMethod(cls, meth);
6281 }
6282
6283
6284 /***********************************************************************
6285 * class_getIvarLayout
6286 * Called by the garbage collector.
6287 * The class must be nil or already realized.
6288 * Locking: none
6289 **********************************************************************/
6290 const uint8_t *
6291 class_getIvarLayout(Class cls)
6292 {
6293 if (cls) return cls->data()->ro->ivarLayout;
6294 else return nil;
6295 }
6296
6297
6298 /***********************************************************************
6299 * class_getWeakIvarLayout
6300 * Called by the garbage collector.
6301 * The class must be nil or already realized.
6302 * Locking: none
6303 **********************************************************************/
6304 const uint8_t *
6305 class_getWeakIvarLayout(Class cls)
6306 {
6307 if (cls) return cls->data()->ro->weakIvarLayout;
6308 else return nil;
6309 }
6310
6311
6312 /***********************************************************************
6313 * class_setIvarLayout
6314 * Changes the class's ivar layout.
6315 * nil layout means no unscanned ivars
6316 * The class must be under construction.
6317 * fixme: sanity-check layout vs instance size?
6318 * fixme: sanity-check layout vs superclass?
6319 * Locking: acquires runtimeLock
6320 **********************************************************************/
6321 void
6322 class_setIvarLayout(Class cls, const uint8_t *layout)
6323 {
6324 if (!cls) return;
6325
6326 mutex_locker_t lock(runtimeLock);
6327
6328 checkIsKnownClass(cls);
6329
6330 // Can only change layout of in-construction classes.
6331 // note: if modifications to post-construction classes were
6332 // allowed, there would be a race below (us vs. concurrent object_setIvar)
6333 if (!(cls->data()->flags & RW_CONSTRUCTING)) {
6334 _objc_inform("*** Can't set ivar layout for already-registered "
6335 "class '%s'", cls->nameForLogging());
6336 return;
6337 }
6338
6339 class_ro_t *ro_w = make_ro_writeable(cls->data());
6340
6341 try_free(ro_w->ivarLayout);
6342 ro_w->ivarLayout = ustrdupMaybeNil(layout);
6343 }
6344
6345
6346 /***********************************************************************
6347 * class_setWeakIvarLayout
6348 * Changes the class's weak ivar layout.
6349 * nil layout means no weak ivars
6350 * The class must be under construction.
6351 * fixme: sanity-check layout vs instance size?
6352 * fixme: sanity-check layout vs superclass?
6353 * Locking: acquires runtimeLock
6354 **********************************************************************/
6355 void
6356 class_setWeakIvarLayout(Class cls, const uint8_t *layout)
6357 {
6358 if (!cls) return;
6359
6360 mutex_locker_t lock(runtimeLock);
6361
6362 checkIsKnownClass(cls);
6363
6364 // Can only change layout of in-construction classes.
6365 // note: if modifications to post-construction classes were
6366 // allowed, there would be a race below (us vs. concurrent object_setIvar)
6367 if (!(cls->data()->flags & RW_CONSTRUCTING)) {
6368 _objc_inform("*** Can't set weak ivar layout for already-registered "
6369 "class '%s'", cls->nameForLogging());
6370 return;
6371 }
6372
6373 class_ro_t *ro_w = make_ro_writeable(cls->data());
6374
6375 try_free(ro_w->weakIvarLayout);
6376 ro_w->weakIvarLayout = ustrdupMaybeNil(layout);
6377 }
6378
6379
6380 /***********************************************************************
6381 * getIvar
6382 * Look up an ivar by name.
6383 * Locking: runtimeLock must be read- or write-locked by the caller.
6384 **********************************************************************/
6385 static ivar_t *getIvar(Class cls, const char *name)
6386 {
6387 runtimeLock.assertLocked();
6388
6389 const ivar_list_t *ivars;
6390 ASSERT(cls->isRealized());
6391 if ((ivars = cls->data()->ro->ivars)) {
6392 for (auto& ivar : *ivars) {
6393 if (!ivar.offset) continue; // anonymous bitfield
6394
6395 // ivar.name may be nil for anonymous bitfields etc.
6396 if (ivar.name && 0 == strcmp(name, ivar.name)) {
6397 return &ivar;
6398 }
6399 }
6400 }
6401
6402 return nil;
6403 }
6404
6405
6406 /***********************************************************************
6407 * _class_getClassForIvar
6408 * Given a class and an ivar that is in it or one of its superclasses,
6409 * find the actual class that defined the ivar.
6410 **********************************************************************/
6411 Class _class_getClassForIvar(Class cls, Ivar ivar)
6412 {
6413 mutex_locker_t lock(runtimeLock);
6414
6415 for ( ; cls; cls = cls->superclass) {
6416 if (auto ivars = cls->data()->ro->ivars) {
6417 if (ivars->containsIvar(ivar)) {
6418 return cls;
6419 }
6420 }
6421 }
6422
6423 return nil;
6424 }
6425
6426
6427 /***********************************************************************
6428 * _class_getVariable
6429 * fixme
6430 * Locking: read-locks runtimeLock
6431 **********************************************************************/
6432 Ivar
6433 _class_getVariable(Class cls, const char *name)
6434 {
6435 mutex_locker_t lock(runtimeLock);
6436
6437 for ( ; cls; cls = cls->superclass) {
6438 ivar_t *ivar = getIvar(cls, name);
6439 if (ivar) {
6440 return ivar;
6441 }
6442 }
6443
6444 return nil;
6445 }
6446
6447
6448 /***********************************************************************
6449 * class_conformsToProtocol
6450 * fixme
6451 * Locking: read-locks runtimeLock
6452 **********************************************************************/
6453 BOOL class_conformsToProtocol(Class cls, Protocol *proto_gen)
6454 {
6455 protocol_t *proto = newprotocol(proto_gen);
6456
6457 if (!cls) return NO;
6458 if (!proto_gen) return NO;
6459
6460 mutex_locker_t lock(runtimeLock);
6461
6462 checkIsKnownClass(cls);
6463
6464 ASSERT(cls->isRealized());
6465
6466 for (const auto& proto_ref : cls->data()->protocols) {
6467 protocol_t *p = remapProtocol(proto_ref);
6468 if (p == proto || protocol_conformsToProtocol_nolock(p, proto)) {
6469 return YES;
6470 }
6471 }
6472
6473 return NO;
6474 }
6475
6476
6477 /**********************************************************************
6478 * addMethod
6479 * fixme
6480 * Locking: runtimeLock must be held by the caller
6481 **********************************************************************/
6482 static IMP
6483 addMethod(Class cls, SEL name, IMP imp, const char *types, bool replace)
6484 {
6485 IMP result = nil;
6486
6487 runtimeLock.assertLocked();
6488
6489 checkIsKnownClass(cls);
6490
6491 ASSERT(types);
6492 ASSERT(cls->isRealized());
6493
6494 method_t *m;
6495 if ((m = getMethodNoSuper_nolock(cls, name))) {
6496 // already exists
6497 if (!replace) {
6498 result = m->imp;
6499 } else {
6500 result = _method_setImplementation(cls, m, imp);
6501 }
6502 } else {
6503 // fixme optimize
6504 method_list_t *newlist;
6505 newlist = (method_list_t *)calloc(sizeof(*newlist), 1);
6506 newlist->entsizeAndFlags =
6507 (uint32_t)sizeof(method_t) | fixed_up_method_list;
6508 newlist->count = 1;
6509 newlist->first.name = name;
6510 newlist->first.types = strdupIfMutable(types);
6511 newlist->first.imp = imp;
6512
6513 prepareMethodLists(cls, &newlist, 1, NO, NO);
6514 cls->data()->methods.attachLists(&newlist, 1);
6515 flushCaches(cls);
6516
6517 result = nil;
6518 }
6519
6520 return result;
6521 }
6522
6523 /**********************************************************************
6524 * addMethods
6525 * Add the given methods to a class in bulk.
6526 * Returns the selectors which could not be added, when replace == NO and a
6527 * method already exists. The returned selectors are NULL terminated and must be
6528 * freed by the caller. They are NULL if no failures occurred.
6529 * Locking: runtimeLock must be held by the caller
6530 **********************************************************************/
6531 static SEL *
6532 addMethods(Class cls, const SEL *names, const IMP *imps, const char **types,
6533 uint32_t count, bool replace, uint32_t *outFailedCount)
6534 {
6535 runtimeLock.assertLocked();
6536
6537 ASSERT(names);
6538 ASSERT(imps);
6539 ASSERT(types);
6540 ASSERT(cls->isRealized());
6541
6542 method_list_t *newlist;
6543 size_t newlistSize = method_list_t::byteSize(sizeof(method_t), count);
6544 newlist = (method_list_t *)calloc(newlistSize, 1);
6545 newlist->entsizeAndFlags =
6546 (uint32_t)sizeof(method_t) | fixed_up_method_list;
6547 newlist->count = 0;
6548
6549 method_t *newlistMethods = &newlist->first;
6550
6551 SEL *failedNames = nil;
6552 uint32_t failedCount = 0;
6553
6554 for (uint32_t i = 0; i < count; i++) {
6555 method_t *m;
6556 if ((m = getMethodNoSuper_nolock(cls, names[i]))) {
6557 // already exists
6558 if (!replace) {
6559 // report failure
6560 if (failedNames == nil) {
6561 // allocate an extra entry for a trailing NULL in case
6562 // every method fails
6563 failedNames = (SEL *)calloc(sizeof(*failedNames),
6564 count + 1);
6565 }
6566 failedNames[failedCount] = m->name;
6567 failedCount++;
6568 } else {
6569 _method_setImplementation(cls, m, imps[i]);
6570 }
6571 } else {
6572 method_t *newmethod = &newlistMethods[newlist->count];
6573 newmethod->name = names[i];
6574 newmethod->types = strdupIfMutable(types[i]);
6575 newmethod->imp = imps[i];
6576 newlist->count++;
6577 }
6578 }
6579
6580 if (newlist->count > 0) {
6581 // fixme resize newlist because it may have been over-allocated above.
6582 // Note that realloc() alone doesn't work due to ptrauth.
6583
6584 method_t::SortBySELAddress sorter;
6585 std::stable_sort(newlist->begin(), newlist->end(), sorter);
6586
6587 prepareMethodLists(cls, &newlist, 1, NO, NO);
6588 cls->data()->methods.attachLists(&newlist, 1);
6589 flushCaches(cls);
6590 } else {
6591 // Attaching the method list to the class consumes it. If we don't
6592 // do that, we have to free the memory ourselves.
6593 free(newlist);
6594 }
6595
6596 if (outFailedCount) *outFailedCount = failedCount;
6597
6598 return failedNames;
6599 }
6600
6601
6602 BOOL
6603 class_addMethod(Class cls, SEL name, IMP imp, const char *types)
6604 {
6605 if (!cls) return NO;
6606
6607 mutex_locker_t lock(runtimeLock);
6608 return ! addMethod(cls, name, imp, types ?: "", NO);
6609 }
6610
6611
6612 IMP
6613 class_replaceMethod(Class cls, SEL name, IMP imp, const char *types)
6614 {
6615 if (!cls) return nil;
6616
6617 mutex_locker_t lock(runtimeLock);
6618 return addMethod(cls, name, imp, types ?: "", YES);
6619 }
6620
6621
6622 SEL *
6623 class_addMethodsBulk(Class cls, const SEL *names, const IMP *imps,
6624 const char **types, uint32_t count,
6625 uint32_t *outFailedCount)
6626 {
6627 if (!cls) {
6628 if (outFailedCount) *outFailedCount = count;
6629 return (SEL *)memdup(names, count * sizeof(*names));
6630 }
6631
6632 mutex_locker_t lock(runtimeLock);
6633 return addMethods(cls, names, imps, types, count, NO, outFailedCount);
6634 }
6635
6636 void
6637 class_replaceMethodsBulk(Class cls, const SEL *names, const IMP *imps,
6638 const char **types, uint32_t count)
6639 {
6640 if (!cls) return;
6641
6642 mutex_locker_t lock(runtimeLock);
6643 addMethods(cls, names, imps, types, count, YES, nil);
6644 }
6645
6646
6647 /***********************************************************************
6648 * class_addIvar
6649 * Adds an ivar to a class.
6650 * Locking: acquires runtimeLock
6651 **********************************************************************/
6652 BOOL
6653 class_addIvar(Class cls, const char *name, size_t size,
6654 uint8_t alignment, const char *type)
6655 {
6656 if (!cls) return NO;
6657
6658 if (!type) type = "";
6659 if (name && 0 == strcmp(name, "")) name = nil;
6660
6661 mutex_locker_t lock(runtimeLock);
6662
6663 checkIsKnownClass(cls);
6664 ASSERT(cls->isRealized());
6665
6666 // No class variables
6667 if (cls->isMetaClass()) {
6668 return NO;
6669 }
6670
6671 // Can only add ivars to in-construction classes.
6672 if (!(cls->data()->flags & RW_CONSTRUCTING)) {
6673 return NO;
6674 }
6675
6676 // Check for existing ivar with this name, unless it's anonymous.
6677 // Check for too-big ivar.
6678 // fixme check for superclass ivar too?
6679 if ((name && getIvar(cls, name)) || size > UINT32_MAX) {
6680 return NO;
6681 }
6682
6683 class_ro_t *ro_w = make_ro_writeable(cls->data());
6684
6685 // fixme allocate less memory here
6686
6687 ivar_list_t *oldlist, *newlist;
6688 if ((oldlist = (ivar_list_t *)cls->data()->ro->ivars)) {
6689 size_t oldsize = oldlist->byteSize();
6690 newlist = (ivar_list_t *)calloc(oldsize + oldlist->entsize(), 1);
6691 memcpy(newlist, oldlist, oldsize);
6692 free(oldlist);
6693 } else {
6694 newlist = (ivar_list_t *)calloc(sizeof(ivar_list_t), 1);
6695 newlist->entsizeAndFlags = (uint32_t)sizeof(ivar_t);
6696 }
6697
6698 uint32_t offset = cls->unalignedInstanceSize();
6699 uint32_t alignMask = (1<<alignment)-1;
6700 offset = (offset + alignMask) & ~alignMask;
6701
6702 ivar_t& ivar = newlist->get(newlist->count++);
6703 #if __x86_64__
6704 // Deliberately over-allocate the ivar offset variable.
6705 // Use calloc() to clear all 64 bits. See the note in struct ivar_t.
6706 ivar.offset = (int32_t *)(int64_t *)calloc(sizeof(int64_t), 1);
6707 #else
6708 ivar.offset = (int32_t *)malloc(sizeof(int32_t));
6709 #endif
6710 *ivar.offset = offset;
6711 ivar.name = name ? strdupIfMutable(name) : nil;
6712 ivar.type = strdupIfMutable(type);
6713 ivar.alignment_raw = alignment;
6714 ivar.size = (uint32_t)size;
6715
6716 ro_w->ivars = newlist;
6717 cls->setInstanceSize((uint32_t)(offset + size));
6718
6719 // Ivar layout updated in registerClass.
6720
6721 return YES;
6722 }
6723
6724
6725 /***********************************************************************
6726 * class_addProtocol
6727 * Adds a protocol to a class.
6728 * Locking: acquires runtimeLock
6729 **********************************************************************/
6730 BOOL class_addProtocol(Class cls, Protocol *protocol_gen)
6731 {
6732 protocol_t *protocol = newprotocol(protocol_gen);
6733
6734 if (!cls) return NO;
6735 if (class_conformsToProtocol(cls, protocol_gen)) return NO;
6736
6737 mutex_locker_t lock(runtimeLock);
6738
6739 ASSERT(cls->isRealized());
6740
6741 // fixme optimize
6742 protocol_list_t *protolist = (protocol_list_t *)
6743 malloc(sizeof(protocol_list_t) + sizeof(protocol_t *));
6744 protolist->count = 1;
6745 protolist->list[0] = (protocol_ref_t)protocol;
6746
6747 cls->data()->protocols.attachLists(&protolist, 1);
6748
6749 // fixme metaclass?
6750
6751 return YES;
6752 }
6753
6754
6755 /***********************************************************************
6756 * class_addProperty
6757 * Adds a property to a class.
6758 * Locking: acquires runtimeLock
6759 **********************************************************************/
6760 static bool
6761 _class_addProperty(Class cls, const char *name,
6762 const objc_property_attribute_t *attrs, unsigned int count,
6763 bool replace)
6764 {
6765 if (!cls) return NO;
6766 if (!name) return NO;
6767
6768 property_t *prop = class_getProperty(cls, name);
6769 if (prop && !replace) {
6770 // already exists, refuse to replace
6771 return NO;
6772 }
6773 else if (prop) {
6774 // replace existing
6775 mutex_locker_t lock(runtimeLock);
6776 try_free(prop->attributes);
6777 prop->attributes = copyPropertyAttributeString(attrs, count);
6778 return YES;
6779 }
6780 else {
6781 mutex_locker_t lock(runtimeLock);
6782
6783 ASSERT(cls->isRealized());
6784
6785 property_list_t *proplist = (property_list_t *)
6786 malloc(sizeof(*proplist));
6787 proplist->count = 1;
6788 proplist->entsizeAndFlags = sizeof(proplist->first);
6789 proplist->first.name = strdupIfMutable(name);
6790 proplist->first.attributes = copyPropertyAttributeString(attrs, count);
6791
6792 cls->data()->properties.attachLists(&proplist, 1);
6793
6794 return YES;
6795 }
6796 }
6797
6798 BOOL
6799 class_addProperty(Class cls, const char *name,
6800 const objc_property_attribute_t *attrs, unsigned int n)
6801 {
6802 return _class_addProperty(cls, name, attrs, n, NO);
6803 }
6804
6805 void
6806 class_replaceProperty(Class cls, const char *name,
6807 const objc_property_attribute_t *attrs, unsigned int n)
6808 {
6809 _class_addProperty(cls, name, attrs, n, YES);
6810 }
6811
6812
6813 /***********************************************************************
6814 * look_up_class
6815 * Look up a class by name, and realize it.
6816 * Locking: acquires runtimeLock
6817 **********************************************************************/
6818 static BOOL empty_getClass(const char *name, Class *outClass)
6819 {
6820 *outClass = nil;
6821 return NO;
6822 }
6823
6824 static ChainedHookFunction<objc_hook_getClass> GetClassHook{empty_getClass};
6825
6826 void objc_setHook_getClass(objc_hook_getClass newValue,
6827 objc_hook_getClass *outOldValue)
6828 {
6829 GetClassHook.set(newValue, outOldValue);
6830 }
6831
6832 Class
6833 look_up_class(const char *name,
6834 bool includeUnconnected __attribute__((unused)),
6835 bool includeClassHandler __attribute__((unused)))
6836 {
6837 if (!name) return nil;
6838
6839 Class result;
6840 bool unrealized;
6841 {
6842 runtimeLock.lock();
6843 result = getClassExceptSomeSwift(name);
6844 unrealized = result && !result->isRealized();
6845 if (unrealized) {
6846 result = realizeClassMaybeSwiftAndUnlock(result, runtimeLock);
6847 // runtimeLock is now unlocked
6848 } else {
6849 runtimeLock.unlock();
6850 }
6851 }
6852
6853 if (!result) {
6854 // Ask Swift about its un-instantiated classes.
6855
6856 // We use thread-local storage to prevent infinite recursion
6857 // if the hook function provokes another lookup of the same name
6858 // (for example, if the hook calls objc_allocateClassPair)
6859
6860 auto *tls = _objc_fetch_pthread_data(true);
6861
6862 // Stop if this thread is already looking up this name.
6863 for (unsigned i = 0; i < tls->classNameLookupsUsed; i++) {
6864 if (0 == strcmp(name, tls->classNameLookups[i])) {
6865 return nil;
6866 }
6867 }
6868
6869 // Save this lookup in tls.
6870 if (tls->classNameLookupsUsed == tls->classNameLookupsAllocated) {
6871 tls->classNameLookupsAllocated =
6872 (tls->classNameLookupsAllocated * 2 ?: 1);
6873 size_t size = tls->classNameLookupsAllocated *
6874 sizeof(tls->classNameLookups[0]);
6875 tls->classNameLookups = (const char **)
6876 realloc(tls->classNameLookups, size);
6877 }
6878 tls->classNameLookups[tls->classNameLookupsUsed++] = name;
6879
6880 // Call the hook.
6881 Class swiftcls = nil;
6882 if (GetClassHook.get()(name, &swiftcls)) {
6883 ASSERT(swiftcls->isRealized());
6884 result = swiftcls;
6885 }
6886
6887 // Erase the name from tls.
6888 unsigned slot = --tls->classNameLookupsUsed;
6889 ASSERT(slot >= 0 && slot < tls->classNameLookupsAllocated);
6890 ASSERT(name == tls->classNameLookups[slot]);
6891 tls->classNameLookups[slot] = nil;
6892 }
6893
6894 return result;
6895 }
6896
6897
6898 /***********************************************************************
6899 * objc_duplicateClass
6900 * fixme
6901 * Locking: acquires runtimeLock
6902 **********************************************************************/
6903 Class
6904 objc_duplicateClass(Class original, const char *name,
6905 size_t extraBytes)
6906 {
6907 Class duplicate;
6908
6909 mutex_locker_t lock(runtimeLock);
6910
6911 checkIsKnownClass(original);
6912
6913 ASSERT(original->isRealized());
6914 ASSERT(!original->isMetaClass());
6915
6916 duplicate = alloc_class_for_subclass(original, extraBytes);
6917
6918 duplicate->initClassIsa(original->ISA());
6919 duplicate->superclass = original->superclass;
6920
6921 duplicate->cache.initializeToEmpty();
6922
6923 class_rw_t *rw = (class_rw_t *)calloc(sizeof(*original->data()), 1);
6924 rw->flags = (original->data()->flags | RW_COPIED_RO | RW_REALIZING);
6925 rw->version = original->data()->version;
6926 rw->firstSubclass = nil;
6927 rw->nextSiblingClass = nil;
6928
6929 duplicate->bits = original->bits;
6930 duplicate->setData(rw);
6931
6932 rw->ro = original->data()->ro->duplicate();
6933 *(char **)&rw->ro->name = strdupIfMutable(name);
6934
6935 rw->methods = original->data()->methods.duplicate();
6936
6937 // fixme dies when categories are added to the base
6938 rw->properties = original->data()->properties;
6939 rw->protocols = original->data()->protocols;
6940
6941 duplicate->chooseClassArrayIndex();
6942
6943 if (duplicate->superclass) {
6944 addSubclass(duplicate->superclass, duplicate);
6945 // duplicate->isa == original->isa so don't addSubclass() for it
6946 } else {
6947 addRootClass(duplicate);
6948 }
6949
6950 // Don't methodize class - construction above is correct
6951
6952 addNamedClass(duplicate, duplicate->data()->ro->name);
6953 addClassTableEntry(duplicate, /*addMeta=*/false);
6954
6955 if (PrintConnecting) {
6956 _objc_inform("CLASS: realizing class '%s' (duplicate of %s) %p %p",
6957 name, original->nameForLogging(),
6958 (void*)duplicate, duplicate->data()->ro);
6959 }
6960
6961 duplicate->clearInfo(RW_REALIZING);
6962
6963 return duplicate;
6964 }
6965
6966 /***********************************************************************
6967 * objc_initializeClassPair
6968 * Locking: runtimeLock must be write-locked by the caller
6969 **********************************************************************/
6970
6971 // &UnsetLayout is the default ivar layout during class construction
6972 static const uint8_t UnsetLayout = 0;
6973
6974 static void objc_initializeClassPair_internal(Class superclass, const char *name, Class cls, Class meta)
6975 {
6976 runtimeLock.assertLocked();
6977
6978 class_ro_t *cls_ro_w, *meta_ro_w;
6979
6980 cls->setData((class_rw_t *)calloc(sizeof(class_rw_t), 1));
6981 meta->setData((class_rw_t *)calloc(sizeof(class_rw_t), 1));
6982 cls_ro_w = (class_ro_t *)calloc(sizeof(class_ro_t), 1);
6983 meta_ro_w = (class_ro_t *)calloc(sizeof(class_ro_t), 1);
6984 cls->data()->ro = cls_ro_w;
6985 meta->data()->ro = meta_ro_w;
6986
6987 // Set basic info
6988
6989 cls->data()->flags = RW_CONSTRUCTING | RW_COPIED_RO | RW_REALIZED | RW_REALIZING;
6990 meta->data()->flags = RW_CONSTRUCTING | RW_COPIED_RO | RW_REALIZED | RW_REALIZING;
6991 cls->data()->version = 0;
6992 meta->data()->version = 7;
6993
6994 cls_ro_w->flags = 0;
6995 meta_ro_w->flags = RO_META;
6996 if (!superclass) {
6997 cls_ro_w->flags |= RO_ROOT;
6998 meta_ro_w->flags |= RO_ROOT;
6999 }
7000 if (superclass) {
7001 uint32_t flagsToCopy = RW_FORBIDS_ASSOCIATED_OBJECTS;
7002 cls->data()->flags |= superclass->data()->flags & flagsToCopy;
7003 cls_ro_w->instanceStart = superclass->unalignedInstanceSize();
7004 meta_ro_w->instanceStart = superclass->ISA()->unalignedInstanceSize();
7005 cls->setInstanceSize(cls_ro_w->instanceStart);
7006 meta->setInstanceSize(meta_ro_w->instanceStart);
7007 } else {
7008 cls_ro_w->instanceStart = 0;
7009 meta_ro_w->instanceStart = (uint32_t)sizeof(objc_class);
7010 cls->setInstanceSize((uint32_t)sizeof(id)); // just an isa
7011 meta->setInstanceSize(meta_ro_w->instanceStart);
7012 }
7013
7014 cls_ro_w->name = strdupIfMutable(name);
7015 meta_ro_w->name = strdupIfMutable(name);
7016
7017 cls_ro_w->ivarLayout = &UnsetLayout;
7018 cls_ro_w->weakIvarLayout = &UnsetLayout;
7019
7020 meta->chooseClassArrayIndex();
7021 cls->chooseClassArrayIndex();
7022
7023 // This absolutely needs to be done before addSubclass
7024 // as initializeToEmpty() clobbers the FAST_CACHE bits
7025 cls->cache.initializeToEmpty();
7026 meta->cache.initializeToEmpty();
7027
7028 #if FAST_CACHE_META
7029 meta->cache.setBit(FAST_CACHE_META);
7030 #endif
7031 meta->setInstancesRequireRawIsa();
7032
7033 // Connect to superclasses and metaclasses
7034 cls->initClassIsa(meta);
7035
7036 if (superclass) {
7037 meta->initClassIsa(superclass->ISA()->ISA());
7038 cls->superclass = superclass;
7039 meta->superclass = superclass->ISA();
7040 addSubclass(superclass, cls);
7041 addSubclass(superclass->ISA(), meta);
7042 } else {
7043 meta->initClassIsa(meta);
7044 cls->superclass = Nil;
7045 meta->superclass = cls;
7046 addRootClass(cls);
7047 addSubclass(cls, meta);
7048 }
7049
7050 addClassTableEntry(cls);
7051 }
7052
7053
7054 /***********************************************************************
7055 * verifySuperclass
7056 * Sanity-check the superclass provided to
7057 * objc_allocateClassPair, objc_initializeClassPair, or objc_readClassPair.
7058 **********************************************************************/
7059 bool
7060 verifySuperclass(Class superclass, bool rootOK)
7061 {
7062 if (!superclass) {
7063 // Superclass does not exist.
7064 // If subclass may be a root class, this is OK.
7065 // If subclass must not be a root class, this is bad.
7066 return rootOK;
7067 }
7068
7069 // Superclass must be realized.
7070 if (! superclass->isRealized()) return false;
7071
7072 // Superclass must not be under construction.
7073 if (superclass->data()->flags & RW_CONSTRUCTING) return false;
7074
7075 return true;
7076 }
7077
7078
7079 /***********************************************************************
7080 * objc_initializeClassPair
7081 **********************************************************************/
7082 Class objc_initializeClassPair(Class superclass, const char *name, Class cls, Class meta)
7083 {
7084 // Fail if the class name is in use.
7085 if (look_up_class(name, NO, NO)) return nil;
7086
7087 mutex_locker_t lock(runtimeLock);
7088
7089 // Fail if the class name is in use.
7090 // Fail if the superclass isn't kosher.
7091 if (getClassExceptSomeSwift(name) ||
7092 !verifySuperclass(superclass, true/*rootOK*/))
7093 {
7094 return nil;
7095 }
7096
7097 objc_initializeClassPair_internal(superclass, name, cls, meta);
7098
7099 return cls;
7100 }
7101
7102
7103 /***********************************************************************
7104 * objc_allocateClassPair
7105 * fixme
7106 * Locking: acquires runtimeLock
7107 **********************************************************************/
7108 Class objc_allocateClassPair(Class superclass, const char *name,
7109 size_t extraBytes)
7110 {
7111 Class cls, meta;
7112
7113 // Fail if the class name is in use.
7114 if (look_up_class(name, NO, NO)) return nil;
7115
7116 mutex_locker_t lock(runtimeLock);
7117
7118 // Fail if the class name is in use.
7119 // Fail if the superclass isn't kosher.
7120 if (getClassExceptSomeSwift(name) ||
7121 !verifySuperclass(superclass, true/*rootOK*/))
7122 {
7123 return nil;
7124 }
7125
7126 // Allocate new classes.
7127 cls = alloc_class_for_subclass(superclass, extraBytes);
7128 meta = alloc_class_for_subclass(superclass, extraBytes);
7129
7130 // fixme mangle the name if it looks swift-y?
7131 objc_initializeClassPair_internal(superclass, name, cls, meta);
7132
7133 return cls;
7134 }
7135
7136
7137 /***********************************************************************
7138 * objc_registerClassPair
7139 * fixme
7140 * Locking: acquires runtimeLock
7141 **********************************************************************/
7142 void objc_registerClassPair(Class cls)
7143 {
7144 mutex_locker_t lock(runtimeLock);
7145
7146 checkIsKnownClass(cls);
7147
7148 if ((cls->data()->flags & RW_CONSTRUCTED) ||
7149 (cls->ISA()->data()->flags & RW_CONSTRUCTED))
7150 {
7151 _objc_inform("objc_registerClassPair: class '%s' was already "
7152 "registered!", cls->data()->ro->name);
7153 return;
7154 }
7155
7156 if (!(cls->data()->flags & RW_CONSTRUCTING) ||
7157 !(cls->ISA()->data()->flags & RW_CONSTRUCTING))
7158 {
7159 _objc_inform("objc_registerClassPair: class '%s' was not "
7160 "allocated with objc_allocateClassPair!",
7161 cls->data()->ro->name);
7162 return;
7163 }
7164
7165 // Clear "under construction" bit, set "done constructing" bit
7166 cls->ISA()->changeInfo(RW_CONSTRUCTED, RW_CONSTRUCTING | RW_REALIZING);
7167 cls->changeInfo(RW_CONSTRUCTED, RW_CONSTRUCTING | RW_REALIZING);
7168
7169 // Add to named class table.
7170 addNamedClass(cls, cls->data()->ro->name);
7171 }
7172
7173
7174 /***********************************************************************
7175 * objc_readClassPair()
7176 * Read a class and metaclass as written by a compiler.
7177 * Assumes the class and metaclass are not referenced by other things
7178 * that might need to be fixed up (such as categories and subclasses).
7179 * Does not call +load.
7180 * Returns the class pointer, or nil.
7181 *
7182 * Locking: runtimeLock acquired by map_images
7183 **********************************************************************/
7184 Class objc_readClassPair(Class bits, const struct objc_image_info *info)
7185 {
7186 mutex_locker_t lock(runtimeLock);
7187
7188 // No info bits are significant yet.
7189 (void)info;
7190
7191 // Fail if the superclass isn't kosher.
7192 bool rootOK = bits->data()->flags & RO_ROOT;
7193 if (!verifySuperclass(bits->superclass, rootOK)){
7194 return nil;
7195 }
7196
7197 // Duplicate classes are allowed, just like they are for image loading.
7198 // readClass will complain about the duplicate.
7199
7200 Class cls = readClass(bits, false/*bundle*/, false/*shared cache*/);
7201 if (cls != bits) {
7202 // This function isn't allowed to remap anything.
7203 _objc_fatal("objc_readClassPair for class %s changed %p to %p",
7204 cls->nameForLogging(), bits, cls);
7205 }
7206
7207 // The only client of this function is old Swift.
7208 // Stable Swift won't use it.
7209 // fixme once Swift in the OS settles we can assert(!cls->isSwiftStable()).
7210 cls = realizeClassWithoutSwift(cls, nil);
7211
7212 return cls;
7213 }
7214
7215
7216 /***********************************************************************
7217 * detach_class
7218 * Disconnect a class from other data structures.
7219 * Exception: does not remove the class from the +load list
7220 * Call this before free_class.
7221 * Locking: runtimeLock must be held by the caller.
7222 **********************************************************************/
7223 static void detach_class(Class cls, bool isMeta)
7224 {
7225 runtimeLock.assertLocked();
7226
7227 // categories not yet attached to this class
7228 objc::unattachedCategories.eraseClass(cls);
7229
7230 // superclass's subclass list
7231 if (cls->isRealized()) {
7232 Class supercls = cls->superclass;
7233 if (supercls) {
7234 removeSubclass(supercls, cls);
7235 } else {
7236 removeRootClass(cls);
7237 }
7238 }
7239
7240 // class tables and +load queue
7241 if (!isMeta) {
7242 removeNamedClass(cls, cls->mangledName());
7243 }
7244 objc::allocatedClasses.get().erase(cls);
7245 }
7246
7247
7248 /***********************************************************************
7249 * free_class
7250 * Frees a class's data structures.
7251 * Call this after detach_class.
7252 * Locking: runtimeLock must be held by the caller
7253 **********************************************************************/
7254 static void free_class(Class cls)
7255 {
7256 runtimeLock.assertLocked();
7257
7258 if (! cls->isRealized()) return;
7259
7260 auto rw = cls->data();
7261 auto ro = rw->ro;
7262
7263 cache_delete(cls);
7264
7265 for (auto& meth : rw->methods) {
7266 try_free(meth.types);
7267 }
7268 rw->methods.tryFree();
7269
7270 const ivar_list_t *ivars = ro->ivars;
7271 if (ivars) {
7272 for (auto& ivar : *ivars) {
7273 try_free(ivar.offset);
7274 try_free(ivar.name);
7275 try_free(ivar.type);
7276 }
7277 try_free(ivars);
7278 }
7279
7280 for (auto& prop : rw->properties) {
7281 try_free(prop.name);
7282 try_free(prop.attributes);
7283 }
7284 rw->properties.tryFree();
7285
7286 rw->protocols.tryFree();
7287
7288 try_free(ro->ivarLayout);
7289 try_free(ro->weakIvarLayout);
7290 try_free(ro->name);
7291 try_free(ro);
7292 try_free(rw);
7293 try_free(cls);
7294 }
7295
7296
7297 void objc_disposeClassPair(Class cls)
7298 {
7299 mutex_locker_t lock(runtimeLock);
7300
7301 checkIsKnownClass(cls);
7302
7303 if (!(cls->data()->flags & (RW_CONSTRUCTED|RW_CONSTRUCTING)) ||
7304 !(cls->ISA()->data()->flags & (RW_CONSTRUCTED|RW_CONSTRUCTING)))
7305 {
7306 // class not allocated with objc_allocateClassPair
7307 // disposing still-unregistered class is OK!
7308 _objc_inform("objc_disposeClassPair: class '%s' was not "
7309 "allocated with objc_allocateClassPair!",
7310 cls->data()->ro->name);
7311 return;
7312 }
7313
7314 if (cls->isMetaClass()) {
7315 _objc_inform("objc_disposeClassPair: class '%s' is a metaclass, "
7316 "not a class!", cls->data()->ro->name);
7317 return;
7318 }
7319
7320 // Shouldn't have any live subclasses.
7321 if (cls->data()->firstSubclass) {
7322 _objc_inform("objc_disposeClassPair: class '%s' still has subclasses, "
7323 "including '%s'!", cls->data()->ro->name,
7324 cls->data()->firstSubclass->nameForLogging());
7325 }
7326 if (cls->ISA()->data()->firstSubclass) {
7327 _objc_inform("objc_disposeClassPair: class '%s' still has subclasses, "
7328 "including '%s'!", cls->data()->ro->name,
7329 cls->ISA()->data()->firstSubclass->nameForLogging());
7330 }
7331
7332 // don't remove_class_from_loadable_list()
7333 // - it's not there and we don't have the lock
7334 detach_class(cls->ISA(), YES);
7335 detach_class(cls, NO);
7336 free_class(cls->ISA());
7337 free_class(cls);
7338 }
7339
7340
7341 /***********************************************************************
7342 * objc_constructInstance
7343 * Creates an instance of `cls` at the location pointed to by `bytes`.
7344 * `bytes` must point to at least class_getInstanceSize(cls) bytes of
7345 * well-aligned zero-filled memory.
7346 * The new object's isa is set. Any C++ constructors are called.
7347 * Returns `bytes` if successful. Returns nil if `cls` or `bytes` is
7348 * nil, or if C++ constructors fail.
7349 * Note: class_createInstance() and class_createInstances() preflight this.
7350 **********************************************************************/
7351 id
7352 objc_constructInstance(Class cls, void *bytes)
7353 {
7354 if (!cls || !bytes) return nil;
7355
7356 id obj = (id)bytes;
7357
7358 // Read class's info bits all at once for performance
7359 bool hasCxxCtor = cls->hasCxxCtor();
7360 bool hasCxxDtor = cls->hasCxxDtor();
7361 bool fast = cls->canAllocNonpointer();
7362
7363 if (fast) {
7364 obj->initInstanceIsa(cls, hasCxxDtor);
7365 } else {
7366 obj->initIsa(cls);
7367 }
7368
7369 if (hasCxxCtor) {
7370 return object_cxxConstructFromClass(obj, cls, OBJECT_CONSTRUCT_NONE);
7371 } else {
7372 return obj;
7373 }
7374 }
7375
7376
7377 /***********************************************************************
7378 * class_createInstance
7379 * fixme
7380 * Locking: none
7381 *
7382 * Note: this function has been carefully written so that the fastpath
7383 * takes no branch.
7384 **********************************************************************/
7385 static ALWAYS_INLINE id
7386 _class_createInstanceFromZone(Class cls, size_t extraBytes, void *zone,
7387 int construct_flags = OBJECT_CONSTRUCT_NONE,
7388 bool cxxConstruct = true,
7389 size_t *outAllocatedSize = nil)
7390 {
7391 ASSERT(cls->isRealized());
7392
7393 // Read class's info bits all at once for performance
7394 bool hasCxxCtor = cxxConstruct && cls->hasCxxCtor();
7395 bool hasCxxDtor = cls->hasCxxDtor();
7396 bool fast = cls->canAllocNonpointer();
7397 size_t size;
7398
7399 size = cls->instanceSize(extraBytes);
7400 if (outAllocatedSize) *outAllocatedSize = size;
7401
7402 id obj;
7403 if (zone) {
7404 obj = (id)malloc_zone_calloc((malloc_zone_t *)zone, 1, size);
7405 } else {
7406 obj = (id)calloc(1, size);
7407 }
7408 if (slowpath(!obj)) {
7409 if (construct_flags & OBJECT_CONSTRUCT_CALL_BADALLOC) {
7410 return _objc_callBadAllocHandler(cls);
7411 }
7412 return nil;
7413 }
7414
7415 if (!zone && fast) {
7416 obj->initInstanceIsa(cls, hasCxxDtor);
7417 } else {
7418 // Use raw pointer isa on the assumption that they might be
7419 // doing something weird with the zone or RR.
7420 obj->initIsa(cls);
7421 }
7422
7423 if (fastpath(!hasCxxCtor)) {
7424 return obj;
7425 }
7426
7427 construct_flags |= OBJECT_CONSTRUCT_FREE_ONFAILURE;
7428 return object_cxxConstructFromClass(obj, cls, construct_flags);
7429 }
7430
7431 id
7432 class_createInstance(Class cls, size_t extraBytes)
7433 {
7434 if (!cls) return nil;
7435 return _class_createInstanceFromZone(cls, extraBytes, nil);
7436 }
7437
7438 NEVER_INLINE
7439 id
7440 _objc_rootAllocWithZone(Class cls, malloc_zone_t *zone __unused)
7441 {
7442 // allocWithZone under __OBJC2__ ignores the zone parameter
7443 return _class_createInstanceFromZone(cls, 0, nil,
7444 OBJECT_CONSTRUCT_CALL_BADALLOC);
7445 }
7446
7447 /***********************************************************************
7448 * class_createInstances
7449 * fixme
7450 * Locking: none
7451 **********************************************************************/
7452 #if SUPPORT_NONPOINTER_ISA
7453 #warning fixme optimize class_createInstances
7454 #endif
7455 unsigned
7456 class_createInstances(Class cls, size_t extraBytes,
7457 id *results, unsigned num_requested)
7458 {
7459 return _class_createInstancesFromZone(cls, extraBytes, nil,
7460 results, num_requested);
7461 }
7462
7463 /***********************************************************************
7464 * object_copyFromZone
7465 * fixme
7466 * Locking: none
7467 **********************************************************************/
7468 static id
7469 _object_copyFromZone(id oldObj, size_t extraBytes, void *zone)
7470 {
7471 if (!oldObj) return nil;
7472 if (oldObj->isTaggedPointer()) return oldObj;
7473
7474 // fixme this doesn't handle C++ ivars correctly (#4619414)
7475
7476 Class cls = oldObj->ISA();
7477 size_t size;
7478 id obj = _class_createInstanceFromZone(cls, extraBytes, zone,
7479 OBJECT_CONSTRUCT_NONE, false, &size);
7480 if (!obj) return nil;
7481
7482 // Copy everything except the isa, which was already set above.
7483 uint8_t *copyDst = (uint8_t *)obj + sizeof(Class);
7484 uint8_t *copySrc = (uint8_t *)oldObj + sizeof(Class);
7485 size_t copySize = size - sizeof(Class);
7486 memmove(copyDst, copySrc, copySize);
7487
7488 fixupCopiedIvars(obj, oldObj);
7489
7490 return obj;
7491 }
7492
7493
7494 /***********************************************************************
7495 * object_copy
7496 * fixme
7497 * Locking: none
7498 **********************************************************************/
7499 id
7500 object_copy(id oldObj, size_t extraBytes)
7501 {
7502 return _object_copyFromZone(oldObj, extraBytes, malloc_default_zone());
7503 }
7504
7505
7506 #if SUPPORT_ZONES
7507
7508 /***********************************************************************
7509 * class_createInstanceFromZone
7510 * fixme
7511 * Locking: none
7512 **********************************************************************/
7513 id
7514 class_createInstanceFromZone(Class cls, size_t extraBytes, void *zone)
7515 {
7516 if (!cls) return nil;
7517 return _class_createInstanceFromZone(cls, extraBytes, zone);
7518 }
7519
7520 /***********************************************************************
7521 * object_copyFromZone
7522 * fixme
7523 * Locking: none
7524 **********************************************************************/
7525 id
7526 object_copyFromZone(id oldObj, size_t extraBytes, void *zone)
7527 {
7528 return _object_copyFromZone(oldObj, extraBytes, zone);
7529 }
7530
7531 #endif
7532
7533
7534 /***********************************************************************
7535 * objc_destructInstance
7536 * Destroys an instance without freeing memory.
7537 * Calls C++ destructors.
7538 * Calls ARC ivar cleanup.
7539 * Removes associative references.
7540 * Returns `obj`. Does nothing if `obj` is nil.
7541 **********************************************************************/
7542 void *objc_destructInstance(id obj)
7543 {
7544 if (obj) {
7545 // Read all of the flags at once for performance.
7546 bool cxx = obj->hasCxxDtor();
7547 bool assoc = obj->hasAssociatedObjects();
7548
7549 // This order is important.
7550 if (cxx) object_cxxDestruct(obj);
7551 if (assoc) _object_remove_assocations(obj);
7552 obj->clearDeallocating();
7553 }
7554
7555 return obj;
7556 }
7557
7558
7559 /***********************************************************************
7560 * object_dispose
7561 * fixme
7562 * Locking: none
7563 **********************************************************************/
7564 id
7565 object_dispose(id obj)
7566 {
7567 if (!obj) return nil;
7568
7569 objc_destructInstance(obj);
7570 free(obj);
7571
7572 return nil;
7573 }
7574
7575
7576 /***********************************************************************
7577 * _objc_getFreedObjectClass
7578 * fixme
7579 * Locking: none
7580 **********************************************************************/
7581 Class _objc_getFreedObjectClass (void)
7582 {
7583 return nil;
7584 }
7585
7586
7587
7588 /***********************************************************************
7589 * Tagged pointer objects.
7590 *
7591 * Tagged pointer objects store the class and the object value in the
7592 * object pointer; the "pointer" does not actually point to anything.
7593 *
7594 * Tagged pointer objects currently use this representation:
7595 * (LSB)
7596 * 1 bit set if tagged, clear if ordinary object pointer
7597 * 3 bits tag index
7598 * 60 bits payload
7599 * (MSB)
7600 * The tag index defines the object's class.
7601 * The payload format is defined by the object's class.
7602 *
7603 * If the tag index is 0b111, the tagged pointer object uses an
7604 * "extended" representation, allowing more classes but with smaller payloads:
7605 * (LSB)
7606 * 1 bit set if tagged, clear if ordinary object pointer
7607 * 3 bits 0b111
7608 * 8 bits extended tag index
7609 * 52 bits payload
7610 * (MSB)
7611 *
7612 * Some architectures reverse the MSB and LSB in these representations.
7613 *
7614 * This representation is subject to change. Representation-agnostic SPI is:
7615 * objc-internal.h for class implementers.
7616 * objc-gdb.h for debuggers.
7617 **********************************************************************/
7618 #if !SUPPORT_TAGGED_POINTERS
7619
7620 // These variables are always provided for debuggers.
7621 uintptr_t objc_debug_taggedpointer_obfuscator = 0;
7622 uintptr_t objc_debug_taggedpointer_mask = 0;
7623 unsigned objc_debug_taggedpointer_slot_shift = 0;
7624 uintptr_t objc_debug_taggedpointer_slot_mask = 0;
7625 unsigned objc_debug_taggedpointer_payload_lshift = 0;
7626 unsigned objc_debug_taggedpointer_payload_rshift = 0;
7627 Class objc_debug_taggedpointer_classes[1] = { nil };
7628
7629 uintptr_t objc_debug_taggedpointer_ext_mask = 0;
7630 unsigned objc_debug_taggedpointer_ext_slot_shift = 0;
7631 uintptr_t objc_debug_taggedpointer_ext_slot_mask = 0;
7632 unsigned objc_debug_taggedpointer_ext_payload_lshift = 0;
7633 unsigned objc_debug_taggedpointer_ext_payload_rshift = 0;
7634 Class objc_debug_taggedpointer_ext_classes[1] = { nil };
7635
7636 static void
7637 disableTaggedPointers() { }
7638
7639 static void
7640 initializeTaggedPointerObfuscator(void) { }
7641
7642 #else
7643
7644 // The "slot" used in the class table and given to the debugger
7645 // includes the is-tagged bit. This makes objc_msgSend faster.
7646 // The "ext" representation doesn't do that.
7647
7648 uintptr_t objc_debug_taggedpointer_obfuscator;
7649 uintptr_t objc_debug_taggedpointer_mask = _OBJC_TAG_MASK;
7650 unsigned objc_debug_taggedpointer_slot_shift = _OBJC_TAG_SLOT_SHIFT;
7651 uintptr_t objc_debug_taggedpointer_slot_mask = _OBJC_TAG_SLOT_MASK;
7652 unsigned objc_debug_taggedpointer_payload_lshift = _OBJC_TAG_PAYLOAD_LSHIFT;
7653 unsigned objc_debug_taggedpointer_payload_rshift = _OBJC_TAG_PAYLOAD_RSHIFT;
7654 // objc_debug_taggedpointer_classes is defined in objc-msg-*.s
7655
7656 uintptr_t objc_debug_taggedpointer_ext_mask = _OBJC_TAG_EXT_MASK;
7657 unsigned objc_debug_taggedpointer_ext_slot_shift = _OBJC_TAG_EXT_SLOT_SHIFT;
7658 uintptr_t objc_debug_taggedpointer_ext_slot_mask = _OBJC_TAG_EXT_SLOT_MASK;
7659 unsigned objc_debug_taggedpointer_ext_payload_lshift = _OBJC_TAG_EXT_PAYLOAD_LSHIFT;
7660 unsigned objc_debug_taggedpointer_ext_payload_rshift = _OBJC_TAG_EXT_PAYLOAD_RSHIFT;
7661 // objc_debug_taggedpointer_ext_classes is defined in objc-msg-*.s
7662
7663 static void
7664 disableTaggedPointers()
7665 {
7666 objc_debug_taggedpointer_mask = 0;
7667 objc_debug_taggedpointer_slot_shift = 0;
7668 objc_debug_taggedpointer_slot_mask = 0;
7669 objc_debug_taggedpointer_payload_lshift = 0;
7670 objc_debug_taggedpointer_payload_rshift = 0;
7671
7672 objc_debug_taggedpointer_ext_mask = 0;
7673 objc_debug_taggedpointer_ext_slot_shift = 0;
7674 objc_debug_taggedpointer_ext_slot_mask = 0;
7675 objc_debug_taggedpointer_ext_payload_lshift = 0;
7676 objc_debug_taggedpointer_ext_payload_rshift = 0;
7677 }
7678
7679
7680 // Returns a pointer to the class's storage in the tagged class arrays.
7681 // Assumes the tag is a valid basic tag.
7682 static Class *
7683 classSlotForBasicTagIndex(objc_tag_index_t tag)
7684 {
7685 uintptr_t tagObfuscator = ((objc_debug_taggedpointer_obfuscator
7686 >> _OBJC_TAG_INDEX_SHIFT)
7687 & _OBJC_TAG_INDEX_MASK);
7688 uintptr_t obfuscatedTag = tag ^ tagObfuscator;
7689 // Array index in objc_tag_classes includes the tagged bit itself
7690 #if SUPPORT_MSB_TAGGED_POINTERS
7691 return &objc_tag_classes[0x8 | obfuscatedTag];
7692 #else
7693 return &objc_tag_classes[(obfuscatedTag << 1) | 1];
7694 #endif
7695 }
7696
7697
7698 // Returns a pointer to the class's storage in the tagged class arrays,
7699 // or nil if the tag is out of range.
7700 static Class *
7701 classSlotForTagIndex(objc_tag_index_t tag)
7702 {
7703 if (tag >= OBJC_TAG_First60BitPayload && tag <= OBJC_TAG_Last60BitPayload) {
7704 return classSlotForBasicTagIndex(tag);
7705 }
7706
7707 if (tag >= OBJC_TAG_First52BitPayload && tag <= OBJC_TAG_Last52BitPayload) {
7708 int index = tag - OBJC_TAG_First52BitPayload;
7709 uintptr_t tagObfuscator = ((objc_debug_taggedpointer_obfuscator
7710 >> _OBJC_TAG_EXT_INDEX_SHIFT)
7711 & _OBJC_TAG_EXT_INDEX_MASK);
7712 return &objc_tag_ext_classes[index ^ tagObfuscator];
7713 }
7714
7715 return nil;
7716 }
7717
7718 /***********************************************************************
7719 * initializeTaggedPointerObfuscator
7720 * Initialize objc_debug_taggedpointer_obfuscator with randomness.
7721 *
7722 * The tagged pointer obfuscator is intended to make it more difficult
7723 * for an attacker to construct a particular object as a tagged pointer,
7724 * in the presence of a buffer overflow or other write control over some
7725 * memory. The obfuscator is XORed with the tagged pointers when setting
7726 * or retrieving payload values. They are filled with randomness on first
7727 * use.
7728 **********************************************************************/
7729 static void
7730 initializeTaggedPointerObfuscator(void)
7731 {
7732 if (sdkIsOlderThan(10_14, 12_0, 12_0, 5_0, 3_0) ||
7733 // Set the obfuscator to zero for apps linked against older SDKs,
7734 // in case they're relying on the tagged pointer representation.
7735 DisableTaggedPointerObfuscation) {
7736 objc_debug_taggedpointer_obfuscator = 0;
7737 } else {
7738 // Pull random data into the variable, then shift away all non-payload bits.
7739 arc4random_buf(&objc_debug_taggedpointer_obfuscator,
7740 sizeof(objc_debug_taggedpointer_obfuscator));
7741 objc_debug_taggedpointer_obfuscator &= ~_OBJC_TAG_MASK;
7742 }
7743 }
7744
7745
7746 /***********************************************************************
7747 * _objc_registerTaggedPointerClass
7748 * Set the class to use for the given tagged pointer index.
7749 * Aborts if the tag is out of range, or if the tag is already
7750 * used by some other class.
7751 **********************************************************************/
7752 void
7753 _objc_registerTaggedPointerClass(objc_tag_index_t tag, Class cls)
7754 {
7755 if (objc_debug_taggedpointer_mask == 0) {
7756 _objc_fatal("tagged pointers are disabled");
7757 }
7758
7759 Class *slot = classSlotForTagIndex(tag);
7760 if (!slot) {
7761 _objc_fatal("tag index %u is invalid", (unsigned int)tag);
7762 }
7763
7764 Class oldCls = *slot;
7765
7766 if (cls && oldCls && cls != oldCls) {
7767 _objc_fatal("tag index %u used for two different classes "
7768 "(was %p %s, now %p %s)", tag,
7769 oldCls, oldCls->nameForLogging(),
7770 cls, cls->nameForLogging());
7771 }
7772
7773 *slot = cls;
7774
7775 // Store a placeholder class in the basic tag slot that is
7776 // reserved for the extended tag space, if it isn't set already.
7777 // Do this lazily when the first extended tag is registered so
7778 // that old debuggers characterize bogus pointers correctly more often.
7779 if (tag < OBJC_TAG_First60BitPayload || tag > OBJC_TAG_Last60BitPayload) {
7780 Class *extSlot = classSlotForBasicTagIndex(OBJC_TAG_RESERVED_7);
7781 if (*extSlot == nil) {
7782 extern objc_class OBJC_CLASS_$___NSUnrecognizedTaggedPointer;
7783 *extSlot = (Class)&OBJC_CLASS_$___NSUnrecognizedTaggedPointer;
7784 }
7785 }
7786 }
7787
7788
7789 /***********************************************************************
7790 * _objc_getClassForTag
7791 * Returns the class that is using the given tagged pointer tag.
7792 * Returns nil if no class is using that tag or the tag is out of range.
7793 **********************************************************************/
7794 Class
7795 _objc_getClassForTag(objc_tag_index_t tag)
7796 {
7797 Class *slot = classSlotForTagIndex(tag);
7798 if (slot) return *slot;
7799 else return nil;
7800 }
7801
7802 #endif
7803
7804
7805 #if SUPPORT_FIXUP
7806
7807 OBJC_EXTERN void objc_msgSend_fixup(void);
7808 OBJC_EXTERN void objc_msgSendSuper2_fixup(void);
7809 OBJC_EXTERN void objc_msgSend_stret_fixup(void);
7810 OBJC_EXTERN void objc_msgSendSuper2_stret_fixup(void);
7811 #if defined(__i386__) || defined(__x86_64__)
7812 OBJC_EXTERN void objc_msgSend_fpret_fixup(void);
7813 #endif
7814 #if defined(__x86_64__)
7815 OBJC_EXTERN void objc_msgSend_fp2ret_fixup(void);
7816 #endif
7817
7818 OBJC_EXTERN void objc_msgSend_fixedup(void);
7819 OBJC_EXTERN void objc_msgSendSuper2_fixedup(void);
7820 OBJC_EXTERN void objc_msgSend_stret_fixedup(void);
7821 OBJC_EXTERN void objc_msgSendSuper2_stret_fixedup(void);
7822 #if defined(__i386__) || defined(__x86_64__)
7823 OBJC_EXTERN void objc_msgSend_fpret_fixedup(void);
7824 #endif
7825 #if defined(__x86_64__)
7826 OBJC_EXTERN void objc_msgSend_fp2ret_fixedup(void);
7827 #endif
7828
7829 /***********************************************************************
7830 * fixupMessageRef
7831 * Repairs an old vtable dispatch call site.
7832 * vtable dispatch itself is not supported.
7833 **********************************************************************/
7834 static void
7835 fixupMessageRef(message_ref_t *msg)
7836 {
7837 msg->sel = sel_registerName((const char *)msg->sel);
7838
7839 if (msg->imp == &objc_msgSend_fixup) {
7840 if (msg->sel == @selector(alloc)) {
7841 msg->imp = (IMP)&objc_alloc;
7842 } else if (msg->sel == @selector(allocWithZone:)) {
7843 msg->imp = (IMP)&objc_allocWithZone;
7844 } else if (msg->sel == @selector(retain)) {
7845 msg->imp = (IMP)&objc_retain;
7846 } else if (msg->sel == @selector(release)) {
7847 msg->imp = (IMP)&objc_release;
7848 } else if (msg->sel == @selector(autorelease)) {
7849 msg->imp = (IMP)&objc_autorelease;
7850 } else {
7851 msg->imp = &objc_msgSend_fixedup;
7852 }
7853 }
7854 else if (msg->imp == &objc_msgSendSuper2_fixup) {
7855 msg->imp = &objc_msgSendSuper2_fixedup;
7856 }
7857 else if (msg->imp == &objc_msgSend_stret_fixup) {
7858 msg->imp = &objc_msgSend_stret_fixedup;
7859 }
7860 else if (msg->imp == &objc_msgSendSuper2_stret_fixup) {
7861 msg->imp = &objc_msgSendSuper2_stret_fixedup;
7862 }
7863 #if defined(__i386__) || defined(__x86_64__)
7864 else if (msg->imp == &objc_msgSend_fpret_fixup) {
7865 msg->imp = &objc_msgSend_fpret_fixedup;
7866 }
7867 #endif
7868 #if defined(__x86_64__)
7869 else if (msg->imp == &objc_msgSend_fp2ret_fixup) {
7870 msg->imp = &objc_msgSend_fp2ret_fixedup;
7871 }
7872 #endif
7873 }
7874
7875 // SUPPORT_FIXUP
7876 #endif
7877
7878
7879 // ProKit SPI
7880 static Class setSuperclass(Class cls, Class newSuper)
7881 {
7882 Class oldSuper;
7883
7884 runtimeLock.assertLocked();
7885
7886 ASSERT(cls->isRealized());
7887 ASSERT(newSuper->isRealized());
7888
7889 oldSuper = cls->superclass;
7890 removeSubclass(oldSuper, cls);
7891 removeSubclass(oldSuper->ISA(), cls->ISA());
7892
7893 cls->superclass = newSuper;
7894 cls->ISA()->superclass = newSuper->ISA();
7895 addSubclass(newSuper, cls);
7896 addSubclass(newSuper->ISA(), cls->ISA());
7897
7898 // Flush subclass's method caches.
7899 flushCaches(cls);
7900 flushCaches(cls->ISA());
7901
7902 return oldSuper;
7903 }
7904
7905
7906 Class class_setSuperclass(Class cls, Class newSuper)
7907 {
7908 mutex_locker_t lock(runtimeLock);
7909 return setSuperclass(cls, newSuper);
7910 }
7911
7912 void runtime_init(void)
7913 {
7914 objc::unattachedCategories.init(32);
7915 objc::allocatedClasses.init();
7916 }
7917
7918 // __OBJC2__
7919 #endif