2 * Copyright (c) 2005-2009 Apple Inc. All Rights Reserved.
4 * @APPLE_LICENSE_HEADER_START@
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. Please obtain a copy of the License at
10 * http://www.opensource.apple.com/apsl/ and read it before using this
13 * The Original Code and all software distributed under the License are
14 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
15 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
16 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
18 * Please see the License for the specific language governing rights and
19 * limitations under the License.
21 * @APPLE_LICENSE_HEADER_END@
24 /***********************************************************************
26 * Support for new-ABI classes and images.
27 **********************************************************************/
31 #include "DenseMapExtras.h"
32 #include "objc-private.h"
33 #include "objc-runtime-new.h"
34 #include "objc-file.h"
35 #include "objc-zalloc.h"
37 #include <objc/message.h>
38 #include <mach/shared_region.h>
40 #define newprotocol(p) ((protocol_t *)p)
42 static void disableTaggedPointers();
43 static void detach_class(Class cls, bool isMeta);
44 static void free_class(Class cls);
45 static IMP addMethod(Class cls, SEL name, IMP imp, const char *types, bool replace);
46 static void adjustCustomFlagsForMethodChange(Class cls, method_t *meth);
47 static method_t *search_method_list(const method_list_t *mlist, SEL sel);
48 template<typename T> static bool method_lists_contains_any(T *mlists, T *end,
49 SEL sels[], size_t selcount);
50 static void flushCaches(Class cls, const char *func, bool (^predicate)(Class c));
51 static void initializeTaggedPointerObfuscator(void);
53 static void fixupMessageRef(message_ref_t *msg);
55 static Class realizeClassMaybeSwiftAndUnlock(Class cls, mutex_t& lock);
56 static Class readClass(Class cls, bool headerIsBundle, bool headerIsPreoptimized);
58 struct locstamped_category_t {
60 struct header_info *hi;
63 ATTACH_CLASS = 1 << 0,
64 ATTACH_METACLASS = 1 << 1,
65 ATTACH_CLASS_AND_METACLASS = 1 << 2,
66 ATTACH_EXISTING = 1 << 3,
68 static void attachCategories(Class cls, const struct locstamped_category_t *cats_list, uint32_t cats_count, int flags);
71 /***********************************************************************
73 **********************************************************************/
76 #if CONFIG_USE_CACHE_LOCK
77 mutex_t cacheUpdateLock;
79 recursive_mutex_t loadMethodLock;
81 /***********************************************************************
82 * Class structure decoding
83 **********************************************************************/
85 const uintptr_t objc_debug_class_rw_data_mask = FAST_DATA_MASK;
88 /***********************************************************************
89 * Non-pointer isa decoding
90 **********************************************************************/
91 #if SUPPORT_INDEXED_ISA
93 // Indexed non-pointer isa.
95 // These are used to mask the ISA and see if its got an index or not.
96 const uintptr_t objc_debug_indexed_isa_magic_mask = ISA_INDEX_MAGIC_MASK;
97 const uintptr_t objc_debug_indexed_isa_magic_value = ISA_INDEX_MAGIC_VALUE;
99 // die if masks overlap
100 STATIC_ASSERT((ISA_INDEX_MASK & ISA_INDEX_MAGIC_MASK) == 0);
102 // die if magic is wrong
103 STATIC_ASSERT((~ISA_INDEX_MAGIC_MASK & ISA_INDEX_MAGIC_VALUE) == 0);
105 // Then these are used to extract the index from the ISA.
106 const uintptr_t objc_debug_indexed_isa_index_mask = ISA_INDEX_MASK;
107 const uintptr_t objc_debug_indexed_isa_index_shift = ISA_INDEX_SHIFT;
109 asm("\n .globl _objc_absolute_indexed_isa_magic_mask" \
110 "\n _objc_absolute_indexed_isa_magic_mask = " STRINGIFY2(ISA_INDEX_MAGIC_MASK));
111 asm("\n .globl _objc_absolute_indexed_isa_magic_value" \
112 "\n _objc_absolute_indexed_isa_magic_value = " STRINGIFY2(ISA_INDEX_MAGIC_VALUE));
113 asm("\n .globl _objc_absolute_indexed_isa_index_mask" \
114 "\n _objc_absolute_indexed_isa_index_mask = " STRINGIFY2(ISA_INDEX_MASK));
115 asm("\n .globl _objc_absolute_indexed_isa_index_shift" \
116 "\n _objc_absolute_indexed_isa_index_shift = " STRINGIFY2(ISA_INDEX_SHIFT));
119 // And then we can use that index to get the class from this array. Note
120 // the size is provided so that clients can ensure the index they get is in
121 // bounds and not read off the end of the array.
122 // Defined in the objc-msg-*.s files
123 // const Class objc_indexed_classes[]
125 // When we don't have enough bits to store a class*, we can instead store an
126 // index in to this array. Classes are added here when they are realized.
127 // Note, an index of 0 is illegal.
128 uintptr_t objc_indexed_classes_count = 0;
130 // SUPPORT_INDEXED_ISA
132 // not SUPPORT_INDEXED_ISA
134 // These variables exist but are all set to 0 so that they are ignored.
135 const uintptr_t objc_debug_indexed_isa_magic_mask = 0;
136 const uintptr_t objc_debug_indexed_isa_magic_value = 0;
137 const uintptr_t objc_debug_indexed_isa_index_mask = 0;
138 const uintptr_t objc_debug_indexed_isa_index_shift = 0;
139 Class objc_indexed_classes[1] = { nil };
140 uintptr_t objc_indexed_classes_count = 0;
142 // not SUPPORT_INDEXED_ISA
146 #if SUPPORT_PACKED_ISA
148 // Packed non-pointer isa.
150 asm("\n .globl _objc_absolute_packed_isa_class_mask" \
151 "\n _objc_absolute_packed_isa_class_mask = " STRINGIFY2(ISA_MASK));
153 // a better definition is
154 // (uintptr_t)ptrauth_strip((void *)ISA_MASK, ISA_SIGNING_KEY)
155 // however we know that PAC uses bits outside of MACH_VM_MAX_ADDRESS
156 // so approximate the definition here to be constant
157 template <typename T>
158 static constexpr T coveringMask(T n) {
159 for (T mask = 0; mask != ~T{0}; mask = (mask << 1) | 1) {
160 if ((n & mask) == n) return mask;
164 const uintptr_t objc_debug_isa_class_mask = ISA_MASK & coveringMask(MACH_VM_MAX_ADDRESS - 1);
166 const uintptr_t objc_debug_isa_magic_mask = ISA_MAGIC_MASK;
167 const uintptr_t objc_debug_isa_magic_value = ISA_MAGIC_VALUE;
169 // die if masks overlap
170 STATIC_ASSERT((ISA_MASK & ISA_MAGIC_MASK) == 0);
172 // die if magic is wrong
173 STATIC_ASSERT((~ISA_MAGIC_MASK & ISA_MAGIC_VALUE) == 0);
175 // die if virtual address space bound goes up
176 STATIC_ASSERT((~ISA_MASK & MACH_VM_MAX_ADDRESS) == 0 ||
177 ISA_MASK + sizeof(void*) == MACH_VM_MAX_ADDRESS);
179 // SUPPORT_PACKED_ISA
181 // not SUPPORT_PACKED_ISA
183 // These variables exist but enforce pointer alignment only.
184 const uintptr_t objc_debug_isa_class_mask = (~WORD_MASK);
185 const uintptr_t objc_debug_isa_magic_mask = WORD_MASK;
186 const uintptr_t objc_debug_isa_magic_value = 0;
188 // not SUPPORT_PACKED_ISA
192 /***********************************************************************
194 **********************************************************************/
195 const uintptr_t objc_debug_swift_stable_abi_bit = FAST_IS_SWIFT_STABLE;
198 /***********************************************************************
200 * A table of all classes (and metaclasses) which have been allocated
201 * with objc_allocateClassPair.
202 **********************************************************************/
204 static ExplicitInitDenseSet<Class> allocatedClasses;
207 /***********************************************************************
208 * _firstRealizedClass
209 * The root of all realized classes
210 **********************************************************************/
211 static Class _firstRealizedClass = nil;
213 /***********************************************************************
214 * didInitialAttachCategories
215 * Whether the initial attachment of categories present at startup has
217 **********************************************************************/
218 static bool didInitialAttachCategories = false;
220 /***********************************************************************
221 * didCallDyldNotifyRegister
222 * Whether the call to _dyld_objc_notify_register has completed.
223 **********************************************************************/
224 bool didCallDyldNotifyRegister = false;
227 /***********************************************************************
229 * The map from small method pointers to replacement IMPs.
231 * Locking: runtimeLock must be held when accessing this map.
232 **********************************************************************/
234 static objc::LazyInitDenseMap<const method_t *, IMP> smallMethodIMPMap;
237 static IMP method_t_remappedImp_nolock(const method_t *m) {
238 runtimeLock.assertLocked();
239 auto *map = objc::smallMethodIMPMap.get(false);
242 auto iter = map->find(m);
243 if (iter == map->end())
248 IMP method_t::remappedImp(bool needsLock) const {
251 mutex_locker_t guard(runtimeLock);
252 return method_t_remappedImp_nolock(this);
254 return method_t_remappedImp_nolock(this);
258 void method_t::remapImp(IMP imp) {
260 runtimeLock.assertLocked();
261 auto *map = objc::smallMethodIMPMap.get(true);
265 objc_method_description *method_t::getSmallDescription() const {
266 static objc::LazyInitDenseMap<const method_t *, objc_method_description *> map;
268 mutex_locker_t guard(runtimeLock);
270 auto &ptr = (*map.get(true))[this];
272 ptr = (objc_method_description *)malloc(sizeof *ptr);
274 ptr->types = (char *)types();
280 Low two bits of mlist->entsize is used as the fixed-up marker.
281 Method lists from shared cache are 1 (uniqued) or 3 (uniqued and sorted).
282 (Protocol method lists are not sorted because of their extra parallel data)
283 Runtime fixed-up method lists get 3.
285 High two bits of protocol->flags is used as the fixed-up marker.
286 PREOPTIMIZED VERSION:
287 Protocols from shared cache are 1<<30.
288 Runtime fixed-up protocols get 1<<30.
289 UN-PREOPTIMIZED VERSION:
290 Protocols from shared cache are 1<<30.
291 Shared cache's fixups are not trusted.
292 Runtime fixed-up protocols get 3<<30.
295 static const uint32_t fixed_up_method_list = 3;
296 static const uint32_t uniqued_method_list = 1;
297 static uint32_t fixed_up_protocol = PROTOCOL_FIXED_UP_1;
298 static uint32_t canonical_protocol = PROTOCOL_IS_CANONICAL;
301 disableSharedCacheOptimizations(void)
303 fixed_up_protocol = PROTOCOL_FIXED_UP_1 | PROTOCOL_FIXED_UP_2;
304 // Its safe to just set canonical protocol to 0 as we'll never call
305 // clearIsCanonical() unless isCanonical() returned true, which can't happen
307 canonical_protocol = 0;
310 bool method_list_t::isUniqued() const {
311 return (flags() & uniqued_method_list) != 0;
314 bool method_list_t::isFixedUp() const {
315 // Ignore any flags in the top bits, just look at the bottom two.
316 return (flags() & 0x3) == fixed_up_method_list;
319 void method_list_t::setFixedUp() {
320 runtimeLock.assertLocked();
321 ASSERT(!isFixedUp());
322 entsizeAndFlags = entsize() | fixed_up_method_list;
325 bool protocol_t::isFixedUp() const {
326 return (flags & PROTOCOL_FIXED_UP_MASK) == fixed_up_protocol;
329 void protocol_t::setFixedUp() {
330 runtimeLock.assertLocked();
331 ASSERT(!isFixedUp());
332 flags = (flags & ~PROTOCOL_FIXED_UP_MASK) | fixed_up_protocol;
335 bool protocol_t::isCanonical() const {
336 return (flags & canonical_protocol) != 0;
339 void protocol_t::clearIsCanonical() {
340 runtimeLock.assertLocked();
341 ASSERT(isCanonical());
342 flags = flags & ~canonical_protocol;
346 const method_list_t_authed_ptr<method_list_t> *method_array_t::endCategoryMethodLists(Class cls) const
348 auto mlists = beginLists();
349 auto mlistsEnd = endLists();
351 if (mlists == mlistsEnd || !cls->data()->ro()->baseMethods())
353 // No methods, or no base methods.
354 // Everything here is a category method.
358 // Have base methods. Category methods are
359 // everything except the last method list.
360 return mlistsEnd - 1;
363 static const char *sel_cname(SEL sel)
365 return (const char *)(void *)sel;
369 static size_t protocol_list_size(const protocol_list_t *plist)
371 return sizeof(protocol_list_t) + plist->count * sizeof(protocol_t *);
375 static void try_free(const void *p)
377 if (p && malloc_size(p)) free((void *)p);
381 using ClassCopyFixupHandler = void (*)(Class _Nonnull oldClass,
382 Class _Nonnull newClass);
383 // Normally there's only one handler registered.
384 static GlobalSmallVector<ClassCopyFixupHandler, 1> classCopyFixupHandlers;
386 void _objc_setClassCopyFixupHandler(void (* _Nonnull newFixupHandler)
387 (Class _Nonnull oldClass, Class _Nonnull newClass)) {
388 mutex_locker_t lock(runtimeLock);
390 classCopyFixupHandlers.append(newFixupHandler);
394 alloc_class_for_subclass(Class supercls, size_t extraBytes)
396 if (!supercls || !supercls->isAnySwift()) {
397 return _calloc_class(sizeof(objc_class) + extraBytes);
400 // Superclass is a Swift class. New subclass must duplicate its extra bits.
402 // Allocate the new class, with space for super's prefix and suffix
403 // and self's extraBytes.
404 swift_class_t *swiftSupercls = (swift_class_t *)supercls;
405 size_t superSize = swiftSupercls->classSize;
406 void *superBits = swiftSupercls->baseAddress();
407 void *bits = malloc(superSize + extraBytes);
409 // Copy all of the superclass's data to the new class.
410 memcpy(bits, superBits, superSize);
412 // Erase the objc data and the Swift description in the new class.
413 swift_class_t *swcls = (swift_class_t *)
414 ((uint8_t *)bits + swiftSupercls->classAddressOffset);
415 bzero(swcls, sizeof(objc_class));
416 swcls->description = nil;
418 for (auto handler : classCopyFixupHandlers) {
419 handler(supercls, (Class)swcls);
422 // Mark this class as Swift-enhanced.
423 if (supercls->isSwiftStable()) {
424 swcls->bits.setIsSwiftStable();
426 if (supercls->isSwiftLegacy()) {
427 swcls->bits.setIsSwiftLegacy();
434 /***********************************************************************
435 * object_getIndexedIvars.
436 **********************************************************************/
437 void *object_getIndexedIvars(id obj)
439 uint8_t *base = (uint8_t *)obj;
441 if (obj->isTaggedPointerOrNil()) return nil;
443 if (!obj->isClass()) return base + obj->ISA()->alignedInstanceSize();
445 Class cls = (Class)obj;
446 if (!cls->isAnySwift()) return base + sizeof(objc_class);
448 swift_class_t *swcls = (swift_class_t *)cls;
449 return base - swcls->classAddressOffset + word_align(swcls->classSize);
453 /***********************************************************************
455 * Reallocates rw->ro if necessary to make it writeable.
456 * Locking: runtimeLock must be held by the caller.
457 **********************************************************************/
458 static class_ro_t *make_ro_writeable(class_rw_t *rw)
460 runtimeLock.assertLocked();
462 if (rw->flags & RW_COPIED_RO) {
463 // already writeable, do nothing
465 rw->set_ro(rw->ro()->duplicate());
466 rw->flags |= RW_COPIED_RO;
468 return const_cast<class_ro_t *>(rw->ro());
472 /***********************************************************************
473 * dataSegmentsContain
474 * Returns true if the given address lies within a data segment in any
476 **********************************************************************/
479 dataSegmentsContain(Class cls)
482 if (objc::dataSegmentsRanges.find((uintptr_t)cls, index)) {
483 // if the class is realized (hence has a class_rw_t),
484 // memorize where we found the range
485 if (cls->isRealized()) {
486 cls->data()->witness = (uint16_t)index;
494 /***********************************************************************
496 * Return true if the class is known to the runtime (located within the
497 * shared cache, within the data segment of a loaded image, or has been
498 * allocated with obj_allocateClassPair).
500 * The result of this operation is cached on the class in a "witness"
501 * value that is cheaply checked in the fastpath.
502 **********************************************************************/
505 isKnownClass(Class cls)
507 if (fastpath(objc::dataSegmentsRanges.contains(cls->data()->witness, (uintptr_t)cls))) {
510 auto &set = objc::allocatedClasses.get();
511 return set.find(cls) != set.end() || dataSegmentsContain(cls);
515 /***********************************************************************
517 * Add a class to the table of all classes. If addMeta is true,
518 * automatically adds the metaclass of the class as well.
519 * Locking: runtimeLock must be held by the caller.
520 **********************************************************************/
522 addClassTableEntry(Class cls, bool addMeta = true)
524 runtimeLock.assertLocked();
526 // This class is allowed to be a known class via the shared cache or via
527 // data segments, but it is not allowed to be in the dynamic table already.
528 auto &set = objc::allocatedClasses.get();
530 ASSERT(set.find(cls) == set.end());
532 if (!isKnownClass(cls))
535 addClassTableEntry(cls->ISA(), false);
539 /***********************************************************************
541 * Checks the given class against the list of all known classes. Dies
542 * with a fatal error if the class is not known.
543 * Locking: runtimeLock must be held by the caller.
544 **********************************************************************/
547 checkIsKnownClass(Class cls)
549 if (slowpath(!isKnownClass(cls))) {
550 _objc_fatal("Attempt to use unknown class %p.", cls);
554 /***********************************************************************
556 * Returns class NSObject.
558 **********************************************************************/
559 static Class classNSObject(void)
561 extern objc_class OBJC_CLASS_$_NSObject;
562 return (Class)&OBJC_CLASS_$_NSObject;
565 static Class metaclassNSObject(void)
567 extern objc_class OBJC_METACLASS_$_NSObject;
568 return (Class)&OBJC_METACLASS_$_NSObject;
571 /***********************************************************************
573 * Implementation of PrintReplacedMethods / OBJC_PRINT_REPLACED_METHODS.
574 * Warn about methods from cats that override other methods in cats or cls.
575 * Assumes no methods from cats have been added to cls yet.
576 **********************************************************************/
577 __attribute__((cold, noinline))
579 printReplacements(Class cls, const locstamped_category_t *cats_list, uint32_t cats_count)
582 bool isMeta = cls->isMetaClass();
584 // Newest categories are LAST in cats
585 // Later categories override earlier ones.
586 for (c = 0; c < cats_count; c++) {
587 category_t *cat = cats_list[c].cat;
589 method_list_t *mlist = cat->methodsForMeta(isMeta);
590 if (!mlist) continue;
592 for (const auto& meth : *mlist) {
593 SEL s = sel_registerName(sel_cname(meth.name()));
595 // Search for replaced methods in method lookup order.
596 // Complain about the first duplicate only.
598 // Look for method in earlier categories
599 for (uint32_t c2 = 0; c2 < c; c2++) {
600 category_t *cat2 = cats_list[c2].cat;
602 const method_list_t *mlist2 = cat2->methodsForMeta(isMeta);
603 if (!mlist2) continue;
605 for (const auto& meth2 : *mlist2) {
606 SEL s2 = sel_registerName(sel_cname(meth2.name()));
608 logReplacedMethod(cls->nameForLogging(), s,
609 cls->isMetaClass(), cat->name,
610 meth2.imp(false), meth.imp(false));
616 // Look for method in cls
617 for (const auto& meth2 : cls->data()->methods()) {
618 SEL s2 = sel_registerName(sel_cname(meth2.name()));
620 logReplacedMethod(cls->nameForLogging(), s,
621 cls->isMetaClass(), cat->name,
622 meth2.imp(false), meth.imp(false));
634 /***********************************************************************
635 * unreasonableClassCount
636 * Provides an upper bound for any iteration of classes,
637 * to prevent spins when runtime metadata is corrupted.
638 **********************************************************************/
639 static unsigned unreasonableClassCount()
641 runtimeLock.assertLocked();
643 int base = NXCountMapTable(gdb_objc_realized_classes) +
644 getPreoptimizedClassUnreasonableCount();
646 // Provide lots of slack here. Some iterations touch metaclasses too.
647 // Some iterations backtrack (like realized class iteration).
648 // We don't need an efficient bound, merely one that prevents spins.
649 return (base + 1) * 16;
653 /***********************************************************************
655 * The passed in block returns `false` if subclasses can be skipped
656 * Locking: runtimeLock must be held by the caller.
657 **********************************************************************/
659 foreach_realized_class_and_subclass_2(Class top, unsigned &count,
661 bool (^code)(Class) __attribute((noescape)))
665 runtimeLock.assertLocked();
670 _objc_fatal("Memory corruption in class list.");
673 bool skip_subclasses;
675 if (skip_metaclass && cls->isMetaClass()) {
676 skip_subclasses = true;
678 skip_subclasses = !code(cls);
681 if (!skip_subclasses && cls->data()->firstSubclass) {
682 cls = cls->data()->firstSubclass;
684 while (!cls->data()->nextSiblingClass && cls != top) {
685 cls = cls->getSuperclass();
687 _objc_fatal("Memory corruption in class list.");
690 if (cls == top) break;
691 cls = cls->data()->nextSiblingClass;
696 // Enumerates a class and all of its realized subclasses.
698 foreach_realized_class_and_subclass(Class top, bool (^code)(Class) __attribute((noescape)))
700 unsigned int count = unreasonableClassCount();
702 foreach_realized_class_and_subclass_2(top, count, false, code);
705 // Enumerates all realized classes and metaclasses.
707 foreach_realized_class_and_metaclass(bool (^code)(Class) __attribute((noescape)))
709 unsigned int count = unreasonableClassCount();
711 for (Class top = _firstRealizedClass;
713 top = top->data()->nextSiblingClass)
715 foreach_realized_class_and_subclass_2(top, count, false, code);
719 // Enumerates all realized classes (ignoring metaclasses).
721 foreach_realized_class(bool (^code)(Class) __attribute((noescape)))
723 unsigned int count = unreasonableClassCount();
725 for (Class top = _firstRealizedClass;
727 top = top->data()->nextSiblingClass)
729 foreach_realized_class_and_subclass_2(top, count, true, code);
734 /***********************************************************************
735 * Method Scanners / Optimization tracking
736 * Implementation of scanning for various implementations of methods.
737 **********************************************************************/
741 enum SelectorBundle {
749 // The current state of NSObject swizzling for every scanner
751 // It allows for cheap checks of global swizzles, and also lets
752 // things like IMP Swizzling before NSObject has been initialized
753 // to be remembered, as setInitialized() would miss these.
755 // Every pair of bits describes a SelectorBundle.
756 // even bits: is NSObject class swizzled for this bundle
757 // odd bits: is NSObject meta class swizzled for this bundle
758 static uintptr_t NSObjectSwizzledMask;
760 static ALWAYS_INLINE uintptr_t
761 swizzlingBit(SelectorBundle bundle, bool isMeta)
763 return 1UL << (2 * bundle + isMeta);
766 static void __attribute__((cold, noinline))
767 printCustom(Class cls, SelectorBundle bundle, bool inherited)
769 static char const * const SelectorBundleName[] = {
770 [AWZ] = "CUSTOM AWZ",
772 [Core] = "CUSTOM Core",
775 _objc_inform("%s: %s%s%s", SelectorBundleName[bundle],
776 cls->nameForLogging(),
777 cls->isMetaClass() ? " (meta)" : "",
778 inherited ? " (inherited)" : "");
781 enum class Scope { Instances, Classes, Both };
783 template <typename Traits, SelectorBundle Bundle, bool &ShouldPrint, Scope Domain = Scope::Both>
786 // work around compiler being broken with templates using Class/objc_class,
787 // probably some weird confusion with Class being builtin
788 ALWAYS_INLINE static objc_class *as_objc_class(Class cls) {
789 return (objc_class *)cls;
793 setCustomRecursively(Class cls, bool inherited = false)
795 foreach_realized_class_and_subclass(cls, [=](Class c){
796 if (c != cls && !as_objc_class(c)->isInitialized()) {
797 // Subclass not yet initialized. Wait for setInitialized() to do it
800 if (Traits::isCustom(c)) {
803 Traits::setCustom(c);
805 printCustom(cls, Bundle, inherited || c != cls);
812 isNSObjectSwizzled(bool isMeta)
814 return NSObjectSwizzledMask & swizzlingBit(Bundle, isMeta);
818 setNSObjectSwizzled(Class NSOClass, bool isMeta)
820 NSObjectSwizzledMask |= swizzlingBit(Bundle, isMeta);
821 if (as_objc_class(NSOClass)->isInitialized()) {
822 setCustomRecursively(NSOClass);
827 scanChangedMethodForUnknownClass(const method_t *meth)
831 cls = classNSObject();
832 if (Domain != Scope::Classes && !isNSObjectSwizzled(NO)) {
833 for (const auto &meth2: as_objc_class(cls)->data()->methods()) {
834 if (meth == &meth2) {
835 setNSObjectSwizzled(cls, NO);
841 cls = metaclassNSObject();
842 if (Domain != Scope::Instances && !isNSObjectSwizzled(YES)) {
843 for (const auto &meth2: as_objc_class(cls)->data()->methods()) {
844 if (meth == &meth2) {
845 setNSObjectSwizzled(cls, YES);
853 scanAddedClassImpl(Class cls, bool isMeta)
855 bool setCustom = NO, inherited = NO;
857 if (isNSObjectSwizzled(isMeta)) {
859 } else if (Traits::knownClassHasDefaultImpl(cls, isMeta)) {
860 // This class is known to have the default implementations,
861 // but we need to check categories.
862 auto &methods = as_objc_class(cls)->data()->methods();
863 setCustom = Traits::scanMethodLists(methods.beginCategoryMethodLists(),
864 methods.endCategoryMethodLists(cls));
865 } else if (!isMeta && !as_objc_class(cls)->getSuperclass()) {
868 } else if (Traits::isCustom(as_objc_class(cls)->getSuperclass())) {
869 // Superclass is custom, therefore we are too.
874 auto &methods = as_objc_class(cls)->data()->methods();
875 setCustom = Traits::scanMethodLists(methods.beginLists(),
878 if (slowpath(setCustom)) {
879 if (ShouldPrint) printCustom(cls, Bundle, inherited);
881 Traits::setDefault(cls);
886 static bool knownClassHasDefaultImpl(Class cls, bool isMeta) {
887 // Typically only NSObject has default implementations.
888 // Allow this to be extended by overriding (to allow
889 // SwiftObject, for example).
890 Class NSOClass = (isMeta ? metaclassNSObject() : classNSObject());
891 return cls == NSOClass;
894 // Scan a class that is about to be marked Initialized for particular
895 // bundles of selectors, and mark the class and its children
898 // This also handles inheriting properties from its superclass.
900 // Caller: objc_class::setInitialized()
902 scanInitializedClass(Class cls, Class metacls)
904 if (Domain != Scope::Classes) {
905 scanAddedClassImpl(cls, false);
907 if (Domain != Scope::Instances) {
908 scanAddedClassImpl(metacls, true);
912 // Inherit various properties from the superclass when a class
913 // is being added to the graph.
915 // Caller: addSubclass()
917 scanAddedSubClass(Class subcls, Class supercls)
919 if (slowpath(Traits::isCustom(supercls) && !Traits::isCustom(subcls))) {
920 setCustomRecursively(subcls, true);
924 // Scan Method lists for selectors that would override things
927 // This is used to detect when categories override problematic selectors
928 // are injected in a class after it has been initialized.
930 // Caller: prepareMethodLists()
932 scanAddedMethodLists(Class cls, method_list_t **mlists, int count)
934 if (slowpath(Traits::isCustom(cls))) {
937 if (slowpath(Traits::scanMethodLists(mlists, mlists + count))) {
938 setCustomRecursively(cls);
942 // Handle IMP Swizzling (the IMP for an exisiting method being changed).
944 // In almost all cases, IMP swizzling does not affect custom bits.
945 // Custom search will already find the method whether or not
946 // it is swizzled, so it does not transition from non-custom to custom.
948 // The only cases where IMP swizzling can affect the custom bits is
949 // if the swizzled method is one of the methods that is assumed to be
950 // non-custom. These special cases are listed in setInitialized().
951 // We look for such cases here.
953 // Caller: Swizzling methods via adjustCustomFlagsForMethodChange()
955 scanChangedMethod(Class cls, const method_t *meth)
957 if (fastpath(!Traits::isInterestingSelector(meth->name()))) {
962 bool isMeta = as_objc_class(cls)->isMetaClass();
963 if (isMeta && Domain != Scope::Instances) {
964 if (cls == metaclassNSObject() && !isNSObjectSwizzled(isMeta)) {
965 setNSObjectSwizzled(cls, isMeta);
968 if (!isMeta && Domain != Scope::Classes) {
969 if (cls == classNSObject() && !isNSObjectSwizzled(isMeta)) {
970 setNSObjectSwizzled(cls, isMeta);
974 // We're called from method_exchangeImplementations, only NSObject
975 // class and metaclass may be problematic (exchanging the default
976 // builtin IMP of an interesting seleector, is a swizzling that,
977 // may flip our scanned property. For other classes, the previous
978 // value had already flipped the property).
980 // However, as we don't know the class, we need to scan all of
981 // NSObject class and metaclass methods (this is SLOW).
982 scanChangedMethodForUnknownClass(meth);
987 } // namespace scanner
989 // AWZ methods: +alloc / +allocWithZone:
990 struct AWZScanner : scanner::Mixin<AWZScanner, AWZ, PrintCustomAWZ, scanner::Scope::Classes> {
991 static bool isCustom(Class cls) {
992 return cls->hasCustomAWZ();
994 static void setCustom(Class cls) {
995 cls->setHasCustomAWZ();
997 static void setDefault(Class cls) {
998 cls->setHasDefaultAWZ();
1000 static bool isInterestingSelector(SEL sel) {
1001 return sel == @selector(alloc) || sel == @selector(allocWithZone:);
1003 template<typename T>
1004 static bool scanMethodLists(T *mlists, T *end) {
1005 SEL sels[2] = { @selector(alloc), @selector(allocWithZone:), };
1006 return method_lists_contains_any(mlists, end, sels, 2);
1010 // Retain/Release methods that are extremely rarely overridden
1012 // retain/release/autorelease/retainCount/
1013 // _tryRetain/_isDeallocating/retainWeakReference/allowsWeakReference
1014 struct RRScanner : scanner::Mixin<RRScanner, RR, PrintCustomRR
1015 #if !SUPPORT_NONPOINTER_ISA
1016 , scanner::Scope::Instances
1019 static bool isCustom(Class cls) {
1020 return cls->hasCustomRR();
1022 static void setCustom(Class cls) {
1023 cls->setHasCustomRR();
1025 static void setDefault(Class cls) {
1026 cls->setHasDefaultRR();
1028 static bool isInterestingSelector(SEL sel) {
1029 return sel == @selector(retain) ||
1030 sel == @selector(release) ||
1031 sel == @selector(autorelease) ||
1032 sel == @selector(_tryRetain) ||
1033 sel == @selector(_isDeallocating) ||
1034 sel == @selector(retainCount) ||
1035 sel == @selector(allowsWeakReference) ||
1036 sel == @selector(retainWeakReference);
1038 template <typename T>
1039 static bool scanMethodLists(T *mlists, T *end) {
1043 @selector(autorelease),
1044 @selector(_tryRetain),
1045 @selector(_isDeallocating),
1046 @selector(retainCount),
1047 @selector(allowsWeakReference),
1048 @selector(retainWeakReference),
1050 return method_lists_contains_any(mlists, end, sels, 8);
1054 // Core NSObject methods that are extremely rarely overridden
1056 // +new, ±class, ±self, ±isKindOfClass:, ±respondsToSelector
1057 struct CoreScanner : scanner::Mixin<CoreScanner, Core, PrintCustomCore> {
1058 static bool knownClassHasDefaultImpl(Class cls, bool isMeta) {
1059 if (scanner::Mixin<CoreScanner, Core, PrintCustomCore>::knownClassHasDefaultImpl(cls, isMeta))
1061 if ((cls->isRootClass() || cls->isRootMetaclass())
1062 && strcmp(cls->mangledName(), "_TtCs12_SwiftObject") == 0)
1068 static bool isCustom(Class cls) {
1069 return cls->hasCustomCore();
1071 static void setCustom(Class cls) {
1072 cls->setHasCustomCore();
1074 static void setDefault(Class cls) {
1075 cls->setHasDefaultCore();
1077 static bool isInterestingSelector(SEL sel) {
1078 return sel == @selector(new) ||
1079 sel == @selector(self) ||
1080 sel == @selector(class) ||
1081 sel == @selector(isKindOfClass:) ||
1082 sel == @selector(respondsToSelector:);
1084 template <typename T>
1085 static bool scanMethodLists(T *mlists, T *end) {
1090 @selector(isKindOfClass:),
1091 @selector(respondsToSelector:)
1093 return method_lists_contains_any(mlists, end, sels, 5);
1097 class category_list : nocopy_t {
1099 locstamped_category_t lc;
1101 locstamped_category_t *array;
1102 // this aliases with locstamped_category_t::hi
1103 // which is an aliased pointer
1104 uint32_t is_array : 1;
1105 uint32_t count : 31;
1111 category_list() : _u{{nullptr, nullptr}} { }
1112 category_list(locstamped_category_t lc) : _u{{lc}} { }
1113 category_list(category_list &&other) : category_list() {
1114 std::swap(_u, other._u);
1123 uint32_t count() const
1125 if (_u.is_array) return _u.count;
1126 return _u.lc.cat ? 1 : 0;
1129 uint32_t arrayByteSize(uint32_t size) const
1131 return sizeof(locstamped_category_t) * size;
1134 const locstamped_category_t *array() const
1136 return _u.is_array ? _u.array : &_u.lc;
1139 void append(locstamped_category_t lc)
1142 if (_u.count == _u.size) {
1143 // Have a typical malloc growth:
1144 // - size <= 8: grow by 2
1145 // - size <= 16: grow by 4
1146 // - size <= 32: grow by 8
1148 _u.size += _u.size < 8 ? 2 : 1 << (fls(_u.size) - 2);
1149 _u.array = (locstamped_category_t *)reallocf(_u.array, arrayByteSize(_u.size));
1151 _u.array[_u.count++] = lc;
1152 } else if (_u.lc.cat == NULL) {
1155 locstamped_category_t *arr = (locstamped_category_t *)malloc(arrayByteSize(2));
1166 void erase(category_t *cat)
1169 for (int i = 0; i < _u.count; i++) {
1170 if (_u.array[i].cat == cat) {
1171 // shift entries to preserve list order
1172 memmove(&_u.array[i], &_u.array[i+1], arrayByteSize(_u.count - i - 1));
1176 } else if (_u.lc.cat == cat) {
1183 class UnattachedCategories : public ExplicitInitDenseMap<Class, category_list>
1186 void addForClass(locstamped_category_t lc, Class cls)
1188 runtimeLock.assertLocked();
1190 if (slowpath(PrintConnecting)) {
1191 _objc_inform("CLASS: found category %c%s(%s)",
1192 cls->isMetaClassMaybeUnrealized() ? '+' : '-',
1193 cls->nameForLogging(), lc.cat->name);
1196 auto result = get().try_emplace(cls, lc);
1197 if (!result.second) {
1198 result.first->second.append(lc);
1202 void attachToClass(Class cls, Class previously, int flags)
1204 runtimeLock.assertLocked();
1205 ASSERT((flags & ATTACH_CLASS) ||
1206 (flags & ATTACH_METACLASS) ||
1207 (flags & ATTACH_CLASS_AND_METACLASS));
1210 auto it = map.find(previously);
1212 if (it != map.end()) {
1213 category_list &list = it->second;
1214 if (flags & ATTACH_CLASS_AND_METACLASS) {
1215 int otherFlags = flags & ~ATTACH_CLASS_AND_METACLASS;
1216 attachCategories(cls, list.array(), list.count(), otherFlags | ATTACH_CLASS);
1217 attachCategories(cls->ISA(), list.array(), list.count(), otherFlags | ATTACH_METACLASS);
1219 attachCategories(cls, list.array(), list.count(), flags);
1225 void eraseCategoryForClass(category_t *cat, Class cls)
1227 runtimeLock.assertLocked();
1230 auto it = map.find(cls);
1231 if (it != map.end()) {
1232 category_list &list = it->second;
1234 if (list.count() == 0) {
1240 void eraseClass(Class cls)
1242 runtimeLock.assertLocked();
1248 static UnattachedCategories unattachedCategories;
1252 static bool isBundleClass(Class cls)
1254 return cls->data()->ro()->flags & RO_FROM_BUNDLE;
1259 fixupMethodList(method_list_t *mlist, bool bundleCopy, bool sort)
1261 runtimeLock.assertLocked();
1262 ASSERT(!mlist->isFixedUp());
1264 // fixme lock less in attachMethodLists ?
1265 // dyld3 may have already uniqued, but not sorted, the list
1266 if (!mlist->isUniqued()) {
1267 mutex_locker_t lock(selLock);
1269 // Unique selectors in list.
1270 for (auto& meth : *mlist) {
1271 const char *name = sel_cname(meth.name());
1272 meth.setName(sel_registerNameNoLock(name, bundleCopy));
1276 // Sort by selector address.
1277 // Don't try to sort small lists, as they're immutable.
1278 // Don't try to sort big lists of nonstandard size, as stable_sort
1279 // won't copy the entries properly.
1280 if (sort && !mlist->isSmallList() && mlist->entsize() == method_t::bigSize) {
1281 method_t::SortBySELAddress sorter;
1282 std::stable_sort(&mlist->begin()->big(), &mlist->end()->big(), sorter);
1285 // Mark method list as uniqued and sorted.
1286 // Can't mark small lists, since they're immutable.
1287 if (!mlist->isSmallList()) {
1288 mlist->setFixedUp();
1294 prepareMethodLists(Class cls, method_list_t **addedLists, int addedCount,
1295 bool baseMethods, bool methodsFromBundle, const char *why)
1297 runtimeLock.assertLocked();
1299 if (addedCount == 0) return;
1301 // There exist RR/AWZ/Core special cases for some class's base methods.
1302 // But this code should never need to scan base methods for RR/AWZ/Core:
1303 // default RR/AWZ/Core cannot be set before setInitialized().
1304 // Therefore we need not handle any special cases here.
1306 ASSERT(cls->hasCustomAWZ() && cls->hasCustomRR() && cls->hasCustomCore());
1307 } else if (cls->cache.isConstantOptimizedCache()) {
1308 cls->setDisallowPreoptCachesRecursively(why);
1309 } else if (cls->allowsPreoptInlinedSels()) {
1310 #if CONFIG_USE_PREOPT_CACHES
1311 SEL *sels = (SEL *)objc_opt_offsets[OBJC_OPT_INLINED_METHODS_START];
1312 SEL *sels_end = (SEL *)objc_opt_offsets[OBJC_OPT_INLINED_METHODS_END];
1313 if (method_lists_contains_any(addedLists, addedLists + addedCount, sels, sels_end - sels)) {
1314 cls->setDisallowPreoptInlinedSelsRecursively(why);
1319 // Add method lists to array.
1320 // Reallocate un-fixed method lists.
1321 // The new methods are PREPENDED to the method list array.
1323 for (int i = 0; i < addedCount; i++) {
1324 method_list_t *mlist = addedLists[i];
1327 // Fixup selectors if necessary
1328 if (!mlist->isFixedUp()) {
1329 fixupMethodList(mlist, methodsFromBundle, true/*sort*/);
1333 // If the class is initialized, then scan for method implementations
1334 // tracked by the class's flags. If it's not initialized yet,
1335 // then objc_class::setInitialized() will take care of it.
1336 if (cls->isInitialized()) {
1337 objc::AWZScanner::scanAddedMethodLists(cls, addedLists, addedCount);
1338 objc::RRScanner::scanAddedMethodLists(cls, addedLists, addedCount);
1339 objc::CoreScanner::scanAddedMethodLists(cls, addedLists, addedCount);
1344 class_rw_t::extAlloc(const class_ro_t *ro, bool deepCopy)
1346 runtimeLock.assertLocked();
1348 auto rwe = objc::zalloc<class_rw_ext_t>();
1350 rwe->version = (ro->flags & RO_META) ? 7 : 0;
1352 method_list_t *list = ro->baseMethods();
1354 if (deepCopy) list = list->duplicate();
1355 rwe->methods.attachLists(&list, 1);
1358 // See comments in objc_duplicateClass
1359 // property lists and protocol lists historically
1360 // have not been deep-copied
1362 // This is probably wrong and ought to be fixed some day
1363 property_list_t *proplist = ro->baseProperties;
1365 rwe->properties.attachLists(&proplist, 1);
1368 protocol_list_t *protolist = ro->baseProtocols;
1370 rwe->protocols.attachLists(&protolist, 1);
1373 set_ro_or_rwe(rwe, ro);
1377 // Attach method lists and properties and protocols from categories to a class.
1378 // Assumes the categories in cats are all loaded and sorted by load order,
1379 // oldest categories first.
1381 attachCategories(Class cls, const locstamped_category_t *cats_list, uint32_t cats_count,
1384 if (slowpath(PrintReplacedMethods)) {
1385 printReplacements(cls, cats_list, cats_count);
1387 if (slowpath(PrintConnecting)) {
1388 _objc_inform("CLASS: attaching %d categories to%s class '%s'%s",
1389 cats_count, (flags & ATTACH_EXISTING) ? " existing" : "",
1390 cls->nameForLogging(), (flags & ATTACH_METACLASS) ? " (meta)" : "");
1394 * Only a few classes have more than 64 categories during launch.
1395 * This uses a little stack, and avoids malloc.
1397 * Categories must be added in the proper order, which is back
1398 * to front. To do that with the chunking, we iterate cats_list
1399 * from front to back, build up the local buffers backwards,
1400 * and call attachLists on the chunks. attachLists prepends the
1401 * lists, so the final result is in the expected order.
1403 constexpr uint32_t ATTACH_BUFSIZ = 64;
1404 method_list_t *mlists[ATTACH_BUFSIZ];
1405 property_list_t *proplists[ATTACH_BUFSIZ];
1406 protocol_list_t *protolists[ATTACH_BUFSIZ];
1408 uint32_t mcount = 0;
1409 uint32_t propcount = 0;
1410 uint32_t protocount = 0;
1411 bool fromBundle = NO;
1412 bool isMeta = (flags & ATTACH_METACLASS);
1413 auto rwe = cls->data()->extAllocIfNeeded();
1415 for (uint32_t i = 0; i < cats_count; i++) {
1416 auto& entry = cats_list[i];
1418 method_list_t *mlist = entry.cat->methodsForMeta(isMeta);
1420 if (mcount == ATTACH_BUFSIZ) {
1421 prepareMethodLists(cls, mlists, mcount, NO, fromBundle, __func__);
1422 rwe->methods.attachLists(mlists, mcount);
1425 mlists[ATTACH_BUFSIZ - ++mcount] = mlist;
1426 fromBundle |= entry.hi->isBundle();
1429 property_list_t *proplist =
1430 entry.cat->propertiesForMeta(isMeta, entry.hi);
1432 if (propcount == ATTACH_BUFSIZ) {
1433 rwe->properties.attachLists(proplists, propcount);
1436 proplists[ATTACH_BUFSIZ - ++propcount] = proplist;
1439 protocol_list_t *protolist = entry.cat->protocolsForMeta(isMeta);
1441 if (protocount == ATTACH_BUFSIZ) {
1442 rwe->protocols.attachLists(protolists, protocount);
1445 protolists[ATTACH_BUFSIZ - ++protocount] = protolist;
1450 prepareMethodLists(cls, mlists + ATTACH_BUFSIZ - mcount, mcount,
1451 NO, fromBundle, __func__);
1452 rwe->methods.attachLists(mlists + ATTACH_BUFSIZ - mcount, mcount);
1453 if (flags & ATTACH_EXISTING) {
1454 flushCaches(cls, __func__, [](Class c){
1455 // constant caches have been dealt with in prepareMethodLists
1456 // if the class still is constant here, it's fine to keep
1457 return !c->cache.isConstantOptimizedCache();
1462 rwe->properties.attachLists(proplists + ATTACH_BUFSIZ - propcount, propcount);
1464 rwe->protocols.attachLists(protolists + ATTACH_BUFSIZ - protocount, protocount);
1468 /***********************************************************************
1470 * Fixes up cls's method list, protocol list, and property list.
1471 * Attaches any outstanding categories.
1472 * Locking: runtimeLock must be held by the caller
1473 **********************************************************************/
1474 static void methodizeClass(Class cls, Class previously)
1476 runtimeLock.assertLocked();
1478 bool isMeta = cls->isMetaClass();
1479 auto rw = cls->data();
1481 auto rwe = rw->ext();
1483 // Methodizing for the first time
1484 if (PrintConnecting) {
1485 _objc_inform("CLASS: methodizing class '%s' %s",
1486 cls->nameForLogging(), isMeta ? "(meta)" : "");
1489 // Install methods and properties that the class implements itself.
1490 method_list_t *list = ro->baseMethods();
1492 prepareMethodLists(cls, &list, 1, YES, isBundleClass(cls), nullptr);
1493 if (rwe) rwe->methods.attachLists(&list, 1);
1496 property_list_t *proplist = ro->baseProperties;
1497 if (rwe && proplist) {
1498 rwe->properties.attachLists(&proplist, 1);
1501 protocol_list_t *protolist = ro->baseProtocols;
1502 if (rwe && protolist) {
1503 rwe->protocols.attachLists(&protolist, 1);
1506 // Root classes get bonus method implementations if they don't have
1507 // them already. These apply before category replacements.
1508 if (cls->isRootMetaclass()) {
1510 addMethod(cls, @selector(initialize), (IMP)&objc_noop_imp, "", NO);
1513 // Attach categories.
1516 objc::unattachedCategories.attachToClass(cls, previously,
1519 // When a class relocates, categories with class methods
1520 // may be registered on the class itself rather than on
1521 // the metaclass. Tell attachToClass to look for those.
1522 objc::unattachedCategories.attachToClass(cls, previously,
1523 ATTACH_CLASS_AND_METACLASS);
1526 objc::unattachedCategories.attachToClass(cls, cls,
1527 isMeta ? ATTACH_METACLASS : ATTACH_CLASS);
1530 // Debug: sanity-check all SELs; log method list contents
1531 for (const auto& meth : rw->methods()) {
1532 if (PrintConnecting) {
1533 _objc_inform("METHOD %c[%s %s]", isMeta ? '+' : '-',
1534 cls->nameForLogging(), sel_getName(meth.name()));
1536 ASSERT(sel_registerName(sel_getName(meth.name())) == meth.name());
1542 /***********************************************************************
1544 * Returns the secondary metaclass => class map
1545 * Used for some cases of +initialize and +resolveClassMethod:.
1546 * This map does not contain all class and metaclass pairs. It only
1547 * contains metaclasses whose classes would be in the runtime-allocated
1548 * named-class table, but are not because some other class with the same name
1550 * Classes with no duplicates are not included.
1551 * Classes in the preoptimized named-class table are not included.
1552 * Classes whose duplicates are in the preoptimized table are not included.
1553 * Most code should use getMaybeUnrealizedNonMetaClass()
1554 * instead of reading this table.
1555 * Locking: runtimeLock must be read- or write-locked by the caller
1556 **********************************************************************/
1557 static NXMapTable *nonmeta_class_map = nil;
1558 static NXMapTable *nonMetaClasses(void)
1560 runtimeLock.assertLocked();
1562 if (nonmeta_class_map) return nonmeta_class_map;
1564 // nonmeta_class_map is typically small
1565 INIT_ONCE_PTR(nonmeta_class_map,
1566 NXCreateMapTable(NXPtrValueMapPrototype, 32),
1569 return nonmeta_class_map;
1573 /***********************************************************************
1575 * Adds metacls => cls to the secondary metaclass map
1576 * Locking: runtimeLock must be held by the caller
1577 **********************************************************************/
1578 static void addNonMetaClass(Class cls)
1580 runtimeLock.assertLocked();
1582 old = NXMapInsert(nonMetaClasses(), cls->ISA(), cls);
1584 ASSERT(!cls->isMetaClassMaybeUnrealized());
1585 ASSERT(cls->ISA()->isMetaClassMaybeUnrealized());
1590 static void removeNonMetaClass(Class cls)
1592 runtimeLock.assertLocked();
1593 NXMapRemove(nonMetaClasses(), cls->ISA());
1597 static bool scanMangledField(const char *&string, const char *end,
1598 const char *&field, int& length)
1600 // Leading zero not allowed.
1601 if (*string == '0') return false;
1605 while (field < end) {
1607 if (!isdigit(c)) break;
1609 if (__builtin_smul_overflow(length, 10, &length)) return false;
1610 if (__builtin_sadd_overflow(length, c - '0', &length)) return false;
1613 string = field + length;
1614 return length > 0 && string <= end;
1618 /***********************************************************************
1619 * copySwiftV1DemangledName
1620 * Returns the pretty form of the given Swift-v1-mangled class or protocol name.
1621 * Returns nil if the string doesn't look like a mangled Swift v1 name.
1622 * The result must be freed with free().
1623 **********************************************************************/
1624 static char *copySwiftV1DemangledName(const char *string, bool isProtocol = false)
1626 if (!string) return nil;
1628 // Swift mangling prefix.
1629 if (strncmp(string, isProtocol ? "_TtP" : "_TtC", 4) != 0) return nil;
1632 const char *end = string + strlen(string);
1637 if (string[0] == 's') {
1638 // "s" is the Swift module.
1643 if (! scanMangledField(string, end, prefix, prefixLength)) return nil;
1646 // Class or protocol name.
1649 if (! scanMangledField(string, end, suffix, suffixLength)) return nil;
1652 // Remainder must be "_".
1653 if (strcmp(string, "_") != 0) return nil;
1655 // Remainder must be empty.
1656 if (string != end) return nil;
1660 asprintf(&result, "%.*s.%.*s", prefixLength,prefix, suffixLength,suffix);
1665 /***********************************************************************
1666 * copySwiftV1MangledName
1667 * Returns the Swift 1.0 mangled form of the given class or protocol name.
1668 * Returns nil if the string doesn't look like an unmangled Swift name.
1669 * The result must be freed with free().
1670 **********************************************************************/
1671 static char *copySwiftV1MangledName(const char *string, bool isProtocol = false)
1673 if (!string) return nil;
1675 size_t dotCount = 0;
1678 for (s = string; *s; s++) {
1681 dotIndex = s - string;
1684 size_t stringLength = s - string;
1686 if (dotCount != 1 || dotIndex == 0 || dotIndex >= stringLength-1) {
1690 const char *prefix = string;
1691 size_t prefixLength = dotIndex;
1692 const char *suffix = string + dotIndex + 1;
1693 size_t suffixLength = stringLength - (dotIndex + 1);
1697 if (prefixLength == 5 && memcmp(prefix, "Swift", 5) == 0) {
1698 asprintf(&name, "_Tt%cs%zu%.*s%s",
1699 isProtocol ? 'P' : 'C',
1700 suffixLength, (int)suffixLength, suffix,
1701 isProtocol ? "_" : "");
1703 asprintf(&name, "_Tt%c%zu%.*s%zu%.*s%s",
1704 isProtocol ? 'P' : 'C',
1705 prefixLength, (int)prefixLength, prefix,
1706 suffixLength, (int)suffixLength, suffix,
1707 isProtocol ? "_" : "");
1713 /***********************************************************************
1714 * getClassExceptSomeSwift
1715 * Looks up a class by name. The class MIGHT NOT be realized.
1716 * Demangled Swift names are recognized.
1717 * Classes known to the Swift runtime but not yet used are NOT recognized.
1718 * (such as subclasses of un-instantiated generics)
1719 * Use look_up_class() to find them as well.
1720 * Locking: runtimeLock must be read- or write-locked by the caller.
1721 **********************************************************************/
1723 // This is a misnomer: gdb_objc_realized_classes is actually a list of
1724 // named classes not in the dyld shared cache, whether realized or not.
1725 // This list excludes lazily named classes, which have to be looked up
1726 // using a getClass hook.
1727 NXMapTable *gdb_objc_realized_classes; // exported for debuggers in objc-gdb.h
1728 uintptr_t objc_debug_realized_class_generation_count;
1730 static Class getClass_impl(const char *name)
1732 runtimeLock.assertLocked();
1734 // allocated in _read_images
1735 ASSERT(gdb_objc_realized_classes);
1737 // Try runtime-allocated table
1738 Class result = (Class)NXMapGet(gdb_objc_realized_classes, name);
1739 if (result) return result;
1741 // Try table from dyld shared cache.
1742 // Note we do this last to handle the case where we dlopen'ed a shared cache
1743 // dylib with duplicates of classes already present in the main executable.
1744 // In that case, we put the class from the main executable in
1745 // gdb_objc_realized_classes and want to check that before considering any
1746 // newly loaded shared cache binaries.
1747 return getPreoptimizedClass(name);
1750 static Class getClassExceptSomeSwift(const char *name)
1752 runtimeLock.assertLocked();
1755 Class result = getClass_impl(name);
1756 if (result) return result;
1758 // Try Swift-mangled equivalent of the given name.
1759 if (char *swName = copySwiftV1MangledName(name)) {
1760 result = getClass_impl(swName);
1769 /***********************************************************************
1771 * Adds name => cls to the named non-meta class map.
1772 * Warns about duplicate class names and keeps the old mapping.
1773 * Locking: runtimeLock must be held by the caller
1774 **********************************************************************/
1775 static void addNamedClass(Class cls, const char *name, Class replacing = nil)
1777 runtimeLock.assertLocked();
1779 if ((old = getClassExceptSomeSwift(name)) && old != replacing) {
1780 inform_duplicate(name, old, cls);
1782 // getMaybeUnrealizedNonMetaClass uses name lookups.
1783 // Classes not found by name lookup must be in the
1784 // secondary meta->nonmeta table.
1785 addNonMetaClass(cls);
1787 NXMapInsert(gdb_objc_realized_classes, name, cls);
1789 ASSERT(!(cls->data()->flags & RO_META));
1791 // wrong: constructed classes are already realized when they get here
1792 // ASSERT(!cls->isRealized());
1796 /***********************************************************************
1798 * Removes cls from the name => cls map.
1799 * Locking: runtimeLock must be held by the caller
1800 **********************************************************************/
1801 static void removeNamedClass(Class cls, const char *name)
1803 runtimeLock.assertLocked();
1804 ASSERT(!(cls->data()->flags & RO_META));
1805 if (cls == NXMapGet(gdb_objc_realized_classes, name)) {
1806 NXMapRemove(gdb_objc_realized_classes, name);
1808 // cls has a name collision with another class - don't remove the other
1809 // but do remove cls from the secondary metaclass->class map.
1810 removeNonMetaClass(cls);
1815 /***********************************************************************
1816 * futureNamedClasses
1817 * Returns the classname => future class map for unrealized future classes.
1818 * Locking: runtimeLock must be held by the caller
1819 **********************************************************************/
1820 static NXMapTable *future_named_class_map = nil;
1821 static NXMapTable *futureNamedClasses()
1823 runtimeLock.assertLocked();
1825 if (future_named_class_map) return future_named_class_map;
1827 // future_named_class_map is big enough for CF's classes and a few others
1828 future_named_class_map =
1829 NXCreateMapTable(NXStrValueMapPrototype, 32);
1831 return future_named_class_map;
1835 static bool haveFutureNamedClasses() {
1836 return future_named_class_map && NXCountMapTable(future_named_class_map);
1840 /***********************************************************************
1841 * addFutureNamedClass
1842 * Installs cls as the class structure to use for the named class if it appears.
1843 * Locking: runtimeLock must be held by the caller
1844 **********************************************************************/
1845 static void addFutureNamedClass(const char *name, Class cls)
1849 runtimeLock.assertLocked();
1852 _objc_inform("FUTURE: reserving %p for %s", (void*)cls, name);
1855 class_rw_t *rw = objc::zalloc<class_rw_t>();
1856 class_ro_t *ro = (class_ro_t *)calloc(sizeof(class_ro_t), 1);
1857 ro->name.store(strdupIfMutable(name), std::memory_order_relaxed);
1860 cls->data()->flags = RO_FUTURE;
1862 old = NXMapKeyCopyingInsert(futureNamedClasses(), name, cls);
1867 /***********************************************************************
1868 * popFutureNamedClass
1869 * Removes the named class from the unrealized future class list,
1870 * because it has been realized.
1871 * Returns nil if the name is not used by a future class.
1872 * Locking: runtimeLock must be held by the caller
1873 **********************************************************************/
1874 static Class popFutureNamedClass(const char *name)
1876 runtimeLock.assertLocked();
1880 if (future_named_class_map) {
1881 cls = (Class)NXMapKeyFreeingRemove(future_named_class_map, name);
1882 if (cls && NXCountMapTable(future_named_class_map) == 0) {
1883 NXFreeMapTable(future_named_class_map);
1884 future_named_class_map = nil;
1892 /***********************************************************************
1894 * Returns the oldClass => newClass map for realized future classes.
1895 * Returns the oldClass => nil map for ignored weak-linked classes.
1896 * Locking: runtimeLock must be read- or write-locked by the caller
1897 **********************************************************************/
1898 static objc::DenseMap<Class, Class> *remappedClasses(bool create)
1900 static objc::LazyInitDenseMap<Class, Class> remapped_class_map;
1902 runtimeLock.assertLocked();
1904 // start big enough to hold CF's classes and a few others
1905 return remapped_class_map.get(create, 32);
1909 /***********************************************************************
1911 * Returns YES if no classes have been remapped
1912 * Locking: runtimeLock must be read- or write-locked by the caller
1913 **********************************************************************/
1914 static bool noClassesRemapped(void)
1916 runtimeLock.assertLocked();
1918 bool result = (remappedClasses(NO) == nil);
1920 // Catch construction of an empty table, which defeats optimization.
1921 auto *map = remappedClasses(NO);
1922 if (map) ASSERT(map->size() > 0);
1928 /***********************************************************************
1930 * newcls is a realized future class, replacing oldcls.
1931 * OR newcls is nil, replacing ignored weak-linked class oldcls.
1932 * Locking: runtimeLock must be write-locked by the caller
1933 **********************************************************************/
1934 static void addRemappedClass(Class oldcls, Class newcls)
1936 runtimeLock.assertLocked();
1939 _objc_inform("FUTURE: using %p instead of %p for %s",
1940 (void*)newcls, (void*)oldcls, oldcls->nameForLogging());
1943 auto result = remappedClasses(YES)->insert({ oldcls, newcls });
1945 if (!std::get<1>(result)) {
1946 // An existing mapping was overwritten. This is not allowed
1947 // unless it was to nil.
1948 auto iterator = std::get<0>(result);
1949 auto value = std::get<1>(*iterator);
1950 ASSERT(value == nil);
1958 /***********************************************************************
1960 * Returns the live class pointer for cls, which may be pointing to
1961 * a class struct that has been reallocated.
1962 * Returns nil if cls is ignored because of weak linking.
1963 * Locking: runtimeLock must be read- or write-locked by the caller
1964 **********************************************************************/
1965 static Class remapClass(Class cls)
1967 runtimeLock.assertLocked();
1969 if (!cls) return nil;
1971 auto *map = remappedClasses(NO);
1975 auto iterator = map->find(cls);
1976 if (iterator == map->end())
1978 return std::get<1>(*iterator);
1981 static Class remapClass(classref_t cls)
1983 return remapClass((Class)cls);
1986 Class _class_remap(Class cls)
1988 mutex_locker_t lock(runtimeLock);
1989 return remapClass(cls);
1992 /***********************************************************************
1994 * Fix up a class ref, in case the class referenced has been reallocated
1995 * or is an ignored weak-linked class.
1996 * Locking: runtimeLock must be read- or write-locked by the caller
1997 **********************************************************************/
1998 static void remapClassRef(Class *clsref)
2000 runtimeLock.assertLocked();
2002 Class newcls = remapClass(*clsref);
2003 if (*clsref != newcls) *clsref = newcls;
2008 objc_loadClassref(_Nullable Class * _Nonnull clsref)
2010 auto *atomicClsref = explicit_atomic<uintptr_t>::from_pointer((uintptr_t *)clsref);
2012 uintptr_t cls = atomicClsref->load(std::memory_order_relaxed);
2013 if (fastpath((cls & 1) == 0))
2016 auto stub = (stub_class_t *)(cls & ~1ULL);
2017 Class initialized = stub->initializer((Class)stub, nil);
2018 atomicClsref->store((uintptr_t)initialized, std::memory_order_relaxed);
2023 /***********************************************************************
2024 * getMaybeUnrealizedNonMetaClass
2025 * Return the ordinary class for this class or metaclass.
2026 * `inst` is an instance of `cls` or a subclass thereof, or nil.
2027 * Non-nil inst is faster.
2028 * The result may be unrealized.
2029 * Used by +initialize.
2030 * Locking: runtimeLock must be read- or write-locked by the caller
2031 **********************************************************************/
2032 static Class getMaybeUnrealizedNonMetaClass(Class metacls, id inst)
2034 static int total, named, secondary, sharedcache, dyld3;
2035 runtimeLock.assertLocked();
2036 ASSERT(metacls->isRealized());
2040 // return cls itself if it's already a non-meta class
2041 if (!metacls->isMetaClass()) return metacls;
2043 // metacls really is a metaclass
2044 // which means inst (if any) is a class
2046 // special case for root metaclass
2047 // where inst == inst->ISA() == metacls is possible
2048 if (metacls->ISA() == metacls) {
2049 Class cls = metacls->getSuperclass();
2050 ASSERT(cls->isRealized());
2051 ASSERT(!cls->isMetaClass());
2052 ASSERT(cls->ISA() == metacls);
2053 if (cls->ISA() == metacls) return cls;
2056 // use inst if available
2058 Class cls = remapClass((Class)inst);
2059 // cls may be a subclass - find the real class for metacls
2060 // fixme this probably stops working once Swift starts
2061 // reallocating classes if cls is unrealized.
2063 if (cls->ISA() == metacls) {
2064 ASSERT(!cls->isMetaClassMaybeUnrealized());
2067 cls = cls->getSuperclass();
2070 _objc_fatal("cls is not an instance of metacls");
2072 // release build: be forgiving and fall through to slow lookups
2076 // See if the metaclass has a pointer to its nonmetaclass.
2077 if (Class cls = metacls->bits.safe_ro()->getNonMetaclass())
2082 Class cls = getClassExceptSomeSwift(metacls->mangledName());
2083 if (cls && cls->ISA() == metacls) {
2085 if (PrintInitializing) {
2086 _objc_inform("INITIALIZE: %d/%d (%g%%) "
2087 "successful by-name metaclass lookups",
2088 named, total, named*100.0/total);
2094 // try secondary table
2096 Class cls = (Class)NXMapGet(nonMetaClasses(), metacls);
2099 if (PrintInitializing) {
2100 _objc_inform("INITIALIZE: %d/%d (%g%%) "
2101 "successful secondary metaclass lookups",
2102 secondary, total, secondary*100.0/total);
2105 ASSERT(cls->ISA() == metacls);
2110 // try the dyld closure table
2111 if (isPreoptimized())
2113 // Try table from dyld closure first. It was built to ignore the dupes it
2114 // knows will come from the cache, so anything left in here was there when
2117 // Note, we have to pass the lambda directly here as otherwise we would try
2118 // message copy and autorelease.
2119 _dyld_for_each_objc_class(metacls->mangledName(),
2120 [&cls, metacls](void* classPtr, bool isLoaded, bool* stop) {
2121 // Skip images which aren't loaded. This supports the case where dyld
2122 // might soft link an image from the main binary so its possibly not
2127 // Found a loaded image with this class name, so check if its the right one
2128 Class result = (Class)classPtr;
2129 if (result->ISA() == metacls) {
2137 if (PrintInitializing) {
2138 _objc_inform("INITIALIZE: %d/%d (%g%%) "
2139 "successful dyld closure metaclass lookups",
2140 dyld3, total, dyld3*100.0/total);
2147 // try any duplicates in the dyld shared cache
2152 Class *classes = copyPreoptimizedClasses(metacls->mangledName(),&count);
2154 for (int i = 0; i < count; i++) {
2155 if (classes[i]->ISA() == metacls) {
2165 if (PrintInitializing) {
2166 _objc_inform("INITIALIZE: %d/%d (%g%%) "
2167 "successful shared cache metaclass lookups",
2168 sharedcache, total, sharedcache*100.0/total);
2175 _objc_fatal("no class for metaclass %p", (void*)metacls);
2179 /***********************************************************************
2180 * class_initialize. Send the '+initialize' message on demand to any
2181 * uninitialized class. Force initialization of superclasses first.
2182 * inst is an instance of cls, or nil. Non-nil is better for performance.
2183 * Returns the class pointer. If the class was unrealized then
2184 * it may be reallocated.
2186 * runtimeLock must be held by the caller
2187 * This function may drop the lock.
2188 * On exit the lock is re-acquired or dropped as requested by leaveLocked.
2189 **********************************************************************/
2190 static Class initializeAndMaybeRelock(Class cls, id inst,
2191 mutex_t& lock, bool leaveLocked)
2193 lock.assertLocked();
2194 ASSERT(cls->isRealized());
2196 if (cls->isInitialized()) {
2197 if (!leaveLocked) lock.unlock();
2201 // Find the non-meta class for cls, if it is not already one.
2202 // The +initialize message is sent to the non-meta class object.
2203 Class nonmeta = getMaybeUnrealizedNonMetaClass(cls, inst);
2205 // Realize the non-meta class if necessary.
2206 if (nonmeta->isRealized()) {
2207 // nonmeta is cls, which was already realized
2208 // OR nonmeta is distinct, but is already realized
2209 // - nothing else to do
2212 nonmeta = realizeClassMaybeSwiftAndUnlock(nonmeta, lock);
2213 // runtimeLock is now unlocked
2214 // fixme Swift can't relocate the class today,
2215 // but someday it will:
2216 cls = object_getClass(nonmeta);
2219 // runtimeLock is now unlocked, for +initialize dispatch
2220 ASSERT(nonmeta->isRealized());
2221 initializeNonMetaClass(nonmeta);
2223 if (leaveLocked) runtimeLock.lock();
2227 // Locking: acquires runtimeLock
2228 Class class_initialize(Class cls, id obj)
2231 return initializeAndMaybeRelock(cls, obj, runtimeLock, false);
2234 // Locking: caller must hold runtimeLock; this may drop and re-acquire it
2235 static Class initializeAndLeaveLocked(Class cls, id obj, mutex_t& lock)
2237 return initializeAndMaybeRelock(cls, obj, lock, true);
2241 /***********************************************************************
2243 * Adds cls as a new realized root class.
2244 * Locking: runtimeLock must be held by the caller.
2245 **********************************************************************/
2246 static void addRootClass(Class cls)
2248 runtimeLock.assertLocked();
2250 ASSERT(cls->isRealized());
2252 objc_debug_realized_class_generation_count++;
2254 cls->data()->nextSiblingClass = _firstRealizedClass;
2255 _firstRealizedClass = cls;
2258 static void removeRootClass(Class cls)
2260 runtimeLock.assertLocked();
2262 objc_debug_realized_class_generation_count++;
2265 for (classp = &_firstRealizedClass;
2267 classp = &(*classp)->data()->nextSiblingClass)
2270 *classp = (*classp)->data()->nextSiblingClass;
2274 /***********************************************************************
2276 * Adds subcls as a subclass of supercls.
2277 * Locking: runtimeLock must be held by the caller.
2278 **********************************************************************/
2279 static void addSubclass(Class supercls, Class subcls)
2281 runtimeLock.assertLocked();
2283 if (supercls && subcls) {
2284 ASSERT(supercls->isRealized());
2285 ASSERT(subcls->isRealized());
2287 objc_debug_realized_class_generation_count++;
2289 subcls->data()->nextSiblingClass = supercls->data()->firstSubclass;
2290 supercls->data()->firstSubclass = subcls;
2292 if (supercls->hasCxxCtor()) {
2293 subcls->setHasCxxCtor();
2296 if (supercls->hasCxxDtor()) {
2297 subcls->setHasCxxDtor();
2300 objc::AWZScanner::scanAddedSubClass(subcls, supercls);
2301 objc::RRScanner::scanAddedSubClass(subcls, supercls);
2302 objc::CoreScanner::scanAddedSubClass(subcls, supercls);
2304 if (!supercls->allowsPreoptCaches()) {
2305 subcls->setDisallowPreoptCachesRecursively(__func__);
2306 } else if (!supercls->allowsPreoptInlinedSels()) {
2307 subcls->setDisallowPreoptInlinedSelsRecursively(__func__);
2310 // Special case: instancesRequireRawIsa does not propagate
2311 // from root class to root metaclass
2312 if (supercls->instancesRequireRawIsa() && supercls->getSuperclass()) {
2313 subcls->setInstancesRequireRawIsaRecursively(true);
2319 /***********************************************************************
2321 * Removes subcls as a subclass of supercls.
2322 * Locking: runtimeLock must be held by the caller.
2323 **********************************************************************/
2324 static void removeSubclass(Class supercls, Class subcls)
2326 runtimeLock.assertLocked();
2327 ASSERT(supercls->isRealized());
2328 ASSERT(subcls->isRealized());
2329 ASSERT(subcls->getSuperclass() == supercls);
2331 objc_debug_realized_class_generation_count++;
2334 for (cp = &supercls->data()->firstSubclass;
2335 *cp && *cp != subcls;
2336 cp = &(*cp)->data()->nextSiblingClass)
2338 ASSERT(*cp == subcls);
2339 *cp = subcls->data()->nextSiblingClass;
2344 /***********************************************************************
2346 * Returns the protocol name => protocol map for protocols.
2347 * Locking: runtimeLock must read- or write-locked by the caller
2348 **********************************************************************/
2349 static NXMapTable *protocols(void)
2351 static NXMapTable *protocol_map = nil;
2353 runtimeLock.assertLocked();
2355 INIT_ONCE_PTR(protocol_map,
2356 NXCreateMapTable(NXStrValueMapPrototype, 16),
2357 NXFreeMapTable(v) );
2359 return protocol_map;
2363 /***********************************************************************
2365 * Looks up a protocol by name. Demangled Swift names are recognized.
2366 * Locking: runtimeLock must be read- or write-locked by the caller.
2367 **********************************************************************/
2368 static NEVER_INLINE Protocol *getProtocol(const char *name)
2370 runtimeLock.assertLocked();
2373 Protocol *result = (Protocol *)NXMapGet(protocols(), name);
2374 if (result) return result;
2376 // Try table from dyld3 closure and dyld shared cache
2377 result = getPreoptimizedProtocol(name);
2378 if (result) return result;
2380 // Try Swift-mangled equivalent of the given name.
2381 if (char *swName = copySwiftV1MangledName(name, true/*isProtocol*/)) {
2382 result = (Protocol *)NXMapGet(protocols(), swName);
2384 // Try table from dyld3 closure and dyld shared cache
2386 result = getPreoptimizedProtocol(swName);
2396 /***********************************************************************
2398 * Returns the live protocol pointer for proto, which may be pointing to
2399 * a protocol struct that has been reallocated.
2400 * Locking: runtimeLock must be read- or write-locked by the caller
2401 **********************************************************************/
2402 static ALWAYS_INLINE protocol_t *remapProtocol(protocol_ref_t proto)
2404 runtimeLock.assertLocked();
2406 // Protocols in shared cache images have a canonical bit to mark that they
2407 // are the definition we should use
2408 if (((protocol_t *)proto)->isCanonical())
2409 return (protocol_t *)proto;
2411 protocol_t *newproto = (protocol_t *)
2412 getProtocol(((protocol_t *)proto)->mangledName);
2413 return newproto ? newproto : (protocol_t *)proto;
2417 /***********************************************************************
2419 * Fix up a protocol ref, in case the protocol referenced has been reallocated.
2420 * Locking: runtimeLock must be read- or write-locked by the caller
2421 **********************************************************************/
2422 static size_t UnfixedProtocolReferences;
2423 static void remapProtocolRef(protocol_t **protoref)
2425 runtimeLock.assertLocked();
2427 protocol_t *newproto = remapProtocol((protocol_ref_t)*protoref);
2428 if (*protoref != newproto) {
2429 *protoref = newproto;
2430 UnfixedProtocolReferences++;
2435 /***********************************************************************
2437 * Slides a class's ivars to accommodate the given superclass size.
2438 * Ivars are NOT compacted to compensate for a superclass that shrunk.
2439 * Locking: runtimeLock must be held by the caller.
2440 **********************************************************************/
2441 static void moveIvars(class_ro_t *ro, uint32_t superSize)
2443 runtimeLock.assertLocked();
2447 ASSERT(superSize > ro->instanceStart);
2448 diff = superSize - ro->instanceStart;
2451 // Find maximum alignment in this class's ivars
2452 uint32_t maxAlignment = 1;
2453 for (const auto& ivar : *ro->ivars) {
2454 if (!ivar.offset) continue; // anonymous bitfield
2456 uint32_t alignment = ivar.alignment();
2457 if (alignment > maxAlignment) maxAlignment = alignment;
2460 // Compute a slide value that preserves that alignment
2461 uint32_t alignMask = maxAlignment - 1;
2462 diff = (diff + alignMask) & ~alignMask;
2464 // Slide all of this class's ivars en masse
2465 for (const auto& ivar : *ro->ivars) {
2466 if (!ivar.offset) continue; // anonymous bitfield
2468 uint32_t oldOffset = (uint32_t)*ivar.offset;
2469 uint32_t newOffset = oldOffset + diff;
2470 *ivar.offset = newOffset;
2473 _objc_inform("IVARS: offset %u -> %u for %s "
2474 "(size %u, align %u)",
2475 oldOffset, newOffset, ivar.name,
2476 ivar.size, ivar.alignment());
2481 *(uint32_t *)&ro->instanceStart += diff;
2482 *(uint32_t *)&ro->instanceSize += diff;
2486 static void reconcileInstanceVariables(Class cls, Class supercls, const class_ro_t*& ro)
2488 class_rw_t *rw = cls->data();
2491 ASSERT(!cls->isMetaClass());
2493 /* debug: print them all before sliding
2495 for (const auto& ivar : *ro->ivars) {
2496 if (!ivar.offset) continue; // anonymous bitfield
2498 _objc_inform("IVARS: %s.%s (offset %u, size %u, align %u)",
2499 ro->name, ivar.name,
2500 *ivar.offset, ivar.size, ivar.alignment());
2505 // Non-fragile ivars - reconcile this class with its superclass
2506 const class_ro_t *super_ro = supercls->data()->ro();
2508 if (DebugNonFragileIvars) {
2509 // Debugging: Force non-fragile ivars to slide.
2510 // Intended to find compiler, runtime, and program bugs.
2511 // If it fails with this and works without, you have a problem.
2513 // Operation: Reset everything to 0 + misalignment.
2514 // Then force the normal sliding logic to push everything back.
2516 // Exceptions: root classes, metaclasses, *NSCF* classes,
2517 // __CF* classes, NSConstantString, NSSimpleCString
2519 // (already know it's not root because supercls != nil)
2520 const char *clsname = cls->mangledName();
2521 if (!strstr(clsname, "NSCF") &&
2522 0 != strncmp(clsname, "__CF", 4) &&
2523 0 != strcmp(clsname, "NSConstantString") &&
2524 0 != strcmp(clsname, "NSSimpleCString"))
2526 uint32_t oldStart = ro->instanceStart;
2527 class_ro_t *ro_w = make_ro_writeable(rw);
2530 // Find max ivar alignment in class.
2531 // default to word size to simplify ivar update
2532 uint32_t alignment = 1<<WORD_SHIFT;
2534 for (const auto& ivar : *ro->ivars) {
2535 if (ivar.alignment() > alignment) {
2536 alignment = ivar.alignment();
2540 uint32_t misalignment = ro->instanceStart % alignment;
2541 uint32_t delta = ro->instanceStart - misalignment;
2542 ro_w->instanceStart = misalignment;
2543 ro_w->instanceSize -= delta;
2546 _objc_inform("IVARS: DEBUG: forcing ivars for class '%s' "
2547 "to slide (instanceStart %zu -> %zu)",
2548 cls->nameForLogging(), (size_t)oldStart,
2549 (size_t)ro->instanceStart);
2553 for (const auto& ivar : *ro->ivars) {
2554 if (!ivar.offset) continue; // anonymous bitfield
2555 *ivar.offset -= delta;
2561 if (ro->instanceStart >= super_ro->instanceSize) {
2562 // Superclass has not overgrown its space. We're done here.
2565 // fixme can optimize for "class has no new ivars", etc
2567 if (ro->instanceStart < super_ro->instanceSize) {
2568 // Superclass has changed size. This class's ivars must move.
2569 // Also slide layout bits in parallel.
2570 // This code is incapable of compacting the subclass to
2571 // compensate for a superclass that shrunk, so don't do that.
2573 _objc_inform("IVARS: sliding ivars for class %s "
2574 "(superclass was %u bytes, now %u)",
2575 cls->nameForLogging(), ro->instanceStart,
2576 super_ro->instanceSize);
2578 class_ro_t *ro_w = make_ro_writeable(rw);
2580 moveIvars(ro_w, super_ro->instanceSize);
2581 gdb_objc_class_changed(cls, OBJC_CLASS_IVARS_CHANGED, ro->getName());
2585 static void validateAlreadyRealizedClass(Class cls) {
2586 ASSERT(cls->isRealized());
2588 class_rw_t *rw = cls->data();
2589 size_t rwSize = malloc_size(rw);
2591 // Note: this check will need some adjustment if class_rw_t's
2592 // size changes to not match the malloc bucket.
2593 if (rwSize != sizeof(class_rw_t))
2594 _objc_fatal("realized class %p has corrupt data pointer %p", cls, rw);
2598 /***********************************************************************
2599 * realizeClassWithoutSwift
2600 * Performs first-time initialization on class cls,
2601 * including allocating its read-write data.
2602 * Does not perform any Swift-side initialization.
2603 * Returns the real class structure for the class.
2604 * Locking: runtimeLock must be write-locked by the caller
2605 **********************************************************************/
2606 static Class realizeClassWithoutSwift(Class cls, Class previously)
2608 runtimeLock.assertLocked();
2614 if (!cls) return nil;
2615 if (cls->isRealized()) {
2616 validateAlreadyRealizedClass(cls);
2619 ASSERT(cls == remapClass(cls));
2621 // fixme verify class is not in an un-dlopened part of the shared cache?
2623 auto ro = (const class_ro_t *)cls->data();
2624 auto isMeta = ro->flags & RO_META;
2625 if (ro->flags & RO_FUTURE) {
2626 // This was a future class. rw data is already allocated.
2628 ro = cls->data()->ro();
2630 cls->changeInfo(RW_REALIZED|RW_REALIZING, RW_FUTURE);
2632 // Normal class. Allocate writeable class data.
2633 rw = objc::zalloc<class_rw_t>();
2635 rw->flags = RW_REALIZED|RW_REALIZING|isMeta;
2639 cls->cache.initializeToEmptyOrPreoptimizedInDisguise();
2642 if (isMeta) cls->cache.setBit(FAST_CACHE_META);
2645 // Choose an index for this class.
2646 // Sets cls->instancesRequireRawIsa if indexes no more indexes are available
2647 cls->chooseClassArrayIndex();
2649 if (PrintConnecting) {
2650 _objc_inform("CLASS: realizing class '%s'%s %p %p #%u %s%s",
2651 cls->nameForLogging(), isMeta ? " (meta)" : "",
2652 (void*)cls, ro, cls->classArrayIndex(),
2653 cls->isSwiftStable() ? "(swift)" : "",
2654 cls->isSwiftLegacy() ? "(pre-stable swift)" : "");
2657 // Realize superclass and metaclass, if they aren't already.
2658 // This needs to be done after RW_REALIZED is set above, for root classes.
2659 // This needs to be done after class index is chosen, for root metaclasses.
2660 // This assumes that none of those classes have Swift contents,
2661 // or that Swift's initializers have already been called.
2662 // fixme that assumption will be wrong if we add support
2663 // for ObjC subclasses of Swift classes.
2664 supercls = realizeClassWithoutSwift(remapClass(cls->getSuperclass()), nil);
2665 metacls = realizeClassWithoutSwift(remapClass(cls->ISA()), nil);
2667 #if SUPPORT_NONPOINTER_ISA
2669 // Metaclasses do not need any features from non pointer ISA
2670 // This allows for a faspath for classes in objc_retain/objc_release.
2671 cls->setInstancesRequireRawIsa();
2673 // Disable non-pointer isa for some classes and/or platforms.
2674 // Set instancesRequireRawIsa.
2675 bool instancesRequireRawIsa = cls->instancesRequireRawIsa();
2676 bool rawIsaIsInherited = false;
2677 static bool hackedDispatch = false;
2679 if (DisableNonpointerIsa) {
2680 // Non-pointer isa disabled by environment or app SDK version
2681 instancesRequireRawIsa = true;
2683 else if (!hackedDispatch && 0 == strcmp(ro->getName(), "OS_object"))
2685 // hack for libdispatch et al - isa also acts as vtable pointer
2686 hackedDispatch = true;
2687 instancesRequireRawIsa = true;
2689 else if (supercls && supercls->getSuperclass() &&
2690 supercls->instancesRequireRawIsa())
2692 // This is also propagated by addSubclass()
2693 // but nonpointer isa setup needs it earlier.
2694 // Special case: instancesRequireRawIsa does not propagate
2695 // from root class to root metaclass
2696 instancesRequireRawIsa = true;
2697 rawIsaIsInherited = true;
2700 if (instancesRequireRawIsa) {
2701 cls->setInstancesRequireRawIsaRecursively(rawIsaIsInherited);
2704 // SUPPORT_NONPOINTER_ISA
2707 // Update superclass and metaclass in case of remapping
2708 cls->setSuperclass(supercls);
2709 cls->initClassIsa(metacls);
2711 // Reconcile instance variable offsets / layout.
2712 // This may reallocate class_ro_t, updating our ro variable.
2713 if (supercls && !isMeta) reconcileInstanceVariables(cls, supercls, ro);
2715 // Set fastInstanceSize if it wasn't set already.
2716 cls->setInstanceSize(ro->instanceSize);
2718 // Copy some flags from ro to rw
2719 if (ro->flags & RO_HAS_CXX_STRUCTORS) {
2720 cls->setHasCxxDtor();
2721 if (! (ro->flags & RO_HAS_CXX_DTOR_ONLY)) {
2722 cls->setHasCxxCtor();
2726 // Propagate the associated objects forbidden flag from ro or from
2728 if ((ro->flags & RO_FORBIDS_ASSOCIATED_OBJECTS) ||
2729 (supercls && supercls->forbidsAssociatedObjects()))
2731 rw->flags |= RW_FORBIDS_ASSOCIATED_OBJECTS;
2734 // Connect this class to its superclass's subclass lists
2736 addSubclass(supercls, cls);
2741 // Attach categories
2742 methodizeClass(cls, previously);
2748 /***********************************************************************
2749 * _objc_realizeClassFromSwift
2750 * Called by Swift when it needs the ObjC part of a class to be realized.
2751 * There are four cases:
2752 * 1. cls != nil; previously == cls
2753 * Class cls is being realized in place
2754 * 2. cls != nil; previously == nil
2755 * Class cls is being constructed at runtime
2756 * 3. cls != nil; previously != cls
2757 * The class that was at previously has been reallocated to cls
2758 * 4. cls == nil, previously != nil
2759 * The class at previously is hereby disavowed
2761 * Only variants #1 and #2 are supported today.
2763 * Locking: acquires runtimeLock
2764 **********************************************************************/
2765 Class _objc_realizeClassFromSwift(Class cls, void *previously)
2768 if (previously && previously != (void*)cls) {
2770 mutex_locker_t lock(runtimeLock);
2771 addRemappedClass((Class)previously, cls);
2772 addClassTableEntry(cls);
2773 addNamedClass(cls, cls->mangledName(), /*replacing*/nil);
2774 return realizeClassWithoutSwift(cls, (Class)previously);
2776 // #1 and #2: realization in place, or new class
2777 mutex_locker_t lock(runtimeLock);
2781 cls = readClass(cls, false/*bundle*/, false/*shared cache*/);
2784 // #1 and #2: realization in place, or new class
2785 // We ignore the Swift metadata initializer callback.
2786 // We assume that's all handled since we're being called from Swift.
2787 return realizeClassWithoutSwift(cls, nil);
2792 // In the future this will mean remapping the old address to nil
2793 // and if necessary removing the old address from any other tables.
2794 _objc_fatal("Swift requested that class %p be ignored, "
2795 "but libobjc does not support that.", previously);
2799 /***********************************************************************
2801 * Performs first-time initialization on class cls,
2802 * including allocating its read-write data,
2803 * and any Swift-side initialization.
2804 * Returns the real class structure for the class.
2805 * Locking: acquires runtimeLock indirectly
2806 **********************************************************************/
2807 static Class realizeSwiftClass(Class cls)
2809 runtimeLock.assertUnlocked();
2811 // Some assumptions:
2812 // * Metaclasses never have a Swift initializer.
2813 // * Root classes never have a Swift initializer.
2814 // (These two together avoid initialization order problems at the root.)
2815 // * Unrealized non-Swift classes have no Swift ancestry.
2816 // * Unrealized Swift classes with no initializer have no ancestry that
2817 // does have the initializer.
2818 // (These two together mean we don't need to scan superclasses here
2819 // and we don't need to worry about Swift superclasses inside
2820 // realizeClassWithoutSwift()).
2822 // fixme some of these assumptions will be wrong
2823 // if we add support for ObjC sublasses of Swift classes.
2827 ASSERT(remapClass(cls) == cls);
2828 ASSERT(cls->isSwiftStable_ButAllowLegacyForNow());
2829 ASSERT(!cls->isMetaClassMaybeUnrealized());
2830 ASSERT(cls->getSuperclass());
2831 runtimeLock.unlock();
2834 // Look for a Swift metadata initialization function
2835 // installed on the class. If it is present we call it.
2836 // That function in turn initializes the Swift metadata,
2837 // prepares the "compiler-generated" ObjC metadata if not
2838 // already present, and calls _objc_realizeSwiftClass() to finish
2839 // our own initialization.
2841 if (auto init = cls->swiftMetadataInitializer()) {
2842 if (PrintConnecting) {
2843 _objc_inform("CLASS: calling Swift metadata initializer "
2844 "for class '%s' (%p)", cls->nameForLogging(), cls);
2847 Class newcls = init(cls, nil);
2849 // fixme someday Swift will need to relocate classes at this point,
2850 // but we don't accept that yet.
2851 if (cls != newcls) {
2852 mutex_locker_t lock(runtimeLock);
2853 addRemappedClass(cls, newcls);
2859 // No Swift-side initialization callback.
2860 // Perform our own realization directly.
2861 mutex_locker_t lock(runtimeLock);
2862 return realizeClassWithoutSwift(cls, nil);
2867 /***********************************************************************
2868 * realizeClassMaybeSwift (MaybeRelock / AndUnlock / AndLeaveLocked)
2869 * Realize a class that might be a Swift class.
2870 * Returns the real class structure for the class.
2872 * runtimeLock must be held on entry
2873 * runtimeLock may be dropped during execution
2874 * ...AndUnlock function leaves runtimeLock unlocked on exit
2875 * ...AndLeaveLocked re-acquires runtimeLock if it was dropped
2876 * This complication avoids repeated lock transitions in some cases.
2877 **********************************************************************/
2879 realizeClassMaybeSwiftMaybeRelock(Class cls, mutex_t& lock, bool leaveLocked)
2881 lock.assertLocked();
2883 if (!cls->isSwiftStable_ButAllowLegacyForNow()) {
2884 // Non-Swift class. Realize it now with the lock still held.
2885 // fixme wrong in the future for objc subclasses of swift classes
2886 realizeClassWithoutSwift(cls, nil);
2887 if (!leaveLocked) lock.unlock();
2889 // Swift class. We need to drop locks and call the Swift
2890 // runtime to initialize it.
2892 cls = realizeSwiftClass(cls);
2893 ASSERT(cls->isRealized()); // callback must have provoked realization
2894 if (leaveLocked) lock.lock();
2901 realizeClassMaybeSwiftAndUnlock(Class cls, mutex_t& lock)
2903 return realizeClassMaybeSwiftMaybeRelock(cls, lock, false);
2907 realizeClassMaybeSwiftAndLeaveLocked(Class cls, mutex_t& lock)
2909 return realizeClassMaybeSwiftMaybeRelock(cls, lock, true);
2913 /***********************************************************************
2914 * missingWeakSuperclass
2915 * Return YES if some superclass of cls was weak-linked and is missing.
2916 **********************************************************************/
2918 missingWeakSuperclass(Class cls)
2920 ASSERT(!cls->isRealized());
2922 if (!cls->getSuperclass()) {
2923 // superclass nil. This is normal for root classes only.
2924 return (!(cls->data()->flags & RO_ROOT));
2926 // superclass not nil. Check if a higher superclass is missing.
2927 Class supercls = remapClass(cls->getSuperclass());
2928 ASSERT(cls != cls->getSuperclass());
2929 ASSERT(cls != supercls);
2930 if (!supercls) return YES;
2931 if (supercls->isRealized()) return NO;
2932 return missingWeakSuperclass(supercls);
2937 /***********************************************************************
2938 * realizeAllClassesInImage
2939 * Non-lazily realizes all unrealized classes in the given image.
2940 * Locking: runtimeLock must be held by the caller.
2941 * Locking: this function may drop and re-acquire the lock.
2942 **********************************************************************/
2943 static void realizeAllClassesInImage(header_info *hi)
2945 runtimeLock.assertLocked();
2948 classref_t const *classlist;
2950 if (hi->areAllClassesRealized()) return;
2952 classlist = _getObjc2ClassList(hi, &count);
2954 for (i = 0; i < count; i++) {
2955 Class cls = remapClass(classlist[i]);
2957 realizeClassMaybeSwiftAndLeaveLocked(cls, runtimeLock);
2961 hi->setAllClassesRealized(YES);
2965 /***********************************************************************
2967 * Non-lazily realizes all unrealized classes in all known images.
2968 * Locking: runtimeLock must be held by the caller.
2969 * Locking: this function may drop and re-acquire the lock.
2970 * Dropping the lock makes this function thread-unsafe with respect
2971 * to concurrent image unload, but the callers of this function
2972 * already ultimately do something that is also thread-unsafe with
2973 * respect to image unload (such as using the list of all classes).
2974 **********************************************************************/
2975 static void realizeAllClasses(void)
2977 runtimeLock.assertLocked();
2980 for (hi = FirstHeader; hi; hi = hi->getNext()) {
2981 realizeAllClassesInImage(hi); // may drop and re-acquire runtimeLock
2986 /***********************************************************************
2987 * _objc_allocateFutureClass
2988 * Allocate an unresolved future class for the given class name.
2989 * Returns any existing allocation if one was already made.
2990 * Assumes the named class doesn't exist yet.
2991 * Locking: acquires runtimeLock
2992 **********************************************************************/
2993 Class _objc_allocateFutureClass(const char *name)
2995 mutex_locker_t lock(runtimeLock);
2998 NXMapTable *map = futureNamedClasses();
3000 if ((cls = (Class)NXMapGet(map, name))) {
3001 // Already have a future class for this name.
3005 cls = _calloc_class(sizeof(objc_class));
3006 addFutureNamedClass(name, cls);
3012 /***********************************************************************
3013 * objc_getFutureClass. Return the id of the named class.
3014 * If the class does not exist, return an uninitialized class
3015 * structure that will be used for the class when and if it
3018 **********************************************************************/
3019 Class objc_getFutureClass(const char *name)
3023 // YES unconnected, NO class handler
3024 // (unconnected is OK because it will someday be the real class)
3025 cls = look_up_class(name, YES, NO);
3028 _objc_inform("FUTURE: found %p already in use for %s",
3035 // No class or future class with that name yet. Make one.
3036 // fixme not thread-safe with respect to
3037 // simultaneous library load or getFutureClass.
3038 return _objc_allocateFutureClass(name);
3042 BOOL _class_isFutureClass(Class cls)
3044 return cls && cls->isFuture();
3047 BOOL _class_isSwift(Class _Nullable cls)
3049 return cls && cls->isSwiftStable();
3052 /***********************************************************************
3053 * _objc_flush_caches
3054 * Flushes all caches.
3055 * (Historical behavior: flush caches for cls, its metaclass,
3056 * and subclasses thereof. Nil flushes all classes.)
3057 * Locking: acquires runtimeLock
3058 **********************************************************************/
3059 static void flushCaches(Class cls, const char *func, bool (^predicate)(Class))
3061 runtimeLock.assertLocked();
3062 #if CONFIG_USE_CACHE_LOCK
3063 mutex_locker_t lock(cacheUpdateLock);
3066 const auto handler = ^(Class c) {
3068 c->cache.eraseNolock(func);
3075 foreach_realized_class_and_subclass(cls, handler);
3077 foreach_realized_class_and_metaclass(handler);
3082 void _objc_flush_caches(Class cls)
3085 mutex_locker_t lock(runtimeLock);
3086 flushCaches(cls, __func__, [](Class c){
3087 return !c->cache.isConstantOptimizedCache();
3089 if (cls && !cls->isMetaClass() && !cls->isRootClass()) {
3090 flushCaches(cls->ISA(), __func__, [](Class c){
3091 return !c->cache.isConstantOptimizedCache();
3094 // cls is a root class or root metaclass. Its metaclass is itself
3095 // or a subclass so the metaclass caches were already flushed.
3100 // collectALot if cls==nil
3101 #if CONFIG_USE_CACHE_LOCK
3102 mutex_locker_t lock(cacheUpdateLock);
3104 mutex_locker_t lock(runtimeLock);
3106 cache_t::collectNolock(true);
3111 /***********************************************************************
3113 * Process the given images which are being mapped in by dyld.
3114 * Calls ABI-agnostic code after taking ABI-specific locks.
3116 * Locking: write-locks runtimeLock
3117 **********************************************************************/
3119 map_images(unsigned count, const char * const paths[],
3120 const struct mach_header * const mhdrs[])
3122 mutex_locker_t lock(runtimeLock);
3123 return map_images_nolock(count, paths, mhdrs);
3127 static void load_categories_nolock(header_info *hi) {
3128 bool hasClassProperties = hi->info()->hasCategoryClassProperties();
3131 auto processCatlist = [&](category_t * const *catlist) {
3132 for (unsigned i = 0; i < count; i++) {
3133 category_t *cat = catlist[i];
3134 Class cls = remapClass(cat->cls);
3135 locstamped_category_t lc{cat, hi};
3138 // Category's target class is missing (probably weak-linked).
3139 // Ignore the category.
3140 if (PrintConnecting) {
3141 _objc_inform("CLASS: IGNORING category \?\?\?(%s) %p with "
3142 "missing weak-linked target class",
3148 // Process this category.
3149 if (cls->isStubClass()) {
3150 // Stub classes are never realized. Stub classes
3151 // don't know their metaclass until they're
3152 // initialized, so we have to add categories with
3153 // class methods or properties to the stub itself.
3154 // methodizeClass() will find them and add them to
3155 // the metaclass as appropriate.
3156 if (cat->instanceMethods ||
3158 cat->instanceProperties ||
3159 cat->classMethods ||
3161 (hasClassProperties && cat->_classProperties))
3163 objc::unattachedCategories.addForClass(lc, cls);
3166 // First, register the category with its target class.
3167 // Then, rebuild the class's method lists (etc) if
3168 // the class is realized.
3169 if (cat->instanceMethods || cat->protocols
3170 || cat->instanceProperties)
3172 if (cls->isRealized()) {
3173 attachCategories(cls, &lc, 1, ATTACH_EXISTING);
3175 objc::unattachedCategories.addForClass(lc, cls);
3179 if (cat->classMethods || cat->protocols
3180 || (hasClassProperties && cat->_classProperties))
3182 if (cls->ISA()->isRealized()) {
3183 attachCategories(cls->ISA(), &lc, 1, ATTACH_EXISTING | ATTACH_METACLASS);
3185 objc::unattachedCategories.addForClass(lc, cls->ISA());
3192 processCatlist(hi->catlist(&count));
3193 processCatlist(hi->catlist2(&count));
3196 static void loadAllCategories() {
3197 mutex_locker_t lock(runtimeLock);
3199 for (auto *hi = FirstHeader; hi != NULL; hi = hi->getNext()) {
3200 load_categories_nolock(hi);
3204 /***********************************************************************
3206 * Process +load in the given images which are being mapped in by dyld.
3208 * Locking: write-locks runtimeLock and loadMethodLock
3209 **********************************************************************/
3210 extern bool hasLoadMethods(const headerType *mhdr);
3211 extern void prepare_load_methods(const headerType *mhdr);
3214 load_images(const char *path __unused, const struct mach_header *mh)
3216 if (!didInitialAttachCategories && didCallDyldNotifyRegister) {
3217 didInitialAttachCategories = true;
3218 loadAllCategories();
3221 // Return without taking locks if there are no +load methods here.
3222 if (!hasLoadMethods((const headerType *)mh)) return;
3224 recursive_mutex_locker_t lock(loadMethodLock);
3226 // Discover load methods
3228 mutex_locker_t lock2(runtimeLock);
3229 prepare_load_methods((const headerType *)mh);
3232 // Call +load methods (without runtimeLock - re-entrant)
3233 call_load_methods();
3237 /***********************************************************************
3239 * Process the given image which is about to be unmapped by dyld.
3241 * Locking: write-locks runtimeLock and loadMethodLock
3242 **********************************************************************/
3244 unmap_image(const char *path __unused, const struct mach_header *mh)
3246 recursive_mutex_locker_t lock(loadMethodLock);
3247 mutex_locker_t lock2(runtimeLock);
3248 unmap_image_nolock(mh);
3252 /***********************************************************************
3254 * Preflight check in advance of readClass() from an image.
3255 **********************************************************************/
3256 bool mustReadClasses(header_info *hi, bool hasDyldRoots)
3260 // If the image is not preoptimized then we must read classes.
3261 if (!hi->hasPreoptimizedClasses()) {
3262 reason = nil; // Don't log this one because it is noisy.
3266 // If iOS simulator then we must read classes.
3267 #if TARGET_OS_SIMULATOR
3268 reason = "the image is for iOS simulator";
3272 ASSERT(!hi->isBundle()); // no MH_BUNDLE in shared cache
3274 // If the image may have missing weak superclasses then we must read classes
3275 if (!noMissingWeakSuperclasses() || hasDyldRoots) {
3276 reason = "the image may contain classes with missing weak superclasses";
3280 // If there are unresolved future classes then we must read classes.
3281 if (haveFutureNamedClasses()) {
3282 reason = "there are unresolved future classes pending";
3286 // readClass() rewrites bits in backward-deploying Swift stable ABI code.
3287 // The assumption here is there there are no such classes
3288 // in the dyld shared cache.
3292 classref_t const *classlist = _getObjc2ClassList(hi, &count);
3293 for (size_t i = 0; i < count; i++) {
3294 Class cls = remapClass(classlist[i]);
3295 ASSERT(!cls->isUnfixedBackwardDeployingStableSwift());
3300 // readClass() does not need to do anything.
3304 if (PrintPreopt && reason) {
3305 _objc_inform("PREOPTIMIZATION: reading classes manually from %s "
3306 "because %s", hi->fname(), reason);
3312 /***********************************************************************
3314 * Read a class and metaclass as written by a compiler.
3315 * Returns the new class pointer. This could be:
3317 * - nil (cls has a missing weak-linked superclass)
3318 * - something else (space for this class was reserved by a future class)
3320 * Note that all work performed by this function is preflighted by
3321 * mustReadClasses(). Do not change this function without updating that one.
3323 * Locking: runtimeLock acquired by map_images or objc_readClassPair
3324 **********************************************************************/
3325 Class readClass(Class cls, bool headerIsBundle, bool headerIsPreoptimized)
3327 const char *mangledName = cls->nonlazyMangledName();
3329 if (missingWeakSuperclass(cls)) {
3330 // No superclass (probably weak-linked).
3331 // Disavow any knowledge of this subclass.
3332 if (PrintConnecting) {
3333 _objc_inform("CLASS: IGNORING class '%s' with "
3334 "missing weak-linked superclass",
3335 cls->nameForLogging());
3337 addRemappedClass(cls, nil);
3338 cls->setSuperclass(nil);
3342 cls->fixupBackwardDeployingStableSwift();
3344 Class replacing = nil;
3345 if (mangledName != nullptr) {
3346 if (Class newCls = popFutureNamedClass(mangledName)) {
3347 // This name was previously allocated as a future class.
3348 // Copy objc_class to future class's struct.
3349 // Preserve future's rw data block.
3351 if (newCls->isAnySwift()) {
3352 _objc_fatal("Can't complete future class request for '%s' "
3353 "because the real class is too big.",
3354 cls->nameForLogging());
3357 class_rw_t *rw = newCls->data();
3358 const class_ro_t *old_ro = rw->ro();
3359 memcpy(newCls, cls, sizeof(objc_class));
3361 // Manually set address-discriminated ptrauthed fields
3362 // so that newCls gets the correct signatures.
3363 newCls->setSuperclass(cls->getSuperclass());
3364 newCls->initIsa(cls->getIsa());
3366 rw->set_ro((class_ro_t *)newCls->data());
3367 newCls->setData(rw);
3368 freeIfMutable((char *)old_ro->getName());
3369 free((void *)old_ro);
3371 addRemappedClass(cls, newCls);
3378 if (headerIsPreoptimized && !replacing) {
3379 // class list built in shared cache
3380 // fixme strict assert doesn't work because of duplicates
3381 // ASSERT(cls == getClass(name));
3382 ASSERT(mangledName == nullptr || getClassExceptSomeSwift(mangledName));
3384 if (mangledName) { //some Swift generic classes can lazily generate their names
3385 addNamedClass(cls, mangledName, replacing);
3387 Class meta = cls->ISA();
3388 const class_ro_t *metaRO = meta->bits.safe_ro();
3389 ASSERT(metaRO->getNonMetaclass() && "Metaclass with lazy name must have a pointer to the corresponding nonmetaclass.");
3390 ASSERT(metaRO->getNonMetaclass() == cls && "Metaclass nonmetaclass pointer must equal the original class.");
3392 addClassTableEntry(cls);
3395 // for future reference: shared cache never contains MH_BUNDLEs
3396 if (headerIsBundle) {
3397 cls->data()->flags |= RO_FROM_BUNDLE;
3398 cls->ISA()->data()->flags |= RO_FROM_BUNDLE;
3405 /***********************************************************************
3407 * Read a protocol as written by a compiler.
3408 **********************************************************************/
3410 readProtocol(protocol_t *newproto, Class protocol_class,
3411 NXMapTable *protocol_map,
3412 bool headerIsPreoptimized, bool headerIsBundle)
3414 // This is not enough to make protocols in unloaded bundles safe,
3415 // but it does prevent crashes when looking up unrelated protocols.
3416 auto insertFn = headerIsBundle ? NXMapKeyCopyingInsert : NXMapInsert;
3418 protocol_t *oldproto = (protocol_t *)getProtocol(newproto->mangledName);
3421 if (oldproto != newproto) {
3422 // Some other definition already won.
3423 if (PrintProtocols) {
3424 _objc_inform("PROTOCOLS: protocol at %p is %s "
3425 "(duplicate of %p)",
3426 newproto, oldproto->nameForLogging(), oldproto);
3429 // If we are a shared cache binary then we have a definition of this
3430 // protocol, but if another one was chosen then we need to clear our
3431 // isCanonical bit so that no-one trusts it.
3432 // Note, if getProtocol returned a shared cache protocol then the
3433 // canonical definition is already in the shared cache and we don't
3434 // need to do anything.
3435 if (headerIsPreoptimized && !oldproto->isCanonical()) {
3436 // Note newproto is an entry in our __objc_protolist section which
3437 // for shared cache binaries points to the original protocol in
3438 // that binary, not the shared cache uniqued one.
3439 auto cacheproto = (protocol_t *)
3440 getSharedCachePreoptimizedProtocol(newproto->mangledName);
3441 if (cacheproto && cacheproto->isCanonical())
3442 cacheproto->clearIsCanonical();
3446 else if (headerIsPreoptimized) {
3447 // Shared cache initialized the protocol object itself,
3448 // but in order to allow out-of-cache replacement we need
3449 // to add it to the protocol table now.
3451 protocol_t *cacheproto = (protocol_t *)
3452 getPreoptimizedProtocol(newproto->mangledName);
3453 protocol_t *installedproto;
3454 if (cacheproto && cacheproto != newproto) {
3455 // Another definition in the shared cache wins (because
3456 // everything in the cache was fixed up to point to it).
3457 installedproto = cacheproto;
3460 // This definition wins.
3461 installedproto = newproto;
3464 ASSERT(installedproto->getIsa() == protocol_class);
3465 ASSERT(installedproto->size >= sizeof(protocol_t));
3466 insertFn(protocol_map, installedproto->mangledName,
3469 if (PrintProtocols) {
3470 _objc_inform("PROTOCOLS: protocol at %p is %s",
3471 installedproto, installedproto->nameForLogging());
3472 if (newproto != installedproto) {
3473 _objc_inform("PROTOCOLS: protocol at %p is %s "
3474 "(duplicate of %p)",
3475 newproto, installedproto->nameForLogging(),
3481 // New protocol from an un-preoptimized image. Fix it up in place.
3482 // fixme duplicate protocols from unloadable bundle
3483 newproto->initIsa(protocol_class); // fixme pinned
3484 insertFn(protocol_map, newproto->mangledName, newproto);
3485 if (PrintProtocols) {
3486 _objc_inform("PROTOCOLS: protocol at %p is %s",
3487 newproto, newproto->nameForLogging());
3492 /***********************************************************************
3494 * Perform initial processing of the headers in the linked
3495 * list beginning with headerList.
3497 * Called by: map_images_nolock
3499 * Locking: runtimeLock acquired by map_images
3500 **********************************************************************/
3501 void _read_images(header_info **hList, uint32_t hCount, int totalClasses, int unoptimizedTotalClasses)
3507 Class *resolvedFutureClasses = nil;
3508 size_t resolvedFutureClassCount = 0;
3509 static bool doneOnce;
3510 bool launchTime = NO;
3511 TimeLogger ts(PrintImageTimes);
3513 runtimeLock.assertLocked();
3515 #define EACH_HEADER \
3517 hIndex < hCount && (hi = hList[hIndex]); \
3524 #if SUPPORT_NONPOINTER_ISA
3525 // Disable non-pointer isa under some conditions.
3527 # if SUPPORT_INDEXED_ISA
3528 // Disable nonpointer isa if any image contains old Swift code
3530 if (hi->info()->containsSwift() &&
3531 hi->info()->swiftUnstableVersion() < objc_image_info::SwiftVersion3)
3533 DisableNonpointerIsa = true;
3535 _objc_inform("RAW ISA: disabling non-pointer isa because "
3536 "the app or a framework contains Swift code "
3537 "older than Swift 3.0");
3545 // Disable non-pointer isa if the app is too old
3546 // (linked before OS X 10.11)
3547 if (!dyld_program_sdk_at_least(dyld_platform_version_macOS_10_11)) {
3548 DisableNonpointerIsa = true;
3550 _objc_inform("RAW ISA: disabling non-pointer isa because "
3551 "the app is too old.");
3555 // Disable non-pointer isa if the app has a __DATA,__objc_rawisa section
3556 // New apps that load old extensions may need this.
3558 if (hi->mhdr()->filetype != MH_EXECUTE) continue;
3560 if (getsectiondata(hi->mhdr(), "__DATA", "__objc_rawisa", &size)) {
3561 DisableNonpointerIsa = true;
3563 _objc_inform("RAW ISA: disabling non-pointer isa because "
3564 "the app has a __DATA,__objc_rawisa section");
3567 break; // assume only one MH_EXECUTE image
3573 if (DisableTaggedPointers) {
3574 disableTaggedPointers();
3577 initializeTaggedPointerObfuscator();
3579 if (PrintConnecting) {
3580 _objc_inform("CLASS: found %d classes during launch", totalClasses);
3584 // Preoptimized classes don't go in this table.
3585 // 4/3 is NXMapTable's load factor
3586 int namedClassesSize =
3587 (isPreoptimized() ? unoptimizedTotalClasses : totalClasses) * 4 / 3;
3588 gdb_objc_realized_classes =
3589 NXCreateMapTable(NXStrValueMapPrototype, namedClassesSize);
3591 ts.log("IMAGE TIMES: first time tasks");
3594 // Fix up @selector references
3595 static size_t UnfixedSelectors;
3597 mutex_locker_t lock(selLock);
3599 if (hi->hasPreoptimizedSelectors()) continue;
3601 bool isBundle = hi->isBundle();
3602 SEL *sels = _getObjc2SelectorRefs(hi, &count);
3603 UnfixedSelectors += count;
3604 for (i = 0; i < count; i++) {
3605 const char *name = sel_cname(sels[i]);
3606 SEL sel = sel_registerNameNoLock(name, isBundle);
3607 if (sels[i] != sel) {
3614 ts.log("IMAGE TIMES: fix up selector references");
3616 // Discover classes. Fix up unresolved future classes. Mark bundle classes.
3617 bool hasDyldRoots = dyld_shared_cache_some_image_overridden();
3620 if (! mustReadClasses(hi, hasDyldRoots)) {
3621 // Image is sufficiently optimized that we need not call readClass()
3625 classref_t const *classlist = _getObjc2ClassList(hi, &count);
3627 bool headerIsBundle = hi->isBundle();
3628 bool headerIsPreoptimized = hi->hasPreoptimizedClasses();
3630 for (i = 0; i < count; i++) {
3631 Class cls = (Class)classlist[i];
3632 Class newCls = readClass(cls, headerIsBundle, headerIsPreoptimized);
3634 if (newCls != cls && newCls) {
3635 // Class was moved but not deleted. Currently this occurs
3636 // only when the new class resolved a future class.
3637 // Non-lazily realize the class below.
3638 resolvedFutureClasses = (Class *)
3639 realloc(resolvedFutureClasses,
3640 (resolvedFutureClassCount+1) * sizeof(Class));
3641 resolvedFutureClasses[resolvedFutureClassCount++] = newCls;
3646 ts.log("IMAGE TIMES: discover classes");
3648 // Fix up remapped classes
3649 // Class list and nonlazy class list remain unremapped.
3650 // Class refs and super refs are remapped for message dispatching.
3652 if (!noClassesRemapped()) {
3654 Class *classrefs = _getObjc2ClassRefs(hi, &count);
3655 for (i = 0; i < count; i++) {
3656 remapClassRef(&classrefs[i]);
3658 // fixme why doesn't test future1 catch the absence of this?
3659 classrefs = _getObjc2SuperRefs(hi, &count);
3660 for (i = 0; i < count; i++) {
3661 remapClassRef(&classrefs[i]);
3666 ts.log("IMAGE TIMES: remap classes");
3669 // Fix up old objc_msgSend_fixup call sites
3671 message_ref_t *refs = _getObjc2MessageRefs(hi, &count);
3672 if (count == 0) continue;
3675 _objc_inform("VTABLES: repairing %zu unsupported vtable dispatch "
3676 "call sites in %s", count, hi->fname());
3678 for (i = 0; i < count; i++) {
3679 fixupMessageRef(refs+i);
3683 ts.log("IMAGE TIMES: fix up objc_msgSend_fixup");
3687 // Discover protocols. Fix up protocol refs.
3689 extern objc_class OBJC_CLASS_$_Protocol;
3690 Class cls = (Class)&OBJC_CLASS_$_Protocol;
3692 NXMapTable *protocol_map = protocols();
3693 bool isPreoptimized = hi->hasPreoptimizedProtocols();
3695 // Skip reading protocols if this is an image from the shared cache
3696 // and we support roots
3697 // Note, after launch we do need to walk the protocol as the protocol
3698 // in the shared cache is marked with isCanonical() and that may not
3699 // be true if some non-shared cache binary was chosen as the canonical
3701 if (launchTime && isPreoptimized) {
3702 if (PrintProtocols) {
3703 _objc_inform("PROTOCOLS: Skipping reading protocols in image: %s",
3709 bool isBundle = hi->isBundle();
3711 protocol_t * const *protolist = _getObjc2ProtocolList(hi, &count);
3712 for (i = 0; i < count; i++) {
3713 readProtocol(protolist[i], cls, protocol_map,
3714 isPreoptimized, isBundle);
3718 ts.log("IMAGE TIMES: discover protocols");
3720 // Fix up @protocol references
3721 // Preoptimized images may have the right
3722 // answer already but we don't know for sure.
3724 // At launch time, we know preoptimized image refs are pointing at the
3725 // shared cache definition of a protocol. We can skip the check on
3726 // launch, but have to visit @protocol refs for shared cache images
3728 if (launchTime && hi->isPreoptimized())
3730 protocol_t **protolist = _getObjc2ProtocolRefs(hi, &count);
3731 for (i = 0; i < count; i++) {
3732 remapProtocolRef(&protolist[i]);
3736 ts.log("IMAGE TIMES: fix up @protocol references");
3738 // Discover categories. Only do this after the initial category
3739 // attachment has been done. For categories present at startup,
3740 // discovery is deferred until the first load_images call after
3741 // the call to _dyld_objc_notify_register completes. rdar://problem/53119145
3742 if (didInitialAttachCategories) {
3744 load_categories_nolock(hi);
3748 ts.log("IMAGE TIMES: discover categories");
3750 // Category discovery MUST BE Late to avoid potential races
3751 // when other threads call the new category code before
3752 // this thread finishes its fixups.
3754 // +load handled by prepare_load_methods()
3756 // Realize non-lazy classes (for +load methods and static instances)
3758 classref_t const *classlist = hi->nlclslist(&count);
3759 for (i = 0; i < count; i++) {
3760 Class cls = remapClass(classlist[i]);
3763 addClassTableEntry(cls);
3765 if (cls->isSwiftStable()) {
3766 if (cls->swiftMetadataInitializer()) {
3767 _objc_fatal("Swift class %s with a metadata initializer "
3768 "is not allowed to be non-lazy",
3769 cls->nameForLogging());
3771 // fixme also disallow relocatable classes
3772 // We can't disallow all Swift classes because of
3773 // classes like Swift.__EmptyArrayStorage
3775 realizeClassWithoutSwift(cls, nil);
3779 ts.log("IMAGE TIMES: realize non-lazy classes");
3781 // Realize newly-resolved future classes, in case CF manipulates them
3782 if (resolvedFutureClasses) {
3783 for (i = 0; i < resolvedFutureClassCount; i++) {
3784 Class cls = resolvedFutureClasses[i];
3785 if (cls->isSwiftStable()) {
3786 _objc_fatal("Swift class is not allowed to be future");
3788 realizeClassWithoutSwift(cls, nil);
3789 cls->setInstancesRequireRawIsaRecursively(false/*inherited*/);
3791 free(resolvedFutureClasses);
3794 ts.log("IMAGE TIMES: realize future classes");
3796 if (DebugNonFragileIvars) {
3797 realizeAllClasses();
3801 // Print preoptimization statistics
3803 static unsigned int PreoptTotalMethodLists;
3804 static unsigned int PreoptOptimizedMethodLists;
3805 static unsigned int PreoptTotalClasses;
3806 static unsigned int PreoptOptimizedClasses;
3809 if (hi->hasPreoptimizedSelectors()) {
3810 _objc_inform("PREOPTIMIZATION: honoring preoptimized selectors "
3811 "in %s", hi->fname());
3813 else if (hi->info()->optimizedByDyld()) {
3814 _objc_inform("PREOPTIMIZATION: IGNORING preoptimized selectors "
3815 "in %s", hi->fname());
3818 classref_t const *classlist = _getObjc2ClassList(hi, &count);
3819 for (i = 0; i < count; i++) {
3820 Class cls = remapClass(classlist[i]);
3823 PreoptTotalClasses++;
3824 if (hi->hasPreoptimizedClasses()) {
3825 PreoptOptimizedClasses++;
3828 const method_list_t *mlist;
3829 if ((mlist = cls->bits.safe_ro()->baseMethods())) {
3830 PreoptTotalMethodLists++;
3831 if (mlist->isFixedUp()) {
3832 PreoptOptimizedMethodLists++;
3835 if ((mlist = cls->ISA()->bits.safe_ro()->baseMethods())) {
3836 PreoptTotalMethodLists++;
3837 if (mlist->isFixedUp()) {
3838 PreoptOptimizedMethodLists++;
3844 _objc_inform("PREOPTIMIZATION: %zu selector references not "
3845 "pre-optimized", UnfixedSelectors);
3846 _objc_inform("PREOPTIMIZATION: %u/%u (%.3g%%) method lists pre-sorted",
3847 PreoptOptimizedMethodLists, PreoptTotalMethodLists,
3848 PreoptTotalMethodLists
3849 ? 100.0*PreoptOptimizedMethodLists/PreoptTotalMethodLists
3851 _objc_inform("PREOPTIMIZATION: %u/%u (%.3g%%) classes pre-registered",
3852 PreoptOptimizedClasses, PreoptTotalClasses,
3854 ? 100.0*PreoptOptimizedClasses/PreoptTotalClasses
3856 _objc_inform("PREOPTIMIZATION: %zu protocol references not "
3857 "pre-optimized", UnfixedProtocolReferences);
3864 /***********************************************************************
3865 * prepare_load_methods
3866 * Schedule +load for classes in this image, any un-+load-ed
3867 * superclasses in other images, and any categories in this image.
3868 **********************************************************************/
3869 // Recursively schedule +load for cls and any un-+load-ed superclasses.
3870 // cls must already be connected.
3871 static void schedule_class_load(Class cls)
3874 ASSERT(cls->isRealized()); // _read_images should realize
3876 if (cls->data()->flags & RW_LOADED) return;
3878 // Ensure superclass-first ordering
3879 schedule_class_load(cls->getSuperclass());
3881 add_class_to_loadable_list(cls);
3882 cls->setInfo(RW_LOADED);
3885 // Quick scan for +load methods that doesn't take a lock.
3886 bool hasLoadMethods(const headerType *mhdr)
3889 if (_getObjc2NonlazyClassList(mhdr, &count) && count > 0) return true;
3890 if (_getObjc2NonlazyCategoryList(mhdr, &count) && count > 0) return true;
3894 void prepare_load_methods(const headerType *mhdr)
3898 runtimeLock.assertLocked();
3900 classref_t const *classlist =
3901 _getObjc2NonlazyClassList(mhdr, &count);
3902 for (i = 0; i < count; i++) {
3903 schedule_class_load(remapClass(classlist[i]));
3906 category_t * const *categorylist = _getObjc2NonlazyCategoryList(mhdr, &count);
3907 for (i = 0; i < count; i++) {
3908 category_t *cat = categorylist[i];
3909 Class cls = remapClass(cat->cls);
3910 if (!cls) continue; // category for ignored weak-linked class
3911 if (cls->isSwiftStable()) {
3912 _objc_fatal("Swift class extensions and categories on Swift "
3913 "classes are not allowed to have +load methods");
3915 realizeClassWithoutSwift(cls, nil);
3916 ASSERT(cls->ISA()->isRealized());
3917 add_category_to_loadable_list(cat);
3922 /***********************************************************************
3924 * Only handles MH_BUNDLE for now.
3925 * Locking: write-lock and loadMethodLock acquired by unmap_image
3926 **********************************************************************/
3927 void _unload_image(header_info *hi)
3931 loadMethodLock.assertLocked();
3932 runtimeLock.assertLocked();
3934 // Unload unattached categories and categories waiting for +load.
3936 // Ignore __objc_catlist2. We don't support unloading Swift
3937 // and we never will.
3938 category_t * const *catlist = hi->catlist(&count);
3939 for (i = 0; i < count; i++) {
3940 category_t *cat = catlist[i];
3941 Class cls = remapClass(cat->cls);
3942 if (!cls) continue; // category for ignored weak-linked class
3944 // fixme for MH_DYLIB cat's class may have been unloaded already
3947 objc::unattachedCategories.eraseCategoryForClass(cat, cls);
3950 remove_category_from_loadable_list(cat);
3955 // Gather classes from both __DATA,__objc_clslist
3956 // and __DATA,__objc_nlclslist. arclite's hack puts a class in the latter
3957 // only, and we need to unload that class if we unload an arclite image.
3959 objc::DenseSet<Class> classes{};
3960 classref_t const *classlist;
3962 classlist = _getObjc2ClassList(hi, &count);
3963 for (i = 0; i < count; i++) {
3964 Class cls = remapClass(classlist[i]);
3965 if (cls) classes.insert(cls);
3968 classlist = hi->nlclslist(&count);
3969 for (i = 0; i < count; i++) {
3970 Class cls = remapClass(classlist[i]);
3971 if (cls) classes.insert(cls);
3974 // First detach classes from each other. Then free each class.
3975 // This avoid bugs where this loop unloads a subclass before its superclass
3977 for (Class cls: classes) {
3978 remove_class_from_loadable_list(cls);
3979 detach_class(cls->ISA(), YES);
3980 detach_class(cls, NO);
3982 for (Class cls: classes) {
3983 free_class(cls->ISA());
3987 // XXX FIXME -- Clean up protocols:
3988 // <rdar://problem/9033191> Support unloading protocols at dylib/image unload time
3990 // fixme DebugUnload
3994 /***********************************************************************
3995 * method_getDescription
3996 * Returns a pointer to this method's objc_method_description.
3998 **********************************************************************/
3999 struct objc_method_description *
4000 method_getDescription(Method m)
4003 return m->getDescription();
4008 method_getImplementation(Method m)
4010 return m ? m->imp(true) : nil;
4013 IMPAndSEL _method_getImplementationAndName(Method m)
4015 return { m->imp(true), m->name() };
4019 /***********************************************************************
4021 * Returns this method's selector.
4022 * The method must not be nil.
4023 * The method must already have been fixed-up.
4025 **********************************************************************/
4027 method_getName(Method m)
4031 ASSERT(m->name() == sel_registerName(sel_getName(m->name())));
4036 /***********************************************************************
4037 * method_getTypeEncoding
4038 * Returns this method's old-style type encoding string.
4039 * The method must not be nil.
4041 **********************************************************************/
4043 method_getTypeEncoding(Method m)
4050 /***********************************************************************
4051 * method_setImplementation
4052 * Sets this method's implementation to imp.
4053 * The previous implementation is returned.
4054 **********************************************************************/
4056 _method_setImplementation(Class cls, method_t *m, IMP imp)
4058 runtimeLock.assertLocked();
4061 if (!imp) return nil;
4063 IMP old = m->imp(false);
4064 SEL sel = m->name();
4068 // Cache updates are slow if cls is nil (i.e. unknown)
4069 // RR/AWZ updates are slow if cls is nil (i.e. unknown)
4070 // fixme build list of classes whose Methods are known externally?
4072 flushCaches(cls, __func__, [sel, old](Class c){
4073 return c->cache.shouldFlush(sel, old);
4076 adjustCustomFlagsForMethodChange(cls, m);
4082 method_setImplementation(Method m, IMP imp)
4084 // Don't know the class - will be slow if RR/AWZ are affected
4085 // fixme build list of classes whose Methods are known externally?
4086 mutex_locker_t lock(runtimeLock);
4087 return _method_setImplementation(Nil, m, imp);
4090 extern void _method_setImplementationRawUnsafe(Method m, IMP imp)
4092 mutex_locker_t lock(runtimeLock);
4097 void method_exchangeImplementations(Method m1, Method m2)
4099 if (!m1 || !m2) return;
4101 mutex_locker_t lock(runtimeLock);
4103 IMP imp1 = m1->imp(false);
4104 IMP imp2 = m2->imp(false);
4105 SEL sel1 = m1->name();
4106 SEL sel2 = m2->name();
4112 // RR/AWZ updates are slow because class is unknown
4113 // Cache updates are slow because class is unknown
4114 // fixme build list of classes whose Methods are known externally?
4116 flushCaches(nil, __func__, [sel1, sel2, imp1, imp2](Class c){
4117 return c->cache.shouldFlush(sel1, imp1) || c->cache.shouldFlush(sel2, imp2);
4120 adjustCustomFlagsForMethodChange(nil, m1);
4121 adjustCustomFlagsForMethodChange(nil, m2);
4125 /***********************************************************************
4129 **********************************************************************/
4131 ivar_getOffset(Ivar ivar)
4133 if (!ivar) return 0;
4134 return *ivar->offset;
4138 /***********************************************************************
4142 **********************************************************************/
4144 ivar_getName(Ivar ivar)
4146 if (!ivar) return nil;
4151 /***********************************************************************
4152 * ivar_getTypeEncoding
4155 **********************************************************************/
4157 ivar_getTypeEncoding(Ivar ivar)
4159 if (!ivar) return nil;
4165 const char *property_getName(objc_property_t prop)
4170 const char *property_getAttributes(objc_property_t prop)
4172 return prop->attributes;
4175 objc_property_attribute_t *property_copyAttributeList(objc_property_t prop,
4176 unsigned int *outCount)
4179 if (outCount) *outCount = 0;
4183 mutex_locker_t lock(runtimeLock);
4184 return copyPropertyAttributeList(prop->attributes,outCount);
4187 char * property_copyAttributeValue(objc_property_t prop, const char *name)
4189 if (!prop || !name || *name == '\0') return nil;
4191 mutex_locker_t lock(runtimeLock);
4192 return copyPropertyAttributeValue(prop->attributes, name);
4196 /***********************************************************************
4197 * getExtendedTypesIndexesForMethod
4199 * a is the count of methods in all method lists before m's method list
4200 * b is the index of m in m's method list
4201 * a+b is the index of m's extended types in the extended types array
4202 **********************************************************************/
4203 static void getExtendedTypesIndexesForMethod(protocol_t *proto, const method_t *m, bool isRequiredMethod, bool isInstanceMethod, uint32_t& a, uint32_t &b)
4207 if (proto->instanceMethods) {
4208 if (isRequiredMethod && isInstanceMethod) {
4209 b = proto->instanceMethods->indexOfMethod(m);
4212 a += proto->instanceMethods->count;
4215 if (proto->classMethods) {
4216 if (isRequiredMethod && !isInstanceMethod) {
4217 b = proto->classMethods->indexOfMethod(m);
4220 a += proto->classMethods->count;
4223 if (proto->optionalInstanceMethods) {
4224 if (!isRequiredMethod && isInstanceMethod) {
4225 b = proto->optionalInstanceMethods->indexOfMethod(m);
4228 a += proto->optionalInstanceMethods->count;
4231 if (proto->optionalClassMethods) {
4232 if (!isRequiredMethod && !isInstanceMethod) {
4233 b = proto->optionalClassMethods->indexOfMethod(m);
4236 a += proto->optionalClassMethods->count;
4241 /***********************************************************************
4242 * getExtendedTypesIndexForMethod
4243 * Returns the index of m's extended types in proto's extended types array.
4244 **********************************************************************/
4245 static uint32_t getExtendedTypesIndexForMethod(protocol_t *proto, const method_t *m, bool isRequiredMethod, bool isInstanceMethod)
4249 getExtendedTypesIndexesForMethod(proto, m, isRequiredMethod,
4250 isInstanceMethod, a, b);
4255 /***********************************************************************
4256 * fixupProtocolMethodList
4257 * Fixes up a single method list in a protocol.
4258 **********************************************************************/
4260 fixupProtocolMethodList(protocol_t *proto, method_list_t *mlist,
4261 bool required, bool instance)
4263 runtimeLock.assertLocked();
4266 if (mlist->isFixedUp()) return;
4268 const char **extTypes = proto->extendedMethodTypes();
4269 fixupMethodList(mlist, true/*always copy for simplicity*/,
4270 !extTypes/*sort if no extended method types*/);
4272 if (extTypes && !mlist->isSmallList()) {
4273 // Sort method list and extended method types together.
4274 // fixupMethodList() can't do this.
4276 uint32_t count = mlist->count;
4279 getExtendedTypesIndexesForMethod(proto, &mlist->get(0),
4280 required, instance, prefix, junk);
4281 for (uint32_t i = 0; i < count; i++) {
4282 for (uint32_t j = i+1; j < count; j++) {
4283 auto& mi = mlist->get(i).big();
4284 auto& mj = mlist->get(j).big();
4285 if (mi.name > mj.name) {
4287 std::swap(extTypes[prefix+i], extTypes[prefix+j]);
4295 /***********************************************************************
4297 * Fixes up all of a protocol's method lists.
4298 **********************************************************************/
4300 fixupProtocol(protocol_t *proto)
4302 runtimeLock.assertLocked();
4304 if (proto->protocols) {
4305 for (uintptr_t i = 0; i < proto->protocols->count; i++) {
4306 protocol_t *sub = remapProtocol(proto->protocols->list[i]);
4307 if (!sub->isFixedUp()) fixupProtocol(sub);
4311 fixupProtocolMethodList(proto, proto->instanceMethods, YES, YES);
4312 fixupProtocolMethodList(proto, proto->classMethods, YES, NO);
4313 fixupProtocolMethodList(proto, proto->optionalInstanceMethods, NO, YES);
4314 fixupProtocolMethodList(proto, proto->optionalClassMethods, NO, NO);
4316 // fixme memory barrier so we can check this with no lock
4317 proto->setFixedUp();
4321 /***********************************************************************
4322 * fixupProtocolIfNeeded
4323 * Fixes up all of a protocol's method lists if they aren't fixed up already.
4324 * Locking: write-locks runtimeLock.
4325 **********************************************************************/
4327 fixupProtocolIfNeeded(protocol_t *proto)
4329 runtimeLock.assertUnlocked();
4332 if (!proto->isFixedUp()) {
4333 mutex_locker_t lock(runtimeLock);
4334 fixupProtocol(proto);
4339 static method_list_t *
4340 getProtocolMethodList(protocol_t *proto, bool required, bool instance)
4342 method_list_t **mlistp = nil;
4345 mlistp = &proto->instanceMethods;
4347 mlistp = &proto->classMethods;
4351 mlistp = &proto->optionalInstanceMethods;
4353 mlistp = &proto->optionalClassMethods;
4361 /***********************************************************************
4362 * protocol_getMethod_nolock
4363 * Locking: runtimeLock must be held by the caller
4364 **********************************************************************/
4366 protocol_getMethod_nolock(protocol_t *proto, SEL sel,
4367 bool isRequiredMethod, bool isInstanceMethod,
4370 runtimeLock.assertLocked();
4372 if (!proto || !sel) return nil;
4374 ASSERT(proto->isFixedUp());
4376 method_list_t *mlist =
4377 getProtocolMethodList(proto, isRequiredMethod, isInstanceMethod);
4379 method_t *m = search_method_list(mlist, sel);
4383 if (recursive && proto->protocols) {
4385 for (uint32_t i = 0; i < proto->protocols->count; i++) {
4386 protocol_t *realProto = remapProtocol(proto->protocols->list[i]);
4387 m = protocol_getMethod_nolock(realProto, sel,
4388 isRequiredMethod, isInstanceMethod,
4398 /***********************************************************************
4399 * protocol_getMethod
4401 * Locking: acquires runtimeLock
4402 **********************************************************************/
4404 protocol_getMethod(protocol_t *proto, SEL sel, bool isRequiredMethod, bool isInstanceMethod, bool recursive)
4406 if (!proto) return nil;
4407 fixupProtocolIfNeeded(proto);
4409 mutex_locker_t lock(runtimeLock);
4410 return protocol_getMethod_nolock(proto, sel, isRequiredMethod,
4411 isInstanceMethod, recursive);
4415 /***********************************************************************
4416 * protocol_getMethodTypeEncoding_nolock
4417 * Return the @encode string for the requested protocol method.
4418 * Returns nil if the compiler did not emit any extended @encode data.
4419 * Locking: runtimeLock must be held by the caller
4420 **********************************************************************/
4422 protocol_getMethodTypeEncoding_nolock(protocol_t *proto, SEL sel,
4423 bool isRequiredMethod,
4424 bool isInstanceMethod)
4426 runtimeLock.assertLocked();
4428 if (!proto) return nil;
4429 if (!proto->extendedMethodTypes()) return nil;
4431 ASSERT(proto->isFixedUp());
4434 protocol_getMethod_nolock(proto, sel,
4435 isRequiredMethod, isInstanceMethod, false);
4437 uint32_t i = getExtendedTypesIndexForMethod(proto, m,
4440 return proto->extendedMethodTypes()[i];
4443 // No method with that name. Search incorporated protocols.
4444 if (proto->protocols) {
4445 for (uintptr_t i = 0; i < proto->protocols->count; i++) {
4447 protocol_getMethodTypeEncoding_nolock(remapProtocol(proto->protocols->list[i]), sel, isRequiredMethod, isInstanceMethod);
4448 if (enc) return enc;
4455 /***********************************************************************
4456 * _protocol_getMethodTypeEncoding
4457 * Return the @encode string for the requested protocol method.
4458 * Returns nil if the compiler did not emit any extended @encode data.
4459 * Locking: acquires runtimeLock
4460 **********************************************************************/
4462 _protocol_getMethodTypeEncoding(Protocol *proto_gen, SEL sel,
4463 BOOL isRequiredMethod, BOOL isInstanceMethod)
4465 protocol_t *proto = newprotocol(proto_gen);
4467 if (!proto) return nil;
4468 fixupProtocolIfNeeded(proto);
4470 mutex_locker_t lock(runtimeLock);
4471 return protocol_getMethodTypeEncoding_nolock(proto, sel,
4477 /***********************************************************************
4478 * protocol_t::demangledName
4479 * Returns the (Swift-demangled) name of the given protocol.
4481 **********************************************************************/
4483 protocol_t::demangledName()
4485 if (!hasDemangledNameField())
4488 if (! _demangledName) {
4489 char *de = copySwiftV1DemangledName(mangledName, true/*isProtocol*/);
4490 if (! OSAtomicCompareAndSwapPtrBarrier(nil, (void*)(de ?: mangledName),
4491 (void**)&_demangledName))
4496 return _demangledName;
4499 /***********************************************************************
4501 * Returns the (Swift-demangled) name of the given protocol.
4502 * Locking: runtimeLock must not be held by the caller
4503 **********************************************************************/
4505 protocol_getName(Protocol *proto)
4507 if (!proto) return "nil";
4508 else return newprotocol(proto)->demangledName();
4512 /***********************************************************************
4513 * protocol_getInstanceMethodDescription
4514 * Returns the description of a named instance method.
4515 * Locking: runtimeLock must not be held by the caller
4516 **********************************************************************/
4517 struct objc_method_description
4518 protocol_getMethodDescription(Protocol *p, SEL aSel,
4519 BOOL isRequiredMethod, BOOL isInstanceMethod)
4522 protocol_getMethod(newprotocol(p), aSel,
4523 isRequiredMethod, isInstanceMethod, true);
4524 // method_getDescription is inefficient for small methods. Don't bother
4525 // trying to use it, just make our own.
4526 if (m) return (struct objc_method_description){m->name(), (char *)m->types()};
4527 else return (struct objc_method_description){nil, nil};
4531 /***********************************************************************
4532 * protocol_conformsToProtocol_nolock
4533 * Returns YES if self conforms to other.
4534 * Locking: runtimeLock must be held by the caller.
4535 **********************************************************************/
4537 protocol_conformsToProtocol_nolock(protocol_t *self, protocol_t *other)
4539 runtimeLock.assertLocked();
4541 if (!self || !other) {
4545 // protocols need not be fixed up
4547 if (0 == strcmp(self->mangledName, other->mangledName)) {
4551 if (self->protocols) {
4553 for (i = 0; i < self->protocols->count; i++) {
4554 protocol_t *proto = remapProtocol(self->protocols->list[i]);
4555 if (other == proto) {
4558 if (0 == strcmp(other->mangledName, proto->mangledName)) {
4561 if (protocol_conformsToProtocol_nolock(proto, other)) {
4571 /***********************************************************************
4572 * protocol_conformsToProtocol
4573 * Returns YES if self conforms to other.
4574 * Locking: acquires runtimeLock
4575 **********************************************************************/
4576 BOOL protocol_conformsToProtocol(Protocol *self, Protocol *other)
4578 mutex_locker_t lock(runtimeLock);
4579 return protocol_conformsToProtocol_nolock(newprotocol(self),
4580 newprotocol(other));
4584 /***********************************************************************
4586 * Return YES if two protocols are equal (i.e. conform to each other)
4587 * Locking: acquires runtimeLock
4588 **********************************************************************/
4589 BOOL protocol_isEqual(Protocol *self, Protocol *other)
4591 if (self == other) return YES;
4592 if (!self || !other) return NO;
4594 if (!protocol_conformsToProtocol(self, other)) return NO;
4595 if (!protocol_conformsToProtocol(other, self)) return NO;
4601 /***********************************************************************
4602 * protocol_copyMethodDescriptionList
4603 * Returns descriptions of a protocol's methods.
4604 * Locking: acquires runtimeLock
4605 **********************************************************************/
4606 struct objc_method_description *
4607 protocol_copyMethodDescriptionList(Protocol *p,
4608 BOOL isRequiredMethod,BOOL isInstanceMethod,
4609 unsigned int *outCount)
4611 protocol_t *proto = newprotocol(p);
4612 struct objc_method_description *result = nil;
4613 unsigned int count = 0;
4616 if (outCount) *outCount = 0;
4620 fixupProtocolIfNeeded(proto);
4622 mutex_locker_t lock(runtimeLock);
4624 method_list_t *mlist =
4625 getProtocolMethodList(proto, isRequiredMethod, isInstanceMethod);
4628 result = (struct objc_method_description *)
4629 calloc(mlist->count + 1, sizeof(struct objc_method_description));
4630 for (const auto& meth : *mlist) {
4631 result[count].name = meth.name();
4632 result[count].types = (char *)meth.types();
4637 if (outCount) *outCount = count;
4642 /***********************************************************************
4643 * protocol_getProperty
4645 * Locking: runtimeLock must be held by the caller
4646 **********************************************************************/
4648 protocol_getProperty_nolock(protocol_t *proto, const char *name,
4649 bool isRequiredProperty, bool isInstanceProperty)
4651 runtimeLock.assertLocked();
4653 if (!isRequiredProperty) {
4654 // Only required properties are currently supported.
4658 property_list_t *plist = isInstanceProperty ?
4659 proto->instanceProperties : proto->classProperties();
4661 for (auto& prop : *plist) {
4662 if (0 == strcmp(name, prop.name)) {
4668 if (proto->protocols) {
4670 for (i = 0; i < proto->protocols->count; i++) {
4671 protocol_t *p = remapProtocol(proto->protocols->list[i]);
4673 protocol_getProperty_nolock(p, name,
4675 isInstanceProperty);
4676 if (prop) return prop;
4683 objc_property_t protocol_getProperty(Protocol *p, const char *name,
4684 BOOL isRequiredProperty, BOOL isInstanceProperty)
4686 if (!p || !name) return nil;
4688 mutex_locker_t lock(runtimeLock);
4689 return (objc_property_t)
4690 protocol_getProperty_nolock(newprotocol(p), name,
4691 isRequiredProperty, isInstanceProperty);
4695 /***********************************************************************
4696 * protocol_copyPropertyList
4697 * protocol_copyPropertyList2
4699 * Locking: acquires runtimeLock
4700 **********************************************************************/
4701 static property_t **
4702 copyPropertyList(property_list_t *plist, unsigned int *outCount)
4704 property_t **result = nil;
4705 unsigned int count = 0;
4708 count = plist->count;
4712 result = (property_t **)malloc((count+1) * sizeof(property_t *));
4715 for (auto& prop : *plist) {
4716 result[count++] = ∝
4718 result[count] = nil;
4721 if (outCount) *outCount = count;
4726 protocol_copyPropertyList2(Protocol *proto, unsigned int *outCount,
4727 BOOL isRequiredProperty, BOOL isInstanceProperty)
4729 if (!proto || !isRequiredProperty) {
4730 // Optional properties are not currently supported.
4731 if (outCount) *outCount = 0;
4735 mutex_locker_t lock(runtimeLock);
4737 property_list_t *plist = isInstanceProperty
4738 ? newprotocol(proto)->instanceProperties
4739 : newprotocol(proto)->classProperties();
4740 return (objc_property_t *)copyPropertyList(plist, outCount);
4744 protocol_copyPropertyList(Protocol *proto, unsigned int *outCount)
4746 return protocol_copyPropertyList2(proto, outCount,
4747 YES/*required*/, YES/*instance*/);
4751 /***********************************************************************
4752 * protocol_copyProtocolList
4753 * Copies this protocol's incorporated protocols.
4754 * Does not copy those protocol's incorporated protocols in turn.
4755 * Locking: acquires runtimeLock
4756 **********************************************************************/
4757 Protocol * __unsafe_unretained *
4758 protocol_copyProtocolList(Protocol *p, unsigned int *outCount)
4760 unsigned int count = 0;
4761 Protocol **result = nil;
4762 protocol_t *proto = newprotocol(p);
4765 if (outCount) *outCount = 0;
4769 mutex_locker_t lock(runtimeLock);
4771 if (proto->protocols) {
4772 count = (unsigned int)proto->protocols->count;
4775 result = (Protocol **)malloc((count+1) * sizeof(Protocol *));
4778 for (i = 0; i < count; i++) {
4779 result[i] = (Protocol *)remapProtocol(proto->protocols->list[i]);
4784 if (outCount) *outCount = count;
4789 /***********************************************************************
4790 * objc_allocateProtocol
4791 * Creates a new protocol. The protocol may not be used until
4792 * objc_registerProtocol() is called.
4793 * Returns nil if a protocol with the same name already exists.
4794 * Locking: acquires runtimeLock
4795 **********************************************************************/
4797 objc_allocateProtocol(const char *name)
4799 mutex_locker_t lock(runtimeLock);
4801 if (getProtocol(name)) {
4805 protocol_t *result = (protocol_t *)calloc(sizeof(protocol_t), 1);
4807 extern objc_class OBJC_CLASS_$___IncompleteProtocol;
4808 Class cls = (Class)&OBJC_CLASS_$___IncompleteProtocol;
4809 result->initProtocolIsa(cls);
4810 result->size = sizeof(protocol_t);
4811 // fixme mangle the name if it looks swift-y?
4812 result->mangledName = strdupIfMutable(name);
4814 // fixme reserve name without installing
4816 return (Protocol *)result;
4820 /***********************************************************************
4821 * objc_registerProtocol
4822 * Registers a newly-constructed protocol. The protocol is now
4823 * ready for use and immutable.
4824 * Locking: acquires runtimeLock
4825 **********************************************************************/
4826 void objc_registerProtocol(Protocol *proto_gen)
4828 protocol_t *proto = newprotocol(proto_gen);
4830 mutex_locker_t lock(runtimeLock);
4832 extern objc_class OBJC_CLASS_$___IncompleteProtocol;
4833 Class oldcls = (Class)&OBJC_CLASS_$___IncompleteProtocol;
4834 extern objc_class OBJC_CLASS_$_Protocol;
4835 Class cls = (Class)&OBJC_CLASS_$_Protocol;
4837 if (proto->ISA() == cls) {
4838 _objc_inform("objc_registerProtocol: protocol '%s' was already "
4839 "registered!", proto->nameForLogging());
4842 if (proto->ISA() != oldcls) {
4843 _objc_inform("objc_registerProtocol: protocol '%s' was not allocated "
4844 "with objc_allocateProtocol!", proto->nameForLogging());
4848 // NOT initProtocolIsa(). The protocol object may already
4849 // have been retained and we must preserve that count.
4850 proto->changeIsa(cls);
4852 // Don't add this protocol if we already have it.
4853 // Should we warn on duplicates?
4854 if (getProtocol(proto->mangledName) == nil) {
4855 NXMapKeyCopyingInsert(protocols(), proto->mangledName, proto);
4860 /***********************************************************************
4861 * protocol_addProtocol
4862 * Adds an incorporated protocol to another protocol.
4863 * No method enforcement is performed.
4864 * `proto` must be under construction. `addition` must not.
4865 * Locking: acquires runtimeLock
4866 **********************************************************************/
4868 protocol_addProtocol(Protocol *proto_gen, Protocol *addition_gen)
4870 protocol_t *proto = newprotocol(proto_gen);
4871 protocol_t *addition = newprotocol(addition_gen);
4873 extern objc_class OBJC_CLASS_$___IncompleteProtocol;
4874 Class cls = (Class)&OBJC_CLASS_$___IncompleteProtocol;
4876 if (!proto_gen) return;
4877 if (!addition_gen) return;
4879 mutex_locker_t lock(runtimeLock);
4881 if (proto->ISA() != cls) {
4882 _objc_inform("protocol_addProtocol: modified protocol '%s' is not "
4883 "under construction!", proto->nameForLogging());
4886 if (addition->ISA() == cls) {
4887 _objc_inform("protocol_addProtocol: added protocol '%s' is still "
4888 "under construction!", addition->nameForLogging());
4892 protocol_list_t *protolist = proto->protocols;
4894 protolist = (protocol_list_t *)
4895 calloc(1, sizeof(protocol_list_t)
4896 + sizeof(protolist->list[0]));
4898 protolist = (protocol_list_t *)
4899 realloc(protolist, protocol_list_size(protolist)
4900 + sizeof(protolist->list[0]));
4903 protolist->list[protolist->count++] = (protocol_ref_t)addition;
4904 proto->protocols = protolist;
4908 /***********************************************************************
4909 * protocol_addMethodDescription
4910 * Adds a method to a protocol. The protocol must be under construction.
4911 * Locking: acquires runtimeLock
4912 **********************************************************************/
4914 protocol_addMethod_nolock(method_list_t*& list, SEL name, const char *types)
4917 list = (method_list_t *)calloc(method_list_t::byteSize(sizeof(struct method_t::big), 1), 1);
4918 list->entsizeAndFlags = sizeof(struct method_t::big);
4921 size_t size = list->byteSize() + list->entsize();
4922 list = (method_list_t *)realloc(list, size);
4925 auto &meth = list->get(list->count++).big();
4927 meth.types = types ? strdupIfMutable(types) : "";
4932 protocol_addMethodDescription(Protocol *proto_gen, SEL name, const char *types,
4933 BOOL isRequiredMethod, BOOL isInstanceMethod)
4935 protocol_t *proto = newprotocol(proto_gen);
4937 extern objc_class OBJC_CLASS_$___IncompleteProtocol;
4938 Class cls = (Class)&OBJC_CLASS_$___IncompleteProtocol;
4940 if (!proto_gen) return;
4942 mutex_locker_t lock(runtimeLock);
4944 if (proto->ISA() != cls) {
4945 _objc_inform("protocol_addMethodDescription: protocol '%s' is not "
4946 "under construction!", proto->nameForLogging());
4950 if (isRequiredMethod && isInstanceMethod) {
4951 protocol_addMethod_nolock(proto->instanceMethods, name, types);
4952 } else if (isRequiredMethod && !isInstanceMethod) {
4953 protocol_addMethod_nolock(proto->classMethods, name, types);
4954 } else if (!isRequiredMethod && isInstanceMethod) {
4955 protocol_addMethod_nolock(proto->optionalInstanceMethods, name,types);
4956 } else /* !isRequiredMethod && !isInstanceMethod) */ {
4957 protocol_addMethod_nolock(proto->optionalClassMethods, name, types);
4962 /***********************************************************************
4963 * protocol_addProperty
4964 * Adds a property to a protocol. The protocol must be under construction.
4965 * Locking: acquires runtimeLock
4966 **********************************************************************/
4968 protocol_addProperty_nolock(property_list_t *&plist, const char *name,
4969 const objc_property_attribute_t *attrs,
4973 plist = (property_list_t *)calloc(property_list_t::byteSize(sizeof(property_t), 1), 1);
4974 plist->entsizeAndFlags = sizeof(property_t);
4978 plist = (property_list_t *)realloc(plist, plist->byteSize());
4981 property_t& prop = plist->get(plist->count - 1);
4982 prop.name = strdupIfMutable(name);
4983 prop.attributes = copyPropertyAttributeString(attrs, count);
4987 protocol_addProperty(Protocol *proto_gen, const char *name,
4988 const objc_property_attribute_t *attrs,
4990 BOOL isRequiredProperty, BOOL isInstanceProperty)
4992 protocol_t *proto = newprotocol(proto_gen);
4994 extern objc_class OBJC_CLASS_$___IncompleteProtocol;
4995 Class cls = (Class)&OBJC_CLASS_$___IncompleteProtocol;
5000 mutex_locker_t lock(runtimeLock);
5002 if (proto->ISA() != cls) {
5003 _objc_inform("protocol_addProperty: protocol '%s' is not "
5004 "under construction!", proto->nameForLogging());
5008 if (isRequiredProperty && isInstanceProperty) {
5009 protocol_addProperty_nolock(proto->instanceProperties, name, attrs, count);
5011 else if (isRequiredProperty && !isInstanceProperty) {
5012 protocol_addProperty_nolock(proto->_classProperties, name, attrs, count);
5014 //else if (!isRequiredProperty && isInstanceProperty) {
5015 // protocol_addProperty_nolock(proto->optionalInstanceProperties, name, attrs, count);
5017 //else /* !isRequiredProperty && !isInstanceProperty) */ {
5018 // protocol_addProperty_nolock(proto->optionalClassProperties, name, attrs, count);
5023 objc_getRealizedClassList_nolock(Class *buffer, int bufferLen)
5029 foreach_realized_class([=, &count, &c](Class cls) {
5031 if (c < bufferLen) {
5037 foreach_realized_class([&count](Class cls) {
5047 objc_copyRealizedClassList_nolock(unsigned int *outCount)
5049 Class *result = nil;
5050 unsigned int count = 0;
5052 foreach_realized_class([&count](Class cls) {
5060 result = (Class *)malloc((1+count) * sizeof(Class));
5061 foreach_realized_class([=, &c](Class cls) {
5068 if (outCount) *outCount = count;
5072 /***********************************************************************
5074 * Returns pointers to all classes.
5075 * This requires all classes be realized, which is regretfully non-lazy.
5076 * Locking: acquires runtimeLock
5077 **********************************************************************/
5079 objc_getClassList(Class *buffer, int bufferLen)
5081 mutex_locker_t lock(runtimeLock);
5083 realizeAllClasses();
5085 return objc_getRealizedClassList_nolock(buffer, bufferLen);
5088 /***********************************************************************
5089 * objc_copyClassList
5090 * Returns pointers to Realized classes.
5092 * outCount may be nil. *outCount is the number of classes returned.
5093 * If the returned array is not nil, it is nil-terminated and must be
5094 * freed with free().
5095 * Locking: write-locks runtimeLock
5096 **********************************************************************/
5098 objc_copyRealizedClassList(unsigned int *outCount)
5100 mutex_locker_t lock(runtimeLock);
5102 return objc_copyRealizedClassList_nolock(outCount);
5106 /***********************************************************************
5107 * objc_copyClassList
5108 * Returns pointers to all classes.
5109 * This requires all classes be realized, which is regretfully non-lazy.
5111 * outCount may be nil. *outCount is the number of classes returned.
5112 * If the returned array is not nil, it is nil-terminated and must be
5113 * freed with free().
5114 * Locking: write-locks runtimeLock
5115 **********************************************************************/
5117 objc_copyClassList(unsigned int *outCount)
5119 mutex_locker_t lock(runtimeLock);
5121 realizeAllClasses();
5123 return objc_copyRealizedClassList_nolock(outCount);
5126 /***********************************************************************
5127 * class_copyImpCache
5128 * Returns the current content of the Class IMP Cache
5130 * outCount may be nil. *outCount is the number of entries returned.
5131 * If the returned array is not nil, it is nil-terminated and must be
5132 * freed with free().
5133 * Locking: write-locks cacheUpdateLock
5134 **********************************************************************/
5135 objc_imp_cache_entry *
5136 class_copyImpCache(Class cls, int *outCount)
5138 objc_imp_cache_entry *buffer = nullptr;
5140 #if CONFIG_USE_CACHE_LOCK
5141 mutex_locker_t lock(cacheUpdateLock);
5143 mutex_locker_t lock(runtimeLock);
5146 cache_t &cache = cls->cache;
5147 int count = (int)cache.occupied();
5150 buffer = (objc_imp_cache_entry *)calloc(1+count, sizeof(objc_imp_cache_entry));
5151 cache.copyCacheNolock(buffer, count);
5154 if (outCount) *outCount = count;
5159 /***********************************************************************
5160 * objc_copyProtocolList
5161 * Returns pointers to all protocols.
5162 * Locking: read-locks runtimeLock
5163 **********************************************************************/
5164 Protocol * __unsafe_unretained *
5165 objc_copyProtocolList(unsigned int *outCount)
5167 mutex_locker_t lock(runtimeLock);
5169 NXMapTable *protocol_map = protocols();
5171 // Find all the protocols from the pre-optimized images. These protocols
5172 // won't be in the protocol map.
5173 objc::DenseMap<const char*, Protocol*> preoptimizedProtocols;
5176 for (hi = FirstHeader; hi; hi = hi->getNext()) {
5177 if (!hi->hasPreoptimizedProtocols())
5181 const protocol_t * const *protolist = _getObjc2ProtocolList(hi, &count);
5182 for (i = 0; i < count; i++) {
5183 const protocol_t* protocol = protolist[i];
5185 // Skip protocols we have in the run time map. These likely
5186 // correspond to protocols added dynamically which have the same
5187 // name as a protocol found later in a dlopen'ed shared cache image.
5188 if (NXMapGet(protocol_map, protocol->mangledName) != nil)
5191 // The protocols in the shared cache protolist point to their
5192 // original on-disk object, not the optimized one. We can use the name
5193 // to find the optimized one.
5194 Protocol* optimizedProto = getPreoptimizedProtocol(protocol->mangledName);
5195 preoptimizedProtocols.insert({ protocol->mangledName, optimizedProto });
5200 unsigned int count = NXCountMapTable(protocol_map) + (unsigned int)preoptimizedProtocols.size();
5202 if (outCount) *outCount = 0;
5206 Protocol **result = (Protocol **)malloc((count+1) * sizeof(Protocol*));
5211 NXMapState state = NXInitMapState(protocol_map);
5212 while (NXNextMapState(protocol_map, &state,
5213 (const void **)&name, (const void **)&proto))
5215 result[i++] = proto;
5218 // Add any protocols found in the pre-optimized table
5219 for (auto it : preoptimizedProtocols) {
5220 result[i++] = it.second;
5224 ASSERT(i == count+1);
5226 if (outCount) *outCount = count;
5231 /***********************************************************************
5233 * Get a protocol by name, or return nil
5234 * Locking: read-locks runtimeLock
5235 **********************************************************************/
5236 Protocol *objc_getProtocol(const char *name)
5238 mutex_locker_t lock(runtimeLock);
5239 return getProtocol(name);
5243 /***********************************************************************
5244 * class_copyMethodList
5246 * Locking: read-locks runtimeLock
5247 **********************************************************************/
5249 class_copyMethodList(Class cls, unsigned int *outCount)
5251 unsigned int count = 0;
5252 Method *result = nil;
5255 if (outCount) *outCount = 0;
5259 mutex_locker_t lock(runtimeLock);
5260 const auto methods = cls->data()->methods();
5262 ASSERT(cls->isRealized());
5264 count = methods.count();
5267 result = (Method *)malloc((count + 1) * sizeof(Method));
5270 for (auto& meth : methods) {
5271 result[count++] = &meth;
5273 result[count] = nil;
5276 if (outCount) *outCount = count;
5281 /***********************************************************************
5282 * class_copyIvarList
5284 * Locking: read-locks runtimeLock
5285 **********************************************************************/
5287 class_copyIvarList(Class cls, unsigned int *outCount)
5289 const ivar_list_t *ivars;
5291 unsigned int count = 0;
5294 if (outCount) *outCount = 0;
5298 mutex_locker_t lock(runtimeLock);
5300 ASSERT(cls->isRealized());
5302 if ((ivars = cls->data()->ro()->ivars) && ivars->count) {
5303 result = (Ivar *)malloc((ivars->count+1) * sizeof(Ivar));
5305 for (auto& ivar : *ivars) {
5306 if (!ivar.offset) continue; // anonymous bitfield
5307 result[count++] = &ivar;
5309 result[count] = nil;
5312 if (outCount) *outCount = count;
5317 /***********************************************************************
5318 * class_copyPropertyList. Returns a heap block containing the
5319 * properties declared in the class, or nil if the class
5320 * declares no properties. Caller must free the block.
5321 * Does not copy any superclass's properties.
5322 * Locking: read-locks runtimeLock
5323 **********************************************************************/
5325 class_copyPropertyList(Class cls, unsigned int *outCount)
5328 if (outCount) *outCount = 0;
5332 mutex_locker_t lock(runtimeLock);
5334 checkIsKnownClass(cls);
5335 ASSERT(cls->isRealized());
5337 auto rw = cls->data();
5339 property_t **result = nil;
5340 auto const properties = rw->properties();
5341 unsigned int count = properties.count();
5343 result = (property_t **)malloc((count + 1) * sizeof(property_t *));
5346 for (auto& prop : properties) {
5347 result[count++] = ∝
5349 result[count] = nil;
5352 if (outCount) *outCount = count;
5353 return (objc_property_t *)result;
5357 /***********************************************************************
5358 * objc_class::getLoadMethod
5360 * Called only from add_class_to_loadable_list.
5361 * Locking: runtimeLock must be read- or write-locked by the caller.
5362 **********************************************************************/
5364 objc_class::getLoadMethod()
5366 runtimeLock.assertLocked();
5368 const method_list_t *mlist;
5370 ASSERT(isRealized());
5371 ASSERT(ISA()->isRealized());
5372 ASSERT(!isMetaClass());
5373 ASSERT(ISA()->isMetaClass());
5375 mlist = ISA()->data()->ro()->baseMethods();
5377 for (const auto& meth : *mlist) {
5378 const char *name = sel_cname(meth.name());
5379 if (0 == strcmp(name, "load")) {
5380 return meth.imp(false);
5389 /***********************************************************************
5391 * Returns a category's name.
5393 **********************************************************************/
5395 _category_getName(Category cat)
5401 /***********************************************************************
5402 * _category_getClassName
5403 * Returns a category's class's name
5404 * Called only from add_category_to_loadable_list and
5405 * remove_category_from_loadable_list for logging purposes.
5406 * Locking: runtimeLock must be read- or write-locked by the caller
5407 **********************************************************************/
5409 _category_getClassName(Category cat)
5411 runtimeLock.assertLocked();
5412 return remapClass(cat->cls)->nameForLogging();
5416 /***********************************************************************
5417 * _category_getClass
5418 * Returns a category's class
5419 * Called only by call_category_loads.
5420 * Locking: read-locks runtimeLock
5421 **********************************************************************/
5423 _category_getClass(Category cat)
5425 mutex_locker_t lock(runtimeLock);
5426 Class result = remapClass(cat->cls);
5427 ASSERT(result->isRealized()); // ok for call_category_loads' usage
5432 /***********************************************************************
5433 * _category_getLoadMethod
5435 * Called only from add_category_to_loadable_list
5436 * Locking: runtimeLock must be read- or write-locked by the caller
5437 **********************************************************************/
5439 _category_getLoadMethod(Category cat)
5441 runtimeLock.assertLocked();
5443 const method_list_t *mlist;
5445 mlist = cat->classMethods;
5447 for (const auto& meth : *mlist) {
5448 const char *name = sel_cname(meth.name());
5449 if (0 == strcmp(name, "load")) {
5450 return meth.imp(false);
5459 /***********************************************************************
5460 * category_t::propertiesForMeta
5461 * Return a category's instance or class properties.
5462 * hi is the image containing the category.
5463 **********************************************************************/
5465 category_t::propertiesForMeta(bool isMeta, struct header_info *hi)
5467 if (!isMeta) return instanceProperties;
5468 else if (hi->info()->hasCategoryClassProperties()) return _classProperties;
5473 /***********************************************************************
5474 * class_copyProtocolList
5476 * Locking: read-locks runtimeLock
5477 **********************************************************************/
5478 Protocol * __unsafe_unretained *
5479 class_copyProtocolList(Class cls, unsigned int *outCount)
5481 unsigned int count = 0;
5482 Protocol **result = nil;
5485 if (outCount) *outCount = 0;
5489 mutex_locker_t lock(runtimeLock);
5490 const auto protocols = cls->data()->protocols();
5492 checkIsKnownClass(cls);
5494 ASSERT(cls->isRealized());
5496 count = protocols.count();
5499 result = (Protocol **)malloc((count+1) * sizeof(Protocol *));
5502 for (const auto& proto : protocols) {
5503 result[count++] = (Protocol *)remapProtocol(proto);
5505 result[count] = nil;
5508 if (outCount) *outCount = count;
5513 /***********************************************************************
5514 * objc_copyImageNames
5515 * Copies names of loaded images with ObjC contents.
5517 * Locking: acquires runtimeLock
5518 **********************************************************************/
5519 const char **objc_copyImageNames(unsigned int *outCount)
5521 mutex_locker_t lock(runtimeLock);
5523 int HeaderCount = 0;
5524 for (header_info *hi = FirstHeader; hi != nil; hi = hi->getNext()) {
5529 const TCHAR **names = (const TCHAR **)
5530 malloc((HeaderCount+1) * sizeof(TCHAR *));
5532 const char **names = (const char **)
5533 malloc((HeaderCount+1) * sizeof(char *));
5536 unsigned int count = 0;
5537 for (header_info *hi = FirstHeader; hi != nil; hi = hi->getNext()) {
5539 if (hi->moduleName) {
5540 names[count++] = hi->moduleName;
5543 const char *fname = hi->fname();
5545 names[count++] = fname;
5552 // Return nil instead of empty list if there are no images
5553 free((void *)names);
5557 if (outCount) *outCount = count;
5562 /***********************************************************************
5563 * copyClassNamesForImage_nolock
5564 * Copies class names from the given image.
5565 * Missing weak-import classes are omitted.
5566 * Swift class names are demangled.
5568 * Locking: runtimeLock must be held by the caller
5569 **********************************************************************/
5571 copyClassNamesForImage_nolock(header_info *hi, unsigned int *outCount)
5573 runtimeLock.assertLocked();
5577 classref_t const *classlist = _getObjc2ClassList(hi, &count);
5578 const char **names = (const char **)
5579 malloc((count+1) * sizeof(const char *));
5582 for (size_t i = 0; i < count; i++) {
5583 Class cls = remapClass(classlist[i]);
5585 names[i-shift] = cls->demangledName(/* needs lock */false);
5587 shift++; // ignored weak-linked class
5593 if (outCount) *outCount = (unsigned int)count;
5598 copyClassesForImage_nolock(header_info *hi, unsigned int *outCount)
5600 runtimeLock.assertLocked();
5604 classref_t const *classlist = _getObjc2ClassList(hi, &count);
5605 Class *classes = (Class *)
5606 malloc((count+1) * sizeof(Class));
5609 for (size_t i = 0; i < count; i++) {
5610 Class cls = remapClass(classlist[i]);
5612 classes[i-shift] = cls;
5614 shift++; // ignored weak-linked class
5618 classes[count] = nil;
5620 if (outCount) *outCount = (unsigned int)count;
5625 /***********************************************************************
5626 * objc_copyClassNamesForImage
5627 * Copies class names from the named image.
5628 * The image name must be identical to dladdr's dli_fname value.
5629 * Missing weak-import classes are omitted.
5630 * Swift class names are demangled.
5632 * Locking: acquires runtimeLock
5633 **********************************************************************/
5635 objc_copyClassNamesForImage(const char *image, unsigned int *outCount)
5638 if (outCount) *outCount = 0;
5642 mutex_locker_t lock(runtimeLock);
5646 for (hi = FirstHeader; hi != nil; hi = hi->getNext()) {
5648 if (0 == wcscmp((TCHAR *)image, hi->moduleName)) break;
5650 if (0 == strcmp(image, hi->fname())) break;
5655 if (outCount) *outCount = 0;
5659 return copyClassNamesForImage_nolock(hi, outCount);
5663 objc_copyClassesForImage(const char *image, unsigned int *outCount)
5666 if (outCount) *outCount = 0;
5670 mutex_locker_t lock(runtimeLock);
5674 for (hi = FirstHeader; hi != nil; hi = hi->getNext()) {
5675 if (0 == strcmp(image, hi->fname())) break;
5679 if (outCount) *outCount = 0;
5683 return copyClassesForImage_nolock(hi, outCount);
5686 /***********************************************************************
5687 * objc_copyClassNamesForImageHeader
5688 * Copies class names from the given image.
5689 * Missing weak-import classes are omitted.
5690 * Swift class names are demangled.
5692 * Locking: acquires runtimeLock
5693 **********************************************************************/
5695 objc_copyClassNamesForImageHeader(const struct mach_header *mh, unsigned int *outCount)
5698 if (outCount) *outCount = 0;
5702 mutex_locker_t lock(runtimeLock);
5706 for (hi = FirstHeader; hi != nil; hi = hi->getNext()) {
5707 if (hi->mhdr() == (const headerType *)mh) break;
5711 if (outCount) *outCount = 0;
5715 return copyClassNamesForImage_nolock(hi, outCount);
5719 /***********************************************************************
5720 * saveTemporaryString
5721 * Save a string in a thread-local FIFO buffer.
5722 * This is suitable for temporary strings generated for logging purposes.
5723 **********************************************************************/
5725 saveTemporaryString(char *str)
5727 // Fixed-size FIFO. We free the first string, shift
5728 // the rest, and add the new string to the end.
5729 _objc_pthread_data *data = _objc_fetch_pthread_data(true);
5730 if (data->printableNames[0]) {
5731 free(data->printableNames[0]);
5733 int last = countof(data->printableNames) - 1;
5734 for (int i = 0; i < last; i++) {
5735 data->printableNames[i] = data->printableNames[i+1];
5737 data->printableNames[last] = str;
5741 /***********************************************************************
5742 * objc_class::nameForLogging
5743 * Returns the class's name, suitable for display.
5744 * The returned memory is TEMPORARY. Print it or copy it immediately.
5746 **********************************************************************/
5748 objc_class::nameForLogging()
5750 // Handle the easy case directly.
5751 if (isRealized() || isFuture()) {
5752 if (!isAnySwift()) {
5753 return data()->ro()->getName();
5755 auto rwe = data()->ext();
5756 if (rwe && rwe->demangledName) {
5757 return rwe->demangledName;
5763 if (isStubClass()) {
5764 asprintf(&result, "<stub class %p>", this);
5765 } else if (const char *name = nonlazyMangledName()) {
5766 char *de = copySwiftV1DemangledName(name);
5767 if (de) result = de;
5768 else result = strdup(name);
5770 asprintf(&result, "<lazily named class %p>", this);
5772 saveTemporaryString(result);
5777 /***********************************************************************
5778 * objc_class::demangledName
5779 * If realize=false, the class must already be realized or future.
5780 * Locking: runtimeLock may or may not be held by the caller.
5781 **********************************************************************/
5782 mutex_t DemangleCacheLock;
5783 static objc::DenseSet<const char *> *DemangleCache;
5785 objc_class::demangledName(bool needsLock)
5788 runtimeLock.assertLocked();
5791 // Return previously demangled name if available.
5792 if (isRealized() || isFuture()) {
5793 // Swift metaclasses don't have the is-Swift bit.
5794 // We can't take this shortcut for them.
5795 if (isFuture() || (!isMetaClass() && !isAnySwift())) {
5796 return data()->ro()->getName();
5798 auto rwe = data()->ext();
5799 if (rwe && rwe->demangledName) {
5800 return rwe->demangledName;
5804 // Try demangling the mangled name.
5805 const char *mangled = mangledName();
5806 char *de = copySwiftV1DemangledName(mangled);
5807 class_rw_ext_t *rwe;
5809 if (isRealized() || isFuture()) {
5811 mutex_locker_t lock(runtimeLock);
5812 rwe = data()->extAllocIfNeeded();
5814 rwe = data()->extAllocIfNeeded();
5816 // Class is already realized or future.
5817 // Save demangling result in rw data.
5818 // We may not own runtimeLock so use an atomic operation instead.
5819 if (! OSAtomicCompareAndSwapPtrBarrier(nil, (void*)(de ?: mangled),
5820 (void**)&rwe->demangledName))
5824 return rwe->demangledName;
5827 // Class is not yet realized.
5829 // Name is not mangled. Return it without caching.
5833 // Class is not yet realized and name is mangled.
5834 // Allocate the name but don't save it in the class.
5835 // Save the name in a side cache instead to prevent leaks.
5836 // When the class is actually realized we may allocate a second
5837 // copy of the name, but we don't care.
5838 // (Previously we would try to realize the class now and save the
5839 // name there, but realization is more complicated for Swift classes.)
5841 // Only objc_copyClassNamesForImage() should get here.
5842 // fixme lldb's calls to class_getName() can also get here when
5843 // interrogating the dyld shared cache. (rdar://27258517)
5844 // fixme ASSERT(realize);
5848 mutex_locker_t lock(DemangleCacheLock);
5849 if (!DemangleCache) {
5850 DemangleCache = new objc::DenseSet<const char *>{};
5852 cached = *DemangleCache->insert(de).first;
5854 if (cached != de) free(de);
5859 /***********************************************************************
5862 * Locking: may acquire DemangleCacheLock
5863 **********************************************************************/
5864 const char *class_getName(Class cls)
5866 if (!cls) return "nil";
5867 // fixme lldb calls class_getName() on unrealized classes (rdar://27258517)
5868 // ASSERT(cls->isRealized() || cls->isFuture());
5869 return cls->demangledName(/* needs lock */true);
5872 /***********************************************************************
5873 * objc_debug_class_getNameRaw
5876 **********************************************************************/
5877 const char *objc_debug_class_getNameRaw(Class cls)
5879 if (!cls) return "nil";
5880 return cls->mangledName();
5884 /***********************************************************************
5888 **********************************************************************/
5890 class_getVersion(Class cls)
5893 ASSERT(cls->isRealized());
5894 auto rwe = cls->data()->ext();
5896 return rwe->version;
5898 return cls->isMetaClass() ? 7 : 0;
5902 /***********************************************************************
5906 **********************************************************************/
5908 class_setVersion(Class cls, int version)
5911 ASSERT(cls->isRealized());
5912 auto rwe = cls->data()->ext();
5914 mutex_locker_t lock(runtimeLock);
5915 rwe = cls->data()->extAllocIfNeeded();
5918 rwe->version = version;
5921 /***********************************************************************
5922 * search_method_list_inline
5923 **********************************************************************/
5924 template<class getNameFunc>
5925 ALWAYS_INLINE static method_t *
5926 findMethodInSortedMethodList(SEL key, const method_list_t *list, const getNameFunc &getName)
5930 auto first = list->begin();
5932 decltype(first) probe;
5934 uintptr_t keyValue = (uintptr_t)key;
5937 for (count = list->count; count != 0; count >>= 1) {
5938 probe = base + (count >> 1);
5940 uintptr_t probeValue = (uintptr_t)getName(probe);
5942 if (keyValue == probeValue) {
5943 // `probe` is a match.
5944 // Rewind looking for the *first* occurrence of this value.
5945 // This is required for correct category overrides.
5946 while (probe > first && keyValue == (uintptr_t)getName((probe - 1))) {
5952 if (keyValue > probeValue) {
5961 ALWAYS_INLINE static method_t *
5962 findMethodInSortedMethodList(SEL key, const method_list_t *list)
5964 if (list->isSmallList()) {
5965 if (CONFIG_SHARED_CACHE_RELATIVE_DIRECT_SELECTORS && objc::inSharedCache((uintptr_t)list)) {
5966 return findMethodInSortedMethodList(key, list, [](method_t &m) { return m.getSmallNameAsSEL(); });
5968 return findMethodInSortedMethodList(key, list, [](method_t &m) { return m.getSmallNameAsSELRef(); });
5971 return findMethodInSortedMethodList(key, list, [](method_t &m) { return m.big().name; });
5975 template<class getNameFunc>
5976 ALWAYS_INLINE static method_t *
5977 findMethodInUnsortedMethodList(SEL sel, const method_list_t *list, const getNameFunc &getName)
5979 for (auto& meth : *list) {
5980 if (getName(meth) == sel) return &meth;
5985 ALWAYS_INLINE static method_t *
5986 findMethodInUnsortedMethodList(SEL key, const method_list_t *list)
5988 if (list->isSmallList()) {
5989 if (CONFIG_SHARED_CACHE_RELATIVE_DIRECT_SELECTORS && objc::inSharedCache((uintptr_t)list)) {
5990 return findMethodInUnsortedMethodList(key, list, [](method_t &m) { return m.getSmallNameAsSEL(); });
5992 return findMethodInUnsortedMethodList(key, list, [](method_t &m) { return m.getSmallNameAsSELRef(); });
5995 return findMethodInUnsortedMethodList(key, list, [](method_t &m) { return m.big().name; });
5999 ALWAYS_INLINE static method_t *
6000 search_method_list_inline(const method_list_t *mlist, SEL sel)
6002 int methodListIsFixedUp = mlist->isFixedUp();
6003 int methodListHasExpectedSize = mlist->isExpectedSize();
6005 if (fastpath(methodListIsFixedUp && methodListHasExpectedSize)) {
6006 return findMethodInSortedMethodList(sel, mlist);
6008 // Linear search of unsorted method list
6009 if (auto *m = findMethodInUnsortedMethodList(sel, mlist))
6014 // sanity-check negative results
6015 if (mlist->isFixedUp()) {
6016 for (auto& meth : *mlist) {
6017 if (meth.name() == sel) {
6018 _objc_fatal("linear search worked when binary search did not");
6027 NEVER_INLINE static method_t *
6028 search_method_list(const method_list_t *mlist, SEL sel)
6030 return search_method_list_inline(mlist, sel);
6033 /***********************************************************************
6034 * method_lists_contains_any
6035 **********************************************************************/
6036 template<typename T>
6037 static NEVER_INLINE bool
6038 method_lists_contains_any(T *mlists, T *end,
6039 SEL sels[], size_t selcount)
6041 while (mlists < end) {
6042 const method_list_t *mlist = *mlists++;
6043 int methodListIsFixedUp = mlist->isFixedUp();
6044 int methodListHasExpectedSize = mlist->entsize() == sizeof(struct method_t::big);
6046 if (fastpath(methodListIsFixedUp && methodListHasExpectedSize)) {
6047 for (size_t i = 0; i < selcount; i++) {
6048 if (findMethodInSortedMethodList(sels[i], mlist)) {
6053 for (size_t i = 0; i < selcount; i++) {
6054 if (findMethodInUnsortedMethodList(sels[i], mlist)) {
6064 /***********************************************************************
6065 * getMethodNoSuper_nolock
6067 * Locking: runtimeLock must be read- or write-locked by the caller
6068 **********************************************************************/
6070 getMethodNoSuper_nolock(Class cls, SEL sel)
6072 runtimeLock.assertLocked();
6074 ASSERT(cls->isRealized());
6078 auto const methods = cls->data()->methods();
6079 for (auto mlists = methods.beginLists(),
6080 end = methods.endLists();
6084 // <rdar://problem/46904873> getMethodNoSuper_nolock is the hottest
6085 // caller of search_method_list, inlining it turns
6086 // getMethodNoSuper_nolock into a frame-less function and eliminates
6087 // any store from this codepath.
6088 method_t *m = search_method_list_inline(*mlists, sel);
6096 /***********************************************************************
6099 * Locking: runtimeLock must be read- or write-locked by the caller
6100 **********************************************************************/
6102 getMethod_nolock(Class cls, SEL sel)
6106 runtimeLock.assertLocked();
6111 ASSERT(cls->isRealized());
6113 while (cls && ((m = getMethodNoSuper_nolock(cls, sel))) == nil) {
6114 cls = cls->getSuperclass();
6121 /***********************************************************************
6124 * Locking: read-locks runtimeLock
6125 **********************************************************************/
6126 static Method _class_getMethod(Class cls, SEL sel)
6128 mutex_locker_t lock(runtimeLock);
6129 return getMethod_nolock(cls, sel);
6133 /***********************************************************************
6134 * class_getInstanceMethod. Return the instance method for the
6135 * specified class and selector.
6136 **********************************************************************/
6137 Method class_getInstanceMethod(Class cls, SEL sel)
6139 if (!cls || !sel) return nil;
6141 // This deliberately avoids +initialize because it historically did so.
6143 // This implementation is a bit weird because it's the only place that
6144 // wants a Method instead of an IMP.
6146 #warning fixme build and search caches
6148 // Search method lists, try method resolver, etc.
6149 lookUpImpOrForward(nil, sel, cls, LOOKUP_RESOLVER);
6151 #warning fixme build and search caches
6153 return _class_getMethod(cls, sel);
6157 /***********************************************************************
6158 * resolveClassMethod
6159 * Call +resolveClassMethod, looking for a method to be added to class cls.
6160 * cls should be a metaclass.
6161 * Does not check if the method already exists.
6162 **********************************************************************/
6163 static void resolveClassMethod(id inst, SEL sel, Class cls)
6165 runtimeLock.assertUnlocked();
6166 ASSERT(cls->isRealized());
6167 ASSERT(cls->isMetaClass());
6169 if (!lookUpImpOrNilTryCache(inst, @selector(resolveClassMethod:), cls)) {
6170 // Resolver not implemented.
6176 mutex_locker_t lock(runtimeLock);
6177 nonmeta = getMaybeUnrealizedNonMetaClass(cls, inst);
6178 // +initialize path should have realized nonmeta already
6179 if (!nonmeta->isRealized()) {
6180 _objc_fatal("nonmeta class %s (%p) unexpectedly not realized",
6181 nonmeta->nameForLogging(), nonmeta);
6184 BOOL (*msg)(Class, SEL, SEL) = (typeof(msg))objc_msgSend;
6185 bool resolved = msg(nonmeta, @selector(resolveClassMethod:), sel);
6187 // Cache the result (good or bad) so the resolver doesn't fire next time.
6188 // +resolveClassMethod adds to self->ISA() a.k.a. cls
6189 IMP imp = lookUpImpOrNilTryCache(inst, sel, cls);
6191 if (resolved && PrintResolving) {
6193 _objc_inform("RESOLVE: method %c[%s %s] "
6194 "dynamically resolved to %p",
6195 cls->isMetaClass() ? '+' : '-',
6196 cls->nameForLogging(), sel_getName(sel), imp);
6199 // Method resolver didn't add anything?
6200 _objc_inform("RESOLVE: +[%s resolveClassMethod:%s] returned YES"
6201 ", but no new implementation of %c[%s %s] was found",
6202 cls->nameForLogging(), sel_getName(sel),
6203 cls->isMetaClass() ? '+' : '-',
6204 cls->nameForLogging(), sel_getName(sel));
6210 /***********************************************************************
6211 * resolveInstanceMethod
6212 * Call +resolveInstanceMethod, looking for a method to be added to class cls.
6213 * cls may be a metaclass or a non-meta class.
6214 * Does not check if the method already exists.
6215 **********************************************************************/
6216 static void resolveInstanceMethod(id inst, SEL sel, Class cls)
6218 runtimeLock.assertUnlocked();
6219 ASSERT(cls->isRealized());
6220 SEL resolve_sel = @selector(resolveInstanceMethod:);
6222 if (!lookUpImpOrNilTryCache(cls, resolve_sel, cls->ISA(/*authenticated*/true))) {
6223 // Resolver not implemented.
6227 BOOL (*msg)(Class, SEL, SEL) = (typeof(msg))objc_msgSend;
6228 bool resolved = msg(cls, resolve_sel, sel);
6230 // Cache the result (good or bad) so the resolver doesn't fire next time.
6231 // +resolveInstanceMethod adds to self a.k.a. cls
6232 IMP imp = lookUpImpOrNilTryCache(inst, sel, cls);
6234 if (resolved && PrintResolving) {
6236 _objc_inform("RESOLVE: method %c[%s %s] "
6237 "dynamically resolved to %p",
6238 cls->isMetaClass() ? '+' : '-',
6239 cls->nameForLogging(), sel_getName(sel), imp);
6242 // Method resolver didn't add anything?
6243 _objc_inform("RESOLVE: +[%s resolveInstanceMethod:%s] returned YES"
6244 ", but no new implementation of %c[%s %s] was found",
6245 cls->nameForLogging(), sel_getName(sel),
6246 cls->isMetaClass() ? '+' : '-',
6247 cls->nameForLogging(), sel_getName(sel));
6253 /***********************************************************************
6254 * resolveMethod_locked
6255 * Call +resolveClassMethod or +resolveInstanceMethod.
6257 * Called with the runtimeLock held to avoid pressure in the caller
6258 * Tail calls into lookUpImpOrForward, also to avoid pressure in the callerb
6259 **********************************************************************/
6260 static NEVER_INLINE IMP
6261 resolveMethod_locked(id inst, SEL sel, Class cls, int behavior)
6263 runtimeLock.assertLocked();
6264 ASSERT(cls->isRealized());
6266 runtimeLock.unlock();
6268 if (! cls->isMetaClass()) {
6269 // try [cls resolveInstanceMethod:sel]
6270 resolveInstanceMethod(inst, sel, cls);
6273 // try [nonMetaClass resolveClassMethod:sel]
6274 // and [cls resolveInstanceMethod:sel]
6275 resolveClassMethod(inst, sel, cls);
6276 if (!lookUpImpOrNilTryCache(inst, sel, cls)) {
6277 resolveInstanceMethod(inst, sel, cls);
6281 // chances are that calling the resolver have populated the cache
6282 // so attempt using it
6283 return lookUpImpOrForwardTryCache(inst, sel, cls, behavior);
6287 /***********************************************************************
6288 * log_and_fill_cache
6289 * Log this method call. If the logger permits it, fill the method cache.
6290 * cls is the method whose cache should be filled.
6291 * implementer is the class that owns the implementation in question.
6292 **********************************************************************/
6294 log_and_fill_cache(Class cls, IMP imp, SEL sel, id receiver, Class implementer)
6296 #if SUPPORT_MESSAGE_LOGGING
6297 if (slowpath(objcMsgLogEnabled && implementer)) {
6298 bool cacheIt = logMessageSend(implementer->isMetaClass(),
6299 cls->nameForLogging(),
6300 implementer->nameForLogging(),
6302 if (!cacheIt) return;
6305 cls->cache.insert(sel, imp, receiver);
6309 /***********************************************************************
6310 * realizeAndInitializeIfNeeded_locked
6311 * Realize the given class if not already realized, and initialize it if
6312 * not already initialized.
6313 * inst is an instance of cls or a subclass, or nil if none is known.
6314 * cls is the class to initialize and realize.
6315 * initializer is true to initialize the class, false to skip initialization.
6316 **********************************************************************/
6318 realizeAndInitializeIfNeeded_locked(id inst, Class cls, bool initialize)
6320 runtimeLock.assertLocked();
6321 if (slowpath(!cls->isRealized())) {
6322 cls = realizeClassMaybeSwiftAndLeaveLocked(cls, runtimeLock);
6323 // runtimeLock may have been dropped but is now locked again
6326 if (slowpath(initialize && !cls->isInitialized())) {
6327 cls = initializeAndLeaveLocked(cls, inst, runtimeLock);
6328 // runtimeLock may have been dropped but is now locked again
6330 // If sel == initialize, class_initialize will send +initialize and
6331 // then the messenger will send +initialize again after this
6332 // procedure finishes. Of course, if this is not being called
6333 // from the messenger then it won't happen. 2778172
6338 /***********************************************************************
6339 * lookUpImpOrForward / lookUpImpOrForwardTryCache / lookUpImpOrNilTryCache
6340 * The standard IMP lookup.
6342 * The TryCache variant attempts a fast-path lookup in the IMP Cache.
6343 * Most callers should use lookUpImpOrForwardTryCache with LOOKUP_INITIALIZE
6345 * Without LOOKUP_INITIALIZE: tries to avoid +initialize (but sometimes fails)
6346 * With LOOKUP_NIL: returns nil on negative cache hits
6348 * inst is an instance of cls or a subclass thereof, or nil if none is known.
6349 * If cls is an un-initialized metaclass then a non-nil inst is faster.
6350 * May return _objc_msgForward_impcache. IMPs destined for external use
6351 * must be converted to _objc_msgForward or _objc_msgForward_stret.
6352 * If you don't want forwarding at all, use LOOKUP_NIL.
6353 **********************************************************************/
6355 static IMP _lookUpImpTryCache(id inst, SEL sel, Class cls, int behavior)
6357 runtimeLock.assertUnlocked();
6359 if (slowpath(!cls->isInitialized())) {
6360 // see comment in lookUpImpOrForward
6361 return lookUpImpOrForward(inst, sel, cls, behavior);
6364 IMP imp = cache_getImp(cls, sel);
6365 if (imp != NULL) goto done;
6366 #if CONFIG_USE_PREOPT_CACHES
6367 if (fastpath(cls->cache.isConstantOptimizedCache(/* strict */true))) {
6368 imp = cache_getImp(cls->cache.preoptFallbackClass(), sel);
6371 if (slowpath(imp == NULL)) {
6372 return lookUpImpOrForward(inst, sel, cls, behavior);
6376 if ((behavior & LOOKUP_NIL) && imp == (IMP)_objc_msgForward_impcache) {
6382 IMP lookUpImpOrForwardTryCache(id inst, SEL sel, Class cls, int behavior)
6384 return _lookUpImpTryCache(inst, sel, cls, behavior);
6387 IMP lookUpImpOrNilTryCache(id inst, SEL sel, Class cls, int behavior)
6389 return _lookUpImpTryCache(inst, sel, cls, behavior | LOOKUP_NIL);
6393 IMP lookUpImpOrForward(id inst, SEL sel, Class cls, int behavior)
6395 const IMP forward_imp = (IMP)_objc_msgForward_impcache;
6399 runtimeLock.assertUnlocked();
6401 if (slowpath(!cls->isInitialized())) {
6402 // The first message sent to a class is often +new or +alloc, or +self
6403 // which goes through objc_opt_* or various optimized entry points.
6405 // However, the class isn't realized/initialized yet at this point,
6406 // and the optimized entry points fall down through objc_msgSend,
6407 // which ends up here.
6409 // We really want to avoid caching these, as it can cause IMP caches
6410 // to be made with a single entry forever.
6412 // Note that this check is racy as several threads might try to
6413 // message a given class for the first time at the same time,
6414 // in which case we might cache anyway.
6415 behavior |= LOOKUP_NOCACHE;
6418 // runtimeLock is held during isRealized and isInitialized checking
6419 // to prevent races against concurrent realization.
6421 // runtimeLock is held during method search to make
6422 // method-lookup + cache-fill atomic with respect to method addition.
6423 // Otherwise, a category could be added but ignored indefinitely because
6424 // the cache was re-filled with the old value after the cache flush on
6425 // behalf of the category.
6429 // We don't want people to be able to craft a binary blob that looks like
6430 // a class but really isn't one and do a CFI attack.
6432 // To make these harder we want to make sure this is a class that was
6433 // either built into the binary or legitimately registered through
6434 // objc_duplicateClass, objc_initializeClassPair or objc_allocateClassPair.
6435 checkIsKnownClass(cls);
6437 cls = realizeAndInitializeIfNeeded_locked(inst, cls, behavior & LOOKUP_INITIALIZE);
6438 // runtimeLock may have been dropped but is now locked again
6439 runtimeLock.assertLocked();
6442 // The code used to lookup the class's cache again right after
6443 // we take the lock but for the vast majority of the cases
6444 // evidence shows this is a miss most of the time, hence a time loss.
6446 // The only codepath calling into this without having performed some
6447 // kind of cache lookup is class_getInstanceMethod().
6449 for (unsigned attempts = unreasonableClassCount();;) {
6450 if (curClass->cache.isConstantOptimizedCache(/* strict */true)) {
6451 #if CONFIG_USE_PREOPT_CACHES
6452 imp = cache_getImp(curClass, sel);
6453 if (imp) goto done_unlock;
6454 curClass = curClass->cache.preoptFallbackClass();
6457 // curClass method list.
6458 Method meth = getMethodNoSuper_nolock(curClass, sel);
6460 imp = meth->imp(false);
6464 if (slowpath((curClass = curClass->getSuperclass()) == nil)) {
6465 // No implementation found, and method resolver didn't help.
6472 // Halt if there is a cycle in the superclass chain.
6473 if (slowpath(--attempts == 0)) {
6474 _objc_fatal("Memory corruption in class list.");
6477 // Superclass cache.
6478 imp = cache_getImp(curClass, sel);
6479 if (slowpath(imp == forward_imp)) {
6480 // Found a forward:: entry in a superclass.
6481 // Stop searching, but don't cache yet; call method
6482 // resolver for this class first.
6485 if (fastpath(imp)) {
6486 // Found the method in a superclass. Cache it in this class.
6491 // No implementation found. Try method resolver once.
6493 if (slowpath(behavior & LOOKUP_RESOLVER)) {
6494 behavior ^= LOOKUP_RESOLVER;
6495 return resolveMethod_locked(inst, sel, cls, behavior);
6499 if (fastpath((behavior & LOOKUP_NOCACHE) == 0)) {
6500 #if CONFIG_USE_PREOPT_CACHES
6501 while (cls->cache.isConstantOptimizedCache(/* strict */true)) {
6502 cls = cls->cache.preoptFallbackClass();
6505 log_and_fill_cache(cls, imp, sel, inst, curClass);
6508 runtimeLock.unlock();
6509 if (slowpath((behavior & LOOKUP_NIL) && imp == forward_imp)) {
6515 /***********************************************************************
6516 * lookupMethodInClassAndLoadCache.
6517 * Like lookUpImpOrForward, but does not search superclasses.
6518 * Caches and returns objc_msgForward if the method is not found in the class.
6519 **********************************************************************/
6520 IMP lookupMethodInClassAndLoadCache(Class cls, SEL sel)
6524 // fixme this is incomplete - no resolver, +initialize -
6525 // but it's only used for .cxx_construct/destruct so we don't care
6526 ASSERT(sel == SEL_cxx_construct || sel == SEL_cxx_destruct);
6528 // Search cache first.
6530 // If the cache used for the lookup is preoptimized,
6531 // we ask for `_objc_msgForward_impcache` to be returned on cache misses,
6532 // so that there's no TOCTOU race between using `isConstantOptimizedCache`
6533 // and calling cache_getImp() when not under the runtime lock.
6535 // For dynamic caches, a miss will return `nil`
6536 imp = cache_getImp(cls, sel, _objc_msgForward_impcache);
6538 if (slowpath(imp == nil)) {
6539 // Cache miss. Search method list.
6541 mutex_locker_t lock(runtimeLock);
6543 if (auto meth = getMethodNoSuper_nolock(cls, sel)) {
6544 // Hit in method list. Cache it.
6545 imp = meth->imp(false);
6547 imp = _objc_msgForward_impcache;
6550 // Note, because we do not hold the runtime lock above
6551 // isConstantOptimizedCache might flip, so we need to double check
6552 if (!cls->cache.isConstantOptimizedCache(true /* strict */)) {
6553 cls->cache.insert(sel, imp, nil);
6561 /***********************************************************************
6564 * Locking: read-locks runtimeLock
6565 **********************************************************************/
6566 objc_property_t class_getProperty(Class cls, const char *name)
6568 if (!cls || !name) return nil;
6570 mutex_locker_t lock(runtimeLock);
6572 checkIsKnownClass(cls);
6574 ASSERT(cls->isRealized());
6576 for ( ; cls; cls = cls->getSuperclass()) {
6577 for (auto& prop : cls->data()->properties()) {
6578 if (0 == strcmp(name, prop.name)) {
6579 return (objc_property_t)∝
6588 /***********************************************************************
6590 **********************************************************************/
6592 Class gdb_class_getClass(Class cls)
6594 const char *className = cls->mangledName();
6595 if(!className || !strlen(className)) return Nil;
6596 Class rCls = look_up_class(className, NO, NO);
6600 Class gdb_object_getClass(id obj)
6602 if (!obj) return nil;
6603 return gdb_class_getClass(obj->getIsa());
6607 /***********************************************************************
6608 * Locking: write-locks runtimeLock
6609 **********************************************************************/
6611 objc_class::setInitialized()
6616 ASSERT(!isMetaClass());
6619 metacls = cls->ISA();
6621 mutex_locker_t lock(runtimeLock);
6624 // - NSObject AWZ class methods are default.
6625 // - NSObject RR class and instance methods are default.
6626 // - NSObject Core class and instance methods are default.
6627 // adjustCustomFlagsForMethodChange() also knows these special cases.
6628 // attachMethodLists() also knows these special cases.
6630 objc::AWZScanner::scanInitializedClass(cls, metacls);
6631 objc::RRScanner::scanInitializedClass(cls, metacls);
6632 objc::CoreScanner::scanInitializedClass(cls, metacls);
6634 #if CONFIG_USE_PREOPT_CACHES
6635 cls->cache.maybeConvertToPreoptimized();
6636 metacls->cache.maybeConvertToPreoptimized();
6639 if (PrintInitializing) {
6640 _objc_inform("INITIALIZE: thread %p: setInitialized(%s)",
6641 objc_thread_self(), cls->nameForLogging());
6643 // Update the +initialize flags.
6645 metacls->changeInfo(RW_INITIALIZED, RW_INITIALIZING);
6650 objc_class::printInstancesRequireRawIsa(bool inherited)
6652 ASSERT(PrintRawIsa);
6653 ASSERT(instancesRequireRawIsa());
6654 _objc_inform("RAW ISA: %s%s%s", nameForLogging(),
6655 isMetaClass() ? " (meta)" : "",
6656 inherited ? " (inherited)" : "");
6659 /***********************************************************************
6660 * Mark this class and all of its subclasses as requiring raw isa pointers
6661 **********************************************************************/
6662 void objc_class::setInstancesRequireRawIsaRecursively(bool inherited)
6664 Class cls = (Class)this;
6665 runtimeLock.assertLocked();
6667 if (instancesRequireRawIsa()) return;
6669 foreach_realized_class_and_subclass(cls, [=](Class c){
6670 if (c->instancesRequireRawIsa()) {
6674 c->setInstancesRequireRawIsa();
6676 if (PrintRawIsa) c->printInstancesRequireRawIsa(inherited || c != cls);
6681 #if CONFIG_USE_PREOPT_CACHES
6682 void objc_class::setDisallowPreoptCachesRecursively(const char *why)
6684 Class cls = (Class)this;
6685 runtimeLock.assertLocked();
6687 if (!allowsPreoptCaches()) return;
6689 foreach_realized_class_and_subclass(cls, [=](Class c){
6690 if (!c->allowsPreoptCaches()) {
6694 if (c->cache.isConstantOptimizedCache(/* strict */true)) {
6695 c->cache.eraseNolock(why);
6698 _objc_inform("CACHES: %sclass %s: disallow preopt cache (from %s)",
6699 isMetaClass() ? "meta" : "",
6700 nameForLogging(), why);
6702 c->setDisallowPreoptCaches();
6708 void objc_class::setDisallowPreoptInlinedSelsRecursively(const char *why)
6710 Class cls = (Class)this;
6711 runtimeLock.assertLocked();
6713 if (!allowsPreoptInlinedSels()) return;
6715 foreach_realized_class_and_subclass(cls, [=](Class c){
6716 if (!c->allowsPreoptInlinedSels()) {
6721 _objc_inform("CACHES: %sclass %s: disallow sel-inlined preopt cache (from %s)",
6722 isMetaClass() ? "meta" : "",
6723 nameForLogging(), why);
6726 c->setDisallowPreoptInlinedSels();
6727 if (c->cache.isConstantOptimizedCacheWithInlinedSels()) {
6728 c->cache.eraseNolock(why);
6735 /***********************************************************************
6736 * Choose a class index.
6737 * Set instancesRequireRawIsa if no more class indexes are available.
6738 **********************************************************************/
6739 void objc_class::chooseClassArrayIndex()
6741 #if SUPPORT_INDEXED_ISA
6742 Class cls = (Class)this;
6743 runtimeLock.assertLocked();
6745 if (objc_indexed_classes_count >= ISA_INDEX_COUNT) {
6746 // No more indexes available.
6747 ASSERT(cls->classArrayIndex() == 0);
6748 cls->setInstancesRequireRawIsaRecursively(false/*not inherited*/);
6752 unsigned index = objc_indexed_classes_count++;
6753 if (index == 0) index = objc_indexed_classes_count++; // index 0 is unused
6754 classForIndex(index) = cls;
6755 cls->setClassArrayIndex(index);
6759 static const char *empty_lazyClassNamer(Class cls __unused) {
6763 static ChainedHookFunction<objc_hook_lazyClassNamer> LazyClassNamerHook{empty_lazyClassNamer};
6765 void objc_setHook_lazyClassNamer(_Nonnull objc_hook_lazyClassNamer newValue,
6766 _Nonnull objc_hook_lazyClassNamer * _Nonnull oldOutValue) {
6767 LazyClassNamerHook.set(newValue, oldOutValue);
6770 const char * objc_class::installMangledNameForLazilyNamedClass() {
6771 auto lazyClassNamer = LazyClassNamerHook.get();
6772 if (!*lazyClassNamer) {
6773 _objc_fatal("Lazily named class %p with no lazy name handler registered", this);
6776 // If this is called on a metaclass, extract the original class
6777 // and make it do the installation instead. It will install
6778 // the metaclass's name too.
6779 if (isMetaClass()) {
6780 Class nonMeta = bits.safe_ro()->getNonMetaclass();
6781 return nonMeta->installMangledNameForLazilyNamedClass();
6784 Class cls = (Class)this;
6785 Class metaclass = ISA();
6787 const char *name = lazyClassNamer((Class)this);
6789 _objc_fatal("Lazily named class %p wasn't named by lazy name handler", this);
6792 // Emplace the name into the class_ro_t. If we lose the race,
6793 // then we'll free our name and use whatever got placed there
6794 // instead of our name.
6795 const char *previously = NULL;
6796 class_ro_t *ro = (class_ro_t *)cls->bits.safe_ro();
6797 bool wonRace = ro->name.compare_exchange_strong(previously, name, std::memory_order_release, std::memory_order_acquire);
6803 // Emplace whatever name won the race in the metaclass too.
6804 class_ro_t *metaRO = (class_ro_t *)metaclass->bits.safe_ro();
6806 // Write our pointer if the current value is NULL. There's no
6807 // need to loop or check success, since the only way this can
6808 // fail is if another thread succeeded in writing the exact
6810 const char *expected = NULL;
6811 metaRO->name.compare_exchange_strong(expected, name, std::memory_order_release, std::memory_order_acquire);
6816 /***********************************************************************
6817 * Update custom RR and AWZ when a method changes its IMP
6818 **********************************************************************/
6820 adjustCustomFlagsForMethodChange(Class cls, method_t *meth)
6822 objc::AWZScanner::scanChangedMethod(cls, meth);
6823 objc::RRScanner::scanChangedMethod(cls, meth);
6824 objc::CoreScanner::scanChangedMethod(cls, meth);
6828 /***********************************************************************
6829 * class_getIvarLayout
6830 * Called by the garbage collector.
6831 * The class must be nil or already realized.
6833 **********************************************************************/
6835 class_getIvarLayout(Class cls)
6837 if (cls) return cls->data()->ro()->getIvarLayout();
6842 /***********************************************************************
6843 * class_getWeakIvarLayout
6844 * Called by the garbage collector.
6845 * The class must be nil or already realized.
6847 **********************************************************************/
6849 class_getWeakIvarLayout(Class cls)
6851 if (cls) return cls->data()->ro()->weakIvarLayout;
6856 /***********************************************************************
6857 * class_setIvarLayout
6858 * Changes the class's ivar layout.
6859 * nil layout means no unscanned ivars
6860 * The class must be under construction.
6861 * fixme: sanity-check layout vs instance size?
6862 * fixme: sanity-check layout vs superclass?
6863 * Locking: acquires runtimeLock
6864 **********************************************************************/
6866 class_setIvarLayout(Class cls, const uint8_t *layout)
6870 ASSERT(!cls->isMetaClass());
6872 mutex_locker_t lock(runtimeLock);
6874 checkIsKnownClass(cls);
6876 // Can only change layout of in-construction classes.
6877 // note: if modifications to post-construction classes were
6878 // allowed, there would be a race below (us vs. concurrent object_setIvar)
6879 if (!(cls->data()->flags & RW_CONSTRUCTING)) {
6880 _objc_inform("*** Can't set ivar layout for already-registered "
6881 "class '%s'", cls->nameForLogging());
6885 class_ro_t *ro_w = make_ro_writeable(cls->data());
6887 try_free(ro_w->getIvarLayout());
6888 ro_w->ivarLayout = ustrdupMaybeNil(layout);
6892 /***********************************************************************
6893 * class_setWeakIvarLayout
6894 * Changes the class's weak ivar layout.
6895 * nil layout means no weak ivars
6896 * The class must be under construction.
6897 * fixme: sanity-check layout vs instance size?
6898 * fixme: sanity-check layout vs superclass?
6899 * Locking: acquires runtimeLock
6900 **********************************************************************/
6902 class_setWeakIvarLayout(Class cls, const uint8_t *layout)
6906 mutex_locker_t lock(runtimeLock);
6908 checkIsKnownClass(cls);
6910 // Can only change layout of in-construction classes.
6911 // note: if modifications to post-construction classes were
6912 // allowed, there would be a race below (us vs. concurrent object_setIvar)
6913 if (!(cls->data()->flags & RW_CONSTRUCTING)) {
6914 _objc_inform("*** Can't set weak ivar layout for already-registered "
6915 "class '%s'", cls->nameForLogging());
6919 class_ro_t *ro_w = make_ro_writeable(cls->data());
6921 try_free(ro_w->weakIvarLayout);
6922 ro_w->weakIvarLayout = ustrdupMaybeNil(layout);
6926 /***********************************************************************
6928 * Look up an ivar by name.
6929 * Locking: runtimeLock must be read- or write-locked by the caller.
6930 **********************************************************************/
6931 static ivar_t *getIvar(Class cls, const char *name)
6933 runtimeLock.assertLocked();
6935 const ivar_list_t *ivars;
6936 ASSERT(cls->isRealized());
6937 if ((ivars = cls->data()->ro()->ivars)) {
6938 for (auto& ivar : *ivars) {
6939 if (!ivar.offset) continue; // anonymous bitfield
6941 // ivar.name may be nil for anonymous bitfields etc.
6942 if (ivar.name && 0 == strcmp(name, ivar.name)) {
6952 /***********************************************************************
6953 * _class_getClassForIvar
6954 * Given a class and an ivar that is in it or one of its superclasses,
6955 * find the actual class that defined the ivar.
6956 **********************************************************************/
6957 Class _class_getClassForIvar(Class cls, Ivar ivar)
6959 mutex_locker_t lock(runtimeLock);
6961 for ( ; cls; cls = cls->getSuperclass()) {
6962 if (auto ivars = cls->data()->ro()->ivars) {
6963 if (ivars->containsIvar(ivar)) {
6973 /***********************************************************************
6974 * _class_getVariable
6976 * Locking: read-locks runtimeLock
6977 **********************************************************************/
6979 _class_getVariable(Class cls, const char *name)
6981 mutex_locker_t lock(runtimeLock);
6983 for ( ; cls; cls = cls->getSuperclass()) {
6984 ivar_t *ivar = getIvar(cls, name);
6994 /***********************************************************************
6995 * class_conformsToProtocol
6997 * Locking: read-locks runtimeLock
6998 **********************************************************************/
6999 BOOL class_conformsToProtocol(Class cls, Protocol *proto_gen)
7001 protocol_t *proto = newprotocol(proto_gen);
7003 if (!cls) return NO;
7004 if (!proto_gen) return NO;
7006 mutex_locker_t lock(runtimeLock);
7008 checkIsKnownClass(cls);
7010 ASSERT(cls->isRealized());
7012 for (const auto& proto_ref : cls->data()->protocols()) {
7013 protocol_t *p = remapProtocol(proto_ref);
7014 if (p == proto || protocol_conformsToProtocol_nolock(p, proto)) {
7023 addMethods_finish(Class cls, method_list_t *newlist)
7025 auto rwe = cls->data()->extAllocIfNeeded();
7027 if (newlist->count > 1) {
7028 method_t::SortBySELAddress sorter;
7029 std::stable_sort(&newlist->begin()->big(), &newlist->end()->big(), sorter);
7032 prepareMethodLists(cls, &newlist, 1, NO, NO, __func__);
7033 rwe->methods.attachLists(&newlist, 1);
7035 // If the class being modified has a constant cache,
7036 // then all children classes are flattened constant caches
7037 // and need to be flushed as well.
7038 flushCaches(cls, __func__, [](Class c){
7039 // constant caches have been dealt with in prepareMethodLists
7040 // if the class still is constant here, it's fine to keep
7041 return !c->cache.isConstantOptimizedCache();
7046 /**********************************************************************
7049 * Locking: runtimeLock must be held by the caller
7050 **********************************************************************/
7052 addMethod(Class cls, SEL name, IMP imp, const char *types, bool replace)
7056 runtimeLock.assertLocked();
7058 checkIsKnownClass(cls);
7061 ASSERT(cls->isRealized());
7064 if ((m = getMethodNoSuper_nolock(cls, name))) {
7067 result = m->imp(false);
7069 result = _method_setImplementation(cls, m, imp);
7073 method_list_t *newlist;
7074 newlist = (method_list_t *)calloc(method_list_t::byteSize(method_t::bigSize, 1), 1);
7075 newlist->entsizeAndFlags =
7076 (uint32_t)sizeof(struct method_t::big) | fixed_up_method_list;
7078 auto &first = newlist->begin()->big();
7080 first.types = strdupIfMutable(types);
7083 addMethods_finish(cls, newlist);
7090 /**********************************************************************
7092 * Add the given methods to a class in bulk.
7093 * Returns the selectors which could not be added, when replace == NO and a
7094 * method already exists. The returned selectors are NULL terminated and must be
7095 * freed by the caller. They are NULL if no failures occurred.
7096 * Locking: runtimeLock must be held by the caller
7097 **********************************************************************/
7099 addMethods(Class cls, const SEL *names, const IMP *imps, const char **types,
7100 uint32_t count, bool replace, uint32_t *outFailedCount)
7102 runtimeLock.assertLocked();
7107 ASSERT(cls->isRealized());
7109 method_list_t *newlist;
7110 size_t newlistSize = method_list_t::byteSize(sizeof(struct method_t::big), count);
7111 newlist = (method_list_t *)calloc(newlistSize, 1);
7112 newlist->entsizeAndFlags =
7113 (uint32_t)sizeof(struct method_t::big) | fixed_up_method_list;
7116 SEL *failedNames = nil;
7117 uint32_t failedCount = 0;
7119 for (uint32_t i = 0; i < count; i++) {
7121 if ((m = getMethodNoSuper_nolock(cls, names[i]))) {
7125 if (failedNames == nil) {
7126 // allocate an extra entry for a trailing NULL in case
7127 // every method fails
7128 failedNames = (SEL *)calloc(sizeof(*failedNames),
7131 failedNames[failedCount] = m->name();
7134 _method_setImplementation(cls, m, imps[i]);
7137 auto &newmethod = newlist->end()->big();
7138 newmethod.name = names[i];
7139 newmethod.types = strdupIfMutable(types[i]);
7140 newmethod.imp = imps[i];
7145 if (newlist->count > 0) {
7146 // fixme resize newlist because it may have been over-allocated above.
7147 // Note that realloc() alone doesn't work due to ptrauth.
7148 addMethods_finish(cls, newlist);
7150 // Attaching the method list to the class consumes it. If we don't
7151 // do that, we have to free the memory ourselves.
7155 if (outFailedCount) *outFailedCount = failedCount;
7162 class_addMethod(Class cls, SEL name, IMP imp, const char *types)
7164 if (!cls) return NO;
7166 mutex_locker_t lock(runtimeLock);
7167 return ! addMethod(cls, name, imp, types ?: "", NO);
7172 class_replaceMethod(Class cls, SEL name, IMP imp, const char *types)
7174 if (!cls) return nil;
7176 mutex_locker_t lock(runtimeLock);
7177 return addMethod(cls, name, imp, types ?: "", YES);
7182 class_addMethodsBulk(Class cls, const SEL *names, const IMP *imps,
7183 const char **types, uint32_t count,
7184 uint32_t *outFailedCount)
7187 if (outFailedCount) *outFailedCount = count;
7188 return (SEL *)memdup(names, count * sizeof(*names));
7191 mutex_locker_t lock(runtimeLock);
7192 return addMethods(cls, names, imps, types, count, NO, outFailedCount);
7196 class_replaceMethodsBulk(Class cls, const SEL *names, const IMP *imps,
7197 const char **types, uint32_t count)
7201 mutex_locker_t lock(runtimeLock);
7202 addMethods(cls, names, imps, types, count, YES, nil);
7206 /***********************************************************************
7208 * Adds an ivar to a class.
7209 * Locking: acquires runtimeLock
7210 **********************************************************************/
7212 class_addIvar(Class cls, const char *name, size_t size,
7213 uint8_t alignment, const char *type)
7215 if (!cls) return NO;
7217 if (!type) type = "";
7218 if (name && 0 == strcmp(name, "")) name = nil;
7220 mutex_locker_t lock(runtimeLock);
7222 checkIsKnownClass(cls);
7223 ASSERT(cls->isRealized());
7225 // No class variables
7226 if (cls->isMetaClass()) {
7230 // Can only add ivars to in-construction classes.
7231 if (!(cls->data()->flags & RW_CONSTRUCTING)) {
7235 // Check for existing ivar with this name, unless it's anonymous.
7236 // Check for too-big ivar.
7237 // fixme check for superclass ivar too?
7238 if ((name && getIvar(cls, name)) || size > UINT32_MAX) {
7242 class_ro_t *ro_w = make_ro_writeable(cls->data());
7244 // fixme allocate less memory here
7246 ivar_list_t *oldlist, *newlist;
7247 if ((oldlist = (ivar_list_t *)cls->data()->ro()->ivars)) {
7248 size_t oldsize = oldlist->byteSize();
7249 newlist = (ivar_list_t *)calloc(oldsize + oldlist->entsize(), 1);
7250 memcpy(newlist, oldlist, oldsize);
7253 newlist = (ivar_list_t *)calloc(ivar_list_t::byteSize(sizeof(ivar_t), 1), 1);
7254 newlist->entsizeAndFlags = (uint32_t)sizeof(ivar_t);
7257 uint32_t offset = cls->unalignedInstanceSize();
7258 uint32_t alignMask = (1<<alignment)-1;
7259 offset = (offset + alignMask) & ~alignMask;
7261 ivar_t& ivar = newlist->get(newlist->count++);
7263 // Deliberately over-allocate the ivar offset variable.
7264 // Use calloc() to clear all 64 bits. See the note in struct ivar_t.
7265 ivar.offset = (int32_t *)(int64_t *)calloc(sizeof(int64_t), 1);
7267 ivar.offset = (int32_t *)malloc(sizeof(int32_t));
7269 *ivar.offset = offset;
7270 ivar.name = name ? strdupIfMutable(name) : nil;
7271 ivar.type = strdupIfMutable(type);
7272 ivar.alignment_raw = alignment;
7273 ivar.size = (uint32_t)size;
7275 ro_w->ivars = newlist;
7276 cls->setInstanceSize((uint32_t)(offset + size));
7278 // Ivar layout updated in registerClass.
7284 /***********************************************************************
7286 * Adds a protocol to a class.
7287 * Locking: acquires runtimeLock
7288 **********************************************************************/
7289 BOOL class_addProtocol(Class cls, Protocol *protocol_gen)
7291 protocol_t *protocol = newprotocol(protocol_gen);
7293 if (!cls) return NO;
7294 if (class_conformsToProtocol(cls, protocol_gen)) return NO;
7296 mutex_locker_t lock(runtimeLock);
7297 auto rwe = cls->data()->extAllocIfNeeded();
7299 ASSERT(cls->isRealized());
7302 protocol_list_t *protolist = (protocol_list_t *)
7303 malloc(sizeof(protocol_list_t) + sizeof(protocol_t *));
7304 protolist->count = 1;
7305 protolist->list[0] = (protocol_ref_t)protocol;
7307 rwe->protocols.attachLists(&protolist, 1);
7315 /***********************************************************************
7317 * Adds a property to a class.
7318 * Locking: acquires runtimeLock
7319 **********************************************************************/
7321 _class_addProperty(Class cls, const char *name,
7322 const objc_property_attribute_t *attrs, unsigned int count,
7325 if (!cls) return NO;
7326 if (!name) return NO;
7328 property_t *prop = class_getProperty(cls, name);
7329 if (prop && !replace) {
7330 // already exists, refuse to replace
7335 mutex_locker_t lock(runtimeLock);
7336 try_free(prop->attributes);
7337 prop->attributes = copyPropertyAttributeString(attrs, count);
7341 mutex_locker_t lock(runtimeLock);
7342 auto rwe = cls->data()->extAllocIfNeeded();
7344 ASSERT(cls->isRealized());
7346 property_list_t *proplist = (property_list_t *)
7347 malloc(property_list_t::byteSize(sizeof(property_t), 1));
7348 proplist->count = 1;
7349 proplist->entsizeAndFlags = sizeof(property_t);
7350 proplist->begin()->name = strdupIfMutable(name);
7351 proplist->begin()->attributes = copyPropertyAttributeString(attrs, count);
7353 rwe->properties.attachLists(&proplist, 1);
7360 class_addProperty(Class cls, const char *name,
7361 const objc_property_attribute_t *attrs, unsigned int n)
7363 return _class_addProperty(cls, name, attrs, n, NO);
7367 class_replaceProperty(Class cls, const char *name,
7368 const objc_property_attribute_t *attrs, unsigned int n)
7370 _class_addProperty(cls, name, attrs, n, YES);
7374 /***********************************************************************
7376 * Look up a class by name, and realize it.
7377 * Locking: acquires runtimeLock
7378 **********************************************************************/
7379 static BOOL empty_getClass(const char *name, Class *outClass)
7385 static ChainedHookFunction<objc_hook_getClass> GetClassHook{empty_getClass};
7387 void objc_setHook_getClass(objc_hook_getClass newValue,
7388 objc_hook_getClass *outOldValue)
7390 GetClassHook.set(newValue, outOldValue);
7394 look_up_class(const char *name,
7395 bool includeUnconnected __attribute__((unused)),
7396 bool includeClassHandler __attribute__((unused)))
7398 if (!name) return nil;
7404 result = getClassExceptSomeSwift(name);
7405 unrealized = result && !result->isRealized();
7407 result = realizeClassMaybeSwiftAndUnlock(result, runtimeLock);
7408 // runtimeLock is now unlocked
7410 runtimeLock.unlock();
7415 // Ask Swift about its un-instantiated classes.
7417 // We use thread-local storage to prevent infinite recursion
7418 // if the hook function provokes another lookup of the same name
7419 // (for example, if the hook calls objc_allocateClassPair)
7421 auto *tls = _objc_fetch_pthread_data(true);
7423 // Stop if this thread is already looking up this name.
7424 for (unsigned i = 0; i < tls->classNameLookupsUsed; i++) {
7425 if (0 == strcmp(name, tls->classNameLookups[i])) {
7430 // Save this lookup in tls.
7431 if (tls->classNameLookupsUsed == tls->classNameLookupsAllocated) {
7432 tls->classNameLookupsAllocated =
7433 (tls->classNameLookupsAllocated * 2 ?: 1);
7434 size_t size = tls->classNameLookupsAllocated *
7435 sizeof(tls->classNameLookups[0]);
7436 tls->classNameLookups = (const char **)
7437 realloc(tls->classNameLookups, size);
7439 tls->classNameLookups[tls->classNameLookupsUsed++] = name;
7442 Class swiftcls = nil;
7443 if (GetClassHook.get()(name, &swiftcls)) {
7444 ASSERT(swiftcls->isRealized());
7448 // Erase the name from tls.
7449 unsigned slot = --tls->classNameLookupsUsed;
7450 ASSERT(slot >= 0 && slot < tls->classNameLookupsAllocated);
7451 ASSERT(name == tls->classNameLookups[slot]);
7452 tls->classNameLookups[slot] = nil;
7459 /***********************************************************************
7460 * objc_duplicateClass
7462 * Locking: acquires runtimeLock
7463 **********************************************************************/
7465 objc_duplicateClass(Class original, const char *name,
7470 mutex_locker_t lock(runtimeLock);
7472 checkIsKnownClass(original);
7474 auto orig_rw = original->data();
7475 auto orig_rwe = orig_rw->ext();
7476 auto orig_ro = orig_rw->ro();
7478 ASSERT(original->isRealized());
7479 ASSERT(!original->isMetaClass());
7481 duplicate = alloc_class_for_subclass(original, extraBytes);
7483 duplicate->initClassIsa(original->ISA());
7484 duplicate->setSuperclass(original->getSuperclass());
7486 duplicate->cache.initializeToEmpty();
7488 class_rw_t *rw = objc::zalloc<class_rw_t>();
7489 rw->flags = (orig_rw->flags | RW_COPIED_RO | RW_REALIZING);
7490 rw->firstSubclass = nil;
7491 rw->nextSiblingClass = nil;
7493 duplicate->bits = original->bits;
7494 duplicate->setData(rw);
7496 auto ro = orig_ro->duplicate();
7497 *(char **)&ro->name = strdupIfMutable(name);
7501 auto rwe = rw->extAllocIfNeeded();
7502 rwe->version = orig_rwe->version;
7503 orig_rwe->methods.duplicateInto(rwe->methods);
7505 // fixme dies when categories are added to the base
7506 rwe->properties = orig_rwe->properties;
7507 rwe->protocols = orig_rwe->protocols;
7508 } else if (ro->baseMethods()) {
7509 // if we have base methods, we need to make a deep copy
7510 // which requires a class_rw_ext_t to be allocated
7514 duplicate->chooseClassArrayIndex();
7516 if (duplicate->getSuperclass()) {
7517 addSubclass(duplicate->getSuperclass(), duplicate);
7518 // duplicate->isa == original->isa so don't addSubclass() for it
7520 addRootClass(duplicate);
7523 // Don't methodize class - construction above is correct
7525 addNamedClass(duplicate, ro->getName());
7526 addClassTableEntry(duplicate, /*addMeta=*/false);
7528 if (PrintConnecting) {
7529 _objc_inform("CLASS: realizing class '%s' (duplicate of %s) %p %p",
7530 name, original->nameForLogging(), (void*)duplicate, ro);
7533 duplicate->clearInfo(RW_REALIZING);
7538 /***********************************************************************
7539 * objc_initializeClassPair
7540 * Locking: runtimeLock must be write-locked by the caller
7541 **********************************************************************/
7543 // &UnsetLayout is the default ivar layout during class construction
7544 static const uint8_t UnsetLayout = 0;
7546 static void objc_initializeClassPair_internal(Class superclass, const char *name, Class cls, Class meta)
7548 runtimeLock.assertLocked();
7550 class_ro_t *cls_ro_w, *meta_ro_w;
7551 class_rw_t *cls_rw_w, *meta_rw_w;
7553 cls_rw_w = objc::zalloc<class_rw_t>();
7554 meta_rw_w = objc::zalloc<class_rw_t>();
7555 cls_ro_w = (class_ro_t *)calloc(sizeof(class_ro_t), 1);
7556 meta_ro_w = (class_ro_t *)calloc(sizeof(class_ro_t), 1);
7558 cls->setData(cls_rw_w);
7559 cls_rw_w->set_ro(cls_ro_w);
7560 meta->setData(meta_rw_w);
7561 meta_rw_w->set_ro(meta_ro_w);
7565 cls_rw_w->flags = RW_CONSTRUCTING | RW_COPIED_RO | RW_REALIZED | RW_REALIZING;
7566 meta_rw_w->flags = RW_CONSTRUCTING | RW_COPIED_RO | RW_REALIZED | RW_REALIZING | RW_META;
7568 cls_ro_w->flags = 0;
7569 meta_ro_w->flags = RO_META;
7571 uint32_t flagsToCopy = RW_FORBIDS_ASSOCIATED_OBJECTS;
7572 cls_rw_w->flags |= superclass->data()->flags & flagsToCopy;
7573 cls_ro_w->instanceStart = superclass->unalignedInstanceSize();
7574 meta_ro_w->instanceStart = superclass->ISA()->unalignedInstanceSize();
7575 cls->setInstanceSize(cls_ro_w->instanceStart);
7576 meta->setInstanceSize(meta_ro_w->instanceStart);
7578 cls_ro_w->flags |= RO_ROOT;
7579 meta_ro_w->flags |= RO_ROOT;
7580 cls_ro_w->instanceStart = 0;
7581 meta_ro_w->instanceStart = (uint32_t)sizeof(objc_class);
7582 cls->setInstanceSize((uint32_t)sizeof(id)); // just an isa
7583 meta->setInstanceSize(meta_ro_w->instanceStart);
7586 cls_ro_w->name.store(strdupIfMutable(name), std::memory_order_release);
7587 meta_ro_w->name.store(strdupIfMutable(name), std::memory_order_release);
7589 cls_ro_w->ivarLayout = &UnsetLayout;
7590 cls_ro_w->weakIvarLayout = &UnsetLayout;
7592 meta->chooseClassArrayIndex();
7593 cls->chooseClassArrayIndex();
7595 // This absolutely needs to be done before addSubclass
7596 // as initializeToEmpty() clobbers the FAST_CACHE bits
7597 cls->cache.initializeToEmpty();
7598 meta->cache.initializeToEmpty();
7601 meta->cache.setBit(FAST_CACHE_META);
7603 meta->setInstancesRequireRawIsa();
7605 // Connect to superclasses and metaclasses
7606 cls->initClassIsa(meta);
7609 meta->initClassIsa(superclass->ISA()->ISA());
7610 cls->setSuperclass(superclass);
7611 meta->setSuperclass(superclass->ISA());
7612 addSubclass(superclass, cls);
7613 addSubclass(superclass->ISA(), meta);
7615 meta->initClassIsa(meta);
7616 cls->setSuperclass(Nil);
7617 meta->setSuperclass(cls);
7619 addSubclass(cls, meta);
7622 addClassTableEntry(cls);
7626 /***********************************************************************
7628 * Sanity-check the superclass provided to
7629 * objc_allocateClassPair, objc_initializeClassPair, or objc_readClassPair.
7630 **********************************************************************/
7632 verifySuperclass(Class superclass, bool rootOK)
7635 // Superclass does not exist.
7636 // If subclass may be a root class, this is OK.
7637 // If subclass must not be a root class, this is bad.
7641 // Superclass must be realized.
7642 if (! superclass->isRealized()) return false;
7644 // Superclass must not be under construction.
7645 if (superclass->data()->flags & RW_CONSTRUCTING) return false;
7651 /***********************************************************************
7652 * objc_initializeClassPair
7653 **********************************************************************/
7654 Class objc_initializeClassPair(Class superclass, const char *name, Class cls, Class meta)
7656 // Fail if the class name is in use.
7657 if (look_up_class(name, NO, NO)) return nil;
7659 mutex_locker_t lock(runtimeLock);
7661 // Fail if the class name is in use.
7662 // Fail if the superclass isn't kosher.
7663 if (getClassExceptSomeSwift(name) ||
7664 !verifySuperclass(superclass, true/*rootOK*/))
7669 objc_initializeClassPair_internal(superclass, name, cls, meta);
7675 /***********************************************************************
7676 * objc_allocateClassPair
7678 * Locking: acquires runtimeLock
7679 **********************************************************************/
7680 Class objc_allocateClassPair(Class superclass, const char *name,
7685 // Fail if the class name is in use.
7686 if (look_up_class(name, NO, NO)) return nil;
7688 mutex_locker_t lock(runtimeLock);
7690 // Fail if the class name is in use.
7691 // Fail if the superclass isn't kosher.
7692 if (getClassExceptSomeSwift(name) ||
7693 !verifySuperclass(superclass, true/*rootOK*/))
7698 // Allocate new classes.
7699 cls = alloc_class_for_subclass(superclass, extraBytes);
7700 meta = alloc_class_for_subclass(superclass, extraBytes);
7702 // fixme mangle the name if it looks swift-y?
7703 objc_initializeClassPair_internal(superclass, name, cls, meta);
7709 /***********************************************************************
7710 * objc_registerClassPair
7712 * Locking: acquires runtimeLock
7713 **********************************************************************/
7714 void objc_registerClassPair(Class cls)
7716 mutex_locker_t lock(runtimeLock);
7718 checkIsKnownClass(cls);
7720 if ((cls->data()->flags & RW_CONSTRUCTED) ||
7721 (cls->ISA()->data()->flags & RW_CONSTRUCTED))
7723 _objc_inform("objc_registerClassPair: class '%s' was already "
7724 "registered!", cls->data()->ro()->getName());
7728 if (!(cls->data()->flags & RW_CONSTRUCTING) ||
7729 !(cls->ISA()->data()->flags & RW_CONSTRUCTING))
7731 _objc_inform("objc_registerClassPair: class '%s' was not "
7732 "allocated with objc_allocateClassPair!",
7733 cls->data()->ro()->getName());
7737 // Clear "under construction" bit, set "done constructing" bit
7738 cls->ISA()->changeInfo(RW_CONSTRUCTED, RW_CONSTRUCTING | RW_REALIZING);
7739 cls->changeInfo(RW_CONSTRUCTED, RW_CONSTRUCTING | RW_REALIZING);
7741 // Add to named class table.
7742 addNamedClass(cls, cls->data()->ro()->getName());
7746 /***********************************************************************
7747 * objc_readClassPair()
7748 * Read a class and metaclass as written by a compiler.
7749 * Assumes the class and metaclass are not referenced by other things
7750 * that might need to be fixed up (such as categories and subclasses).
7751 * Does not call +load.
7752 * Returns the class pointer, or nil.
7754 * Locking: runtimeLock acquired by map_images
7755 **********************************************************************/
7756 Class objc_readClassPair(Class bits, const struct objc_image_info *info)
7758 mutex_locker_t lock(runtimeLock);
7760 // No info bits are significant yet.
7763 // Fail if the superclass isn't kosher.
7764 bool rootOK = bits->data()->flags & RO_ROOT;
7765 if (!verifySuperclass(bits->getSuperclass(), rootOK)){
7769 // Duplicate classes are allowed, just like they are for image loading.
7770 // readClass will complain about the duplicate.
7772 Class cls = readClass(bits, false/*bundle*/, false/*shared cache*/);
7774 // This function isn't allowed to remap anything.
7775 _objc_fatal("objc_readClassPair for class %s changed %p to %p",
7776 cls->nameForLogging(), bits, cls);
7779 // The only client of this function is old Swift.
7780 // Stable Swift won't use it.
7781 // fixme once Swift in the OS settles we can assert(!cls->isSwiftStable()).
7782 cls = realizeClassWithoutSwift(cls, nil);
7788 /***********************************************************************
7790 * Disconnect a class from other data structures.
7791 * Exception: does not remove the class from the +load list
7792 * Call this before free_class.
7793 * Locking: runtimeLock must be held by the caller.
7794 **********************************************************************/
7795 static void detach_class(Class cls, bool isMeta)
7797 runtimeLock.assertLocked();
7799 // categories not yet attached to this class
7800 objc::unattachedCategories.eraseClass(cls);
7802 // superclass's subclass list
7803 if (cls->isRealized()) {
7804 Class supercls = cls->getSuperclass();
7806 removeSubclass(supercls, cls);
7808 removeRootClass(cls);
7812 // class tables and +load queue
7814 removeNamedClass(cls, cls->mangledName());
7816 objc::allocatedClasses.get().erase(cls);
7820 /***********************************************************************
7822 * Frees a class's data structures.
7823 * Call this after detach_class.
7824 * Locking: runtimeLock must be held by the caller
7825 **********************************************************************/
7826 static void free_class(Class cls)
7828 runtimeLock.assertLocked();
7830 if (! cls->isRealized()) return;
7832 auto rw = cls->data();
7833 auto rwe = rw->ext();
7836 cls->cache.destroy();
7839 for (auto& meth : rwe->methods) {
7840 try_free(meth.types());
7842 rwe->methods.tryFree();
7845 const ivar_list_t *ivars = ro->ivars;
7847 for (auto& ivar : *ivars) {
7848 try_free(ivar.offset);
7849 try_free(ivar.name);
7850 try_free(ivar.type);
7856 for (auto& prop : rwe->properties) {
7857 try_free(prop.name);
7858 try_free(prop.attributes);
7860 rwe->properties.tryFree();
7862 rwe->protocols.tryFree();
7865 try_free(ro->getIvarLayout());
7866 try_free(ro->weakIvarLayout);
7867 try_free(ro->getName());
7875 void objc_disposeClassPair(Class cls)
7877 mutex_locker_t lock(runtimeLock);
7879 checkIsKnownClass(cls);
7881 if (!(cls->data()->flags & (RW_CONSTRUCTED|RW_CONSTRUCTING)) ||
7882 !(cls->ISA()->data()->flags & (RW_CONSTRUCTED|RW_CONSTRUCTING)))
7884 // class not allocated with objc_allocateClassPair
7885 // disposing still-unregistered class is OK!
7886 _objc_inform("objc_disposeClassPair: class '%s' was not "
7887 "allocated with objc_allocateClassPair!",
7888 cls->data()->ro()->getName());
7892 if (cls->isMetaClass()) {
7893 _objc_inform("objc_disposeClassPair: class '%s' is a metaclass, "
7894 "not a class!", cls->data()->ro()->getName());
7898 // Shouldn't have any live subclasses.
7899 if (cls->data()->firstSubclass) {
7900 _objc_inform("objc_disposeClassPair: class '%s' still has subclasses, "
7901 "including '%s'!", cls->data()->ro()->getName(),
7902 cls->data()->firstSubclass->nameForLogging());
7904 if (cls->ISA()->data()->firstSubclass) {
7905 _objc_inform("objc_disposeClassPair: class '%s' still has subclasses, "
7906 "including '%s'!", cls->data()->ro()->getName(),
7907 cls->ISA()->data()->firstSubclass->nameForLogging());
7910 // don't remove_class_from_loadable_list()
7911 // - it's not there and we don't have the lock
7912 detach_class(cls->ISA(), YES);
7913 detach_class(cls, NO);
7914 free_class(cls->ISA());
7919 /***********************************************************************
7920 * objc_constructInstance
7921 * Creates an instance of `cls` at the location pointed to by `bytes`.
7922 * `bytes` must point to at least class_getInstanceSize(cls) bytes of
7923 * well-aligned zero-filled memory.
7924 * The new object's isa is set. Any C++ constructors are called.
7925 * Returns `bytes` if successful. Returns nil if `cls` or `bytes` is
7926 * nil, or if C++ constructors fail.
7927 * Note: class_createInstance() and class_createInstances() preflight this.
7928 **********************************************************************/
7930 objc_constructInstance(Class cls, void *bytes)
7932 if (!cls || !bytes) return nil;
7936 // Read class's info bits all at once for performance
7937 bool hasCxxCtor = cls->hasCxxCtor();
7938 bool hasCxxDtor = cls->hasCxxDtor();
7939 bool fast = cls->canAllocNonpointer();
7942 obj->initInstanceIsa(cls, hasCxxDtor);
7948 return object_cxxConstructFromClass(obj, cls, OBJECT_CONSTRUCT_NONE);
7955 /***********************************************************************
7956 * class_createInstance
7960 * Note: this function has been carefully written so that the fastpath
7962 **********************************************************************/
7963 static ALWAYS_INLINE id
7964 _class_createInstanceFromZone(Class cls, size_t extraBytes, void *zone,
7965 int construct_flags = OBJECT_CONSTRUCT_NONE,
7966 bool cxxConstruct = true,
7967 size_t *outAllocatedSize = nil)
7969 ASSERT(cls->isRealized());
7971 // Read class's info bits all at once for performance
7972 bool hasCxxCtor = cxxConstruct && cls->hasCxxCtor();
7973 bool hasCxxDtor = cls->hasCxxDtor();
7974 bool fast = cls->canAllocNonpointer();
7977 size = cls->instanceSize(extraBytes);
7978 if (outAllocatedSize) *outAllocatedSize = size;
7982 obj = (id)malloc_zone_calloc((malloc_zone_t *)zone, 1, size);
7984 obj = (id)calloc(1, size);
7986 if (slowpath(!obj)) {
7987 if (construct_flags & OBJECT_CONSTRUCT_CALL_BADALLOC) {
7988 return _objc_callBadAllocHandler(cls);
7993 if (!zone && fast) {
7994 obj->initInstanceIsa(cls, hasCxxDtor);
7996 // Use raw pointer isa on the assumption that they might be
7997 // doing something weird with the zone or RR.
8001 if (fastpath(!hasCxxCtor)) {
8005 construct_flags |= OBJECT_CONSTRUCT_FREE_ONFAILURE;
8006 return object_cxxConstructFromClass(obj, cls, construct_flags);
8010 class_createInstance(Class cls, size_t extraBytes)
8012 if (!cls) return nil;
8013 return _class_createInstanceFromZone(cls, extraBytes, nil);
8018 _objc_rootAllocWithZone(Class cls, malloc_zone_t *zone __unused)
8020 // allocWithZone under __OBJC2__ ignores the zone parameter
8021 return _class_createInstanceFromZone(cls, 0, nil,
8022 OBJECT_CONSTRUCT_CALL_BADALLOC);
8025 /***********************************************************************
8026 * class_createInstances
8029 **********************************************************************/
8030 #if SUPPORT_NONPOINTER_ISA
8031 #warning fixme optimize class_createInstances
8034 class_createInstances(Class cls, size_t extraBytes,
8035 id *results, unsigned num_requested)
8037 return _class_createInstancesFromZone(cls, extraBytes, nil,
8038 results, num_requested);
8041 /***********************************************************************
8042 * object_copyFromZone
8045 **********************************************************************/
8047 _object_copyFromZone(id oldObj, size_t extraBytes, void *zone)
8049 if (oldObj->isTaggedPointerOrNil()) return oldObj;
8051 // fixme this doesn't handle C++ ivars correctly (#4619414)
8053 Class cls = oldObj->ISA(/*authenticated*/true);
8055 id obj = _class_createInstanceFromZone(cls, extraBytes, zone,
8056 OBJECT_CONSTRUCT_NONE, false, &size);
8057 if (!obj) return nil;
8059 // Copy everything except the isa, which was already set above.
8060 uint8_t *copyDst = (uint8_t *)obj + sizeof(Class);
8061 uint8_t *copySrc = (uint8_t *)oldObj + sizeof(Class);
8062 size_t copySize = size - sizeof(Class);
8063 memmove(copyDst, copySrc, copySize);
8065 fixupCopiedIvars(obj, oldObj);
8071 /***********************************************************************
8075 **********************************************************************/
8077 object_copy(id oldObj, size_t extraBytes)
8079 return _object_copyFromZone(oldObj, extraBytes, malloc_default_zone());
8085 /***********************************************************************
8086 * class_createInstanceFromZone
8089 **********************************************************************/
8091 class_createInstanceFromZone(Class cls, size_t extraBytes, void *zone)
8093 if (!cls) return nil;
8094 return _class_createInstanceFromZone(cls, extraBytes, zone);
8097 /***********************************************************************
8098 * object_copyFromZone
8101 **********************************************************************/
8103 object_copyFromZone(id oldObj, size_t extraBytes, void *zone)
8105 return _object_copyFromZone(oldObj, extraBytes, zone);
8111 /***********************************************************************
8112 * objc_destructInstance
8113 * Destroys an instance without freeing memory.
8114 * Calls C++ destructors.
8115 * Calls ARC ivar cleanup.
8116 * Removes associative references.
8117 * Returns `obj`. Does nothing if `obj` is nil.
8118 **********************************************************************/
8119 void *objc_destructInstance(id obj)
8122 // Read all of the flags at once for performance.
8123 bool cxx = obj->hasCxxDtor();
8124 bool assoc = obj->hasAssociatedObjects();
8126 // This order is important.
8127 if (cxx) object_cxxDestruct(obj);
8128 if (assoc) _object_remove_assocations(obj, /*deallocating*/true);
8129 obj->clearDeallocating();
8136 /***********************************************************************
8140 **********************************************************************/
8142 object_dispose(id obj)
8144 if (!obj) return nil;
8146 objc_destructInstance(obj);
8153 /***********************************************************************
8154 * _objc_getFreedObjectClass
8157 **********************************************************************/
8158 Class _objc_getFreedObjectClass (void)
8165 /***********************************************************************
8166 * Tagged pointer objects.
8168 * Tagged pointer objects store the class and the object value in the
8169 * object pointer; the "pointer" does not actually point to anything.
8171 * Tagged pointer objects currently use this representation:
8173 * 1 bit set if tagged, clear if ordinary object pointer
8177 * The tag index defines the object's class.
8178 * The payload format is defined by the object's class.
8180 * If the tag index is 0b111, the tagged pointer object uses an
8181 * "extended" representation, allowing more classes but with smaller payloads:
8183 * 1 bit set if tagged, clear if ordinary object pointer
8185 * 8 bits extended tag index
8189 * Some architectures reverse the MSB and LSB in these representations.
8191 * This representation is subject to change. Representation-agnostic SPI is:
8192 * objc-internal.h for class implementers.
8193 * objc-gdb.h for debuggers.
8194 **********************************************************************/
8195 #if !SUPPORT_TAGGED_POINTERS
8197 // These variables are always provided for debuggers.
8198 uintptr_t objc_debug_taggedpointer_obfuscator = 0;
8199 uintptr_t objc_debug_taggedpointer_mask = 0;
8200 unsigned objc_debug_taggedpointer_slot_shift = 0;
8201 uintptr_t objc_debug_taggedpointer_slot_mask = 0;
8202 unsigned objc_debug_taggedpointer_payload_lshift = 0;
8203 unsigned objc_debug_taggedpointer_payload_rshift = 0;
8204 Class objc_debug_taggedpointer_classes[1] = { nil };
8206 uintptr_t objc_debug_taggedpointer_ext_mask = 0;
8207 unsigned objc_debug_taggedpointer_ext_slot_shift = 0;
8208 uintptr_t objc_debug_taggedpointer_ext_slot_mask = 0;
8209 unsigned objc_debug_taggedpointer_ext_payload_lshift = 0;
8210 unsigned objc_debug_taggedpointer_ext_payload_rshift = 0;
8211 Class objc_debug_taggedpointer_ext_classes[1] = { nil };
8213 uintptr_t objc_debug_constant_cfstring_tag_bits = 0;
8216 disableTaggedPointers() { }
8219 initializeTaggedPointerObfuscator(void) { }
8223 // The "slot" used in the class table and given to the debugger
8224 // includes the is-tagged bit. This makes objc_msgSend faster.
8225 // The "ext" representation doesn't do that.
8227 uintptr_t objc_debug_taggedpointer_obfuscator;
8228 uintptr_t objc_debug_taggedpointer_mask = _OBJC_TAG_MASK;
8229 unsigned objc_debug_taggedpointer_slot_shift = _OBJC_TAG_SLOT_SHIFT;
8230 uintptr_t objc_debug_taggedpointer_slot_mask = _OBJC_TAG_SLOT_MASK;
8231 unsigned objc_debug_taggedpointer_payload_lshift = _OBJC_TAG_PAYLOAD_LSHIFT;
8232 unsigned objc_debug_taggedpointer_payload_rshift = _OBJC_TAG_PAYLOAD_RSHIFT;
8233 // objc_debug_taggedpointer_classes is defined in objc-msg-*.s
8235 uintptr_t objc_debug_taggedpointer_ext_mask = _OBJC_TAG_EXT_MASK;
8236 unsigned objc_debug_taggedpointer_ext_slot_shift = _OBJC_TAG_EXT_SLOT_SHIFT;
8237 uintptr_t objc_debug_taggedpointer_ext_slot_mask = _OBJC_TAG_EXT_SLOT_MASK;
8238 unsigned objc_debug_taggedpointer_ext_payload_lshift = _OBJC_TAG_EXT_PAYLOAD_LSHIFT;
8239 unsigned objc_debug_taggedpointer_ext_payload_rshift = _OBJC_TAG_EXT_PAYLOAD_RSHIFT;
8240 // objc_debug_taggedpointer_ext_classes is defined in objc-msg-*.s
8242 #if OBJC_SPLIT_TAGGED_POINTERS
8243 uint8_t objc_debug_tag60_permutations[8] = { 0, 1, 2, 3, 4, 5, 6, 7 };
8244 uintptr_t objc_debug_constant_cfstring_tag_bits = _OBJC_TAG_EXT_MASK | ((uintptr_t)(OBJC_TAG_Constant_CFString - OBJC_TAG_First52BitPayload) << _OBJC_TAG_EXT_SLOT_SHIFT);
8246 uintptr_t objc_debug_constant_cfstring_tag_bits = 0;
8250 disableTaggedPointers()
8252 objc_debug_taggedpointer_mask = 0;
8253 objc_debug_taggedpointer_slot_shift = 0;
8254 objc_debug_taggedpointer_slot_mask = 0;
8255 objc_debug_taggedpointer_payload_lshift = 0;
8256 objc_debug_taggedpointer_payload_rshift = 0;
8258 objc_debug_taggedpointer_ext_mask = 0;
8259 objc_debug_taggedpointer_ext_slot_shift = 0;
8260 objc_debug_taggedpointer_ext_slot_mask = 0;
8261 objc_debug_taggedpointer_ext_payload_lshift = 0;
8262 objc_debug_taggedpointer_ext_payload_rshift = 0;
8266 // Returns a pointer to the class's storage in the tagged class arrays.
8267 // Assumes the tag is a valid basic tag.
8269 classSlotForBasicTagIndex(objc_tag_index_t tag)
8271 #if OBJC_SPLIT_TAGGED_POINTERS
8272 uintptr_t obfuscatedTag = _objc_basicTagToObfuscatedTag(tag);
8273 return &objc_tag_classes[obfuscatedTag];
8275 uintptr_t tagObfuscator = ((objc_debug_taggedpointer_obfuscator
8276 >> _OBJC_TAG_INDEX_SHIFT)
8277 & _OBJC_TAG_INDEX_MASK);
8278 uintptr_t obfuscatedTag = tag ^ tagObfuscator;
8280 // Array index in objc_tag_classes includes the tagged bit itself
8281 # if SUPPORT_MSB_TAGGED_POINTERS
8282 return &objc_tag_classes[0x8 | obfuscatedTag];
8284 return &objc_tag_classes[(obfuscatedTag << 1) | 1];
8290 // Returns a pointer to the class's storage in the tagged class arrays,
8291 // or nil if the tag is out of range.
8293 classSlotForTagIndex(objc_tag_index_t tag)
8295 if (tag >= OBJC_TAG_First60BitPayload && tag <= OBJC_TAG_Last60BitPayload) {
8296 return classSlotForBasicTagIndex(tag);
8299 if (tag >= OBJC_TAG_First52BitPayload && tag <= OBJC_TAG_Last52BitPayload) {
8300 int index = tag - OBJC_TAG_First52BitPayload;
8301 #if OBJC_SPLIT_TAGGED_POINTERS
8302 if (tag >= OBJC_TAG_FirstUnobfuscatedSplitTag)
8303 return &objc_tag_ext_classes[index];
8305 uintptr_t tagObfuscator = ((objc_debug_taggedpointer_obfuscator
8306 >> _OBJC_TAG_EXT_INDEX_SHIFT)
8307 & _OBJC_TAG_EXT_INDEX_MASK);
8308 return &objc_tag_ext_classes[index ^ tagObfuscator];
8314 /***********************************************************************
8315 * initializeTaggedPointerObfuscator
8316 * Initialize objc_debug_taggedpointer_obfuscator with randomness.
8318 * The tagged pointer obfuscator is intended to make it more difficult
8319 * for an attacker to construct a particular object as a tagged pointer,
8320 * in the presence of a buffer overflow or other write control over some
8321 * memory. The obfuscator is XORed with the tagged pointers when setting
8322 * or retrieving payload values. They are filled with randomness on first
8324 **********************************************************************/
8326 initializeTaggedPointerObfuscator(void)
8328 if (!DisableTaggedPointerObfuscation && dyld_program_sdk_at_least(dyld_fall_2018_os_versions)) {
8329 // Pull random data into the variable, then shift away all non-payload bits.
8330 arc4random_buf(&objc_debug_taggedpointer_obfuscator,
8331 sizeof(objc_debug_taggedpointer_obfuscator));
8332 objc_debug_taggedpointer_obfuscator &= ~_OBJC_TAG_MASK;
8334 #if OBJC_SPLIT_TAGGED_POINTERS
8335 // The obfuscator doesn't apply to any of the extended tag mask or the no-obfuscation bit.
8336 objc_debug_taggedpointer_obfuscator &= ~(_OBJC_TAG_EXT_MASK | _OBJC_TAG_NO_OBFUSCATION_MASK);
8338 // Shuffle the first seven entries of the tag permutator.
8340 for (int i = max - 1; i >= 0; i--) {
8341 int target = arc4random_uniform(i + 1);
8342 swap(objc_debug_tag60_permutations[i],
8343 objc_debug_tag60_permutations[target]);
8347 // Set the obfuscator to zero for apps linked against older SDKs,
8348 // in case they're relying on the tagged pointer representation.
8349 objc_debug_taggedpointer_obfuscator = 0;
8354 /***********************************************************************
8355 * _objc_registerTaggedPointerClass
8356 * Set the class to use for the given tagged pointer index.
8357 * Aborts if the tag is out of range, or if the tag is already
8358 * used by some other class.
8359 **********************************************************************/
8361 _objc_registerTaggedPointerClass(objc_tag_index_t tag, Class cls)
8363 if (objc_debug_taggedpointer_mask == 0) {
8364 _objc_fatal("tagged pointers are disabled");
8367 Class *slot = classSlotForTagIndex(tag);
8369 _objc_fatal("tag index %u is invalid", (unsigned int)tag);
8372 Class oldCls = *slot;
8374 if (cls && oldCls && cls != oldCls) {
8375 _objc_fatal("tag index %u used for two different classes "
8376 "(was %p %s, now %p %s)", tag,
8377 oldCls, oldCls->nameForLogging(),
8378 cls, cls->nameForLogging());
8383 // Store a placeholder class in the basic tag slot that is
8384 // reserved for the extended tag space, if it isn't set already.
8385 // Do this lazily when the first extended tag is registered so
8386 // that old debuggers characterize bogus pointers correctly more often.
8387 if (tag < OBJC_TAG_First60BitPayload || tag > OBJC_TAG_Last60BitPayload) {
8388 Class *extSlot = classSlotForBasicTagIndex(OBJC_TAG_RESERVED_7);
8389 if (*extSlot == nil) {
8390 extern objc_class OBJC_CLASS_$___NSUnrecognizedTaggedPointer;
8391 *extSlot = (Class)&OBJC_CLASS_$___NSUnrecognizedTaggedPointer;
8397 /***********************************************************************
8398 * _objc_getClassForTag
8399 * Returns the class that is using the given tagged pointer tag.
8400 * Returns nil if no class is using that tag or the tag is out of range.
8401 **********************************************************************/
8403 _objc_getClassForTag(objc_tag_index_t tag)
8405 Class *slot = classSlotForTagIndex(tag);
8406 if (slot) return *slot;
8415 OBJC_EXTERN void objc_msgSend_fixup(void);
8416 OBJC_EXTERN void objc_msgSendSuper2_fixup(void);
8417 OBJC_EXTERN void objc_msgSend_stret_fixup(void);
8418 OBJC_EXTERN void objc_msgSendSuper2_stret_fixup(void);
8419 #if defined(__i386__) || defined(__x86_64__)
8420 OBJC_EXTERN void objc_msgSend_fpret_fixup(void);
8422 #if defined(__x86_64__)
8423 OBJC_EXTERN void objc_msgSend_fp2ret_fixup(void);
8426 OBJC_EXTERN void objc_msgSend_fixedup(void);
8427 OBJC_EXTERN void objc_msgSendSuper2_fixedup(void);
8428 OBJC_EXTERN void objc_msgSend_stret_fixedup(void);
8429 OBJC_EXTERN void objc_msgSendSuper2_stret_fixedup(void);
8430 #if defined(__i386__) || defined(__x86_64__)
8431 OBJC_EXTERN void objc_msgSend_fpret_fixedup(void);
8433 #if defined(__x86_64__)
8434 OBJC_EXTERN void objc_msgSend_fp2ret_fixedup(void);
8437 /***********************************************************************
8439 * Repairs an old vtable dispatch call site.
8440 * vtable dispatch itself is not supported.
8441 **********************************************************************/
8443 fixupMessageRef(message_ref_t *msg)
8445 msg->sel = sel_registerName((const char *)msg->sel);
8447 if (msg->imp == &objc_msgSend_fixup) {
8448 if (msg->sel == @selector(alloc)) {
8449 msg->imp = (IMP)&objc_alloc;
8450 } else if (msg->sel == @selector(allocWithZone:)) {
8451 msg->imp = (IMP)&objc_allocWithZone;
8452 } else if (msg->sel == @selector(retain)) {
8453 msg->imp = (IMP)&objc_retain;
8454 } else if (msg->sel == @selector(release)) {
8455 msg->imp = (IMP)&objc_release;
8456 } else if (msg->sel == @selector(autorelease)) {
8457 msg->imp = (IMP)&objc_autorelease;
8459 msg->imp = &objc_msgSend_fixedup;
8462 else if (msg->imp == &objc_msgSendSuper2_fixup) {
8463 msg->imp = &objc_msgSendSuper2_fixedup;
8465 else if (msg->imp == &objc_msgSend_stret_fixup) {
8466 msg->imp = &objc_msgSend_stret_fixedup;
8468 else if (msg->imp == &objc_msgSendSuper2_stret_fixup) {
8469 msg->imp = &objc_msgSendSuper2_stret_fixedup;
8471 #if defined(__i386__) || defined(__x86_64__)
8472 else if (msg->imp == &objc_msgSend_fpret_fixup) {
8473 msg->imp = &objc_msgSend_fpret_fixedup;
8476 #if defined(__x86_64__)
8477 else if (msg->imp == &objc_msgSend_fp2ret_fixup) {
8478 msg->imp = &objc_msgSend_fp2ret_fixedup;
8488 static Class setSuperclass(Class cls, Class newSuper)
8492 runtimeLock.assertLocked();
8494 ASSERT(cls->isRealized());
8495 ASSERT(newSuper->isRealized());
8497 oldSuper = cls->getSuperclass();
8498 removeSubclass(oldSuper, cls);
8499 removeSubclass(oldSuper->ISA(), cls->ISA());
8501 cls->setSuperclass(newSuper);
8502 cls->ISA()->setSuperclass(newSuper->ISA(/*authenticated*/true));
8503 addSubclass(newSuper, cls);
8504 addSubclass(newSuper->ISA(), cls->ISA());
8506 // Flush subclass's method caches.
8507 flushCaches(cls, __func__, [](Class c){ return true; });
8508 flushCaches(cls->ISA(), __func__, [](Class c){ return true; });
8514 Class class_setSuperclass(Class cls, Class newSuper)
8516 mutex_locker_t lock(runtimeLock);
8517 return setSuperclass(cls, newSuper);
8520 void runtime_init(void)
8522 objc::unattachedCategories.init(32);
8523 objc::allocatedClasses.init();