+/***********************************************************************
+* unreasonableClassCount
+* Provides an upper bound for any iteration of classes,
+* to prevent spins when runtime metadata is corrupted.
+**********************************************************************/
+static unsigned unreasonableClassCount()
+{
+ runtimeLock.assertLocked();
+
+ int base = NXCountMapTable(gdb_objc_realized_classes) +
+ getPreoptimizedClassUnreasonableCount();
+
+ // Provide lots of slack here. Some iterations touch metaclasses too.
+ // Some iterations backtrack (like realized class iteration).
+ // We don't need an efficient bound, merely one that prevents spins.
+ return (base + 1) * 16;
+}
+
+
+/***********************************************************************
+* Class enumerators
+* The passed in block returns `false` if subclasses can be skipped
+* Locking: runtimeLock must be held by the caller.
+**********************************************************************/
+static inline void
+foreach_realized_class_and_subclass_2(Class top, unsigned &count,
+ bool skip_metaclass,
+ bool (^code)(Class) __attribute((noescape)))
+{
+ Class cls = top;
+
+ runtimeLock.assertLocked();
+ ASSERT(top);
+
+ while (1) {
+ if (--count == 0) {
+ _objc_fatal("Memory corruption in class list.");
+ }
+
+ bool skip_subclasses;
+
+ if (skip_metaclass && cls->isMetaClass()) {
+ skip_subclasses = true;
+ } else {
+ skip_subclasses = !code(cls);
+ }
+
+ if (!skip_subclasses && cls->data()->firstSubclass) {
+ cls = cls->data()->firstSubclass;
+ } else {
+ while (!cls->data()->nextSiblingClass && cls != top) {
+ cls = cls->superclass;
+ if (--count == 0) {
+ _objc_fatal("Memory corruption in class list.");
+ }
+ }
+ if (cls == top) break;
+ cls = cls->data()->nextSiblingClass;
+ }
+ }
+}
+
+// Enumerates a class and all of its realized subclasses.
+static void
+foreach_realized_class_and_subclass(Class top, bool (^code)(Class) __attribute((noescape)))
+{
+ unsigned int count = unreasonableClassCount();
+
+ foreach_realized_class_and_subclass_2(top, count, false, code);
+}
+
+// Enumerates all realized classes and metaclasses.
+static void
+foreach_realized_class_and_metaclass(bool (^code)(Class) __attribute((noescape)))
+{
+ unsigned int count = unreasonableClassCount();
+
+ for (Class top = _firstRealizedClass;
+ top != nil;
+ top = top->data()->nextSiblingClass)
+ {
+ foreach_realized_class_and_subclass_2(top, count, false, code);
+ }
+}
+
+// Enumerates all realized classes (ignoring metaclasses).
+static void
+foreach_realized_class(bool (^code)(Class) __attribute((noescape)))
+{
+ unsigned int count = unreasonableClassCount();
+
+ for (Class top = _firstRealizedClass;
+ top != nil;
+ top = top->data()->nextSiblingClass)
+ {
+ foreach_realized_class_and_subclass_2(top, count, true, code);
+ }
+}
+
+
+/***********************************************************************
+ * Method Scanners / Optimization tracking
+ * Implementation of scanning for various implementations of methods.
+ **********************************************************************/
+
+namespace objc {
+
+enum SelectorBundle {
+ AWZ,
+ RR,
+ Core,
+};
+
+namespace scanner {
+
+// The current state of NSObject swizzling for every scanner
+//
+// It allows for cheap checks of global swizzles, and also lets
+// things like IMP Swizzling before NSObject has been initialized
+// to be remembered, as setInitialized() would miss these.
+//
+// Every pair of bits describes a SelectorBundle.
+// even bits: is NSObject class swizzled for this bundle
+// odd bits: is NSObject meta class swizzled for this bundle
+static uintptr_t NSObjectSwizzledMask;
+
+static ALWAYS_INLINE uintptr_t
+swizzlingBit(SelectorBundle bundle, bool isMeta)
+{
+ return 1UL << (2 * bundle + isMeta);
+}
+
+static void __attribute__((cold, noinline))
+printCustom(Class cls, SelectorBundle bundle, bool inherited)
+{
+ static char const * const SelectorBundleName[] = {
+ [AWZ] = "CUSTOM AWZ",
+ [RR] = "CUSTOM RR",
+ [Core] = "CUSTOM Core",
+ };
+
+ _objc_inform("%s: %s%s%s", SelectorBundleName[bundle],
+ cls->nameForLogging(),
+ cls->isMetaClass() ? " (meta)" : "",
+ inherited ? " (inherited)" : "");
+}
+
+enum class Scope { Instances, Classes, Both };
+
+template <typename Traits, SelectorBundle Bundle, bool &ShouldPrint, Scope Domain = Scope::Both>
+class Mixin {
+
+ // work around compiler being broken with templates using Class/objc_class,
+ // probably some weird confusion with Class being builtin
+ ALWAYS_INLINE static objc_class *as_objc_class(Class cls) {
+ return (objc_class *)cls;
+ }
+
+ static void
+ setCustomRecursively(Class cls, bool inherited = false)
+ {
+ foreach_realized_class_and_subclass(cls, [=](Class c){
+ if (c != cls && !as_objc_class(c)->isInitialized()) {
+ // Subclass not yet initialized. Wait for setInitialized() to do it
+ return false;
+ }
+ if (Traits::isCustom(c)) {
+ return false;
+ }
+ Traits::setCustom(c);
+ if (ShouldPrint) {
+ printCustom(cls, Bundle, inherited || c != cls);
+ }
+ return true;
+ });
+ }
+
+ static bool
+ isNSObjectSwizzled(bool isMeta)
+ {
+ return NSObjectSwizzledMask & swizzlingBit(Bundle, isMeta);
+ }
+
+ static void
+ setNSObjectSwizzled(Class NSOClass, bool isMeta)
+ {
+ NSObjectSwizzledMask |= swizzlingBit(Bundle, isMeta);
+ if (as_objc_class(NSOClass)->isInitialized()) {
+ setCustomRecursively(NSOClass);
+ }
+ }
+
+ static void
+ scanChangedMethodForUnknownClass(const method_t *meth)
+ {
+ Class cls;
+
+ cls = classNSObject();
+ if (Domain != Scope::Classes && !isNSObjectSwizzled(NO)) {
+ for (const auto &meth2: as_objc_class(cls)->data()->methods) {
+ if (meth == &meth2) {
+ setNSObjectSwizzled(cls, NO);
+ break;
+ }
+ }
+ }
+
+ cls = metaclassNSObject();
+ if (Domain != Scope::Instances && !isNSObjectSwizzled(YES)) {
+ for (const auto &meth2: as_objc_class(cls)->data()->methods) {
+ if (meth == &meth2) {
+ setNSObjectSwizzled(cls, YES);
+ break;
+ }
+ }
+ }
+ }
+
+ static void
+ scanAddedClassImpl(Class cls, bool isMeta)
+ {
+ Class NSOClass = (isMeta ? metaclassNSObject() : classNSObject());
+ bool setCustom = NO, inherited = NO;
+
+ if (isNSObjectSwizzled(isMeta)) {
+ setCustom = YES;
+ } else if (cls == NSOClass) {
+ // NSObject is default but we need to check categories
+ auto &methods = as_objc_class(cls)->data()->methods;
+ setCustom = Traits::scanMethodLists(methods.beginCategoryMethodLists(),
+ methods.endCategoryMethodLists(cls));
+ } else if (!isMeta && !as_objc_class(cls)->superclass) {
+ // Custom Root class
+ setCustom = YES;
+ } else if (Traits::isCustom(as_objc_class(cls)->superclass)) {
+ // Superclass is custom, therefore we are too.
+ setCustom = YES;
+ inherited = YES;
+ } else {
+ // Not NSObject.
+ auto &methods = as_objc_class(cls)->data()->methods;
+ setCustom = Traits::scanMethodLists(methods.beginLists(),
+ methods.endLists());
+ }
+ if (slowpath(setCustom)) {
+ if (ShouldPrint) printCustom(cls, Bundle, inherited);
+ } else {
+ Traits::setDefault(cls);
+ }
+ }
+
+public:
+ // Scan a class that is about to be marked Initialized for particular
+ // bundles of selectors, and mark the class and its children
+ // accordingly.
+ //
+ // This also handles inheriting properties from its superclass.
+ //
+ // Caller: objc_class::setInitialized()
+ static void
+ scanInitializedClass(Class cls, Class metacls)
+ {
+ if (Domain != Scope::Classes) {
+ scanAddedClassImpl(cls, false);
+ }
+ if (Domain != Scope::Instances) {
+ scanAddedClassImpl(metacls, true);
+ }
+ }
+
+ // Inherit various properties from the superclass when a class
+ // is being added to the graph.
+ //
+ // Caller: addSubclass()
+ static void
+ scanAddedSubClass(Class subcls, Class supercls)
+ {
+ if (slowpath(Traits::isCustom(supercls) && !Traits::isCustom(subcls))) {
+ setCustomRecursively(subcls, true);
+ }
+ }
+
+ // Scan Method lists for selectors that would override things
+ // in a Bundle.
+ //
+ // This is used to detect when categories override problematic selectors
+ // are injected in a class after it has been initialized.
+ //
+ // Caller: prepareMethodLists()
+ static void
+ scanAddedMethodLists(Class cls, method_list_t **mlists, int count)
+ {
+ if (slowpath(Traits::isCustom(cls))) {
+ return;
+ }
+ if (slowpath(Traits::scanMethodLists(mlists, mlists + count))) {
+ setCustomRecursively(cls);
+ }
+ }
+
+ // Handle IMP Swizzling (the IMP for an exisiting method being changed).
+ //
+ // In almost all cases, IMP swizzling does not affect custom bits.
+ // Custom search will already find the method whether or not
+ // it is swizzled, so it does not transition from non-custom to custom.
+ //
+ // The only cases where IMP swizzling can affect the custom bits is
+ // if the swizzled method is one of the methods that is assumed to be
+ // non-custom. These special cases are listed in setInitialized().
+ // We look for such cases here.
+ //
+ // Caller: Swizzling methods via adjustCustomFlagsForMethodChange()
+ static void
+ scanChangedMethod(Class cls, const method_t *meth)
+ {
+ if (fastpath(!Traits::isInterestingSelector(meth->name))) {
+ return;
+ }
+
+ if (cls) {
+ bool isMeta = as_objc_class(cls)->isMetaClass();
+ if (isMeta && Domain != Scope::Instances) {
+ if (cls == metaclassNSObject() && !isNSObjectSwizzled(isMeta)) {
+ setNSObjectSwizzled(cls, isMeta);
+ }
+ }
+ if (!isMeta && Domain != Scope::Classes) {
+ if (cls == classNSObject() && !isNSObjectSwizzled(isMeta)) {
+ setNSObjectSwizzled(cls, isMeta);
+ }
+ }
+ } else {
+ // We're called from method_exchangeImplementations, only NSObject
+ // class and metaclass may be problematic (exchanging the default
+ // builtin IMP of an interesting seleector, is a swizzling that,
+ // may flip our scanned property. For other classes, the previous
+ // value had already flipped the property).
+ //
+ // However, as we don't know the class, we need to scan all of
+ // NSObject class and metaclass methods (this is SLOW).
+ scanChangedMethodForUnknownClass(meth);
+ }
+ }
+};
+
+} // namespace scanner
+
+// AWZ methods: +alloc / +allocWithZone:
+struct AWZScanner : scanner::Mixin<AWZScanner, AWZ, PrintCustomAWZ, scanner::Scope::Classes> {
+ static bool isCustom(Class cls) {
+ return cls->hasCustomAWZ();
+ }
+ static void setCustom(Class cls) {
+ cls->setHasCustomAWZ();
+ }
+ static void setDefault(Class cls) {
+ cls->setHasDefaultAWZ();
+ }
+ static bool isInterestingSelector(SEL sel) {
+ return sel == @selector(alloc) || sel == @selector(allocWithZone:);
+ }
+ static bool scanMethodLists(method_list_t **mlists, method_list_t **end) {
+ SEL sels[2] = { @selector(alloc), @selector(allocWithZone:), };
+ return method_lists_contains_any(mlists, end, sels, 2);
+ }
+};
+
+// Retain/Release methods that are extremely rarely overridden
+//
+// retain/release/autorelease/retainCount/
+// _tryRetain/_isDeallocating/retainWeakReference/allowsWeakReference
+struct RRScanner : scanner::Mixin<RRScanner, RR, PrintCustomRR
+#if !SUPPORT_NONPOINTER_ISA
+, scanner::Scope::Instances
+#endif
+> {
+ static bool isCustom(Class cls) {
+ return cls->hasCustomRR();
+ }
+ static void setCustom(Class cls) {
+ cls->setHasCustomRR();
+ }
+ static void setDefault(Class cls) {
+ cls->setHasDefaultRR();
+ }
+ static bool isInterestingSelector(SEL sel) {
+ return sel == @selector(retain) ||
+ sel == @selector(release) ||
+ sel == @selector(autorelease) ||
+ sel == @selector(_tryRetain) ||
+ sel == @selector(_isDeallocating) ||
+ sel == @selector(retainCount) ||
+ sel == @selector(allowsWeakReference) ||
+ sel == @selector(retainWeakReference);
+ }
+ static bool scanMethodLists(method_list_t **mlists, method_list_t **end) {
+ SEL sels[8] = {
+ @selector(retain),
+ @selector(release),
+ @selector(autorelease),
+ @selector(_tryRetain),
+ @selector(_isDeallocating),
+ @selector(retainCount),
+ @selector(allowsWeakReference),
+ @selector(retainWeakReference),
+ };
+ return method_lists_contains_any(mlists, end, sels, 8);
+ }
+};
+
+// Core NSObject methods that are extremely rarely overridden
+//
+// +new, ±class, ±self, ±isKindOfClass:, ±respondsToSelector
+struct CoreScanner : scanner::Mixin<CoreScanner, Core, PrintCustomCore> {
+ static bool isCustom(Class cls) {
+ return cls->hasCustomCore();
+ }
+ static void setCustom(Class cls) {
+ cls->setHasCustomCore();
+ }
+ static void setDefault(Class cls) {
+ cls->setHasDefaultCore();
+ }
+ static bool isInterestingSelector(SEL sel) {
+ return sel == @selector(new) ||
+ sel == @selector(self) ||
+ sel == @selector(class) ||
+ sel == @selector(isKindOfClass:) ||
+ sel == @selector(respondsToSelector:);
+ }
+ static bool scanMethodLists(method_list_t **mlists, method_list_t **end) {
+ SEL sels[5] = {
+ @selector(new),
+ @selector(self),
+ @selector(class),
+ @selector(isKindOfClass:),
+ @selector(respondsToSelector:)
+ };
+ return method_lists_contains_any(mlists, end, sels, 5);
+ }
+};
+
+class category_list : nocopy_t {
+ union {
+ locstamped_category_t lc;
+ struct {
+ locstamped_category_t *array;
+ // this aliases with locstamped_category_t::hi
+ // which is an aliased pointer
+ uint32_t is_array : 1;
+ uint32_t count : 31;
+ uint32_t size : 32;
+ };
+ } _u;
+
+public:
+ category_list() : _u{{nullptr, nullptr}} { }
+ category_list(locstamped_category_t lc) : _u{{lc}} { }
+ category_list(category_list &&other) : category_list() {
+ std::swap(_u, other._u);
+ }
+ ~category_list()
+ {
+ if (_u.is_array) {
+ free(_u.array);
+ }
+ }
+
+ uint32_t count() const
+ {
+ if (_u.is_array) return _u.count;
+ return _u.lc.cat ? 1 : 0;
+ }
+
+ uint32_t arrayByteSize(uint32_t size) const
+ {
+ return sizeof(locstamped_category_t) * size;
+ }
+
+ const locstamped_category_t *array() const
+ {
+ return _u.is_array ? _u.array : &_u.lc;
+ }
+
+ void append(locstamped_category_t lc)
+ {
+ if (_u.is_array) {
+ if (_u.count == _u.size) {
+ // Have a typical malloc growth:
+ // - size <= 8: grow by 2
+ // - size <= 16: grow by 4
+ // - size <= 32: grow by 8
+ // ... etc
+ _u.size += _u.size < 8 ? 2 : 1 << (fls(_u.size) - 2);
+ _u.array = (locstamped_category_t *)reallocf(_u.array, arrayByteSize(_u.size));
+ }
+ _u.array[_u.count++] = lc;
+ } else if (_u.lc.cat == NULL) {
+ _u.lc = lc;
+ } else {
+ locstamped_category_t *arr = (locstamped_category_t *)malloc(arrayByteSize(2));
+ arr[0] = _u.lc;
+ arr[1] = lc;
+
+ _u.array = arr;
+ _u.is_array = true;
+ _u.count = 2;
+ _u.size = 2;
+ }
+ }
+
+ void erase(category_t *cat)
+ {
+ if (_u.is_array) {
+ for (int i = 0; i < _u.count; i++) {
+ if (_u.array[i].cat == cat) {
+ // shift entries to preserve list order
+ memmove(&_u.array[i], &_u.array[i+1], arrayByteSize(_u.count - i - 1));
+ return;
+ }
+ }
+ } else if (_u.lc.cat == cat) {
+ _u.lc.cat = NULL;
+ _u.lc.hi = NULL;
+ }
+ }
+};
+
+class UnattachedCategories : public ExplicitInitDenseMap<Class, category_list>
+{
+public:
+ void addForClass(locstamped_category_t lc, Class cls)
+ {
+ runtimeLock.assertLocked();
+
+ if (slowpath(PrintConnecting)) {
+ _objc_inform("CLASS: found category %c%s(%s)",
+ cls->isMetaClass() ? '+' : '-',
+ cls->nameForLogging(), lc.cat->name);
+ }
+
+ auto result = get().try_emplace(cls, lc);
+ if (!result.second) {
+ result.first->second.append(lc);
+ }
+ }
+
+ void attachToClass(Class cls, Class previously, int flags)
+ {
+ runtimeLock.assertLocked();
+ ASSERT((flags & ATTACH_CLASS) ||
+ (flags & ATTACH_METACLASS) ||
+ (flags & ATTACH_CLASS_AND_METACLASS));
+
+ auto &map = get();
+ auto it = map.find(previously);
+
+ if (it != map.end()) {
+ category_list &list = it->second;
+ if (flags & ATTACH_CLASS_AND_METACLASS) {
+ int otherFlags = flags & ~ATTACH_CLASS_AND_METACLASS;
+ attachCategories(cls, list.array(), list.count(), otherFlags | ATTACH_CLASS);
+ attachCategories(cls->ISA(), list.array(), list.count(), otherFlags | ATTACH_METACLASS);
+ } else {
+ attachCategories(cls, list.array(), list.count(), flags);
+ }
+ map.erase(it);
+ }
+ }
+
+ void eraseCategoryForClass(category_t *cat, Class cls)
+ {
+ runtimeLock.assertLocked();
+
+ auto &map = get();
+ auto it = map.find(cls);
+ if (it != map.end()) {
+ category_list &list = it->second;
+ list.erase(cat);
+ if (list.count() == 0) {
+ map.erase(it);
+ }
+ }
+ }
+
+ void eraseClass(Class cls)
+ {
+ runtimeLock.assertLocked();
+
+ get().erase(cls);
+ }
+};
+
+static UnattachedCategories unattachedCategories;
+
+} // namespace objc
+