#ifndef _OBJC_RUNTIME_NEW_H
#define _OBJC_RUNTIME_NEW_H
+#include "PointerUnion.h"
+
// class_data_bits_t is the class_t->data field (class_rw_t pointer plus flags)
// The extra bits are optimized for the retain/release and alloc/dealloc paths.
// class has started realizing but not yet completed it
#define RW_REALIZING (1<<19)
+// class is a metaclass (copied from ro)
+#define RW_META RO_META // (1<<0)
+
+
// NOTE: MORE RW_ FLAGS DEFINED BELOW
protected:
class iterator {
- List **lists;
- List **listsEnd;
+ List * const *lists;
+ List * const *listsEnd;
typename List::iterator m, mEnd;
public:
- iterator(List **begin, List **end)
+ iterator(List *const *begin, List *const *end)
: lists(begin), listsEnd(end)
{
if (begin != end) {
return arrayAndFlag & 1;
}
- array_t *array() {
+ array_t *array() const {
return (array_t *)(arrayAndFlag & ~1);
}
}
public:
+ list_array_tt() : list(nullptr) { }
+ list_array_tt(List *l) : list(l) { }
- uint32_t count() {
+ uint32_t count() const {
uint32_t result = 0;
for (auto lists = beginLists(), end = endLists();
lists != end;
return result;
}
- iterator begin() {
+ iterator begin() const {
return iterator(beginLists(), endLists());
}
- iterator end() {
- List **e = endLists();
+ iterator end() const {
+ List * const *e = endLists();
return iterator(e, e);
}
}
}
- List** beginLists() {
+ List* const * beginLists() const {
if (hasArray()) {
return array()->lists;
} else {
}
}
- List** endLists() {
+ List* const * endLists() const {
if (hasArray()) {
return array()->lists + array()->count;
} else if (list) {
typedef list_array_tt<method_t, method_list_t> Super;
public:
- method_list_t **beginCategoryMethodLists() {
+ method_array_t() : Super() { }
+ method_array_t(method_list_t *l) : Super(l) { }
+
+ method_list_t * const *beginCategoryMethodLists() const {
return beginLists();
}
- method_list_t **endCategoryMethodLists(Class cls);
+ method_list_t * const *endCategoryMethodLists(Class cls) const;
method_array_t duplicate() {
return Super::duplicate<method_array_t>();
typedef list_array_tt<property_t, property_list_t> Super;
public:
+ property_array_t() : Super() { }
+ property_array_t(property_list_t *l) : Super(l) { }
+
property_array_t duplicate() {
return Super::duplicate<property_array_t>();
}
typedef list_array_tt<protocol_ref_t, protocol_list_t> Super;
public:
+ protocol_array_t() : Super() { }
+ protocol_array_t(protocol_list_t *l) : Super(l) { }
+
protocol_array_t duplicate() {
return Super::duplicate<protocol_array_t>();
}
};
+struct class_rw_ext_t {
+ const class_ro_t *ro;
+ method_array_t methods;
+ property_array_t properties;
+ protocol_array_t protocols;
+ char *demangledName;
+ uint32_t version;
+};
struct class_rw_t {
// Be warned that Symbolication knows the layout of this structure.
uint32_t flags;
- uint16_t version;
uint16_t witness;
+#if SUPPORT_INDEXED_ISA
+ uint16_t index;
+#endif
- const class_ro_t *ro;
-
- method_array_t methods;
- property_array_t properties;
- protocol_array_t protocols;
+ explicit_atomic<uintptr_t> ro_or_rw_ext;
Class firstSubclass;
Class nextSiblingClass;
- char *demangledName;
+private:
+ using ro_or_rw_ext_t = objc::PointerUnion<const class_ro_t *, class_rw_ext_t *>;
-#if SUPPORT_INDEXED_ISA
- uint32_t index;
-#endif
+ const ro_or_rw_ext_t get_ro_or_rwe() const {
+ return ro_or_rw_ext_t{ro_or_rw_ext};
+ }
+
+ void set_ro_or_rwe(const class_ro_t *ro) {
+ ro_or_rw_ext_t{ro}.storeAt(ro_or_rw_ext, memory_order_relaxed);
+ }
+
+ void set_ro_or_rwe(class_rw_ext_t *rwe, const class_ro_t *ro) {
+ // the release barrier is so that the class_rw_ext_t::ro initialization
+ // is visible to lockless readers
+ rwe->ro = ro;
+ ro_or_rw_ext_t{rwe}.storeAt(ro_or_rw_ext, memory_order_release);
+ }
- void setFlags(uint32_t set)
+ class_rw_ext_t *extAlloc(const class_ro_t *ro, bool deep = false);
+
+public:
+ void setFlags(uint32_t set)
{
__c11_atomic_fetch_or((_Atomic(uint32_t) *)&flags, set, __ATOMIC_RELAXED);
}
newf = (oldf | set) & ~clear;
} while (!OSAtomicCompareAndSwap32Barrier(oldf, newf, (volatile int32_t *)&flags));
}
+
+ class_rw_ext_t *ext() const {
+ return get_ro_or_rwe().dyn_cast<class_rw_ext_t *>();
+ }
+
+ class_rw_ext_t *extAllocIfNeeded() {
+ auto v = get_ro_or_rwe();
+ if (fastpath(v.is<class_rw_ext_t *>())) {
+ return v.get<class_rw_ext_t *>();
+ } else {
+ return extAlloc(v.get<const class_ro_t *>());
+ }
+ }
+
+ class_rw_ext_t *deepCopy(const class_ro_t *ro) {
+ return extAlloc(ro, true);
+ }
+
+ const class_ro_t *ro() const {
+ auto v = get_ro_or_rwe();
+ if (slowpath(v.is<class_rw_ext_t *>())) {
+ return v.get<class_rw_ext_t *>()->ro;
+ }
+ return v.get<const class_ro_t *>();
+ }
+
+ void set_ro(const class_ro_t *ro) {
+ auto v = get_ro_or_rwe();
+ if (v.is<class_rw_ext_t *>()) {
+ v.get<class_rw_ext_t *>()->ro = ro;
+ } else {
+ set_ro_or_rwe(ro);
+ }
+ }
+
+ const method_array_t methods() const {
+ auto v = get_ro_or_rwe();
+ if (v.is<class_rw_ext_t *>()) {
+ return v.get<class_rw_ext_t *>()->methods;
+ } else {
+ return method_array_t{v.get<const class_ro_t *>()->baseMethods()};
+ }
+ }
+
+ const property_array_t properties() const {
+ auto v = get_ro_or_rwe();
+ if (v.is<class_rw_ext_t *>()) {
+ return v.get<class_rw_ext_t *>()->properties;
+ } else {
+ return property_array_t{v.get<const class_ro_t *>()->baseProperties};
+ }
+ }
+
+ const protocol_array_t protocols() const {
+ auto v = get_ro_or_rwe();
+ if (v.is<class_rw_ext_t *>()) {
+ return v.get<class_rw_ext_t *>()->protocols;
+ } else {
+ return protocol_array_t{v.get<const class_ro_t *>()->baseProtocols};
+ }
+ }
};
class_rw_t *maybe_rw = data();
if (maybe_rw->flags & RW_REALIZED) {
// maybe_rw is rw
- return maybe_rw->ro;
+ return maybe_rw->ro();
} else {
// maybe_rw is actually ro
return (class_ro_t *)maybe_rw;
// Return YES if the class's ivars are managed by ARC,
// or the class is MRC but has ARC-style weak ivars.
bool hasAutomaticIvars() {
- return data()->ro->flags & (RO_IS_ARC | RO_HAS_WEAK_WITHOUT_ARC);
+ return data()->ro()->flags & (RO_IS_ARC | RO_HAS_WEAK_WITHOUT_ARC);
}
// Return YES if the class's ivars are managed by ARC.
bool isARC() {
- return data()->ro->flags & RO_IS_ARC;
+ return data()->ro()->flags & RO_IS_ARC;
}
#if FAST_CACHE_META
return cache.getBit(FAST_CACHE_META);
#else
- return data()->ro->flags & RO_META;
+ return data()->flags & RW_META;
#endif
}
// Like isMetaClass, but also valid on un-realized classes
bool isMetaClassMaybeUnrealized() {
- return bits.safe_ro()->flags & RO_META;
+ static_assert(offsetof(class_rw_t, flags) == offsetof(class_ro_t, flags), "flags alias");
+ static_assert(RO_META == RW_META, "flags alias");
+ return data()->flags & RW_META;
}
// NOT identical to this->ISA when this is a metaclass
ASSERT(this);
if (isRealized() || isFuture()) {
- return data()->ro->name;
+ return data()->ro()->name;
} else {
return ((const class_ro_t *)data())->name;
}
}
- const char *demangledName();
+ const char *demangledName(bool needsLock);
const char *nameForLogging();
// May be unaligned depending on class's ivars.
uint32_t unalignedInstanceStart() const {
ASSERT(isRealized());
- return data()->ro->instanceStart;
+ return data()->ro()->instanceStart;
}
// Class's instance start rounded up to a pointer-size boundary.
// May be unaligned depending on class's ivars.
uint32_t unalignedInstanceSize() const {
ASSERT(isRealized());
- return data()->ro->instanceSize;
+ return data()->ro()->instanceSize;
}
// Class's ivar size rounded up to a pointer-size boundary.
void setInstanceSize(uint32_t newSize) {
ASSERT(isRealized());
ASSERT(data()->flags & RW_REALIZING);
- if (newSize != data()->ro->instanceSize) {
+ auto ro = data()->ro();
+ if (newSize != ro->instanceSize) {
ASSERT(data()->flags & RW_COPIED_RO);
- *const_cast<uint32_t *>(&data()->ro->instanceSize) = newSize;
+ *const_cast<uint32_t *>(&ro->instanceSize) = newSize;
}
cache.setFastInstanceSize(newSize);
}
#include "objc-runtime-new.h"
#include "objc-file.h"
#include "objc-cache.h"
+#include "objc-zalloc.h"
#include <Block.h>
#include <objc/message.h>
#include <mach/shared_region.h>
static IMP addMethod(Class cls, SEL name, IMP imp, const char *types, bool replace);
static void adjustCustomFlagsForMethodChange(Class cls, method_t *meth);
static method_t *search_method_list(const method_list_t *mlist, SEL sel);
-static bool method_lists_contains_any(method_list_t **mlists, method_list_t **end,
+static bool method_lists_contains_any(method_list_t * const *mlists, method_list_t * const *end,
SEL sels[], size_t selcount);
static void flushCaches(Class cls);
static void initializeTaggedPointerObfuscator(void);
**********************************************************************/
static Class _firstRealizedClass = nil;
+/***********************************************************************
+* didInitialAttachCategories
+* Whether the initial attachment of categories present at startup has
+* been done.
+**********************************************************************/
+static bool didInitialAttachCategories = false;
+
+/***********************************************************************
+* didCallDyldNotifyRegister
+* Whether the call to _dyld_objc_notify_register has completed.
+**********************************************************************/
+bool didCallDyldNotifyRegister = false;
+
/*
Low two bits of mlist->entsize is used as the fixed-up marker.
PREOPTIMIZED VERSION:
}
-method_list_t **method_array_t::endCategoryMethodLists(Class cls)
+method_list_t * const *method_array_t::endCategoryMethodLists(Class cls) const
{
- method_list_t **mlists = beginLists();
- method_list_t **mlistsEnd = endLists();
+ auto mlists = beginLists();
+ auto mlistsEnd = endLists();
- if (mlists == mlistsEnd || !cls->data()->ro->baseMethods())
+ if (mlists == mlistsEnd || !cls->data()->ro()->baseMethods())
{
// No methods, or no base methods.
// Everything here is a category method.
if (rw->flags & RW_COPIED_RO) {
// already writeable, do nothing
} else {
- rw->ro = rw->ro->duplicate();
+ rw->set_ro(rw->ro()->duplicate());
rw->flags |= RW_COPIED_RO;
}
- return (class_ro_t *)rw->ro;
+ return const_cast<class_ro_t *>(rw->ro());
}
}
// Look for method in cls
- for (const auto& meth2 : cls->data()->methods) {
+ for (const auto& meth2 : cls->data()->methods()) {
SEL s2 = sel_registerName(sel_cname(meth2.name));
if (s == s2) {
logReplacedMethod(cls->nameForLogging(), s,
cls = classNSObject();
if (Domain != Scope::Classes && !isNSObjectSwizzled(NO)) {
- for (const auto &meth2: as_objc_class(cls)->data()->methods) {
+ for (const auto &meth2: as_objc_class(cls)->data()->methods()) {
if (meth == &meth2) {
setNSObjectSwizzled(cls, NO);
break;
cls = metaclassNSObject();
if (Domain != Scope::Instances && !isNSObjectSwizzled(YES)) {
- for (const auto &meth2: as_objc_class(cls)->data()->methods) {
+ for (const auto &meth2: as_objc_class(cls)->data()->methods()) {
if (meth == &meth2) {
setNSObjectSwizzled(cls, YES);
break;
setCustom = YES;
} else if (cls == NSOClass) {
// NSObject is default but we need to check categories
- auto &methods = as_objc_class(cls)->data()->methods;
+ auto &methods = as_objc_class(cls)->data()->methods();
setCustom = Traits::scanMethodLists(methods.beginCategoryMethodLists(),
methods.endCategoryMethodLists(cls));
} else if (!isMeta && !as_objc_class(cls)->superclass) {
inherited = YES;
} else {
// Not NSObject.
- auto &methods = as_objc_class(cls)->data()->methods;
+ auto &methods = as_objc_class(cls)->data()->methods();
setCustom = Traits::scanMethodLists(methods.beginLists(),
methods.endLists());
}
static bool isInterestingSelector(SEL sel) {
return sel == @selector(alloc) || sel == @selector(allocWithZone:);
}
- static bool scanMethodLists(method_list_t **mlists, method_list_t **end) {
+ static bool scanMethodLists(method_list_t * const *mlists, method_list_t * const *end) {
SEL sels[2] = { @selector(alloc), @selector(allocWithZone:), };
return method_lists_contains_any(mlists, end, sels, 2);
}
sel == @selector(allowsWeakReference) ||
sel == @selector(retainWeakReference);
}
- static bool scanMethodLists(method_list_t **mlists, method_list_t **end) {
+ static bool scanMethodLists(method_list_t * const *mlists, method_list_t * const *end) {
SEL sels[8] = {
@selector(retain),
@selector(release),
sel == @selector(isKindOfClass:) ||
sel == @selector(respondsToSelector:);
}
- static bool scanMethodLists(method_list_t **mlists, method_list_t **end) {
+ static bool scanMethodLists(method_list_t * const *mlists, method_list_t * const *end) {
SEL sels[5] = {
@selector(new),
@selector(self),
static bool isBundleClass(Class cls)
{
- return cls->data()->ro->flags & RO_FROM_BUNDLE;
+ return cls->data()->ro()->flags & RO_FROM_BUNDLE;
}
}
}
+class_rw_ext_t *
+class_rw_t::extAlloc(const class_ro_t *ro, bool deepCopy)
+{
+ runtimeLock.assertLocked();
+
+ auto rwe = objc::zalloc<class_rw_ext_t>();
+
+ rwe->version = (ro->flags & RO_META) ? 7 : 0;
+
+ method_list_t *list = ro->baseMethods();
+ if (list) {
+ if (deepCopy) list = list->duplicate();
+ rwe->methods.attachLists(&list, 1);
+ }
+
+ // See comments in objc_duplicateClass
+ // property lists and protocol lists historically
+ // have not been deep-copied
+ //
+ // This is probably wrong and ought to be fixed some day
+ property_list_t *proplist = ro->baseProperties;
+ if (proplist) {
+ rwe->properties.attachLists(&proplist, 1);
+ }
+
+ protocol_list_t *protolist = ro->baseProtocols;
+ if (protolist) {
+ rwe->protocols.attachLists(&protolist, 1);
+ }
+
+ set_ro_or_rwe(rwe, ro);
+ return rwe;
+}
// Attach method lists and properties and protocols from categories to a class.
// Assumes the categories in cats are all loaded and sorted by load order,
uint32_t protocount = 0;
bool fromBundle = NO;
bool isMeta = (flags & ATTACH_METACLASS);
- auto rw = cls->data();
+ auto rwe = cls->data()->extAllocIfNeeded();
for (uint32_t i = 0; i < cats_count; i++) {
auto& entry = cats_list[i];
if (mlist) {
if (mcount == ATTACH_BUFSIZ) {
prepareMethodLists(cls, mlists, mcount, NO, fromBundle);
- rw->methods.attachLists(mlists, mcount);
+ rwe->methods.attachLists(mlists, mcount);
mcount = 0;
}
mlists[ATTACH_BUFSIZ - ++mcount] = mlist;
entry.cat->propertiesForMeta(isMeta, entry.hi);
if (proplist) {
if (propcount == ATTACH_BUFSIZ) {
- rw->properties.attachLists(proplists, propcount);
+ rwe->properties.attachLists(proplists, propcount);
propcount = 0;
}
proplists[ATTACH_BUFSIZ - ++propcount] = proplist;
protocol_list_t *protolist = entry.cat->protocolsForMeta(isMeta);
if (protolist) {
if (protocount == ATTACH_BUFSIZ) {
- rw->protocols.attachLists(protolists, protocount);
+ rwe->protocols.attachLists(protolists, protocount);
protocount = 0;
}
protolists[ATTACH_BUFSIZ - ++protocount] = protolist;
if (mcount > 0) {
prepareMethodLists(cls, mlists + ATTACH_BUFSIZ - mcount, mcount, NO, fromBundle);
- rw->methods.attachLists(mlists + ATTACH_BUFSIZ - mcount, mcount);
+ rwe->methods.attachLists(mlists + ATTACH_BUFSIZ - mcount, mcount);
if (flags & ATTACH_EXISTING) flushCaches(cls);
}
- rw->properties.attachLists(proplists + ATTACH_BUFSIZ - propcount, propcount);
+ rwe->properties.attachLists(proplists + ATTACH_BUFSIZ - propcount, propcount);
- rw->protocols.attachLists(protolists + ATTACH_BUFSIZ - protocount, protocount);
+ rwe->protocols.attachLists(protolists + ATTACH_BUFSIZ - protocount, protocount);
}
bool isMeta = cls->isMetaClass();
auto rw = cls->data();
- auto ro = rw->ro;
+ auto ro = rw->ro();
+ auto rwe = rw->ext();
// Methodizing for the first time
if (PrintConnecting) {
method_list_t *list = ro->baseMethods();
if (list) {
prepareMethodLists(cls, &list, 1, YES, isBundleClass(cls));
- rw->methods.attachLists(&list, 1);
+ if (rwe) rwe->methods.attachLists(&list, 1);
}
property_list_t *proplist = ro->baseProperties;
- if (proplist) {
- rw->properties.attachLists(&proplist, 1);
+ if (rwe && proplist) {
+ rwe->properties.attachLists(&proplist, 1);
}
protocol_list_t *protolist = ro->baseProtocols;
- if (protolist) {
- rw->protocols.attachLists(&protolist, 1);
+ if (rwe && protolist) {
+ rwe->protocols.attachLists(&protolist, 1);
}
// Root classes get bonus method implementations if they don't have
#if DEBUG
// Debug: sanity-check all SELs; log method list contents
- for (const auto& meth : rw->methods) {
+ for (const auto& meth : rw->methods()) {
if (PrintConnecting) {
_objc_inform("METHOD %c[%s %s]", isMeta ? '+' : '-',
cls->nameForLogging(), sel_getName(meth.name));
_objc_inform("FUTURE: reserving %p for %s", (void*)cls, name);
}
- class_rw_t *rw = (class_rw_t *)calloc(sizeof(class_rw_t), 1);
+ class_rw_t *rw = objc::zalloc<class_rw_t>();
class_ro_t *ro = (class_ro_t *)calloc(sizeof(class_ro_t), 1);
ro->name = strdupIfMutable(name);
- rw->ro = ro;
+ rw->set_ro(ro);
cls->setData(rw);
cls->data()->flags = RO_FUTURE;
*/
// Non-fragile ivars - reconcile this class with its superclass
- const class_ro_t *super_ro = supercls->data()->ro;
+ const class_ro_t *super_ro = supercls->data()->ro();
if (DebugNonFragileIvars) {
// Debugging: Force non-fragile ivars to slide.
{
uint32_t oldStart = ro->instanceStart;
class_ro_t *ro_w = make_ro_writeable(rw);
- ro = rw->ro;
+ ro = rw->ro();
// Find max ivar alignment in class.
// default to word size to simplify ivar update
super_ro->instanceSize);
}
class_ro_t *ro_w = make_ro_writeable(rw);
- ro = rw->ro;
+ ro = rw->ro();
moveIvars(ro_w, super_ro->instanceSize);
gdb_objc_class_changed(cls, OBJC_CLASS_IVARS_CHANGED, ro->name);
}
{
runtimeLock.assertLocked();
- const class_ro_t *ro;
class_rw_t *rw;
Class supercls;
Class metacls;
- bool isMeta;
if (!cls) return nil;
if (cls->isRealized()) return cls;
// fixme verify class is not in an un-dlopened part of the shared cache?
- ro = (const class_ro_t *)cls->data();
+ auto ro = (const class_ro_t *)cls->data();
+ auto isMeta = ro->flags & RO_META;
if (ro->flags & RO_FUTURE) {
// This was a future class. rw data is already allocated.
rw = cls->data();
- ro = cls->data()->ro;
+ ro = cls->data()->ro();
+ ASSERT(!isMeta);
cls->changeInfo(RW_REALIZED|RW_REALIZING, RW_FUTURE);
} else {
// Normal class. Allocate writeable class data.
- rw = (class_rw_t *)calloc(sizeof(class_rw_t), 1);
- rw->ro = ro;
- rw->flags = RW_REALIZED|RW_REALIZING;
+ rw = objc::zalloc<class_rw_t>();
+ rw->set_ro(ro);
+ rw->flags = RW_REALIZED|RW_REALIZING|isMeta;
cls->setData(rw);
}
- isMeta = ro->flags & RO_META;
#if FAST_CACHE_META
if (isMeta) cls->cache.setBit(FAST_CACHE_META);
#endif
- rw->version = isMeta ? 7 : 0; // old runtime went up to 6
-
// Choose an index for this class.
// Sets cls->instancesRequireRawIsa if indexes no more indexes are available
}
+static void load_categories_nolock(header_info *hi) {
+ bool hasClassProperties = hi->info()->hasCategoryClassProperties();
+
+ size_t count;
+ auto processCatlist = [&](category_t * const *catlist) {
+ for (unsigned i = 0; i < count; i++) {
+ category_t *cat = catlist[i];
+ Class cls = remapClass(cat->cls);
+ locstamped_category_t lc{cat, hi};
+
+ if (!cls) {
+ // Category's target class is missing (probably weak-linked).
+ // Ignore the category.
+ if (PrintConnecting) {
+ _objc_inform("CLASS: IGNORING category \?\?\?(%s) %p with "
+ "missing weak-linked target class",
+ cat->name, cat);
+ }
+ continue;
+ }
+
+ // Process this category.
+ if (cls->isStubClass()) {
+ // Stub classes are never realized. Stub classes
+ // don't know their metaclass until they're
+ // initialized, so we have to add categories with
+ // class methods or properties to the stub itself.
+ // methodizeClass() will find them and add them to
+ // the metaclass as appropriate.
+ if (cat->instanceMethods ||
+ cat->protocols ||
+ cat->instanceProperties ||
+ cat->classMethods ||
+ cat->protocols ||
+ (hasClassProperties && cat->_classProperties))
+ {
+ objc::unattachedCategories.addForClass(lc, cls);
+ }
+ } else {
+ // First, register the category with its target class.
+ // Then, rebuild the class's method lists (etc) if
+ // the class is realized.
+ if (cat->instanceMethods || cat->protocols
+ || cat->instanceProperties)
+ {
+ if (cls->isRealized()) {
+ attachCategories(cls, &lc, 1, ATTACH_EXISTING);
+ } else {
+ objc::unattachedCategories.addForClass(lc, cls);
+ }
+ }
+
+ if (cat->classMethods || cat->protocols
+ || (hasClassProperties && cat->_classProperties))
+ {
+ if (cls->ISA()->isRealized()) {
+ attachCategories(cls->ISA(), &lc, 1, ATTACH_EXISTING | ATTACH_METACLASS);
+ } else {
+ objc::unattachedCategories.addForClass(lc, cls->ISA());
+ }
+ }
+ }
+ }
+ };
+
+ processCatlist(_getObjc2CategoryList(hi, &count));
+ processCatlist(_getObjc2CategoryList2(hi, &count));
+}
+
+static void loadAllCategories() {
+ mutex_locker_t lock(runtimeLock);
+
+ for (auto *hi = FirstHeader; hi != NULL; hi = hi->getNext()) {
+ load_categories_nolock(hi);
+ }
+}
+
/***********************************************************************
* load_images
* Process +load in the given images which are being mapped in by dyld.
void
load_images(const char *path __unused, const struct mach_header *mh)
{
+ if (!didInitialAttachCategories && didCallDyldNotifyRegister) {
+ didInitialAttachCategories = true;
+ loadAllCategories();
+ }
+
// Return without taking locks if there are no +load methods here.
if (!hasLoadMethods((const headerType *)mh)) return;
}
class_rw_t *rw = newCls->data();
- const class_ro_t *old_ro = rw->ro;
+ const class_ro_t *old_ro = rw->ro();
memcpy(newCls, cls, sizeof(objc_class));
- rw->ro = (class_ro_t *)newCls->data();
+ rw->set_ro((class_ro_t *)newCls->data());
newCls->setData(rw);
freeIfMutable((char *)old_ro->name);
free((void *)old_ro);
ts.log("IMAGE TIMES: fix up @protocol references");
- // Discover categories.
- for (EACH_HEADER) {
- bool hasClassProperties = hi->info()->hasCategoryClassProperties();
-
- auto processCatlist = [&](category_t * const *catlist) {
- for (i = 0; i < count; i++) {
- category_t *cat = catlist[i];
- Class cls = remapClass(cat->cls);
- locstamped_category_t lc{cat, hi};
-
- if (!cls) {
- // Category's target class is missing (probably weak-linked).
- // Ignore the category.
- if (PrintConnecting) {
- _objc_inform("CLASS: IGNORING category \?\?\?(%s) %p with "
- "missing weak-linked target class",
- cat->name, cat);
- }
- continue;
- }
-
- // Process this category.
- if (cls->isStubClass()) {
- // Stub classes are never realized. Stub classes
- // don't know their metaclass until they're
- // initialized, so we have to add categories with
- // class methods or properties to the stub itself.
- // methodizeClass() will find them and add them to
- // the metaclass as appropriate.
- if (cat->instanceMethods ||
- cat->protocols ||
- cat->instanceProperties ||
- cat->classMethods ||
- cat->protocols ||
- (hasClassProperties && cat->_classProperties))
- {
- objc::unattachedCategories.addForClass(lc, cls);
- }
- } else {
- // First, register the category with its target class.
- // Then, rebuild the class's method lists (etc) if
- // the class is realized.
- if (cat->instanceMethods || cat->protocols
- || cat->instanceProperties)
- {
- if (cls->isRealized()) {
- attachCategories(cls, &lc, 1, ATTACH_EXISTING);
- } else {
- objc::unattachedCategories.addForClass(lc, cls);
- }
- }
-
- if (cat->classMethods || cat->protocols
- || (hasClassProperties && cat->_classProperties))
- {
- if (cls->ISA()->isRealized()) {
- attachCategories(cls->ISA(), &lc, 1, ATTACH_EXISTING | ATTACH_METACLASS);
- } else {
- objc::unattachedCategories.addForClass(lc, cls->ISA());
- }
- }
- }
- }
- };
- processCatlist(_getObjc2CategoryList(hi, &count));
- processCatlist(_getObjc2CategoryList2(hi, &count));
+ // Discover categories. Only do this after the initial category
+ // attachment has been done. For categories present at startup,
+ // discovery is deferred until the first load_images call after
+ // the call to _dyld_objc_notify_register completes. rdar://problem/53119145
+ if (didInitialAttachCategories) {
+ for (EACH_HEADER) {
+ load_categories_nolock(hi);
+ }
}
ts.log("IMAGE TIMES: discover categories");
}
mutex_locker_t lock(runtimeLock);
+ const auto methods = cls->data()->methods();
ASSERT(cls->isRealized());
- count = cls->data()->methods.count();
+ count = methods.count();
if (count > 0) {
result = (Method *)malloc((count + 1) * sizeof(Method));
count = 0;
- for (auto& meth : cls->data()->methods) {
+ for (auto& meth : methods) {
result[count++] = &meth;
}
result[count] = nil;
ASSERT(cls->isRealized());
- if ((ivars = cls->data()->ro->ivars) && ivars->count) {
+ if ((ivars = cls->data()->ro()->ivars) && ivars->count) {
result = (Ivar *)malloc((ivars->count+1) * sizeof(Ivar));
for (auto& ivar : *ivars) {
auto rw = cls->data();
property_t **result = nil;
- unsigned int count = rw->properties.count();
+ auto const properties = rw->properties();
+ unsigned int count = properties.count();
if (count > 0) {
result = (property_t **)malloc((count + 1) * sizeof(property_t *));
count = 0;
- for (auto& prop : rw->properties) {
+ for (auto& prop : properties) {
result[count++] = ∝
}
result[count] = nil;
ASSERT(!isMetaClass());
ASSERT(ISA()->isMetaClass());
- mlist = ISA()->data()->ro->baseMethods();
+ mlist = ISA()->data()->ro()->baseMethods();
if (mlist) {
for (const auto& meth : *mlist) {
const char *name = sel_cname(meth.name);
}
mutex_locker_t lock(runtimeLock);
+ const auto protocols = cls->data()->protocols();
checkIsKnownClass(cls);
ASSERT(cls->isRealized());
- count = cls->data()->protocols.count();
+ count = protocols.count();
if (count > 0) {
result = (Protocol **)malloc((count+1) * sizeof(Protocol *));
count = 0;
- for (const auto& proto : cls->data()->protocols) {
+ for (const auto& proto : protocols) {
result[count++] = (Protocol *)remapProtocol(proto);
}
result[count] = nil;
for (size_t i = 0; i < count; i++) {
Class cls = remapClass(classlist[i]);
if (cls) {
- names[i-shift] = cls->demangledName();
+ names[i-shift] = cls->demangledName(/* needs lock */false);
} else {
shift++; // ignored weak-linked class
}
{
// Handle the easy case directly.
if (isRealized() || isFuture()) {
- if (data()->demangledName) return data()->demangledName;
+ if (!isAnySwift()) {
+ return data()->ro()->name;
+ }
+ auto rwe = data()->ext();
+ if (rwe && rwe->demangledName) {
+ return rwe->demangledName;
+ }
}
char *result;
mutex_t DemangleCacheLock;
static objc::DenseSet<const char *> *DemangleCache;
const char *
-objc_class::demangledName()
+objc_class::demangledName(bool needsLock)
{
+ if (!needsLock) {
+ runtimeLock.assertLocked();
+ }
+
// Return previously demangled name if available.
if (isRealized() || isFuture()) {
- if (data()->demangledName) return data()->demangledName;
+ if (!isAnySwift()) {
+ return data()->ro()->name;
+ }
+ auto rwe = data()->ext();
+ if (rwe && rwe->demangledName) {
+ return rwe->demangledName;
+ }
}
// Try demangling the mangled name.
const char *mangled = mangledName();
char *de = copySwiftV1DemangledName(mangled);
+ class_rw_ext_t *rwe;
+
if (isRealized() || isFuture()) {
- // Class is already realized or future.
+ if (needsLock) {
+ mutex_locker_t lock(runtimeLock);
+ rwe = data()->extAllocIfNeeded();
+ } else {
+ rwe = data()->extAllocIfNeeded();
+ }
+ // Class is already realized or future.
// Save demangling result in rw data.
// We may not own runtimeLock so use an atomic operation instead.
if (! OSAtomicCompareAndSwapPtrBarrier(nil, (void*)(de ?: mangled),
- (void**)&data()->demangledName))
+ (void**)&rwe->demangledName))
{
if (de) free(de);
}
- return data()->demangledName;
+ return rwe->demangledName;
}
// Class is not yet realized.
// Only objc_copyClassNamesForImage() should get here.
// fixme lldb's calls to class_getName() can also get here when
// interrogating the dyld shared cache. (rdar://27258517)
- // fixme runtimeLock.assertLocked();
// fixme ASSERT(realize);
const char *cached;
if (!cls) return "nil";
// fixme lldb calls class_getName() on unrealized classes (rdar://27258517)
// ASSERT(cls->isRealized() || cls->isFuture());
- return cls->demangledName();
+ return cls->demangledName(/* needs lock */true);
}
/***********************************************************************
{
if (!cls) return 0;
ASSERT(cls->isRealized());
- return cls->data()->version;
+ auto rwe = cls->data()->ext();
+ if (rwe) {
+ return rwe->version;
+ }
+ return cls->isMetaClass() ? 7 : 0;
}
{
if (!cls) return;
ASSERT(cls->isRealized());
- cls->data()->version = version;
+ auto rwe = cls->data()->ext();
+ if (!rwe) {
+ mutex_locker_t lock(runtimeLock);
+ rwe = cls->data()->extAllocIfNeeded();
+ }
+
+ rwe->version = version;
}
/***********************************************************************
* method_lists_contains_any
**********************************************************************/
static NEVER_INLINE bool
-method_lists_contains_any(method_list_t **mlists, method_list_t **end,
+method_lists_contains_any(method_list_t * const *mlists, method_list_t * const *end,
SEL sels[], size_t selcount)
{
while (mlists < end) {
// fixme nil cls?
// fixme nil sel?
- for (auto mlists = cls->data()->methods.beginLists(),
- end = cls->data()->methods.endLists();
+ auto const methods = cls->data()->methods();
+ for (auto mlists = methods.beginLists(),
+ end = methods.endLists();
mlists != end;
++mlists)
{
ASSERT(cls->isRealized());
for ( ; cls; cls = cls->superclass) {
- for (auto& prop : cls->data()->properties) {
+ for (auto& prop : cls->data()->properties()) {
if (0 == strcmp(name, prop.name)) {
return (objc_property_t)∝
}
const uint8_t *
class_getIvarLayout(Class cls)
{
- if (cls) return cls->data()->ro->ivarLayout;
+ if (cls) return cls->data()->ro()->ivarLayout;
else return nil;
}
const uint8_t *
class_getWeakIvarLayout(Class cls)
{
- if (cls) return cls->data()->ro->weakIvarLayout;
+ if (cls) return cls->data()->ro()->weakIvarLayout;
else return nil;
}
const ivar_list_t *ivars;
ASSERT(cls->isRealized());
- if ((ivars = cls->data()->ro->ivars)) {
+ if ((ivars = cls->data()->ro()->ivars)) {
for (auto& ivar : *ivars) {
if (!ivar.offset) continue; // anonymous bitfield
mutex_locker_t lock(runtimeLock);
for ( ; cls; cls = cls->superclass) {
- if (auto ivars = cls->data()->ro->ivars) {
+ if (auto ivars = cls->data()->ro()->ivars) {
if (ivars->containsIvar(ivar)) {
return cls;
}
ASSERT(cls->isRealized());
- for (const auto& proto_ref : cls->data()->protocols) {
+ for (const auto& proto_ref : cls->data()->protocols()) {
protocol_t *p = remapProtocol(proto_ref);
if (p == proto || protocol_conformsToProtocol_nolock(p, proto)) {
return YES;
result = _method_setImplementation(cls, m, imp);
}
} else {
+ auto rwe = cls->data()->extAllocIfNeeded();
+
// fixme optimize
method_list_t *newlist;
newlist = (method_list_t *)calloc(sizeof(*newlist), 1);
newlist->first.imp = imp;
prepareMethodLists(cls, &newlist, 1, NO, NO);
- cls->data()->methods.attachLists(&newlist, 1);
+ rwe->methods.attachLists(&newlist, 1);
flushCaches(cls);
result = nil;
}
if (newlist->count > 0) {
+ auto rwe = cls->data()->extAllocIfNeeded();
+
// fixme resize newlist because it may have been over-allocated above.
// Note that realloc() alone doesn't work due to ptrauth.
std::stable_sort(newlist->begin(), newlist->end(), sorter);
prepareMethodLists(cls, &newlist, 1, NO, NO);
- cls->data()->methods.attachLists(&newlist, 1);
+ rwe->methods.attachLists(&newlist, 1);
flushCaches(cls);
} else {
// Attaching the method list to the class consumes it. If we don't
// fixme allocate less memory here
ivar_list_t *oldlist, *newlist;
- if ((oldlist = (ivar_list_t *)cls->data()->ro->ivars)) {
+ if ((oldlist = (ivar_list_t *)cls->data()->ro()->ivars)) {
size_t oldsize = oldlist->byteSize();
newlist = (ivar_list_t *)calloc(oldsize + oldlist->entsize(), 1);
memcpy(newlist, oldlist, oldsize);
if (class_conformsToProtocol(cls, protocol_gen)) return NO;
mutex_locker_t lock(runtimeLock);
+ auto rwe = cls->data()->extAllocIfNeeded();
ASSERT(cls->isRealized());
protolist->count = 1;
protolist->list[0] = (protocol_ref_t)protocol;
- cls->data()->protocols.attachLists(&protolist, 1);
+ rwe->protocols.attachLists(&protolist, 1);
// fixme metaclass?
}
else {
mutex_locker_t lock(runtimeLock);
+ auto rwe = cls->data()->extAllocIfNeeded();
ASSERT(cls->isRealized());
proplist->first.name = strdupIfMutable(name);
proplist->first.attributes = copyPropertyAttributeString(attrs, count);
- cls->data()->properties.attachLists(&proplist, 1);
+ rwe->properties.attachLists(&proplist, 1);
return YES;
}
checkIsKnownClass(original);
+ auto orig_rw = original->data();
+ auto orig_rwe = orig_rw->ext();
+ auto orig_ro = orig_rw->ro();
+
ASSERT(original->isRealized());
ASSERT(!original->isMetaClass());
duplicate->cache.initializeToEmpty();
- class_rw_t *rw = (class_rw_t *)calloc(sizeof(*original->data()), 1);
- rw->flags = (original->data()->flags | RW_COPIED_RO | RW_REALIZING);
- rw->version = original->data()->version;
+ class_rw_t *rw = objc::zalloc<class_rw_t>();
+ rw->flags = (orig_rw->flags | RW_COPIED_RO | RW_REALIZING);
rw->firstSubclass = nil;
rw->nextSiblingClass = nil;
duplicate->bits = original->bits;
duplicate->setData(rw);
- rw->ro = original->data()->ro->duplicate();
- *(char **)&rw->ro->name = strdupIfMutable(name);
+ auto ro = orig_ro->duplicate();
+ *(char **)&ro->name = strdupIfMutable(name);
+ rw->set_ro(ro);
- rw->methods = original->data()->methods.duplicate();
+ if (orig_rwe) {
+ auto rwe = rw->extAllocIfNeeded();
+ rwe->version = orig_rwe->version;
+ rwe->methods = orig_rwe->methods.duplicate();
- // fixme dies when categories are added to the base
- rw->properties = original->data()->properties;
- rw->protocols = original->data()->protocols;
+ // fixme dies when categories are added to the base
+ rwe->properties = orig_rwe->properties;
+ rwe->protocols = orig_rwe->protocols;
+ } else if (ro->baseMethods()) {
+ // if we have base methods, we need to make a deep copy
+ // which requires a class_rw_ext_t to be allocated
+ rw->deepCopy(ro);
+ }
duplicate->chooseClassArrayIndex();
// Don't methodize class - construction above is correct
- addNamedClass(duplicate, duplicate->data()->ro->name);
+ addNamedClass(duplicate, ro->name);
addClassTableEntry(duplicate, /*addMeta=*/false);
if (PrintConnecting) {
_objc_inform("CLASS: realizing class '%s' (duplicate of %s) %p %p",
- name, original->nameForLogging(),
- (void*)duplicate, duplicate->data()->ro);
+ name, original->nameForLogging(), (void*)duplicate, ro);
}
duplicate->clearInfo(RW_REALIZING);
runtimeLock.assertLocked();
class_ro_t *cls_ro_w, *meta_ro_w;
+ class_rw_t *cls_rw_w, *meta_rw_w;
- cls->setData((class_rw_t *)calloc(sizeof(class_rw_t), 1));
- meta->setData((class_rw_t *)calloc(sizeof(class_rw_t), 1));
+ cls_rw_w = objc::zalloc<class_rw_t>();
+ meta_rw_w = objc::zalloc<class_rw_t>();
cls_ro_w = (class_ro_t *)calloc(sizeof(class_ro_t), 1);
meta_ro_w = (class_ro_t *)calloc(sizeof(class_ro_t), 1);
- cls->data()->ro = cls_ro_w;
- meta->data()->ro = meta_ro_w;
+
+ cls->setData(cls_rw_w);
+ cls_rw_w->set_ro(cls_ro_w);
+ meta->setData(meta_rw_w);
+ meta_rw_w->set_ro(meta_ro_w);
// Set basic info
- cls->data()->flags = RW_CONSTRUCTING | RW_COPIED_RO | RW_REALIZED | RW_REALIZING;
- meta->data()->flags = RW_CONSTRUCTING | RW_COPIED_RO | RW_REALIZED | RW_REALIZING;
- cls->data()->version = 0;
- meta->data()->version = 7;
+ cls_rw_w->flags = RW_CONSTRUCTING | RW_COPIED_RO | RW_REALIZED | RW_REALIZING;
+ meta_rw_w->flags = RW_CONSTRUCTING | RW_COPIED_RO | RW_REALIZED | RW_REALIZING | RW_META;
cls_ro_w->flags = 0;
meta_ro_w->flags = RO_META;
- if (!superclass) {
- cls_ro_w->flags |= RO_ROOT;
- meta_ro_w->flags |= RO_ROOT;
- }
if (superclass) {
uint32_t flagsToCopy = RW_FORBIDS_ASSOCIATED_OBJECTS;
- cls->data()->flags |= superclass->data()->flags & flagsToCopy;
+ cls_rw_w->flags |= superclass->data()->flags & flagsToCopy;
cls_ro_w->instanceStart = superclass->unalignedInstanceSize();
meta_ro_w->instanceStart = superclass->ISA()->unalignedInstanceSize();
cls->setInstanceSize(cls_ro_w->instanceStart);
meta->setInstanceSize(meta_ro_w->instanceStart);
} else {
+ cls_ro_w->flags |= RO_ROOT;
+ meta_ro_w->flags |= RO_ROOT;
cls_ro_w->instanceStart = 0;
meta_ro_w->instanceStart = (uint32_t)sizeof(objc_class);
cls->setInstanceSize((uint32_t)sizeof(id)); // just an isa
(cls->ISA()->data()->flags & RW_CONSTRUCTED))
{
_objc_inform("objc_registerClassPair: class '%s' was already "
- "registered!", cls->data()->ro->name);
+ "registered!", cls->data()->ro()->name);
return;
}
{
_objc_inform("objc_registerClassPair: class '%s' was not "
"allocated with objc_allocateClassPair!",
- cls->data()->ro->name);
+ cls->data()->ro()->name);
return;
}
cls->changeInfo(RW_CONSTRUCTED, RW_CONSTRUCTING | RW_REALIZING);
// Add to named class table.
- addNamedClass(cls, cls->data()->ro->name);
+ addNamedClass(cls, cls->data()->ro()->name);
}
if (! cls->isRealized()) return;
auto rw = cls->data();
- auto ro = rw->ro;
+ auto rwe = rw->ext();
+ auto ro = rw->ro();
cache_delete(cls);
-
- for (auto& meth : rw->methods) {
- try_free(meth.types);
+
+ if (rwe) {
+ for (auto& meth : rwe->methods) {
+ try_free(meth.types);
+ }
+ rwe->methods.tryFree();
}
- rw->methods.tryFree();
const ivar_list_t *ivars = ro->ivars;
if (ivars) {
try_free(ivars);
}
- for (auto& prop : rw->properties) {
- try_free(prop.name);
- try_free(prop.attributes);
- }
- rw->properties.tryFree();
+ if (rwe) {
+ for (auto& prop : rwe->properties) {
+ try_free(prop.name);
+ try_free(prop.attributes);
+ }
+ rwe->properties.tryFree();
- rw->protocols.tryFree();
+ rwe->protocols.tryFree();
+ }
try_free(ro->ivarLayout);
try_free(ro->weakIvarLayout);
try_free(ro->name);
try_free(ro);
- try_free(rw);
+ objc::zfree(rwe);
+ objc::zfree(rw);
try_free(cls);
}
// disposing still-unregistered class is OK!
_objc_inform("objc_disposeClassPair: class '%s' was not "
"allocated with objc_allocateClassPair!",
- cls->data()->ro->name);
+ cls->data()->ro()->name);
return;
}
if (cls->isMetaClass()) {
_objc_inform("objc_disposeClassPair: class '%s' is a metaclass, "
- "not a class!", cls->data()->ro->name);
+ "not a class!", cls->data()->ro()->name);
return;
}
// Shouldn't have any live subclasses.
if (cls->data()->firstSubclass) {
_objc_inform("objc_disposeClassPair: class '%s' still has subclasses, "
- "including '%s'!", cls->data()->ro->name,
+ "including '%s'!", cls->data()->ro()->name,
cls->data()->firstSubclass->nameForLogging());
}
if (cls->ISA()->data()->firstSubclass) {
_objc_inform("objc_disposeClassPair: class '%s' still has subclasses, "
- "including '%s'!", cls->data()->ro->name,
+ "including '%s'!", cls->data()->ro()->name,
cls->ISA()->data()->firstSubclass->nameForLogging());
}